莫烦TensorFlow_07 tensorboard可视化
阅读原文时间:2023年07月09日阅读:1

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

def add_layer(inputs, in_size, out_size, activation_function = None):

with tf.name_scope('layer'):

with tf.name\_scope('Weights'):  
  Weights = tf.Variable(tf.random\_normal(\[in\_size, out\_size\]), name='W')  # hang lie  

with tf.name\_scope('biases'):  
  biases = tf.Variable(tf.zeros(\[1, out\_size\]) + 0.1, name = 'b')  

with tf.name\_scope('Wx\_plus\_b'):  
  Wx\_plus\_b = tf.matmul(inputs, Weights) + biases  

if activation\_function is None:  
  outputs = Wx\_plus\_b  
else:  
  outputs = activation\_function(Wx\_plus\_b)  
return outputs  

#define placeholder
with tf.name_scope('inputs'):
xs = tf.placeholder(tf.float32, [None, 1], name = 'x_input')
ys = tf.placeholder(tf.float32, [None, 1], name = 'y_input')

#add hidden layer
l1 = add_layer(xs, 1, 10, activation_function = tf.nn.relu)
#add output layer
prediction = add_layer(l1, 10, 1, activation_function = None)

#the error between prediction and real data
with tf.name_scope('loss'):
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction),
reduction_indices=[1] ))
with tf.name_scope('train'):
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)

sess = tf.Session()
writer = tf.summary.FileWriter("logs/", sess.graph)

#import step
sess.run(tf.global_variables_initializer() )

注意:有些浏览器可能支持的不好,推荐使用最新的Chrome

命令行输入:

tensorboard --logdir=logs/

手机扫一扫

移动阅读更方便

阿里云服务器
腾讯云服务器
七牛云服务器

你可能感兴趣的文章