Install tensoflow 1.0
Linux/ubuntu:
- python2.7:
pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.1-cp27-none-linux_x86_64.whl
- python3.5:
pip3 install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.1-cp35-cp35m-linux_x86_64.whl
Maxos:
- python2:
pip install https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-1.0.1-py2-none-any.whl
- python3:
pip3 install https://storage.googleapis.com/tensorflow/mac/cpu/tensorflow-1.0.1-py3-none-any.whl
Tensorflow complete addition
import tensorflow as tf # Eliminate warnings (use source installation to automatically eliminate) import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' a = tf.constant(3.0) b = tf.constant(4.0) with tf.Session() as sess: a_b = tf.add(a, b) print("The added type is") print(a_b) print("The real result is:") print(sess.run(a_b))
tf_add
Graphical presentation of addition operations
- Statement to add a log file to a session
import tensorflow as tf # Eliminate warnings (use source installation to automatically eliminate) import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' a = tf.constant(3.0) b = tf.constant(4.0) with tf.Session() as sess: a_b = tf.add(a, b) print("The added type is") print(a_b) print("The real result is:") print(sess.run(a_b)) # Add board record file file_write = tf.summary.FileWriter('/Users/lijianzhao/tensorBoard/', graph=sess.graph)
- Run tensorboard --logdir="/Users/lijianzhao/tensorBoard /" on the terminal
Running tensorboard on the terminal
- According to the terminal prompt, type http://192.168.199.213:6006 in the browser
Tensorbboard main interface
- Select GRAPHS
Select GRAPHS
Simple linear regression
import tensorflow as tf # Eliminate warnings (use source installation to automatically eliminate) import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # regression function def my_regression(): # Prepare 10000 pieces of data x with an average of 5.0 and a standard deviation of 1.0 x = tf.random_normal([100, 1], mean = 5.0, stddev=1.0, name="x") # The real relationship is y = 0.7x + 0.6 y_true = tf.matmul(x, [[0.7]]) + 0.6 # Create weight variable weight = tf.Variable(tf.random_normal([1, 1], mean=1.0, stddev=0.1), name="weight") # Create an offset variable with an initial value of 1 bias = tf.Variable(1.0, name="bias") # Forecast results y_predict = tf.matmul(x, weight) + bias # Calculate loss loss = tf.reduce_mean(tf.square(y_predict - y_true)) # Gradient decline reduces the loss, and the learning rate is 0.1 each time train_op = tf.train.GradientDescentOptimizer(0.01).minimize(loss) # Collect variables tf.summary.scalar("losses", loss) tf.summary.histogram("weightes", weight) # Merge variables merged = tf.summary.merge_all() # initialize variable init_op = tf.global_variables_initializer() # Gradient descent optimization loss with tf.Session() as sess: sess.run(init_op) print("The initial weight is{}, The initial offset is{}".format(weight.eval(), bias.eval())) # Add board record file file_write = tf.summary.FileWriter('/Users/lijianzhao/tensorBoard/my_regression', graph=sess.graph) # Linear regression model of cycle training for i in range(20000): sess.run(train_op) print("Training section{}The secondary weight is{}, Offset to{}".format(i,weight.eval(), bias.eval())) # Observe the change of each value # Run merge summery = sess.run(merged) # Values collected each time are added to the file file_write.add_summary(summery, i) if __name__ == '__main__': my_regression()
Operation results
Procedure flow chart
Loss reduction
The weight gradually approaches the real value
Add scope to program
import tensorflow as tf # Eliminate warnings (use source installation to automatically eliminate) import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # regression function def my_regression(): # Prepare data with tf.variable_scope("data"): # Prepare 10000 pieces of data x with an average of 5.0 and a standard deviation of 1.0 x = tf.random_normal([100, 1], mean = 5.0, stddev=1.0, name="x") # The real relationship is y = 0.7x + 0.6 y_true = tf.matmul(x, [[0.7]]) + 0.6 # Create model with tf.variable_scope ("model"): # Create weight variable weight = tf.Variable(tf.random_normal([1, 1], mean=1.0, stddev=0.1), name="weight") # Create an offset variable with an initial value of 1 bias = tf.Variable(1.0, name="bias") # Forecast results y_predict = tf.matmul(x, weight) + bias # Calculate loss with tf.variable_scope ("loss"): # Calculate loss loss = tf.reduce_mean(tf.square(y_predict - y_true)) # reduce losses with tf.variable_scope("optimizer"): # Gradient decline reduces the loss, and the learning rate is 0.1 each time train_op = tf.train.GradientDescentOptimizer(0.01).minimize(loss) # Collect variables tf.summary.scalar("losses", loss) tf.summary.histogram("weightes", weight) # Merge variables merged = tf.summary.merge_all() # initialize variable init_op = tf.global_variables_initializer() # Gradient descent optimization loss with tf.Session() as sess: sess.run(init_op) print("The initial weight is{}, The initial offset is{}".format(weight.eval(), bias.eval())) # Add board record file file_write = tf.summary.FileWriter('/Users/lijianzhao/tensorBoard/my_regression', graph=sess.graph) # Linear regression model of cycle training for i in range(20000): sess.run(train_op) print("Training section{}The secondary weight is{}, Offset to{}".format(i,weight.eval(), bias.eval())) # Observe the change of each value # Run merge summery = sess.run(merged) # Values collected each time are added to the file file_write.add_summary(summery, i) if __name__ == '__main__': my_regression()
Add scope
Save and restore model (save session resource)
- Create a saver to save the model
saver = tf.train.Saver()
- Save model
saver.save(sess, "./tmp/ckpt/test")
- Recovery model
save.restore(sess, "./tmp/ckpt/test")