Mo TensorFlow Series Tutorial Learning

Source: Internet
Author: User

1. General machine learning predictive function coefficient (y=0.1x+0.3)

#-*-CODING:GBK-*-
import tensorflow as tf
import numpy as NP

#生成数据, y=0.1x+0.3
X_data=np.random.rand ( Astype (np.float32)
y_data=x_data*0.1+0.3


# # #开始创建tensorflow结构 ###
WEIGHT=TF. Variable (Tf.random_uniform ([1],-1.0,1.0))
BIASES=TF. Variable (Tf.zeros ([1]))

y=weight*x_data+biases

#误差
Loss=tf.reduce_mean (Tf.square (y-y_data)
) #优化器
Optimizer=tf.train.gradientdescentoptimizer (0.5)
#用优化器减少误差
train=optimizer.minimize (loss)

#定义初始化变量
init=tf.initialize_all_variables ()

# # #完成创建tensorflow结构 ###

SESS=TF. Session ()
Sess.run (init) #激活init to step in

range (201):
    Sess.run (train)
    if step%20==0:
        Print (Step,sess.run (weight), sess.run (biases))

2. Construct simple neural Network predictive y=x^2, visual interface display

#-*-CODING:GBK-*-import tensorflow as TF import numpy as NP import Matplotlib.pyplot as Plt def add_layer (inputs,in_s Ize,out_size,activation_function=none): WEIGHT=TF. Variable (Tf.random_normal ([in_size,out_size])) BIASES=TF.
        Variable (Tf.zeros ([1,out_size]) +0.1) Wx_plus_b=tf.matmul (inputs,weight) +biases if Activation_function==none: Outputs=wx_plus_b else:outputs=activation_function (wx_plus_b) return outputs #numpy. Linspace (Start, Stop, num=50, Endpoint=true, Retstep=false, Dtype=none) #在指定的间隔内返回均匀间隔的数字 #[:,np.newaxis] Adding a new dimension to each element is equivalent to a matrix transpose, One row, multiple columns, multiple rows x_data=np.linspace ( -1,1,300) [:, Np.newaxis] Noise=np.random.normal (0,0.05,x_data.shape) y_data=np.square (x_data) -0.5+noise Xs=tf.placeholder (tf.float32,[none,1]) Ys=tf.placeholder (tf.float32,[none,1)) L1=add_layer (XS, 1,10,activation_function=tf.nn.relu) Prediction=add_layer (l1,10,1,activation_function=none) Loss=tf.reduce_mean ( Tf.reduce_sum (Tf.square (ys-prediction), reduction_indices=[1]) train_step=tf.train.gradientdescentoptimizer (0.1). Minimize (loss) init=tf.initialize_all_variables () sess=tf.
Session () Sess.run (init) #呼出画图窗口 fig=plt.figure () ax=fig.add_subplot (1,1,1) ax.scatter (x_data,y_data) #不暂停, continuous update status
        Plt.ion () plt.show () for I in range (1000): Sess.run (Train_step,feed_dict={xs:x_data,ys:y_data}) if i%50==0: #print (Sess.run (Loss,feed_dict={xs:x_data,ys:y_data})) Try:ax.lines.remove (Lines[0]) excep T exception:pass prediction_value=sess.run (Prediction,feed_dict={xs:x_data}) Lines=ax.plot (x_ Data,prediction_value, ' R ', lw=5) Plt.pause (0.1)

3.tensorflow Board Learning ... It's like adding a few names ...

#-*-CODING:GBK-*-import tensorflow as TF import numpy as NP import Matplotlib.pyplot as Plt def add_layer (inputs,in_s
            Ize,out_size,activation_function=none): With Tf.name_scope (' layer '): with Tf.name_scope (' weight '): WEIGHT=TF. Variable (Tf.random_normal ([in_size,out_size]), Name= ' W ') with Tf.name_scope (' biases '): BIASES=TF. Variable (Tf.zeros ([1,out_size]) +0.1,name= ' B ') with Tf.name_scope (' Wx_plus_b '): Wx_plus_b=tf.matmul (INP Uts,weight) +biases if Activation_function==none:outputs=wx_plus_b else:outputs=a Ctivation_function (Wx_plus_b) return outputs #numpy. Linspace (Start, Stop, num=50, Endpoint=true, Retstep=false, Dtype=none) #在指定的间隔内返回均匀间隔的数字 #[:,np.newaxis] Adds a new dimension to each element, which is equivalent to a matrix transpose, a row of multiple columns to a row of multiple rows X_data=np.linspace ( -1,1,300) [:, Np.newaxis] Noise=np.random.normal (0,0.05,x_data.shape) y_data=np.square (x_data) -0.5+noise with Tf.name_scope (' Inputs '): Xs=tf.placeholder (tf.float32,[nOne,1],name= ' X_input ') ys=tf.placeholder (tf.float32,[none,1],name= ' y_input ') L1=add_layer (xs,1,10,activation_ Function=tf.nn.relu) Prediction=add_layer (L1,10,1,activation_function=none) with tf.name_scope (' loss '): loss= Tf.reduce_mean (Tf.reduce_sum (Tf.square (ys-prediction), reduction_indices=[1]) with Tf.name_scope (' Train '): Train_ Step=tf.train.gradientdescentoptimizer (0.1). Minimize (loss) init=tf.initialize_all_variables () sess=tf. Session () Writer=tf.summary.filewriter ("logs/", Sess.graph) Sess.run (init) to I in range (1000): Sess.run (Train_step, Feed_dict={xs:x_data,ys:y_data}) if I%50==0:print (Sess.run (Loss,feed_dict={xs:x_data,ys:y_data}))

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.