Import TensorFlow as Tfimport NumPy as Npimport matplotlib.pyplot as Plt#import MNIST datafrom Tensorflow.examples.tutoria Ls.mnist Import input_datamnist=input_data.read_data_sets ("/niu/mnist_data/", One_hot=false) # Parameterlearning_ Rate = 0.001 Training_epochs = batch_size = 256display_step = 1examples_to_show = 10# Network parametersn_input = 784 # MNIST Data input (img shape:28*28 pixel is 784 eigenvalues) #tf Graph input (only pictures) X=tf.placeholder ("float", [None,n_input]) # Hidden layer settingsn_hidden_1 = n_hidden_2 = N_hidden_3 = 10n_hidden_4 = 2 weights = {' encoder_h1 ': TF. Variable (Tf.random_normal ([n_input,n_hidden_1])), ' Encoder_h2 ': TF. Variable (Tf.random_normal ([n_hidden_1,n_hidden_2])), ' Encoder_h3 ': TF. Variable (Tf.random_normal ([n_hidden_2,n_hidden_3])), ' Encoder_h4 ': TF. Variable (Tf.random_normal ([N_hidden_3,n_hidden_4])), ' decoder_h1 ': TF. Variable (Tf.random_normal ([n_hidden_4,n_hidden_3])), ' Decoder_h2 ': TF. Variable (Tf.random_normal ([N_hiddeN_3,n_hidden_2])), ' Decoder_h3 ': TF. Variable (Tf.random_normal ([n_hidden_2,n_hidden_1])), ' Decoder_h4 ': TF. Variable (Tf.random_normal ([N_hidden_1, N_input])),}biases = {' encoder_b1 ': TF. Variable (Tf.random_normal ([n_hidden_1])), ' encoder_b2 ': TF. Variable (Tf.random_normal ([n_hidden_2])), ' encoder_b3 ': TF. Variable (Tf.random_normal ([n_hidden_3])), ' encoder_b4 ': TF. Variable (Tf.random_normal ([N_hidden_4])), ' decoder_b1 ': TF. Variable (Tf.random_normal ([n_hidden_3])), ' decoder_b2 ': TF. Variable (Tf.random_normal ([n_hidden_2])), ' decoder_b3 ': TF. Variable (Tf.random_normal ([n_hidden_1])), ' decoder_b4 ': TF. Variable (Tf.random_normal ([N_input])),}def Encoder (x): # encoder Hidden layer with sigmoid activation #1 layer_ 1 = tf.nn.sigmoid (Tf.add (Tf.matmul (x, weights[' encoder_h1 '), biases[' ENCODER_B1 '])) layer_2 = Tf.nn.sigmoid (Tf.add (Tf.matmul (layer_1, weights[' encoder_h2 ']), biases['Encoder_b2 ']) layer_3 = Tf.nn.sigmoid (Tf.add (Tf.matmul, layer_2 ' weights['), biases[' encoder_b3 ')) Layer_4 = Tf.add (Tf.matmul (layer_3, weights[' Encoder_h4 ']), biases[' Encoder_b4 ') return layer_4 #定义decoderdef Decoder (x): # decoder Hidden layer with sigmoid activation #2 layer_1 = tf.nn.sigmoid (Tf.add (Tf.matmul (x, weights[' decoder_h1 ']), biases[' Decod Er_b1 ']) layer_2 = Tf.nn.sigmoid (Tf.add (Tf.matmul, layer_1 ' weights['), b iases[' decoder_b2 ')) layer_3 = Tf.nn.sigmoid (Tf.add (Tf.matmul, layer_2 ' weights['), biases[' decoder_b3 ')) Layer_4 = Tf.nn.sigmoid (Tf.add (Tf.matmul, layer_3 ' weights['), biases[' DECODER_B4 ')) return layer_4# Construct modelencoder_op = encoder (X) # Featur ESDECODER_OP = Decoder (ENCODER_OP) # 784 features# predictiony_pred = decoder_op #After # Targets (Labels) is the input data.y_true = X #Beforecost = Tf.reduce_mean (Tf.pow (y_true-y_pred, 2)) optimizer = Tf.train.AdamOptimizer (learning_rate). Minimize (cost) # Launch the Graphwith TF. Session () as Sess:sess.run (Tf.global_variables_initializer ()) total_batch = Int (mnist.train.num_examples/batch_size ) # Training cycle for epoch in range (Training_epochs): # Loop through all batches for I in range (total_b Atch): batch_xs, Batch_ys = Mnist.train.next_batch (batch_size) # max (x) = 1, min (x) = 0 # Run Optim ization op (backprop) and cost op (to get loss value) _, c = Sess.run ([Optimizer, cost], Feed_dict={x:batch_xs }) # Display logs per epoch step if epoch% Display_step = = 0:print ("Epoch:", '%04d '% (epoch+1) , "cost=", "{:. 9f}". Format (c)) print ("Optimization finished!") Encode_result = Sess.run (encOder_op,feed_dict={x:mnist.test.images}) Plt.scatter (Encode_result[:,0],encode_result[:,1],c=mnist.test.labels) Plt.title (' Matplotlib,ae,classification--jason Niu ') plt.show ()
The ae:ae of TF realizes the non supervised learning classification before the encoder of the TF comes with the data set AE decoder