Reproduced in: http://blog.csdn.net/miangangzhen/article/details/51281989
#!usr/bin/env Python3#-*-coding:utf-8-*-ImportNumPy as NPImportMath#definition of sigmoid funtion#numpy.exp work for arrays. defsigmoid (x):return1/(1 + np.exp (-x))#definition of sigmoid derivative funtion#input must be sigmoid function ' s resultdefsigmoid_output_to_derivative (Result):returnresult* (1-result)#Init Training SetdefGettrainingset (nameofset): Setdict= { "Sin": Getsinset (),}returnSetdict[nameofset]defGetsinset (): x= 6.2 * Np.random.rand (1)-3.14x= X.reshape () #y = Np.array ([5 *x]). Reshape #y = Np.array ([Math.sin (x)]). Reshapey = Np.array ([Math.sin (x), 1]). Reshape () returnx, ydefGETW (Synapse, Delta): Resultlist= [] #traverse hidden layers each hidden unit weights each output, such as 8 hidden units, each with 2 weights for two outputs forIinchRange (Synapse.shape[0]): Resultlist.append ((Synapse[i,:]*delta). SUM ()) Resultarr= Np.array (resultlist). Reshape (1, synapse.shape[0])returnResultarrdefGett (delta, layer): Result=Np.dot (layer. T, Delta)returnresultdefbackpropagation (Trainingexamples, Etah, Input_dim, Output_dim, Hidden_dim, hidden_num):#Possible conditions ifHidden_num < 1: Print("The number of hidden layers must not be less than 1") return #Initialize the network weights matrix, this is the coreSynapselist = [] #input layer and hidden layer 1Synapselist.append (2*np.random.random (Input_dim,hidden_dim))-1) #hidden layer 1 and Hidden Layer 2, 2->3,,,,,, n-1->n forIinchRange (hidden_num-1): Synapselist.append (2*np.random.random ((Hidden_dim,hidden_dim))-1) #hidden layer n vs. output layerSynapselist.append (2*np.random.random (Hidden_dim,output_dim))-1) ICount=0 Lasterrormax= 99999#While True: forIinchRange (10000): Errormax=0 forX, yinchTrainingexamples:icount+ = 1layerlist= [] #forward Propagationlayerlist.append (sigmoid (Np.dot (x,synapselist[0 ))) forJinchRange (Hidden_num): Layerlist.append (sigmoid (Np.dot (layerlist[-1],synapselist[j+1])) ) #for each output unit K in the network, calculate its error termDeltalist =[] Layeroutputerror= Y-layerlist[-1] #Convergence ConditionsErrormax = Layeroutputerror.sum ()ifLayeroutputerror.sum () > ErrormaxElseErrormax Deltak= Sigmoid_output_to_derivative (layerlist[-1]) *layeroutputerror deltalist.append (deltak) ilength=Len (synapselist) forJinchRange (Hidden_num): w= GETW (Synapselist[ilength-1-J], Deltalist[j]) Delta= Sigmoid_output_to_derivative (Layerlist[ilength-2-j]) *W deltalist.append (Delta)#Update each network weight w (JI) forJinchRange (len (synapselist)-1, 0, 1): T= Gett (deltalist[ilength-1-j], layerlist[j-1]) Synapselist[j]= Synapselist[j] + Etah *T T= Gett (deltalist[-1], x) synapselist[0]= Synapselist[0] + Etah *TPrint("Maximum output error:") Print(Errormax)ifABS (Lasterrormax-errormax) < 0.0001: Print("it's converging.") Print("####################") BreakLasterrormax=Errormax#test a well-trained network forIinchRange (5): XTest, Yreal=getsinset () layertmp=sigmoid (Np.dot (xtest,synapselist[0])) forJinchRange (1, Len (synapselist), 1): Layertmp=sigmoid (Np.dot (layertmp,synapselist[j)) Ytest=layertmpPrint("x:") Print(xTest)Print("The actual y:") Print(yreal)Print("y of the Neuron network output:") Print(ytest)Print("Final output error:") Print(Np.abs (Yreal-ytest)) Print("#####################") Print("Number of iterations:") Print(ICount)if __name__=='__main__': Importdatetime Tstart=Datetime.datetime.now ()#What kind of training sample to useNameofset ="Sin"x, y=Gettrainingset (Nameofset)#setting of Parameters #The learning rate is set here. Etah = 0.01#number of hidden layersHidden_num = 2#the size of the network input layerInput_dim = x.shape[1] #the size of the hidden layerHidden_dim = 100#the size of the output layerOutput_dim = y.shape[1] #Build a training sampleTrainingexamples = [] forIinchRange (10000): x, y=Gettrainingset (Nameofset) Trainingexamples.append ((x, y))#start Training Network with reverse propagation algorithmbackpropagation (Trainingexamples, Etah, Input_dim, Output_dim, Hidden_dim, hidden_num) tEnd=Datetime.datetime.now ()Print("Time Cost :") Print(Tend-tstart)
Analysis:
1. Forward Propagation:
for in range (1, Len (synapselist), 1):
Synapselist is a weight matrix.
2. Reverse propagation
A. Calculating the error of the output of the hidden layer on the input
def GETW (Synapse, Delta): = [] # traverse the hidden layer each hidden unit to each output weight, such as 8 hidden units, each hidden unit two output each has 2 weights for in Range (Synapse.shape[0]): resultlist.append ( * delta). SUM () ) = Np.array (resultlist). Reshape (1, synapse.shape[0]) return Resultarr
Like what:
Knowledge of neural networks (1.python implementation MLP)