Reference from 11 lines python
And Wunda Teacher's machine learning.
http://blog.csdn.net/sadfasdgaaaasdfa/article/details/47185199
There are some doubts in some places. For example, the error of reverse transfer back to the demolition is not quite clear.
X <-Data.frame (C (0,0,1), C (0,1,1), C (1,0,1), C (1,1,1))
y <-C (0,1,1,0)
x <-As.matrix (t (x))
Syn0 <-Matrix (Rnorm (3 * col), ncol=3,nrow=3)
syn1 <-Matrix (Rnorm (3 * 1), ncol=3,nrow=1) for
(i in 1:10000) {
L1 <-1/(1+exp (-(x%*% t (syn0)))
L2 <-1/(1+exp (-(L1) %*% T (syn1)))
erroroutput <-l2-y
Err Orhidder <-erroroutput%*% syn1 * L1 * (1-L1)
syn1 <-syn1- t (0.01 * t (L1)%*% erroroutput)
Syn0 <-syn0-t (0.01 * t (x)%*% errorhidder)
}
Run Results
#
Nnetonelayer <-function (x,y,numberofunits = 3,alpha = 0.01,iter=10000) {
x <-As.matrix (t (x))
# The number of features of X
col <-ncol (x)
#0-1 weight initialization nrow representing the number of features
syn0 <-matrix (rnorm (numberofunits * col), ncol=col,nrow= numberofunits)
#1-weight initialization of 2
syn1 <-matrix (Rnorm (numberofunits * 1), ncol= (numberofunits), nrow=1) cost
<-C (NA)
length (cost) <-iter for
(i in 1:iter) {
#输入层到隐层的值
L1 <-1/(1+exp (-(x%*% t (syn0)))
#隐层到输出层的值
L2 <-1/(1+exp (-(L1) %*% T (syn1)))
#输出层误差
erroroutput <-l2-y
#隐层误差 C19/>errorhidder <-erroroutput%*% syn1 * L1 * (1-L1)
#
syn1 <-syn1- t (Alpha * t (L1)%*% err Oroutput)
syn0 <-syn0-t (Alpha * t (x)%*% Errorhidder)
cost[i] <-erroroutput
}
Plot (cost)
cbind (Y,L2)
}
x <-data.frame (C (0,0,1), C (0,1,1), C (1,0,1), C (1,1,1))
y <-C (0,1,1,0)
Nnetonelayer (x, y)
L2
real results and predicted results
0 0.01942965
1 0.95005097
1 0.96147338
0 0.06028846
the cost function changes with the iteration