BP neural network algorithm (2)

Source: Internet
Author: User
BpNet. h: interfacefortheBpclass. E-Mail: zengzhijun369@163.com ** # includestdafx. h # includeBpNet. h # includemath. h # ifdef_DEBUG # undefTHIS_FILEstaticchar

// BpNet. h: interfacefortheBpclass. /// E-Mail: zengzhijun369@163.com /**//////////////////////////////////// /// // # include stdafx. h # include BpNet. h # include math. h # ifdef_DEBUG # undef THIS_FILE static char

// BpNet. h: interface for the Bp class.
//
// E-mail: zengzhijun369@163.com
/**///////////////////////////////////// //////////////////////////////////
# Include "stdafx. h"
# Include "BpNet. h"
# Include "math. h"

# Ifdef _ DEBUG
# Undef THIS_FILE
Static char THIS_FILE [] =__ FILE __;
# Define new DEBUG_NEW
# Endif

/**///////////////////////////////////// //////////////////////////////////
// Construction/Destruction
/**///////////////////////////////////// //////////////////////////////////

BpNet: BpNet ()
{
Error = 1.0;
E = 0.0;

Rate_w = 0.05; // weight learning rate (input layer-hidden layer)
Rate_w1 = 0.047; // weight learning rate (hidden layer-output layer)
Rate_b1 = 0.05; // learning rate of the hidden layer threshold
Rate_b2 = 0.047; // output layer threshold learning rate
Error = 1.0;
E = 0.0;

Rate_w = 0.05; // weight learning rate (input layer-hidden layer)
Rate_w1 = 0.047; // weight learning rate (hidden layer-output layer)
Rate_b1 = 0.05; // learning rate of the hidden layer threshold
Rate_b2 = 0.047; // output layer threshold learning rate
}

BpNet ::~ BpNet ()
{

}

Void winit (double w [], int sl) // weight Initialization
{Int I;
Double randx ();
For (I = 0; I * (W + I) = 0.2 * randx ();
}
}

Double randx () // kqy error
{Double d;
D = (double) rand ()/32767.0;
Return d;
}

Void BpNet: init ()
{
Winit (double *) w, innode * hidenode );
Winit (double *) w1, hidenode * outnode );
Winit (b1, hidenode );
Winit (b2, outnode );
}


Void BpNet: train (double p [trainsample] [innode], double t [trainsample] [outnode])
{
Double pp [hidenode]; // correction error of hidden nodes
Double qq [outnode]; // deviation between the expected output value and the actual output value
Double yd [outnode]; // expected output value

Double x [innode]; // input vector
Double x1 [hidenode]; // hidden node status value
Double x2 [outnode]; // output node status value
Double o1 [hidenode]; // activation value of the Hidden Layer
Double o2 [hidenode]; // output layer activation Value
For (int isamp = 0; isamp {
For (int I = 0; I X [I] = p [isamp] [I];
For (I = 0; I Yd [I] = t [isamp] [I];

// Construct the input and output standards for each sample
For (int j = 0; j {
SP1 [j] = 0.0;

For (I = 0; I O1 [j] = o1 [j] + w [I] [j] * x [I]; // input activation values of each unit in the hidden layer
X1 [j] = 1.0/(1 + exp (-o1 [j]-b1 [j]); // output kqy1 of each unit in the hidden layer
// If (o1 [j] + b1 [j]> 0) x1 [j] = 1;
// Else x1 [j] = 0;
}

For (int k = 0; k {
O2 [k] = 0.0;

For (j = 0; j O2 [k] = o2 [k] + w1 [j] [k] * x1 [j]; // input activation values for each unit in the output layer
X2 [k] = 1.0/(1.0 + exp (-o2 [k]-b2 [k]); // output of each unit in the output layer
// If (o2 [k] + b2 [k]> 0) x2 [k] = 1;
// Else x2 [k] = 0;
}

For (k = 0; k {
E = 0.0;
Qq [k] = (yd [k]-x2 [k]) * x2 [k] * (1. -x2 [k]); // deviation between the expected output and the actual output
E + = fabs (yd [k]-x2 [k]) * fabs (yd [k]-x2 [k]); // calculate the mean variance

For (j = 0; j W1 [j] [k] = w1 [j] [k] + rate_w1 * qq [k] * x1 [j]; // The next connection between the hidden layer and the output layer
E = sqrt (e );
Error = e;

}

For (j = 0; j {
Pp [j] = 0.0;
For (k = 0; k Pp [j] = pp [j] + qq [k] * w1 [j] [k];
Pp [j] = pp [j] * x1 [j] * (1-x1 [j]); // correction error of Hidden Layer

For (I = 0; I W [I] [j] = w [I] [j] + rate_w * pp [j] * x [I]; // new connection right between the next input layer and the hidden layer
}

For (k = 0; k B2 [k] = b2 [k] + rate_b2 * qq [k]; // The new threshold value between the next hidden layer and the output layer
For (j = 0; j B1 [j] = b1 [j] + rate_b1 * pp [j]; // The new threshold value between the input layer and the hidden layer next time

} // End isamp Sample circulation

}
/** // End train /////// //////////////////////

//////////////////////////////////////// /////////////////////////

Double * BpNet: recognize (double * p)
{
Double x [innode]; // input vector
Double x1 [hidenode]; // hidden node status value
Double x2 [outnode]; // output node status value
Double o1 [hidenode]; // activation value of the Hidden Layer
Double o2 [hidenode]; // output layer activation Value

For (int I = 0; I X [I] = p [I];
For (int j = 0; j {
SP1 [j] = 0.0;

For (int I = 0; I O1 [j] = o1 [j] + w [I] [j] * x [I]; // activation values of each unit in the hidden layer
X1 [j] = 1.0/(1.0 + exp (-o1 [j]-b1 [j]); // output of each unit in the hidden layer
// If (o1 [j] + b1 [j]> 0) x1 [j] = 1;
// Else x1 [j] = 0;
}

For (int k = 0; k {
O2 [k] = 0.0;
For (int j = 0; j O2 [k] = o2 [k] + w1 [j] [k] * x1 [j]; // activation values of each unit in the output layer
X2 [k] = 1.0/(1.0 + exp (-o2 [k]-b2 [k]); // output of each unit in the output layer
// If (o2 [k] + b2 [k]> 0) x2 [k] = 1;
// Else x2 [k] = 0;
}

For (k = 0; k {
Shuchu [k] = x2 [k];
}
Return shuchu;
}/** // End sim ///// //////////////////////

Void BpNet: writetrain ()
{// Zeng Zhijun for 2006.7
AfxMessageBox ("You haven't trained yet. Write it after training! Please do not write it in disorder. Unless you think this training is the best, it will overwrite the weights I have trained, so you have to spend time training! ");
AfxMessageBox ("if you think this training result is the best, save it and don't spend any time training next time! ", MB_YESNO, NULL );
FILE * stream0;
FILE * stream1;
FILE * stream2;
FILE * stream3;

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.