Neural network BP algorithm (implemented by C program)

Source: Internet
Author: User

File input/output directory: F:/BP/

Training sample file name: training sample .txt

Value:

1
1
-1
1
-1
1
0
1
0
1

Output file name: Authorization value. txt.

======================================

# Include "stdlib. H"
# Include "math. H"
# Include "conio. H"
# Include "stdio. H"
# Define N 2/*/number of learning samples */
# Define in 3/*/number of input-layer neurons */
# Define HN 3/*/number of hidden layer neurons */
# Define on 2/*/number of neurons in the output layer */
# Define z 20/*/Save the old weight-> Save the weight of study each time */
Double P [in];/*/input data of a single sample */
Double T [on];/*/single sample instructor data */
Double W [HN] [in];/*/input layer-to-hidden layer weight */
Double V [on] [HN]; // The weight from the hidden layer to the output layer */
Double X [HN];/*/hidden layer input */
Double Y [on];/*/input in the output layer */
Double H [HN];/*/output of the hidden layer */
Double O [on];/*/output at the output layer */
Double yu_hn [HN];/*/hidden layer threshold */
Double yu_on [on];/*/threshold of the output layer */
Double err_m [N]; // The total error of the M-th sample */
Double A;/*/learning efficiency from the output layer to the hidden layer */
Double B;/*/learning efficiency from the hidden layer to the input layer */
Double alpha;/*/Momentum factor, improved BP algorithm */
Double d_err [on];

File * FP;
/* Define the structure of a learning sample */
Struct {
Double input [in];
Double teach [on];
} Study_data [N];

/* Improved BP algorithm is used to save the weight of each calculation */
Struct {
Double old_w [HN] [in];
Double old_v [on] [HN];
} Old_wv [Z];

Int start_show ()
{
Clrscr ();
Printf ("/n ************************/N ");
Printf ("* Welcome to use */N ");
Printf ("* this program of */N ");
Printf ("* calculating the BP */N ");
Printf ("* model! */N ");
Printf ("* happy every day! */N ");
Printf ("************************/N ");
Printf ("/n/nbefore starting, please read the follows carefully:/n ");
Printf ("1. Please ensure the path of the 'Training sample .txt'(xunlianyangben.txt) is/ncorrect, like 'f:/BP/training sample .txt '! /N ");
Printf ("2.The calculating results will be saved in the path of 'f: // BP //'! /N ");
Printf ("3.The program will load 10 datas when running from 'f: // BP // training sample .txt '! /N ");
Printf ("4.The program of BP can study itself for no more than 30000 times./NAND surpassing the number, the program will be ended by itself in/npreventing running infinitely because of error! /N ");
Printf ("/n ");
Printf ("Now press any key to start.../N ");
Getch ();
Getch ();
Clrscr ();
}

Int end_show ()
{
Printf ("/n ---------------------------------------------------/N ");
Printf ("the program has reached the end successfully! /N/npress any key to exit! /N ");
Printf ("/n ************************/N ");
Printf ("* this is the end */N ");
Printf ("* of the program which */N ");
Printf ("* can calculate the BP */N ");
Printf ("* model! */N ");
Printf ("************************/N ");
Printf ("* Thanks for using! */N ");
Printf ("* happy every day! */N ");
Printf ("************************/N ");
Getch ();
Exit (0 );
}

Gettrainingdata ()/* OK */
{Int m, I, J;
Int datr;

If (FP = fopen ("F: // BP // training sample .txt", "R") = NULL)/* read the training sample */
{
Printf ("cannot open file strike any key exit! ");
Getch ();
Exit (1 );
}

M = 0;
I = 0;
J = 0;
While (fscanf (FP, "% d", & datr )! = EOF)
{J ++;
If (j <= (N * In ))
{If (I {
Study_data [M]. Input [I] = datr;
/* Printf ("/nthe study_datat [% d]. input [% d] = % F/N ", M, I, study_data [M]. input [I]); getch (); * // * use to check the loaded training datas */
}
If (M = (N-1) & I = (in-1 ))
{
M = 0;
I =-1;
}
If (I = (in-1 ))
{
M ++;
I =-1;
}
}
Else if (n * In) {if (I {study_data [M]. Teach [I] = datr;
/* Printf ("/nthe study_data [% d]. teach [% d] = % F ", M, I, study_data [M]. teach [I]); getch (); * // * use to check the loaded training datas */
}
If (M = (N-1) & I = (on-1 ))
Printf ("/N ");

If (I = (on-1 ))
{M ++;
I =-1;
}
}
I ++;
}
Fclose (FP );
Printf ("/nthere are [% d] datats that have been loaded successfully! /N ", J );

/* Show the data which has been loaded! */
Printf ("/nshow the data which has been loaded as follows:/N ");
For (m = 0; m {for (I = 0; I {printf ("/nstudy_data [% d]. input [% d] = % F ", M, I, study_data [M]. input [I]);
}
For (j = 0; J {printf ("/nstudy_data [% d]. teach [% d] = % F ", M, J, study_data [M]. teach [J]);
}
}
Printf ("/n/npress any key to start calculating ...");
Getch ();
Return 1;
}

/*///////////////////////////////////*/
/* Initialization permission and threshold subroutine */
/*///////////////////////////////////*/
Initial ()
{Int I;
Int II;
Int J;
Int JJ;
Int K;
Int KK;
/* Implicit layer permission and threshold value initialization */

For (I = 0; I {
For (j = 1; J {W [I] [J] = (double) (RAND ()/32767.0) * 2-1 ); /* initialize the weights from the input layer to the hidden layer, and randomly simulate 0 and 1-1 */
Printf ("W [% d] [% d] = % F/N", I, j, W [I] [J]);
}
}
For (II = 0; II {
For (JJ = 0; JJ {v [II] [JJ] = (double) (RAND ()/32767.0) * 2-1 ); /* initialize the weights from the hidden layer to the output layer, and randomly simulate 0 and 1-1 */
Printf ("V [% d] [% d] = % F/N", II, JJ, V [II] [JJ]);
}
}
For (k = 0; k {
Yu_hn [k] = (double) (RAND ()/32767.0) * 2-1);/* hidden layer threshold value initialization,-0.01 ~ Between 0.01 */
Printf ("yu_hn [% d] = % F/N", K, yu_hn [k]);
}
For (KK = 0; KK {
Yu_on [Kk] = (double) (RAND ()/32767.0) * 2-1);/* initialize the output layer threshold,-0.01 ~ Between 0.01 */
}
Return 1;
}/* Subprogram initial () ends */

/*////////////////////////////////////// ////*/
/* Input subroutine for the m learning sample */
/*////////////////////////////////////// ///*/
Input_p (INT m)
{Int I, J;

For (I = 0; I {P [I] = study_data [M]. Input [I];
Printf ("P [% d] = % F/N", I, P [I]);
}
/* Obtain the data of the MTH sample */
Return 1;
}/* Subprogram input_p (m) end */

/*////////////////////////////////////// ///*/
/* Signal subroutine of the M-level sample teacher */
/*////////////////////////////////////// ///*/
Input_t (INT m)
{Int K;

For (k = 0; k t [k] = study_data [M]. Teach [k];
Return 1;
}/* Subprogram input_t (m) end */

H_ I _o ()
{
Double sigma;
Int I, J;
For (j = 0; J {
Sigma = 0;
For (I = 0; I {Sigma + = W [J] [I] * P [I];/* calculate the Inner Product of the hidden layer */
}

X [J] = sigma-YU_HN [I];/* find the hidden layer net input, why do the hidden layer threshold value */
H [J] = 1.0/(1.0 + exp (-X [J]);/* evaluate the siglon algorithm output from the hidden layer */
}
Return 1;
}/* Subprogram h_ I _o () end */

 

O_ I _o ()
{Int K;
Int J;
Double sigma;
For (k = 0; k {
Sigma = 0.0;
For (j = 0; J {
Sigma + = V [k] [J] * H [k];
}
Y [k] = sigma-YU_ON [k];
O [k] = 1.0/(1.0 + exp (-y [k]);
}
Return 1;
}

Int err_o_h (INT m)
{Int K;
Double abs_err [on];
Double sqr_err = 0;
For (k = 0; k {
Abs_err [k] = T [k]-O [k];
Sqr_err + = (abs_err [k]) * (abs_err [k]);
D_err [k] = abs_err [k] * o [k] * (1.0-o [k]);
Err_m [m] = sqr_err/2;
}
Return 1;
}

 

Double e_err [HN];
Int err_h_ I ()
{
Int J, K;
Double sigma;
For (j = 0; J {
Sigma = 0.0;
For (k = 0; k {
Sigma = d_err [k] * V [k] [J];
}
E_err [J] = Sigma * H [J] * (1-H [J]);
}
Return 1;
}

Savewv (INT m)
{Int I;
Int II;
Int J;
Int JJ;
For (I = 0; I {
For (j = 0; J {
Old_wv [M]. old_w [I] [J] = W [I] [J];
}
}
For (II = 0; II {
For (JJ = 0; JJ {
Old_wv [M]. old_v [II] [JJ] = V [II] [JJ];
}
}
Return 1;
}

Int delta_o_h (int n)/* (int m, int N )*/
{Int K, J;
If (n <1)/* n <= 1 */
{
For (k = 0; k {
For (j = 0; J {
V [k] [J] = V [k] [J] + A * d_err [k] * H [J];
}
Yu_on [k] + = A * d_err [k];
}
}
Else if (n> 1)
{
For (k = 0; k {
For (j = 0; J {
V [k] [J] = V [k] [J] + A * d_err [k] * H [J] + Alpha * (V [k] [J]-old_wv [(n-1)]. old_v [k] [J]);
}
Yu_on [k] + = A * d_err [k];
}
}
Return 1;
}

Delta_h_ I (INT N)/* (INT M, int N )*/
{Int I, J;

If (n <= 1)/* n <= 1 */
{
For (j = 0; J {
For (I = 0; I {
W [J] [I] = W [J] [I] + B * e_err [J] * P [I];
}
Yu_hn [J] + = B * e_err [J];
}
}
Else if (n> 1)
{
For (j = 0; J {
For (I = 0; I {
W [J] [I] = W [J] [I] + B * e_err [J] * P [I] + Alpha * (W [J] [I]-old_wv [(n-1)]. old_w [J] [I]);
}
Yu_hn [J] + = B * e_err [J];
}
}
Return 1;
}

 

Double err_sum ()
{Int m;
Double total_err = 0;
For (m = 0; m {
Total_err + = err_m [m];
}
Return total_err;
}

 

Void savequan ()
{Int I, J, K;
Int II, JJ, KK;

If (FP = fopen ("F: // BP // weight .txt", "A") = NULL)/* Save the result at F: /hsz/BPC /*. TXT */
{
Printf ("cannot open file strike any key exit! ");
Getch ();
Exit (1 );
}

Fprintf (FP, "Save the result of" quanzhi "as follows:/N ");
For (I = 0; I {
For (j = 0; j fprintf (FP, "W [% d] [% d] = % F/N", I, j, W [I] [J]);
}
Fprintf (FP, "/N ");
For (II = 0; II {
For (JJ = 0; JJ fprintf (FP, "V [% d] [% d] = % F/N", II, JJ, V [II] [JJ]);
}
Fclose (FP );
Printf ("/nthe result of privilege value .txt" (quanzhi) has been saved successfully! /Npress any key to continue ...");
Getch ();

If (FP = fopen ("F: // BP // values: .txt", "A") = NULL)/* Save the result at F: /hsz/BPC /*/
{
Printf ("cannot open file strike any key exit! ");
Getch ();
Exit (1 );
}
Fprintf (FP, "Save the result of" output layer threshold "(huozhi) as follows:/N ");
For (k = 0; k fprintf (FP, "yu_on [% d] = % F/N", K, yu_on [k]);

Fprintf (FP, "/nsave the result of" The Hidden Layer threshold is "(huozhi) as follows:/N ");
For (KK = 0; KK fprintf (FP, "yu_hn [% d] = % F/N", KK, yu_hn [Kk]);

Fclose (FP );
Printf ("/nthe result of temporary partition value .txt" (huozhi) has been saved successfully! /Npress any key to continue ...");
Getch ();
}

/**********************/
/** Program entry, that is, the main program **/
/**********************/

Void main ()
{Double pre_error;
Double sum_err;
Int study;
Int flag;
Flag = 30000;
A = 0.7;
B = 0.7;
Alpha = 0.9;
Study = 0;
Pre_error = 0.0001;/* the actual value is pre_error = 0.0001 ;*/

Start_show ();
Gettrainingdata ();
Initial ();

Do
{Int m;
++ Study;
For (m = 0; m {
Input_p (m );
Input_t (m );
H_ I _o ();
O_ I _o ();
Err_o_h (m );
Err_h_ I ();
Savewv (m );/****************/
Delta_o_h (m);/* (M, study )*/
Delta_h_ I (m);/* (M, study )*/
}
Sum_err = err_sum ();
Printf ("sum_err = % F/N", sum_err );
Printf ("pre_error = % F/n", pre_error );

If (study> flag)
{
Printf ("/n *******************************/N ");
Printf ("the program is ended by itself because of error! /Nthe learning times is surpassed! /N ");
Printf ("*****************************/N ");
Getch ();
Break;
}

} While (sum_err> pre_error );

Printf ("/n *****************/N ");
Printf ("/nthe program have studyed for [% d] times! /N ", study );
Printf ("/n *****************/N ");
Savequan ();/* Save the results */
End_show ();
}

======================================

Privilege .txt

{Save the result of "quanzhi" as follows:
W [0] [0] = 0.350578
W [0] [1] =-1, 1.008697
W [0] [2] =-0.962250.
W [1] [0] = 0.055661
W [1] [1] =-0.372367
W [1] [2] =-0.890795
W [2] [0] = 0.129752
W [2] [1] =-0.332591
W [2] [2] =-0.521561

V [0] [0] =-2.932654
V [0] [1] =-1, 3.720583
V [0] [2] =-2.648183.
V [1] [0] = 2.938970
V [1] [1] = 1.633281
V [1] [2] = 1.944077

}

Period value: .txt

{Save the result of "output layer threshold" (huozhi) as follows:
Yu_on [0] =-4.226843.
Yu_on [1] = 1.501791

Save the result of "the hidden layer threshold is" (huozhi) as follows:
Yu_hn [0] =-0.431459
Yu_hn [1] = 0.452127
Yu_hn [2] = 0.258449

}

========================================

The above program is restructured into a VC ++ program!

Thank you for your comments!

Thank you for sharing your learning experience!

 

From http://hshu.bokee.com/4806075.html

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.