#include #include #include #include #include #include #include #include "nmglobal.h" /******************************** ** BACK PROPAGATION NEURAL NET ** ********************************* ** This code is a modified version of the code ** that was submitted to BYTE Magazine by ** Maureen Caudill. It accomanied an article ** that I CANNOT NOW RECALL. ** The author's original heading/comment was ** as follows: ** ** Backpropagation Network ** Written by Maureen Caudill ** in Think C 4.0 on a Macintosh ** ** (c) Maureen Caudill 1988-1991 ** This network will accept 5x7 input patterns ** and produce 8 bit output patterns. ** The source code may be copied or modified without restriction, ** but no fee may be charged for its use. ** ** ++++++++++++++ ** I have modified the code so that it will work ** on systems other than a Macintosh -- RG */ /* ** DEFINES */ #define T 1 /* TRUE */ #define F 0 /* FALSE */ #define ERR -1 #define MAXPATS 10 /* max number of patterns in data file */ #define IN_X_SIZE 5 /* number of neurodes/row of input layer */ #define IN_Y_SIZE 7 /* number of neurodes/col of input layer */ #define IN_SIZE 35 /* equals IN_X_SIZE*IN_Y_SIZE */ #define MID_SIZE 8 /* number of neurodes in middle layer */ #define OUT_SIZE 8 /* number of neurodes in output layer */ #define MARGIN 0.1 /* how near to 1,0 do we have to come to stop? */ #define BETA 0.09 /* beta learning constant */ #define ALPHA 0.09 /* momentum term constant */ #define STOP 0.1 /* when worst_error less than STOP, training is done */ /* ** The Neural Net test requires an input data file. ** The name is specified here. */ const char *inpath="NNET.DAT"; /* ** GLOBALS */ double mid_wts[MID_SIZE][IN_SIZE]; /* middle layer weights */ double out_wts[OUT_SIZE][MID_SIZE]; /* output layer weights */ double mid_out[MID_SIZE]; /* middle layer output */ double out_out[OUT_SIZE]; /* output layer output */ double mid_error[MID_SIZE]; /* middle layer errors */ double out_error[OUT_SIZE]; /* output layer errors */ double mid_wt_change[MID_SIZE][IN_SIZE]; /* storage for last wt change */ double out_wt_change[OUT_SIZE][MID_SIZE]; /* storage for last wt change */ double in_pats[MAXPATS][IN_SIZE]; /* input patterns */ double out_pats[MAXPATS][OUT_SIZE]; /* desired output patterns */ double tot_out_error[MAXPATS]; /* measure of whether net is done */ double out_wt_cum_change[OUT_SIZE][MID_SIZE]; /* accumulated wt changes */ double mid_wt_cum_change[MID_SIZE][IN_SIZE]; /* accumulated wt changes */ double worst_error; /* worst error each pass through the data */ double average_error; /* average error each pass through the data */ double avg_out_error[MAXPATS]; /* average error each pattern */ int iteration_count; /* number of passes thru network so far */ int numpats; /* number of patterns in data file */ int numpasses; /* number of training passes through data file */ int learned; /* flag--if TRUE, network has learned all patterns */ /* ** PROTOTYPES */ static clock_t DoNNetIteration(unsigned long nloops); static void do_mid_forward(int patt); static void do_out_forward(); void display_output(int patt); static void do_forward_pass(int patt); static void do_out_error(int patt); static void worst_pass_error(); static void do_mid_error(); static void adjust_out_wts(); static void adjust_mid_wts(); static void do_back_pass(int patt); static void move_wt_changes(); static int check_out_error(); static void zero_changes(); static void randomize_wts(); static int read_data_file(); /* static int initialize_net(); */ /*********** ** DoNNet ** ************ ** Perform the neural net benchmark. ** Note that this benchmark is one of the few that ** requires an input file. That file is "NNET.DAT" and ** should be on the local directory (from which the ** benchmark program in launched). */ void DoNNET(void) { const char* context="CPU:NNET"; NNetStruct* locnnetstruct = &global_nnetstruct; clock_t total_time = 0; int iterations = 0; /* ** Init random number generator. ** NOTE: It is important that the random number generator ** be re-initialized for every pass through this test. ** The NNET algorithm uses the random number generator ** to initialize the net. Results are sensitive to ** the initial neural net state. */ randnum((int32_t)3); /* ** Read in the input and output patterns. We'll do this ** only once here at the beginning. These values don't ** change once loaded. */ if(read_data_file()!=0) { exit(1); } /* ** See if we need to perform self adjustment loop. */ if (locnnetstruct->adjust == FALSE) { locnnetstruct->adjust = TRUE; /* ** Do self-adjustment. This involves initializing the ** # of loops and increasing the loop count until we ** get a number of loops that we can use. */ for (locnnetstruct->loops = 1; locnnetstruct->loops < MAXNNETLOOPS; locnnetstruct->loops++) { randnum((int32_t)3); if(DoNNetIteration(locnnetstruct->loops) > global_min_ticks) { break; } } } do { randnum((int32_t)3); /* Gotta do this for Neural Net */ total_time += DoNNetIteration(locnnetstruct->loops); iterations += locnnetstruct->loops; } while (total_time < locnnetstruct->request_secs * CLOCKS_PER_SEC); locnnetstruct->iterspersec = (double)(iterations * CLOCKS_PER_SEC) / total_time; } /******************** ** DoNNetIteration ** ********************* ** Do a single iteration of the neural net benchmark. ** By iteration, we mean a "learning" pass. */ static clock_t DoNNetIteration(unsigned long nloops) { clock_t start, stop; int patt; /* ** Run nloops learning cycles. Notice that, counted with ** the learning cycle is the weight randomization and ** zeroing of changes. This should reduce clock jitter, ** since we don't have to stop and start the clock for ** each iteration. */ start = clock(); while(nloops--) { randomize_wts(); zero_changes(); iteration_count=1; learned = F; numpasses = 0; while (learned == F) { for (patt=0; patt tot_error) tot_error = -error; /* worst error this pattern */ } else { sum += error; if (error > tot_error) tot_error = error; /* worst error this pattern */ } } avg_out_error[patt] = sum/OUT_SIZE; tot_out_error[patt] = tot_error; return; } /*********************** ** worst_pass_error() ** ************************ ** Find the worst and average error in the pass and save it **/ static void worst_pass_error() { double error,sum; int i; error = 0.0; sum = 0.0; for (i=0; i error) error = tot_out_error[i]; sum += avg_out_error[i]; } worst_error = error; average_error = sum/numpats; return; } /******************* ** do_mid_error() ** ******************** ** Compute the error for the middle layer neurodes ** This is based on the output errors computed above. ** Note that the derivative of the sigmoid f(x) is ** f'(x) = f(x)(1 - f(x)) ** Recall that f(x) is merely the output of the middle ** layer neurode on the forward pass. **/ static void do_mid_error() { double sum; int neurode, i; for (neurode=0; neurode= STOP) result = F; if (tot_out_error[i] >= 16.0) error = T; } if (error == T) result = ERR; #ifdef DEBUG /* printf("\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error,average_error); */ /* fprintf(outfile, "\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error, average_error); */ #endif return(result); } /******************* ** zero_changes() ** ******************** ** Zero out all the wt change arrays **/ static void zero_changes() { int i,j; for (i = 0; i MAXPATS) numpats = MAXPATS; for (patt=0; patt= 0.9) in_pats[patt][i] = 0.9; if (in_pats[patt][i] <= 0.1) in_pats[patt][i] = 0.1; } element = 0; vals_read = fscanf(infile,"%d %d %d %d %d %d %d %d", &val1, &val2, &val3, &val4, &val5, &val6, &val7, &val8); out_pats[patt][element] = (double) val1; element++; out_pats[patt][element] = (double) val2; element++; out_pats[patt][element] = (double) val3; element++; out_pats[patt][element] = (double) val4; element++; out_pats[patt][element] = (double) val5; element++; out_pats[patt][element] = (double) val6; element++; out_pats[patt][element] = (double) val7; element++; out_pats[patt][element] = (double) val8; element++; } /* printf("\n Closing the input file now. "); */ fclose(infile); return(0); } /********************* ** initialize_net() ** ********************** ** Do all the initialization stuff before beginning */ /* static int initialize_net() { int err_code; randomize_wts(); zero_changes(); err_code = read_data_file(); iteration_count = 1; return(err_code); } */ /********************** ** display_mid_wts() ** *********************** ** Display the weights on the middle layer neurodes ** NOTE: This routine is not used in the benchmark ** test -- RG **/ /* static void display_mid_wts() { int neurode, weight, row, col; fprintf(outfile,"\n Weights of Middle Layer neurodes:"); for (neurode=0; neurode