#include /* #include #include #include */ #include #include "nmglobal.h" #include "nbench1.h" /* ** The Neural Net test requires an input data file. ** The name is specified here. */ char *inpath="NNET.DAT"; /******************************** ** BACK PROPAGATION NEURAL NET ** ********************************* ** This code is a modified version of the code ** that was submitted to BYTE Magazine by ** Maureen Caudill. It accomanied an article ** that I CANNOT NOW RECALL. ** The author's original heading/comment was ** as follows: ** ** Backpropagation Network ** Written by Maureen Caudill ** in Think C 4.0 on a Macintosh ** ** (c) Maureen Caudill 1988-1991 ** This network will accept 5x7 input patterns ** and produce 8 bit output patterns. ** The source code may be copied or modified without restriction, ** but no fee may be charged for its use. ** ** ++++++++++++++ ** I have modified the code so that it will work ** on systems other than a Macintosh -- RG */ /*********** ** DoNNet ** ************ ** Perform the neural net benchmark. ** Note that this benchmark is one of the few that ** requires an input file. That file is "NNET.DAT" and ** should be on the local directory (from which the ** benchmark program in launched). */ void DoNNET(void) { NNetStruct *locnnetstruct; /* Local ptr to global data */ char *errorcontext; unsigned long accumtime; double iterations; /* ** Link to global data */ locnnetstruct=&global_nnetstruct; /* ** Set error context */ errorcontext="CPU:NNET"; /* ** Init random number generator. ** NOTE: It is important that the random number generator ** be re-initialized for every pass through this test. ** The NNET algorithm uses the random number generator ** to initialize the net. Results are sensitive to ** the initial neural net state. */ /* randnum(3L); */ randnum((int32_t)3); /* ** Read in the input and output patterns. We'll do this ** only once here at the beginning. These values don't ** change once loaded. */ if(read_data_file()!=0) ErrorExit(); /* ** See if we need to perform self adjustment loop. */ if(locnnetstruct->adjust==0) { /* ** Do self-adjustment. This involves initializing the ** # of loops and increasing the loop count until we ** get a number of loops that we can use. */ for(locnnetstruct->loops=1L; locnnetstruct->loopsloops++) { /*randnum(3L); */ randnum((int32_t)3); if(DoNNetIteration(locnnetstruct->loops) >global_min_ticks) break; } } /* ** All's well if we get here. Do the test. */ accumtime=0L; iterations=(double)0.0; do { /* randnum(3L); */ /* Gotta do this for Neural Net */ randnum((int32_t)3); /* Gotta do this for Neural Net */ accumtime+=DoNNetIteration(locnnetstruct->loops); iterations+=(double)locnnetstruct->loops; } while(TicksToSecs(accumtime)request_secs); /* ** Clean up, calculate results, and go home. Be sure to ** show that we don't have to rerun adjustment code. */ locnnetstruct->iterspersec=iterations / TicksToFracSecs(accumtime); if(locnnetstruct->adjust==0) locnnetstruct->adjust=1; return; } /******************** ** DoNNetIteration ** ********************* ** Do a single iteration of the neural net benchmark. ** By iteration, we mean a "learning" pass. */ static unsigned long DoNNetIteration(unsigned long nloops) { unsigned long elapsed; /* Elapsed time */ int patt; /* ** Run nloops learning cycles. Notice that, counted with ** the learning cycle is the weight randomization and ** zeroing of changes. This should reduce clock jitter, ** since we don't have to stop and start the clock for ** each iteration. */ elapsed=StartStopwatch(); while(nloops--) { randomize_wts(); zero_changes(); iteration_count=1; learned = F; numpasses = 0; while (learned == F) { for (patt=0; patt tot_error) tot_error = -error; /* worst error this pattern */ } else { sum += error; if (error > tot_error) tot_error = error; /* worst error this pattern */ } } avg_out_error[patt] = sum/OUT_SIZE; tot_out_error[patt] = tot_error; return; } /*********************** ** worst_pass_error() ** ************************ ** Find the worst and average error in the pass and save it **/ static void worst_pass_error() { double error,sum; int i; error = 0.0; sum = 0.0; for (i=0; i error) error = tot_out_error[i]; sum += avg_out_error[i]; } worst_error = error; average_error = sum/numpats; return; } /******************* ** do_mid_error() ** ******************** ** Compute the error for the middle layer neurodes ** This is based on the output errors computed above. ** Note that the derivative of the sigmoid f(x) is ** f'(x) = f(x)(1 - f(x)) ** Recall that f(x) is merely the output of the middle ** layer neurode on the forward pass. **/ static void do_mid_error() { double sum; int neurode, i; for (neurode=0; neurode= STOP) result = F; if (tot_out_error[i] >= 16.0) error = T; } if (error == T) result = ERR; #ifdef DEBUG /* printf("\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error,average_error); */ /* fprintf(outfile, "\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error, average_error); */ #endif return(result); } /******************* ** zero_changes() ** ******************** ** Zero out all the wt change arrays **/ static void zero_changes() { int i,j; for (i = 0; i MAXPATS) numpats = MAXPATS; for (patt=0; patt= 0.9) in_pats[patt][i] = 0.9; if (in_pats[patt][i] <= 0.1) in_pats[patt][i] = 0.1; } element = 0; vals_read = fscanf(infile,"%d %d %d %d %d %d %d %d", &val1, &val2, &val3, &val4, &val5, &val6, &val7, &val8); out_pats[patt][element] = (double) val1; element++; out_pats[patt][element] = (double) val2; element++; out_pats[patt][element] = (double) val3; element++; out_pats[patt][element] = (double) val4; element++; out_pats[patt][element] = (double) val5; element++; out_pats[patt][element] = (double) val6; element++; out_pats[patt][element] = (double) val7; element++; out_pats[patt][element] = (double) val8; element++; } /* printf("\n Closing the input file now. "); */ fclose(infile); return(0); } /********************* ** initialize_net() ** ********************** ** Do all the initialization stuff before beginning */ /* static int initialize_net() { int err_code; randomize_wts(); zero_changes(); err_code = read_data_file(); iteration_count = 1; return(err_code); } */ /********************** ** display_mid_wts() ** *********************** ** Display the weights on the middle layer neurodes ** NOTE: This routine is not used in the benchmark ** test -- RG **/ /* static void display_mid_wts() { int neurode, weight, row, col; fprintf(outfile,"\n Weights of Middle Layer neurodes:"); for (neurode=0; neurode