summaryrefslogtreecommitdiff
path: root/neural.c
diff options
context:
space:
mode:
authorMatt Turner <mattst88@gmail.com>2008-11-12 23:25:26 +0000
committerMatt Turner <mattst88@gmail.com>2008-11-12 23:25:26 +0000
commit37438ca202a9a1f4e782f1d1803686fc6b65e918 (patch)
tree7cf856979306a32f7817b528b306539cf914e93b /neural.c
parent7a57eeccd8ded740d88aba3ea9dcb09050983dd0 (diff)
Move static function prototypes from nbench1.h to appropriate files
git-svn-id: svn://mattst88.com/svn/cleanbench/trunk@14 0d43b9a7-5ab2-4d7b-af9d-f64450cef757
Diffstat (limited to 'neural.c')
-rw-r--r--neural.c75
1 files changed, 70 insertions, 5 deletions
diff --git a/neural.c b/neural.c
index 04d49fe..a767fb4 100644
--- a/neural.c
+++ b/neural.c
@@ -6,11 +6,6 @@
#include "nmglobal.h"
#include "nbench1.h"
-/*
-** The Neural Net test requires an input data file.
-** The name is specified here.
-*/
-char *inpath="NNET.DAT";
/********************************
** BACK PROPAGATION NEURAL NET **
@@ -37,6 +32,76 @@ char *inpath="NNET.DAT";
** on systems other than a Macintosh -- RG
*/
+/*
+** DEFINES
+*/
+#define T 1 /* TRUE */
+#define F 0 /* FALSE */
+#define ERR -1
+#define MAXPATS 10 /* max number of patterns in data file */
+#define IN_X_SIZE 5 /* number of neurodes/row of input layer */
+#define IN_Y_SIZE 7 /* number of neurodes/col of input layer */
+#define IN_SIZE 35 /* equals IN_X_SIZE*IN_Y_SIZE */
+#define MID_SIZE 8 /* number of neurodes in middle layer */
+#define OUT_SIZE 8 /* number of neurodes in output layer */
+#define MARGIN 0.1 /* how near to 1,0 do we have to come to stop? */
+#define BETA 0.09 /* beta learning constant */
+#define ALPHA 0.09 /* momentum term constant */
+#define STOP 0.1 /* when worst_error less than STOP, training is done */
+
+/*
+** The Neural Net test requires an input data file.
+** The name is specified here.
+*/
+char *inpath="NNET.DAT";
+
+/*
+** GLOBALS
+*/
+double mid_wts[MID_SIZE][IN_SIZE]; /* middle layer weights */
+double out_wts[OUT_SIZE][MID_SIZE]; /* output layer weights */
+double mid_out[MID_SIZE]; /* middle layer output */
+double out_out[OUT_SIZE]; /* output layer output */
+double mid_error[MID_SIZE]; /* middle layer errors */
+double out_error[OUT_SIZE]; /* output layer errors */
+double mid_wt_change[MID_SIZE][IN_SIZE]; /* storage for last wt change */
+double out_wt_change[OUT_SIZE][MID_SIZE]; /* storage for last wt change */
+double in_pats[MAXPATS][IN_SIZE]; /* input patterns */
+double out_pats[MAXPATS][OUT_SIZE]; /* desired output patterns */
+double tot_out_error[MAXPATS]; /* measure of whether net is done */
+double out_wt_cum_change[OUT_SIZE][MID_SIZE]; /* accumulated wt changes */
+double mid_wt_cum_change[MID_SIZE][IN_SIZE]; /* accumulated wt changes */
+
+double worst_error; /* worst error each pass through the data */
+double average_error; /* average error each pass through the data */
+double avg_out_error[MAXPATS]; /* average error each pattern */
+
+int iteration_count; /* number of passes thru network so far */
+int numpats; /* number of patterns in data file */
+int numpasses; /* number of training passes through data file */
+int learned; /* flag--if TRUE, network has learned all patterns */
+
+/*
+** PROTOTYPES
+*/
+static unsigned long DoNNetIteration(unsigned long nloops);
+static void do_mid_forward(int patt);
+static void do_out_forward();
+void display_output(int patt);
+static void do_forward_pass(int patt);
+static void do_out_error(int patt);
+static void worst_pass_error();
+static void do_mid_error();
+static void adjust_out_wts();
+static void adjust_mid_wts();
+static void do_back_pass(int patt);
+static void move_wt_changes();
+static int check_out_error();
+static void zero_changes();
+static void randomize_wts();
+static int read_data_file();
+/* static int initialize_net(); */
+
/***********
** DoNNet **
************