summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatt Turner <mattst88@gmail.com>2008-11-15 02:42:01 +0000
committerMatt Turner <mattst88@gmail.com>2008-11-15 02:42:01 +0000
commit0f674b5b2f15c5cd30209230f22198cc71e74310 (patch)
treeef842746de021256b55b0c638a504d3329b03d02
parentfd21c50949e99a978c211c46bc56852460f79bbc (diff)
Clean neural struct
git-svn-id: svn://mattst88.com/svn/cleanbench/trunk@51 0d43b9a7-5ab2-4d7b-af9d-f64450cef757
-rw-r--r--cleanbench.c2
-rw-r--r--neural.c24
-rw-r--r--nmglobal.h19
3 files changed, 14 insertions, 31 deletions
diff --git a/cleanbench.c b/cleanbench.c
index 3b17ee7..af58fc3 100644
--- a/cleanbench.c
+++ b/cleanbench.c
@@ -84,7 +84,7 @@ puts("\nTEST : Iterations/sec. : Old Index : New Index");
puts(" : : Pentium 90* : AMD K6/233*");
puts("--------------------:------------------:-------------:------------");
-for(i=HUFFMAN;i<NUMTESTS;i++)
+for(i=NEURAL;i<NUMTESTS;i++)
{
if(tests_to_do[i])
{ printf("%s :",ftestnames[i]);
diff --git a/neural.c b/neural.c
index ff418ad..e3e3c46 100644
--- a/neural.c
+++ b/neural.c
@@ -36,8 +36,15 @@
*/
/*
-** DEFINES
+** LOOP_MAX
+**
+** This constant sets the max number of loops through the neural
+** net that the system will attempt before giving up. This
+** is not a critical constant. You can alter it if your system
+** has sufficient horsepower.
*/
+#define LOOP_MAX 500000L
+
#define T 1 /* TRUE */
#define F 0 /* FALSE */
#define ERR -1
@@ -58,9 +65,6 @@
*/
const char *inpath="NNET.DAT";
-/*
-** GLOBALS
-*/
double mid_wts[MID_SIZE][IN_SIZE]; /* middle layer weights */
double out_wts[OUT_SIZE][MID_SIZE]; /* output layer weights */
double mid_out[MID_SIZE]; /* middle layer output */
@@ -84,9 +88,6 @@ int numpats; /* number of patterns in data file */
int numpasses; /* number of training passes through data file */
int learned; /* flag--if TRUE, network has learned all patterns */
-/*
-** PROTOTYPES
-*/
static clock_t DoNNetIteration(unsigned long nloops);
static void do_mid_forward(int patt);
static void do_out_forward();
@@ -122,6 +123,7 @@ DoNNET(void)
clock_t total_time = 0;
int iterations = 0;
static int is_adjusted = FALSE;
+ static int loops = 1;
/*
** Init random number generator.
@@ -152,9 +154,9 @@ DoNNET(void)
** # of loops and increasing the loop count until we
** get a number of loops that we can use.
*/
- for (locnnetstruct->loops = 1; locnnetstruct->loops < MAXNNETLOOPS; locnnetstruct->loops++) {
+ for (; loops < LOOP_MAX; loops++) {
randnum((int32_t)3);
- if(DoNNetIteration(locnnetstruct->loops) > MINIMUM_TICKS) {
+ if(DoNNetIteration(loops) > MINIMUM_TICKS) {
break;
}
}
@@ -162,8 +164,8 @@ DoNNET(void)
do {
randnum((int32_t)3); /* Gotta do this for Neural Net */
- total_time += DoNNetIteration(locnnetstruct->loops);
- iterations += locnnetstruct->loops;
+ total_time += DoNNetIteration(loops);
+ iterations += loops;
} while (total_time < MINIMUM_SECONDS * CLOCKS_PER_SEC);
locnnetstruct->results = (double)(iterations * CLOCKS_PER_SEC) / total_time;
diff --git a/nmglobal.h b/nmglobal.h
index 85f030b..432a2dc 100644
--- a/nmglobal.h
+++ b/nmglobal.h
@@ -66,27 +66,8 @@ typedef struct {
double results; /* Results */
} HuffStruct;
-/********************************
-** BACK PROPAGATION NEURAL NET **
-********************************/
-
-/*
-** MAXNNETLOOPS
-**
-** This constant sets the max number of loops through the neural
-** net that the system will attempt before giving up. This
-** is not a critical constant. You can alter it if your system
-** has sufficient horsepower.
-*/
-/*#define MAXNNETLOOPS 50000L*/
-#define MAXNNETLOOPS 500000L
-
-/*
-** TYPEDEFS
-*/
typedef struct {
double results; /* Results */
- unsigned long loops; /* # of times to learn */
} NNetStruct;
/***********************