summaryrefslogtreecommitdiff
path: root/neural.c
diff options
context:
space:
mode:
authorMatt Turner <mattst88@gmail.com>2008-11-15 05:23:15 +0000
committerMatt Turner <mattst88@gmail.com>2008-11-15 05:23:15 +0000
commit0a0699aba70c04e3e6c5847b47cee75025e96255 (patch)
tree1f56669779fd2d69b6802b9eae52ee89a20bc381 /neural.c
parent991b62808450c54ac721f345d82426fde3e6289f (diff)
Use stdbool.h, bool, true, false instead of manually defining
git-svn-id: svn://mattst88.com/svn/cleanbench/trunk@60 0d43b9a7-5ab2-4d7b-af9d-f64450cef757
Diffstat (limited to 'neural.c')
-rw-r--r--neural.c28
1 files changed, 13 insertions, 15 deletions
diff --git a/neural.c b/neural.c
index c3fe5e5..cf03dac 100644
--- a/neural.c
+++ b/neural.c
@@ -1,6 +1,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
+#include <stdbool.h>
#include <string.h>
#include <math.h>
#include <limits.h>
@@ -45,8 +46,6 @@
*/
#define LOOP_MAX 500000L
-#define T 1 /* TRUE */
-#define F 0 /* FALSE */
#define ERR -1
#define MAXPATS 10 /* max number of patterns in data file */
#define IN_X_SIZE 5 /* number of neurodes/row of input layer */
@@ -86,7 +85,7 @@ double avg_out_error[MAXPATS]; /* average error each pattern */
int iteration_count; /* number of passes thru network so far */
int numpats; /* number of patterns in data file */
int numpasses; /* number of training passes through data file */
-int learned; /* flag--if TRUE, network has learned all patterns */
+int learned; /* flag--if true, network has learned all patterns */
static clock_t DoNNetIteration(unsigned long nloops);
static void do_mid_forward(int patt);
@@ -122,7 +121,7 @@ DoNNET(void)
/* const char* context="CPU:NNET"; */ /* Since we never fprintf errors here, we don't need this */
clock_t total_time = 0;
int iterations = 0;
- static int is_adjusted = FALSE;
+ static bool is_adjusted = false;
static int loops = 1;
/*
@@ -147,8 +146,8 @@ DoNNET(void)
/*
** See if we need to perform self adjustment loop.
*/
- if (is_adjusted == FALSE) {
- is_adjusted = TRUE;
+ if (is_adjusted == false) {
+ is_adjusted = true;
/*
** Do self-adjustment. This involves initializing the
** # of loops and increasing the loop count until we
@@ -196,9 +195,9 @@ while(nloops--)
randomize_wts();
zero_changes();
iteration_count=1;
- learned = F;
+ learned = false;
numpasses = 0;
- while (learned == F)
+ while (!learned)
{
for (patt=0; patt<numpats; patt++)
{
@@ -275,7 +274,6 @@ for (neurode=0; neurode<OUT_SIZE; neurode++)
sum = 1.0/(1.0+exp(-sum));
out_out[neurode] = sum;
}
-return;
}
/*************************
@@ -284,7 +282,7 @@ return;
** Display the actual output vs. the desired output of the
** network.
** Once the training is complete, and the "learned" flag set
-** to TRUE, then display_output sends its output to both
+** to true, then display_output sends its output to both
** the screen and to a text output file.
**
** NOTE: This routine has been disabled in the benchmark
@@ -548,8 +546,8 @@ static int check_out_error()
{
int result,i,error;
-result = T;
-error = F;
+result = true;
+error = false;
worst_pass_error(); /* identify the worst error in this pass */
/*
@@ -566,11 +564,11 @@ for (i=0; i<numpats; i++)
i+1,tot_out_error[i]);
*/
- if (worst_error >= STOP) result = F;
- if (tot_out_error[i] >= 16.0) error = T;
+ if (worst_error >= STOP) result = false;
+ if (tot_out_error[i] >= 16.0) error = true;
}
-if (error == T) result = ERR;
+if (error) result = ERR;
#ifdef DEBUG