/* MULTI-LAYER PERCEPTRON PROGRAM * Source FL: mlp.c * Commentary: mlpc.html * Programmer: Robert John Morton YE57226C * Written: November 1993 */ /* Note: this program contains some calls that are specific to Microsoft Windows 95. A more advanced Linux/Unix version for compiling with the gcc compiler is under development [circa November 2024. /* The new Linux version will be tested by training it to replace the Vincenty Solution to the Geodetic INVERSE Problem function within the global navigator program 'nav.c' to obtain the distance and bearing of a waypoint given the latitudes & longitudes of a vehicle and a stationary reference point. It will be taught during flights in which the Vincenty function will be in control and the 'mlp' will have its errors with respect to the Vincenty function's output back- propagated over many flights along different routes until the errors are acceptably small. */ #include // for the random number generators rand() & srand() #include // for standard file i/o streams #include // for filelength() definition #include // dynamic allocation of input, output & weights arrays #include // for _clearscreen(); #include // for inter-pattern processing delay #define NL 4 // number of network layers #define NAL 3 // number of active network layers #define R 32767 // maximum value of a bipolar 16-bit interger (short) #define FALSE 0 #define TRUE !FALSE int delay, // seconds delay between processing consecutive patterns NR, // total number of patterns in the input file Training = FALSE, // TRUE = the program is in training mode BeepFlag = FALSE, // TRUE = beep when the program has finished SeedVal = 1; // default value for the random weights generator seeder short N[NL], // Neurons per layer *L[NL], // Access to the layer outputs **W[NL], // Access to all weights *E[NL], // pointers to each layer's errors array **M[NL], // pointers to the pointer arrays that point to each layer's weights *pt, // pointer to the target outputs for training mode SigTab[1025]; //Sigmoid function's look-up table FILE *Wfh, // pointer to the weights file's data structure *Ifh, // pointer to the input file's data structure *Ofh; // pointer to the outout file's data structure /* SAMPLE COMMAND LINE: rob@neddy:~$ mlp robot.wts robot.in robot.out 5B 256 512 256 16 17 argc SAMPLE EXPLANATION 1 *argv[0] MLP PROGRAM NAME MLP.EXE 2 *argv[1] ROBOT.WTS WEIGHTS FILE (MUST HAVE .WTS EXTENSION) 3 *argv[2] ROBOT.IN NAME OF INPUT SOURCE FILE (NO NAMING RESTRICTIONS) 4 *argv[3] ROBOT.OUT NAME OF OUTPUT SINK FILE " 5 *argv[4] 5B FIRST CHARACTER: / = DO NOT DISPLAY NEURAL OUTPUTS 0 = DISPLAY NEURAL OUTPUTS WITH NO DELAY BETWEEN THEM 1 - 9 = DISPLAY NEURAL OUTPUTS EVERY 1 - 9 SECONDS SECOND CHARACTER: B ACTIVATES A BEEP EVERY 10 SECONDS WHEN PROGRAM FINISHED 6 *argv[5] 256 NUMBERS OF INPUT CHANNELS TO THE NETWORK 7 *argv[6] 512 NUMBER OF NEURONS IN LAYER 1 8 *argv[7] 256 NUMBER OF NEURONS IN LAYER 2 9 *argv[8] 16 NUMBER OF NEURONS IN LAYER 3 (OUTPUTS FROM THE NETWORK) 10 *argv[9] 17 SEED VALUE FOR THE RANDOM SERIES GENERATOR THAT INITIALISES THE WEIGHTS FOR TRAINING FOR NORMAL RUNNING: ARGS 0 TO 3 MANDATORY, ARG 4 OPTIONAL FOR TRAINING RUNS: ARGS 0 TO 8 MANDATORY, ARG 9 OPTIONAL */ /* ALLOCATE MEMORY FOR NEURAL INPUT, OUTPUT, ERROR, WEIGHT, MOMENTUM TERM AND TARGET OUTPUT ARRAYS AND LOAD-IN NEURAL INPUT WEIGHTS */ int Walloc(void) { // returns TRUE or FALSE for success or failure int nl, w; // layer number, weight value srand(SeedVal); // initialise random number generator rand() for(nl = 0; nl < NL; nl++) { /* FOR EACH NETWORK LAYER number of neurons in this network layer | pointer to the 'neurons per layer' array N[] | | layer number of the network currently being dealt with | | | file handle of the Weights file | | | | */ int NN = (*(N + nl) = getw(Wfh)); // get number of neurons in this layer if(NN < 1 || NN > 4096) { // if this number is beyond the limit printf("\nBad data in weights file"); return(FALSE); // print error message; exit Walloc() } int f = TRUE, ni, NI, nn; /* EXIT IF UNABLE TO ALLOCATE & SET POINTER TO OUTPUTS ARRAYS: pointer to base of array of pointers to layer outputs | layer number of the network currently being dealt with | | number of neurons in this network layer | | | */ if((*(L + nl) = (short *)malloc(NN * sizeof(short))) == NULL) { f = FALSE; break; // break out of the outer loop: 'FOR EACH NETWORK LAYER' } if (nl) { /* PROVIDED WE ARE NOT DOING THE INPUT LAYER... pointer to a weight or delta-weight | pointer to a pointer to a weight or delta-weight | | */ short *pw, **ppw; /* ALLOCATE & SET BASE POINTER TO THE WEIGHTS POINTER ARRAY: base pointer to pointers to the weights arrays | number of the layer of neurons being dealt with this pass | | pointer to a pointer to a weight or delta-weight | | | | */ if(*(W + nl) = (ppw = (short **)malloc(NN * sizeof(short *)))) /* | total number of neurons in this network layer */ for(nn = 0; nn < NN; nn++) /* FOR EACH NEURON IN THE CURRENT LAYER ALLOCATE STORAGE FOR & SET POINTER TO INPUT WEIGHTS: pointer to a pointer to a weight or delta-weight | neuron number within this layer | | pointer to a weight or delta-weight | | | */ if(*(ppw + nn) = (pw = (short *)malloc(NI * sizeof(short)))) /* | total number of inputs to weights in the current layer */ for(ni = 0; ni < NI; ni++) { //FOR EACH INPUT TO THIS NEURON: /*If in training mode, generate the next random seed-weight, else if in normal mode input the next connection weight, then store weight in its appropriate array element. */ if(Training) // if the mlp is in 'training' mode if(w < 0) // if the weight for this input is negative w = rand(); // create a new random positive weight for it else // otherwise it is positive, so w = -rand(); // create a new random negative weight for it else // but if the nlp is not in training mode w = getw(Wfh); // get weight for this input from data file *(pw + ni) = w; /* store the weight in the weights array | | | number of input weight to neuron being dealt with base pointer to array of pointers to the input weights array neuron currently being dealt with */ } // end of loop: 'FOR EACH INPUT TO THIS NEURON' else { // storage for input weights couldn't be allocated f = FALSE; // so set the flag to indicate failure break; // break "FOR EACH NEURON IN THE CURRENT LAYER" } // we are now back in the outer loop else // unable to ALLOCATE & SET BASE POINTER TO THE WEIGHTS f = FALSE; // POINTER ARRAY, so set failure flag /* if the memory allocation processes so far have succeeded and the mlp is in training mode: */ if(Training && f == TRUE) { /* if we're doing the output layer then exit if unable to allocate & set pointer to target output array. */ if((nl == NAL) && ((pt = (short *)malloc(NN * sizeof(short))) == NULL)) { f = FALSE; // set failure flag break; // break out of main loop } // if unable to allocate & set pointer to error arrays. if((*(E + nl) = (short *)malloc(NN * sizeof(short))) == NULL) { f = FALSE; // set failure flag break; // break out of main loop } /* IF ABLE TO ALLOCATE & SET POINTER TO DELTA-WEIGHT POINTER ARRAY pointer to pointer arrays that point to each layer's weights | number of the layer of neurons being dealt with this pass | | pointer to a pointer to a weight or delta-weight | | | | | */ if(*(M + nl) = (ppw = (short **)malloc(NN * sizeof(short *)))) { /* | total number of neurons in this network layer */ for(nn = 0; nn < NN; nn++) // FOR EACH NEURON IN THIS LAYER: /* if possible, allocate storage for & set pointer to its inputs. | | | */ if(*(ppw + nn) = (pw = (short *)malloc(NI * sizeof(short)))) /* | total number of inputs to weights in the current layer */ for(ni = 0; ni < NI; ni++) // for each input to this neuron *(pw + ni) = 0; // clear and store its delta-weight else { // couldn't allocate memory for input weights f = FALSE; // for this neuron, so set error flag break; // break out of 'FOR EACH NEURON IN THIS LAYER' loop } if(f == FALSE) // if the failure flag was set during above loop break; // break out of outer loop: 'FOR EACH NETWORK LAYER' } else { // unable to allocate delta-weight memory f = FALSE; // for this neuron, so set error flag break; // break out of 'FOR EACH NETWORK LAYER outer loop } } // end of 'if the mlp is in training mode' } // end of 'PROVIDED WE ARE NOT DOING THE INPUT LAYER...' NI = NN; /* Set this time's weights array height to next time's weights array width. */ } // end of outer loop: "FOR EACH NETWORK LAYER" if(f == FALSE) { // if error occurred during memory allocation process char *s; // pointer to string for error message insertion if(Training) // if mlp is running in training mode s = "train"; // set the word "train" for insertion in error message else // otherwise if in normal running mode s = "run"; // set the word "run" for insertion in error message printf("\nInsufficient memory to %s the network as specified.", s); } return(f); } // DISPLAYS THE OUTPUT VALUE OF A GIVEN NEURON IN A GIVEN LAYER void ShowOut(int nl, int nn, short o) { #define X 50 // offset of neural outputs table from screen-left #define Y 3 // offset of FIRST ROW OF neural outputs table from screen-top static int flag; // first time through flag if(flag == 0) { flag = 1; _settextposition(Y - 2, X); printf("--------NEURAL OUTPUTS--------"); _settextposition(Y - 1, X); printf("LAYER0 LAYER1 LAYER2 LAYER3"); } _settextposition(Y + nn, X + nl * 8); printf("%6d", o); } /* MULTI-LAYER PERCEPTRON NETWORK FUNCTION: This embodies the the Input Summation and Signoid Transfer processes for all each neurons in all layers of the multi-layer perceptron. */ void mlp(void) { int l; // index number of current network layer short *po = *L; // pointer to input array for(l = 1; l < NL; l++) { // for each layer of the network int n, NN = *(N + l); // number of neurons in this layer short *pi = po, // pointer to current layer's inputs array **ppw = *(W + l); // pointer to this neuron's input weights po = *(L + l); // pointer to this neuron's output element for(n = 0; n < NN; n++) { // for each neuron in the layer int i, a, o, s, // Neuron N§, Activation Level, Output, Sign NI = *(N - 1 + l); // NI = number of inputs to this neuron short *pw = *(ppw + n); // pointer to neuron's first input weight long Hi = 0, Lo = 0; /* Hi & Lo act as a split 48-bit accumulator THE SPLIT-LONG-ACCUMULATOR SUMMATION FUNCTION: to sum all the in- puts of neuron 'n' in layer 'l' of the multi-layer perceptron. */ for(i = 0; i < NI; i++) { /* For each input to this neuron: activation level contributed by current input | pointer to current layer's inputs array | | pointer to neuron's first input weight | | | */ long P = (long)*(pi + i) * *(pw + i); /* | | pointer to current neuron's inputs array */ Hi += P >> 16; // See the narrative on the weighted input Lo += P & 0xFFFF; // summation and sigmoid functions. } // Form the final long value, preserving its sign. if((s = (a = ((Hi << 1) + (Lo >> 15)) / NI)) < 0) a = -a; /* if it is negative, reverse its sign THE NEURON'S SIGMOID TRANSFER FUNCTION [FROM LOOK-UP TABLE] Find which two look-up table entries 'o' is between and in- terpolate between them to get true value of o. Please refer to https://robmorton.website/software/sigmoid.html#lin non-integral 'element number' of the output look-up table, which | corresponds to the value 'a' of the sum of the neuron's inputs | | pointer to first element of look-up table | | required element number within the look-up table | | | value of the sum of the neuron's inputs | | | | divide by 32 | | | | | */ o = *(SigTab + (i = a >> 5)); /* value of low entry in look-up table] content of next higher integral element of look-up table | in order to get the fractional part of 'o' | | divide by 32 | | | */ o += ((*(SigTab + i + 1) - o) * (a & 0x1F)) >> 5; /* add δo to o | Remainder δa when total input signal strength 'a' is divided by 32. In this context, a & 0x1F does exactly what a % 32 would do, except very much faster. δa is the extra input signal strength that 'a' is greater than i << 5, namely, that which corresponds to SigTab[i]. */ if (s < 0) // if original sign of 'o' was negative o = -o; // reverse its sign. *po = o; // store neuron's output in layer output array // [for use during development only] if(delay != -1) // if the delay is active ShowOut(l, n, o); // display the output value } } } /* MULTI-LAYER PERCEPTRON DELTA-RULE TRAINING FUNCTION shift factor corresponding to the weight gain term h | shift factor corresponding to Momentum Factor a | | */ void mlptrain (int h, int a) { int nl; // current layer number short *pi = *L; // pointer to the input pattern array h += 15; // shift factor to multiply by h / R for(nl = NAL; nl > 0; nl--) { // for each layer of the network... short **ppw = *(W + nl), // pointer to access this layer's weights **ppm = *(M + nl), // pointer to this layer's delta-weights *pe = *(E + nl), // pointer to this layer's output errors *po = *(L + nl); // pointer to this layer's neural outputs int nn, ni, // neuron number, input number to neuron NN = *(N + nl), // neuron number within current layer NI = *(N - 1 + nl); // number of inputs to this layer if (nl == NAL) // If doing the output layer, for (nn = 0; nn < NN; nn++) // prime each element of error *(pe + nn) = *(po + nn) - *(pt + nn); // array with -(t[j] - o[j]). pi = *(L + nl - 1); // pointer to start of this layer's inputs // COMPUTE OUTPUT ERROR FOR EACH NEURON IN CURRENT LAYER for(nn = 0; nn < NN; nn++) { // FOR EACH NEURON IN CURRENT LAYER: short m, // momentum term *pw = *(ppw + nn), // pointer to neuron's first weight *pm = *(ppm + nn); // pointer to neuron's first delta-weight long // This neuron's output signal and output error o = *(po + nn), e = (((R + o) * (R - o)) >> 15) * *(pe + nn) >> 13; if (e > R) e = R; if (e < -R) e = -R; // constrain it to within the range of a short *(pe + nn) = e; // dF/da = do/da * last time's summation for(ni = 0; ni < NI; ni++) /* adjust each input weight momentum term pointer to neuron's first delta-weight | | shift equivalent of Momentum Factor | | | this neuron's output error | | | | */ short x = ((m = *(pm + ni)) >> a) - (((e * *(pi + ni)) / NI) >> h)); /* | | | | pointer to current neuron's inputs array | | | pointer to start of this layer's inputs | | number of inputs to this layer | shift factor corresponding to the weight gain term */ *(pm + ni) = x; // weight adjustment to delta-weight array element *(pw + ni) += x; // add weight adjustment to neuron's weights array } if (nl > 1) { // Provided we haven't yet reached the first active layer: short *ps = *(E + nl - 1); // pointer to previous layer's output errors for(ni = 0; ni < NI; ni++) { //for each input weight to this layer: //See mlp( ) for an explanation of the following block of code. long Hi = 0, Lo = 0; for(nn = 0; nn < NN; nn++) { long P = (long)*(pe + nn) * *(*(ppw + nn) + ni); Hi += P >> 16; Lo += P & 0xFFFF; } *(ps + ni) = ((Hi << 1) + (Lo >> 15)) / NN; } } // ... prime the previous layer's error array elements } // with this layer's error * weight summations. } int GetInp(void) { // GET THE NEXT INPUT PATTERN FROM THE INPUT STREAM int i, nn, NN = *N; // input value, input number, total number of inputs short *pi = *L; // points to the input array L1[] for(nn = 0; nn < NN; nn++) { // for each input channel... *(pi + nn) = (i = getw(Ifh)); // get next input if(ferror(Ifh)) { // if an error occurred... perror("\nInput source"); /* show standard system error message prefixed by "Input source" */ clearerr(Ifh); //clear possible previous error against this stream return(FALSE); // bail out of the loop and return a FAIL indication } if(delay != -1) // if required ShowOut(0, nn, i); // display the input } return(TRUE); // return successfully } // GET THE NEXT TARGET OUTPUT PATTERN FROM THE TARGET OUTPUT STREAM int GetOut(void) { int i, nn, // input value, input number NN = *(N + NAL); // total number of inputs short *pi = pt; // points to the target output array for(nn = 0; nn < NN; nn++) { // for each input channel... *(pi + nn) = (i = getw(Ofh)); // get next input if(ferror(Ofh)) { // if an error occurred... //show prefixed standard system error message perror("\nTarget output source"); //clear possible error previously set against this stream clearerr(Ofh); return(FALSE); // bail out of the loop and return a FAIL indication } if(delay != -1) // display the target output if required ShowOut(4, nn, i); } return(TRUE); // return successfully } int PutOut(void) { // PUT CURRENT OUTPUT TO THE OUTPUT STREAM int o, nn, // output value, current output number, NN = *(N + NAL); // total number of outputs short *po = *(L + NAL); // points to the outputs array Ln[] for(nn = 0; nn < NN; nn++) { // for each output channel... putw(*(po + nn), Ofh); // store this output. /* if a possible error value is returned and it is indeed found to be truly an error */ if(ferror(Ofh)) { perror("\nOutput sink"); /* show standard system error message prefixed by "Output sink" */ clearerr(Ofh); /* clear possible error previously set against this stream */ return(FALSE); // bail out of the loop and return a FAIL indication } } return(TRUE); // return successfully } // DISPLAY THE WEIGHTS ARRAYS AFTER MEMORY ALLOCATION AND LOADING void ShowWeights(void) { int nl; // network layer number _settextposition(11,1); printf("Neurons per layer:\n"); for(nl = 0; nl < NL; nl++) printf("N[%d] = %d ",nl,*(N + nl)); printf("\nWEIGHTS:"); for(nl = 1; nl < NL; nl++) { // for each network layer int nn, NN = *(N + nl); short **ppw = *(W + nl); // point to appropriate *Wn[] for(nn = 0; nn < NN; nn++) { // for each neuron in layer int ni, NI = *(N - 1 + nl); // number of inputs to neuron short *pw = *(ppw + nn); // points to appropriate Wnn[] printf("\nW%d%d[] = ",nl,nn); for(ni = 0; ni < NI; ni++) // for each input to neuron printf("%5d ",*(pw + ni)); // print its value } } } // STORE THE WEIGHTS IN THE .WTS FILE ON COMPLETION OF TRAINING RUN void SaveWeights(void) { int nl; // network layer number putw(*N, Wfh); // store the number of network input channels for(nl = 1; nl < NL; nl++) { // for each network layer int nn, NN = *(N + nl); // neuron number short **ppw = *(W + nl); // point to appropriate *Wn[] putw(NN, Wfh); // store n§ of neurons in layer for(nn = 0; nn < NN; nn++) { // for each neuron in layer int ni, Ni = *(N - 1 + nl); // number of inputs to neuron short *pw = *(ppw + nn); // points to appropriate Wnn[] for(ni = 0; ni < Ni; ni++) // for each input to neuron putw(*(pw + ni), Wfh); // store the weight } } } // DISPLAYS NUMBER OF NEURONS IN EACH LAYER OF THE NETWORK void ShowLayers(void) { for(int i = 0; i < 4; i++) { // for each training argument _settextposition(6 + i, 1); /* set line for display of 'N§ of neurons in layer' message and display message. */ printf("Nodes in Layer%d %d", i, *(N + i)); } } void ShowTrainPass(void) { // DISPLAYS THE NUMBER OF THE TRAINING DATA PASS static int x; // counter if(x == 0) { _settextposition(9, 1); printf("Training Data Pass N§:"); } _settextposition(9, 24); printf("%d", ++x); } // DISPLAYS THE NUMBER OF THE INPUT PATTERN CURRENTLY BEING PROCESSED void ShowPatCnt(void) { static int x; // counter if(x == 0) { _settextposition(10, 1); printf("Total N§ of Patterns : %d\n",NR); printf("Processing Pattern N§:"); } _settextposition(11, 24); printf("%d", ++x); } // DISPLAYS THE MAGNITUDE OF THE ERROR FUNCTION FOR CURRENT PATTERN void ShowErrFun(int EF) { static int x; // first-time-through flag if(x == 0) { x = 1; _settextposition(12, 1); printf("Error Function Value:"); } _settextposition(12,24); printf("%d", EF); } // RETURNS MAGNITUDE OF ERROR FUNCTION FOR CURRENT INPUT PATTERN short GetErrFun(void) { int nn, // number of neurons in output layer NN = *(N + NAL); // current output layer neuron long Hi = 0, Lo = 0; // Hi & Lo act as a split 48-bit accumulator short *po = *(L + NAL); // points to 1st element of output layer's output array for (nn = 0; nn < NN; nn++) { // for each neuron in the output layer // square of (target output) - (actual outpu)t for this neuron long e, E = (e = *(pt + nn) - *(po + nn)) * e; Hi += E >> 16; // weighted input summation Lo += E & 0xFFFF; // and sigmoid functions. } // the sum of the squares as a long interger returned as a short return((Hi + (Lo >> 16)) / NN); } // MANAGES THE MLP'S BACK-PROPAGATION DELTA-RULE TRAINING PROCESS int TrainingManager(void) { #define AlphaShift 1 // make momentum « of last time's delta-weight /* The maximum acceptable magnitude of the error function for the neural network to be considered trained. */ #define AcceptedError 1 // right-shift equivalent of the gain term denoted by the Greek η static int NetaShift; for(;;) { // while error != 0 for at least one training example int nr, // number of [next] pattern flag = TRUE; // 'acceptably low errors' flag short EF; // error function ShowTrainPass(); // increment & show pass number of training data file for(nr = 0; nr < NR; nr++) { // for each pattern record in input file if(!KbDelay(nr)) // if abort keystroke detected return(FALSE); // bail out of loop ShowPatCnt(); // show number of pattern being processed if(!GetInp()) // get the next input pattern return(FALSE); // bail out of function if couldn't get it mlp(); // present it to the network if(!GetOut()) // get the next target output pattern return(FALSE); // bail out of function if couldn't get it ShowErrFun(EF = GetErrFun()); // compute and display the error function if(EF > AcceptedError) { // if error not yet small enough flag = FALSE; // set the flag = 'not finished' mlptrain(NetaShift, AlphaShift); // and continue training } } if(flag == TRUE) // if finished flag remained true for all patterns break; // break out of the for(;;) loop if(NetaShift < 12) // if gain factor has not yet reached its upper limit NetaShift += 1; // reduce the gain factor h rewind(Ifh); // back to start of training input patterns rewind(Ofh); // back to start of training output patterns } SaveWeights(); // save the weights values for the trained network return(TRUE); } int SigLoad(void) { // LOAD THE SIGMOID FUNCTION'S LOOK-UP TABLE int i; FILE *fh; _settextposition(2,1); printf("Loading Sigmoid look-up table"); _settextposition(2,1); /* if the Sigmoid table file can be found and opened load each value in the table into the SigTab[] array. */ if(fh = fopen("SIGMOID.DAT", "rb")) { for (i = 0; i < 1025; i++) *(SigTab + i) = getw(fh); printf("Sigmoid look-up table loaded."); return(TRUE); } printf("Cannot find/load Sigmoid look-up table file sigmoid.dat"); return(FALSE); } // Ctrl C BREAK DETECTOR AND INTER-PATTERN DELAY TIMER int KbDelay(int nr) { if(kbhit()) { // if a keyboard key has been struck if(getch() == '\3') { // if it was a Ctrl-C break character printf("\nProgram aborted by keystroke."); // display error message return(FALSE); // set error indication } } else { // else (keyboard was not struck) /* if monitoring required and it is not the first pass, set target time to current time + delay, then, wait until current time reaches target time. */ if(delay != -1 && nr > 0) { time_t t = time(NULL) + delay; while(time(NULL) < t); } } return(TRUE); // signal OK to proceed } void BeepDelay(void) { // BEEP EVERY 10 SECONDS WHEN PROGRAM HAS FINISHED int flag = TRUE; printf("\nPress any key to return to exit."); while(flag) { time_t t = time(NULL) + 10; // set target time to current time + delay printf("\07"); // beep // while current time not yet reached target time, wait while(time(NULL) < t && flag) if(kbhit()) // if a key is hit flag = FALSE; // set exit flag } } /* VALIDATES WEIGHTS, INPUT & OUTPUT FILESPECS FROM COMMAND LINE Note: this function is specific Microsoft Windows and will have to be modified significantly for Unix/Linux environments. points to the input points to the prescribed argument string | | file extension string | | */ char *CheckFileName (char *s, char *e) { int i, // char counter b, // backslash location d, // dot location, c; // ASCII character static char S[80]; // storage for the validated argument /* for each character in the input argument... locate the dot within the file name locate the last backslash in the filespec. */ for(i = 0, b = 0, d = 0; *(S + i) = (c = *(s + i)); i++) { if(c == '.' && d == 0) // if char is a dot that is not located d = i; // set its location to character position 'i' if(c == '\\') // if character is a back-slash b = i + 1; // set it position to one after the escape \ character } if(i - b < 1 || i - b > 12) // if input argument too long or too short return(NULL); // exit the function if(d) { // if file name contains a dot if (d - b > 8) // if more than 8 chars before dot return(NULL); // exit with a LULL character if (*e == '\0' && d == i - 1) // if a specific extension is not required *(S + d) = '\0'; // change dot to a null character else while (c = *e++) { // for each char of the prescribed extension int x; if ((x = *(S + ++d)) > 96 && x < 123) // if char is lower case x -= 32; // convert it to upper case. if (c != x) // if char not same as file extension return(NULL); // return 'invalid filespec.' } } else { // else (if no dot was found) if(i - b > 8) // if filename more than 8 characters return(NULL); // exit with a NULL character if(*e) { // if a specific extension is required *(S + i++) = '.'; // replace the null character by a dot while (*(S + i++) = *e++); // add each char of prescribed exten- } // sion including terminating null */ } return(S); } // CHECK AND PROCESS THE NON-FILESPEC COMMAND LINE ARGUMENTS int CheckNonFileArgs(int argc, char **argv) { int i, n, result = TRUE; // index, result flag char *s; if(argc > 4) { // if a 4th argument is present on the command line... /* Delay = number of seconds to wait between processing patterns. If the entered delay is out of range, set it to -1 to indicate no delay. */ if((delay = *(s = *(argv + 4)) - 48) < 0 || delay > 9) delay = -1; // determine whether or not to beep every 10 secs when finished BeepFlag = *(s + 1); } // if more arguments are present, assume a training run is intended if(argc > 5) { if(argc < 9) { // 4 more arguments are mandatory for training printf("\nWrong number of arguments for a training session."); return(FALSE); } /* For each 'training' argument, get the number of neurons in this layer and display this number as a message in the terminal. */ for(Training = TRUE, i = 0; i < 4; i++) { *(N + i) = (n = atoi(*(argv + i + 5))); _settextposition(6 + i, 1); printf("Nodes in Layer%d %d", i, n); /* if the entered number of neurons is out of range, indicates an invalid number of neurons in at least one layer. */ if(n < 1 || n > 4096) { printf(" - invalid."); Training = FALSE; } } if(!Training) // if not training and too many neurons specified return(FALSE); // exit with error } // input the random weights generator seed value if present if(argc > 9) SeedVal = atoi(*(argv + 8)); return(result); } // CHECK FILESPECS ENTERED ON THE COMMAND LINE AND OPEN THE FILES int CheckFileArgs(int argc, char **argv) { char *Text[] = { // annotation text for filename "Application File:", // validation messages "Input Stream: ", "Output Stream: " }, *Extn[] = {"WTS", "", ""}, /* mandatory extensions for application, input and output filenames */ *Mode[] = {"rb", "rb", "wb"} /* read/write modes in which each of th above files must be opened */ *TMode[] = {"wb", "rb", "rb"}, /* ditto for when the network is being trained */ *s, **m; /* pointers to validated filespec and appropriate file mode array */ int i, result = TRUE; // index, result flag FILE *FH[3]; /* array for the weights, input and output file pointers */ if(argc < 4) { // if < 3 arguments (excluding the exe program filename), printf("\nNot enough command line arguments."); return(FALSE); // display error message and exit FALSE. } if(Training) // if running in training mode m = TMode; // set to open files in traing mode else // else, if in normal running mode m = Mode; // set to open files for running mode for(i = 0; i < 3; i++) // for each of the 3 filespec command line arguments if(s = CheckFileName(*(argv + i + 1), *(Extn + i))) // if filespec valid if(*(FH + i) = fopen(s, *(m + i))) // if valid file handle returned printf("\n%s %s", *(Text + i), s); // display the name of file else { printf("\nCould not find/open %s %s.", *(Text + i), s); result = FALSE; // indicate that file opening attempt failed } else { // indicate that the filename was invalid printf("\nArg %d: invalid file name. ",i); if (*(s = *(Extn + i))) // if a definite filename extension is required printf(" .%s extension expected (and assumed when omitted).",s); result = FALSE; //indicate that the file opening phase has failed } Wfh = *FH; // set pointer to weights file handle Ifh = *(FH + 1); // set pointer to input file handle Ofh = *(FH + 2); // set pointer to output file handle return(result); } // COMMAND LINE ARGUMENTS ARE EXPLAINED AT FRONT OF THIS LISTING main(int argc, char *argv[ ]) { _clearscreen(_GWINDOW); // clear the screen printf("EBS MULTI-LAYER PERCEPTRON"); // display the program title /* If the Sigmoid function's look-up table found and loaded OK AND all non-filespec arguments valid AND all filespec arguments valid AND all files found & opened AND memory allocated and weights loaded OK... */ if(SigLoad() && CheckNonFileArgs(argc, argv) && CheckFileArgs(argc, argv) && Walloc()) { _settextposition(11,1); // set it to (20,1) if ShowWeights() is active // total number of patterns in input file NR = (filelength(fileno(Ifh)) / (*N)) >> 1; // if in training mode (ie. the /T switch was set on command line)... if(Training) { if(!TrainingManager()) // if training unsuccessful printf("\nTraining process failed."); } else { // else if in normal non-training mode... int nr; // number of [next] pattern ShowLayers(); _settextposition(11,1); // set it to (20,1) if ShowWeights() is active for(nr = 0; nr < NR; nr++) { // for each pattern in the input file... // check for abortive keystroke and inter-pattern delay if(!KbDelay(nr)) break; ShowPatCnt(); // show the number of the pattern now being processed if(!GetInp()) break; // get the next input pattern mlp(); // pass it through the multi-layer perceptron network if(!PutOut()) break;// send the corresponding output pattern } } _settextposition(13,1); printf("Program finished."); if(BeepFlag) BeepDelay(); //keep beeping every 10 seconds when finished. } else //one or more of the initialisation processes failed printf("\nCould not continue."); }