/*----------------------------------------------------------------------*/ /* TDNN main routines /*----------------------------------------------------------------------*/ #include #include #include "graph_exp.h" #include "net_exp.h" #include "function_exp.h" /********Function nrand generates a random number between 0 and 1 */ float nrand() { unsigned long lvalue, rvalue, shrand, mvalue; shrand = RANDVAL >> 12; lvalue = (shrand + (RANDVAL & 0x0f0f0f0f)) & 0x0f0f0f0f; rvalue = ((shrand & 0xf0f0f0f0) + (RANDVAL & 0xf0f0f0f0)) & 0xf0f0f0f0; mvalue = lvalue | rvalue; shrand = RANDVAL << 15; lvalue = ((shrand & 0x0f0f0f0f) + (mvalue & 0x0f0f0f0f)) & 0x0f0f0f0f; rvalue = ((shrand & 0xf0f0f0f0) + (mvalue & 0xf0f0f0f0)) & 0xf0f0f0f0; RANDVAL = lvalue | rvalue; return (float)RANDVAL / 4.294967e9; } /*******Function randomizeweights sets all Network weights to random values, -RanMag to +RanMag */ randomizeweights() { int i, j, k, l; for (l = 0; l < Network.layers; l++) { for (k = 0; k < Network.nodes[l]; k++) { if (l > 0) Network.threshold[l-1][k] = 2.0 * RanMag * nrand() - RanMag; for (j = 0; j < Network.taps[l]; j++) { Network.state[l][j][k] = 0.0; if (l < Network.layers) for (i = 0; i < Network.nodes[l+1]; i++) Network.weight[l][j][k][i] = 2.0 * RanMag * nrand() - RanMag; } } } weightnorm = 1.0; count = 0; sprintf(msgstring, "Weights reset and randomized; step count reset."); Wprintf(); } /******Function LoadGroup loads a pattern group into variables "traininput" and "traindesired" by taking input from a program running in the background */ LoadGroup() { int p, k, i; /* signal child by announcing expected number of inputs/outputs */ fprintf (stdout, "%d\n", Network.nodes[0]); fprintf (stdout, "%d\n", Network.nodes[Network.layers - 1]); /* Read the incoming data */ fscanf (stdin, "%d", &no_patterns); if (no_patterns > 1) { sprintf(msgstring, "#patterns = %d", no_patterns); for (p = 0; p < no_patterns; p++) { for (i = 0; i < Network.nodes[0]; i++) { fscanf(stdin, "%f", &traininput[p][i]); } for (k = 0; k < Network.nodes[Network.layers - 1]; k++) { fscanf(stdin, "%f", &traindesired[p][k]); } } } else { sprintf(msgstring, "training program has exited."); LRunTrain = FALSE; Interrupt = TRUE; } Wprintf(); } /******Function LoadSamps loads a pattern group into variable "sampleinput" by taking input from a program running in the background */ LoadSamps() { int p, i; /* signal child by announcing expected number of inputs */ fprintf (stdout, "%d\n", Network.nodes[0]); /* Read the incoming data */ fscanf (stdin, "%d", &no_samples); if (no_samples > 1) { sprintf(msgstring, "#samples = %d", no_samples); for (p = 0; p < no_samples; p++) { for (i = 0; i < Network.nodes[0]; i++) fscanf(stdin, "%f", &sampleinput[p][i]); } } else { sprintf(msgstring, "training program has exited."); LRunSample = FALSE; Interrupt = TRUE; } Wprintf(); } /******Function TDNNTrain trains the time-delay neural network */ void TDNNTrain(w, client_data, call_data) Widget w; XtPointer client_data, call_data; { int p, k, number; /* # of correctly classified ouputs per pattern */ int top, correct; /* total # of correct patterns */ XEvent event; /* Choose a pattern */ Interrupt = FALSE; correct = 0; if (LRunTrain) LoadGroup(); /* Load new set of patterns */ sprintf(msgstring,"#patterns = %d", no_patterns); Wprintf(); while (correct < no_patterns || LRunTrain) { correct = 0; if (Interrupt) break; /* Generated externally */ for (p = 0; p < no_patterns; p++) { pattern = p; /* Generate effective epsilon for time-delay training */ if (pattern >= endeps) eps_eff = epsilon; else if (pattern < starteps) eps_eff = 1.0; else eps_eff = epsilon + (1. - epsilon) * (pattern - endeps) / (starteps - endeps); FeedForward(traininput); number = Checksolved(); if ((count % update) == 0) { (drawtype)(w, client_data, call_data); if (LRunTrain) XClearWindow(dpy, XtWindow(graph)); DrawGraph (w, client_data, call_data); } while (XtPending()) { /* Allow interrupts */ XtNextEvent(&event); XtDispatchEvent(&event); } if (number == Network.nodes[Network.layers - 1]) correct++; if (!LLastPat || (pattern == (no_patterns - 1))) FeedBack(); count++; } if (LRunTrain) LoadGroup(); } if (!Interrupt) { sprintf(msgstring, "Network successfully solved in %d steps (%d pattern sets).", count, (int)(count / no_patterns)); Wprintf(); } else { sprintf(msgstring, "Network training interrupted at step %d (%d pattern sets).", count, (int)(count / no_patterns)); Wprintf(); } } int Checksolved() { int k, correct; correct = 0; for (k = 0; k < Network.nodes[Network.layers - 1]; k++) { if (fabs(Network.state[Network.layers - 1][0][k] - traindesired[pattern][k]) <= eps_eff) correct++; } return(correct); } FeedBack() { float delta[MAXACCUM][MAXNODES]; float newdelta[MAXACCUM][MAXNODES]; float sumn, sumk, etanorm; int i, j, k, l, n, taps_accum, last_accum; etanorm = eta * (1 - eps_eff); /* Backpropagate from output -- output layer only */ for (k = 0; k < Network.nodes[Network.layers - 1]; k++) { if (LSelf) traindesired[pattern][k] = (Limiter)(Network.state [Network.layers - 1][0][k]); delta[0][k] = -2 * (traindesired[pattern][k] - Network.state[ Network.layers - 1][0][k]) * (DSig)(Network.state[Network.layers - 1][0][k]); for (j = 0; j < Network.nodes[Network.layers - 2]; j++) for (i = 0; i < Network.taps[Network.layers - 2]; i++) Network.weight[Network.layers - 2][i][j][k] -= etanorm * delta[0][k] * Network.state[Network.layers - 2][i][j]; Network.threshold[Network.layers - 2][k] -= etanorm * delta[0][k]; } /* Backpropagate errors to all other layers */ taps_accum = Network.taps[Network.layers - 2]; last_accum = 1; for (l = Network.layers - 2; l > 0; l--) { for (n = 0; n < (last_accum + taps_accum - 1); n++) for (j = 0; j < Network.nodes[l]; j++) newdelta[n][j] = 0.0; for (n = 0; n < last_accum; n++) for (j = 0; j < Network.nodes[l]; j++) for (i = 0; i < Network.taps[l]; i++) for (k = 0; k < Network.nodes[l+1]; k++) newdelta[i+n][j] += Network.weight[l][i][j][k] * delta[n][k]; for (j = 0; j < Network.nodes[l]; j++) { sumn = 0; for (i = 0; i < taps_accum; i++) { newdelta [i][j] *= (DSig)(Network.state [l][i][j]); sumn += newdelta [i][j]; } Network.threshold[l-1][j] -= etanorm * sumn; for (k = 0; k < Network.nodes[l-1]; k++) for (i = 0; i < Network.taps[l-1]; i++) { sumn = 0; for (n=0; n < taps_accum; n++) sumn += newdelta [n][j] * Network.state [l-1][i+n][k]; Network.weight [l-1][i][k][j] -= etanorm * sumn; }; } for (i = 0; i < taps_accum; i++) for (j = 0; j < Network.nodes[l]; j++) delta[i][j] = newdelta[i][j]; last_accum = taps_accum; taps_accum += Network.taps[l - 1] - 1; } } FeedForward(intype) float intype[MAXPATTERNS][MAXNODES]; { int i, j, k, l, taps_accum; /* Propagate the signal along all taps and insert training pattern into the network */ for (k = 0; k < Network.nodes[Network.layers - 1]; k++) for (j = Network.taps[Network.layers - 1]; j > 0; j--) Network.state[Network.layers - 1][j][k] = Network.state[Network.layers - 1][j - 1][k]; taps_accum = Network.taps[Network.layers - 2]; for (l = Network.layers - 2; l >= 0; l--) { for (k = 0; k < Network.nodes[l]; k++) for (j = taps_accum; j > 0; j--) Network.state[l][j][k] = Network.state[l][j - 1][k]; if (l != 0) taps_accum += Network.taps[l - 1] - 1; } for (k = 0; k < Network.nodes[0]; k++) Network.state[0][0][k] = intype[pattern][k]; /* Calculate the states at the current taps */ for (l = 1; l < Network.layers; l++) { for (k = 0; k < Network.nodes[l]; k++) { Network.state[l][0][k] = 0; for (j = 0; j < Network.nodes[l - 1]; j++) for (i = 0; i < Network.taps[l - 1]; i++) Network.state[l][0][k] += Network.state[l - 1][i][j] * Network.weight[l - 1][i][j][k]; Network.state[l][0][k] = (Sigmoid)(Network.state[l][0][k] + Network.threshold[l-1][k]); } } for (k = 0; k < Network.nodes[Network.layers - 1]; k++) outputhistory[pattern][k] = Network.state[Network.layers - 1] [0][k]; }