1 /** @file perceptron.cpp
2
3 @brief CPerceptronNN functions needed for network running
4 (in the non-training mode)
5
6 @author Jakub Ad�mek
7 Last modified $Id: perceptron.cc,v 1.22 2002/04/23 15:49:41 jakubadamek Exp $
8 */
9
10 #include "perceptron.h"
11 #include "vector.h"
12 #include "base.h"
13
14 #include <time.h>
15 #include <math.h>
16 #include <sys/timeb.h>
17
18 REGSMALLVECTOR (double, CFloat, TSmallVectorFloat)
19 REGVECTOR (CFloat, TVectorFloat)
20 REGVECTOR (CInt, TVectorInt)
21
22 REGVECTOR (TPerceptron, TVectorPerceptron)
23 REGVECTORC (CPerceptronNN, TConnectionsAttribs, TConAttribs)
24 REGVECTOR (TVectorPerceptron, TSeriesNeurons);
25
26 REGSTRUCT (TLearningGoal)
27 REGSTRUCT (CPerceptronNN)
28 REGSTRUCT (TPerceptron)
29 REGSTRUCTC (CPerceptronNN, TConnectionsAttribs)
30 REGSTRUCTC (CPerceptronNN, TPerceptrons)
31
32 REGSTRUCT (TConIndex)
33 REGSET (TConIndex, TConIndexes)
34
35 CPerceptronNN * globalDebugNN;
36
runNetwork(const TSmallVectorFloat & inputs,int irow)37 void CPerceptronNN::runNetwork (const TSmallVectorFloat &inputs, int irow)
38 {
39 TPerceptron * nStart, * nEnd=NULL, * noldEnd=NULL;
40 int ineu, con;
41 TVectorPerceptron *last = NULL;
42 if (irow > -1) last = &seriesNeurons[irow];
43
44 globalDebugNN = this;
45
46 for (ineu=0; ineu < (int)layerSizes[0]; ++ineu)
47 neurons[ineu].output = inputs[ineu];
48 for (; ineu < neurons.size(); ++ineu)
49 neurons[ineu].output = 0;
50
51 while (next_connection (nStart,nEnd,con,last)) {
52 if (noldEnd == NULL) noldEnd = nEnd;
53 else if (noldEnd != nEnd) {
54 noldEnd->output = noldEnd->transfer();
55 noldEnd = nEnd;
56 }
57 nEnd->output += nStart->output * weights->d[con]; //(**weights.d).d.d[con] into watch window
58 }
59 if (nEnd) nEnd->output = nEnd->transfer();
60
61 if (irow > -1) {
62 if (seriesNeurons.size() <= irow+1)
63 seriesNeurons.push_back (seriesNeurons[0]);
64 for (ineu = 0; ineu < neurons.size() - layerSizes[0]; ++ineu) {
65 seriesNeurons[irow+1][ineu].output = neurons[ineu+layerSizes[0]].output;
66 seriesNeurons[irow+1][ineu].partialEk = 0;
67 }
68 }
69 }
70
copyOutputs(TSmallVectorFloat & outputs) const71 void CPerceptronNN::copyOutputs (TSmallVectorFloat &outputs) const
72 {
73 for (int ineu = outOffset; ineu < neurons.size(); ++ineu)
74 outputs[ineu-outOffset] = (double)neurons[ineu].output;
75 }
76
getError(const CTrainingData * trData,CInt set,bool postprocessed)77 double CPerceptronNN::getError (const CTrainingData *trData, CInt set, bool postprocessed)
78 {
79 int ineu;
80 int irow, series;
81
82 if (trData) {
83 for (ineu=outOffset; ineu < neurons.size(); ++ineu)
84 neurons[ineu].error = 0;
85 trData->moveToSetStart(set);
86 while (trData->getSet() == set) {
87 irow = 0;
88 series = trData->getSeries();
89 while (series == trData->getSeries()) {
90 runNetwork (trData->getInputs(), irow);
91 ++irow;
92 for (ineu=outOffset; ineu < neurons.size(); ++ineu)
93 neurons[ineu].error += sqr (neurons[ineu].output - trData->getOutputs()[ineu-outOffset]);
94 trData->moveNext (set);
95 }
96 }
97 }
98
99 double retVal = 0;
100 double val, val2;
101 for (ineu=outOffset; ineu < neurons.size(); ++ineu) {
102 if (postprocessed && trData) {
103 val2 = sqrt (neurons[ineu].error / trData->getRowCount((enumSetType)(int)set));
104 trData->getColumn(CU_OUTPUT,ineu-outOffset).postprocessError (val2,val);
105 retVal += val;
106 }
107 else retVal += neurons[ineu].error / 2;
108 }
109 return retVal;
110 }
111
112
calculateOutputs(CTrainingData & trData,CString fileName)113 void CPerceptronNN::calculateOutputs (CTrainingData &trData, CString fileName)
114 {
115 int ineu;
116 int irow, series;
117
118 for (ineu=outOffset; ineu < neurons.size(); ++ineu)
119 neurons[ineu].error = 0;
120 for (int s=0; s < 2; ++s) {
121 enumSetType set = s == 0 ? ST_TRAIN : ST_EVAL;
122 trData.moveToSetStart(set);
123 while (trData.getSet() == set) {
124 irow = 0;
125 series = trData.getSeries();
126 while (series == trData.getSeries()) {
127 runNetwork (trData.getInputs(), irow);
128 ++irow;
129 for (ineu=outOffset; ineu < neurons.size(); ++ineu)
130 neurons[ineu].error += sqr (neurons[ineu].output - trData.getOutputs()[ineu-outOffset]);
131 copyOutputs (trData.modifyOutputs());
132 trData.moveNext (set);
133 }
134 }
135 }
136 CString data;
137 trData.dumpData (data);
138 if (fileName.length()) {
139 bang_ofstream dump (fileName);
140 if (dump) dump << ((const char*)data);
141 }
142 }
143
initConAttribs()144 void CPerceptronNN::initConAttribs ()
145 {
146 int conCount = 0;
147 int ilayer;
148 CInt iStart;
149 int neuronCount = 0;
150 TConIndex conIndex;
151
152 TVectorInt layerStarts (layerSizes.size()+1);
153 layerStarts[0] = 0;
154 for (ilayer=1; ilayer <= layerSizes.size(); ++ilayer)
155 layerStarts[ilayer] = layerStarts[ilayer-1] + layerSizes[ilayer-1];
156
157 for (ilayer = 0; ilayer < layerSizes.size(); ++ilayer)
158 neuronCount += (int)layerSizes[ilayer];
159
160 conIndexes.DeleteAll();
161 conIndex.recurrent = 0;
162
163 switch ((int)conRestrict) {
164 case CR_LAYERED:
165 for (ilayer = 1; ilayer < layerSizes.size(); ++ilayer)
166 for (conIndex.end = layerStarts[ilayer]; conIndex.end < layerStarts[ilayer+1]; ++conIndex.end) {
167 neurons[conIndex.end].inputCount = layerSizes[ilayer-1] + 1;
168 conIndex.start = -1;
169 conIndexes.insert (conIndex);
170 for (conIndex.start = layerStarts[ilayer-1]; conIndex.start < layerStarts[ilayer]; ++conIndex.start)
171 conIndexes.insert (conIndex);
172 }
173 break;
174 // connections lead from all neurons including bias to all hidden and output neurons
175 default:
176 for (conIndex.end = layerSizes[0]; conIndex.end < (CInt)neuronCount; ++conIndex.end) {
177 if (conIndex.end < layerStarts[1]) iStart = 0;
178 else iStart = layerStarts[0];
179 neurons[conIndex.end].inputCount = conIndex.end - iStart + 1;
180 conIndex.start = -1;
181 conIndexes.insert (conIndex);
182 for (conIndex.start = iStart; conIndex.start < conIndex.end; ++conIndex.start)
183 conIndexes.insert (conIndex);
184 }
185 }
186
187 // RECURRENT CONNECTIONS:
188
189 conIndex.recurrent = 1;
190 CInt from[2], to[2], sfrom[2], sto[2];
191 TAllRecurrentConnections::iterator recs;
192 for (recs = allRecurrentConnections.begin(); recs != allRecurrentConnections.end(); ++recs) {
193 sfrom[0] = recs->start.from; sfrom[1] = recs->end.from;
194 sto[0] = recs->start.to; sto[1] = recs->end.to;
195 setPercent (sto[0], layerSizes[recs->start.layer] - 1);
196 setPercent (sfrom[0], layerSizes[recs->start.layer] - 1);
197 setPercent (sto[1], layerSizes[recs->end.layer] - 1);
198 setPercent (sfrom[1], layerSizes[recs->end.layer] - 1);
199 from[0] = sfrom[0] + layerStarts[recs->start.layer];
200 to[0] = Min/*<int>*/ ((int) (sto[0] + layerStarts[recs->start.layer]),(int)( neuronCount-1 ));
201 from[1] = sfrom[1] + layerStarts[recs->end.layer];
202 to[1] = Min/*<int>*/ ((int) (sto[1] + layerStarts[recs->end.layer]),(int)( neuronCount-1 ));
203
204 // irix CC doesn't allow the type info after Min
205
206 for (conIndex.end = from[1]; conIndex.end <= to[1]; ++conIndex.end) {
207 neurons[conIndex.end].inputCount += to[0] - from[0] + 1;
208 for (conIndex.start = from[0]; conIndex.start <= to[0]; ++conIndex.start)
209 conIndexes.insert (conIndex);
210 }
211 }
212
213 switch ((int) trainProcedure) {
214 case TP_STABLE_CONJUGATE: conAttribs.Realloc (6); break;
215 case TP_RUN_ONLY: conAttribs.Realloc (1); break;
216 default: conAttribs.Realloc (4);
217 }
218
219 int iAttr = 0;
220 for (TConAttribs::iterator citer=conAttribs.begin(); citer != conAttribs.end(); ++citer) {
221 switch (iAttr++) {
222 case 0: weights = citer; weights->name = "weights"; break;
223 case 1: gradient = citer; gradient->name = "gradient"; break;
224 case 2: bestWeights = citer; bestWeights->name = "bestWeights"; break;
225 case 3: D = citer; D->name = "D"; break;
226 case 4: oldGradient = citer; oldGradient->name = "oldGradient"; break;
227 case 5: oldD = citer; oldD->name = "oldD"; break;
228 }
229 citer->d.Realloc (conIndexes.size());
230 }
231 }
232
initNeurons()233 void CPerceptronNN::initNeurons ()
234 {
235 if (debug == 0)
236 srand( (unsigned)time( NULL ) );
237
238 int neuronCount = 0;
239 int ilayer;
240
241 for (ilayer = 0; ilayer < layerSizes.size(); ++ilayer)
242 neuronCount += (int)layerSizes[ilayer];
243 outOffset = neuronCount - layerSizes[layerSizes.size()-1];
244 neurons.data.Realloc (neuronCount+1);
245 // bias outputs steadily 1
246 neurons[-1].output = 1;
247
248 int inbase = 0;
249 for (ilayer=0; ilayer < layerSizes.size(); ++ilayer) {
250 for (int ineuron=inbase; ineuron < (int)layerSizes[ilayer]+inbase; ++ineuron) {
251 neurons[ineuron].layerSize = (int)layerSizes[ilayer];
252 neurons[ineuron].layer = (CInt)ilayer;
253 if (ilayer)
254 neurons[ineuron].transferFn = layerTrFuncs[ilayer];
255 }
256 inbase += (int)layerSizes[ilayer];
257 }
258
259 int neu;
260 for (neu=-1; neu < neurons.size(); ++neu)
261 neurons[neu].index = (CInt)neu;
262
263 seriesNeurons.Realloc (1);
264 seriesNeurons[0].Realloc (neurons.size() - layerSizes[0]);
265 for (neu = 0; neu < neurons.size() - layerSizes[0]; ++neu) {
266 seriesNeurons[0][neu] = neurons[neu+layerSizes[0]];
267 seriesNeurons[0][neu].output = 0;
268 seriesNeurons[0][neu].partialEk = 0;
269 }
270 }
271
printWeights(const CString & filename) const272 CXml * CPerceptronNN::printWeights (const CString &filename) const
273 {
274 TStream str;
275 if (dumpWeights.length()) {
276 str.type = ST_REMOTE;
277 str.value = findFileName (getFilePath(filename)+dumpWeights);
278 }
279 else str.type = ST_LOCAL;
280 str.encoding = SE_TEXT;
281 str.delimiter = dumpWeightsDelimiter;
282 return printStream (str, weights->d.begin(), weights->d.end(), "");
283 }
284
readWeights(CRox & stream,const CString & filename)285 CString CPerceptronNN::readWeights (CRox &stream, const CString &filename)
286 {
287 assert (weights.valid());
288 if (!weights.valid()) return "PerceptronNN: Can't read weights: not yet initialized.";
289 TStream str;
290 return readStream (str, stream, weights->d, filename);
291 }
292
read(CRox * xml)293 CString CPerceptronNN::read (CRox *xml)
294 {
295 return TPerceptronStructure::read (xml);
296 }
297
print() const298 CXml * CPerceptronNN::print () const
299 {
300 return TPerceptronStructure::print ();
301 }
302