1 /*****************************************************************************
2 FILE : $Source: /projects/higgs1/SNNS/CVS/SNNS/kernel/sources/kr_td.c,v $
3 SHORTNAME :
4 SNNS VERSION : 4.2
5
6 PURPOSE : SNNS-Kernel Learning Functions for Time Delay networks
7 NOTES : with following learning functions:
8 - Backpropagation
9
10 AUTHOR : Oliver Schnabel, Guenter Mamier
11 DATE :
12
13 CHANGED BY : Michael Vogt, Guenter Mamier
14 RCS VERSION : $Revision: 2.12 $
15 LAST CHANGE : $Date: 1998/03/13 16:23:57 $
16
17 Copyright (c) 1990-1995 SNNS Group, IPVR, Univ. Stuttgart, FRG
18 Copyright (c) 1996-1998 SNNS Group, WSI, Univ. Tuebingen, FRG
19
20 ******************************************************************************/
21 #include <config.h>
22
23 #include <stdio.h>
24 #include <math.h>
25 #ifdef HAVE_VALUES_H
26 #include <values.h>
27 #endif
28
29
30 #include "kr_typ.h" /* Kernel Types and Constants */
31 #include "kr_const.h" /* Constant Declarators for SNNS-Kernel */
32 #include "kr_def.h" /* Default Values */
33 #include "kernel.h" /* kernel function prototypes */
34 #include "kr_mac.h" /* Kernel Macros */
35 #include "kr_newpattern.h"
36
37 #include "kr_td.ph"
38
39
40
41 /*****************************************************************************
42 FUNCTION : initializeTDBackprop
43
44 PURPOSE : initialize the learning algorithm TD-backprop
45 NOTES :
46
47 RETURNS :
48 UPDATE : 19.02.1993
49 ******************************************************************************/
initializeTDBackprop(void)50 static krui_err initializeTDBackprop(void)
51 {
52 register FlagWord flags;
53 register struct Link *link_ptr;
54 register struct Unit *unit_ptr;
55 register struct Site *site_ptr;
56
57 /* set unit's bias to zero */
58 FOR_ALL_UNITS( unit_ptr ){
59 flags = unit_ptr->flags;
60
61 if ( (unit_ptr->flags & UFLAG_IN_USE) == UFLAG_IN_USE){
62 if (flags & UFLAG_SITES){ /* unit has sites */
63 FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr )
64 link_ptr->value_b =
65 link_ptr->value_c =
66 unit_ptr->value_a =
67 unit_ptr->value_b =
68 unit_ptr->value_c = (FlintType) 0;
69 }else{ /* TD-units have no sites: direct links */
70 if (flags & UFLAG_DLINKS){ /* unit has direct links */
71 FOR_ALL_LINKS( unit_ptr, link_ptr )
72 link_ptr->value_b =
73 link_ptr->value_c =
74 unit_ptr->value_a =
75 unit_ptr->value_b =
76 unit_ptr->value_c = (FlintType) 0;
77 }
78 } /* else */
79 } /* if */
80 } /* FOR_ALL_UNITS */
81 return( KRERR_NO_ERROR );
82 }
83
84
85 /*****************************************************************************
86 FUNCTION : propagateTDNetForward
87
88 PURPOSE : topological TimeDelay forward propagation
89 NOTES : needs only the weight matrix of one receptive field for
90 propagating one pattern through the net
91 If the provided pattern_no is < 0, no pattern is loaded into
92 the input layer but all other layers are propagated as usual
93 RETURNS :
94 UPDATE : 19.02.1993
95 ******************************************************************************/
96 extern FlintType OUT_Custom_Python(FlintType act);
97
propagateTDNetForward(int pattern_no,int sub_pat_no)98 void propagateTDNetForward(int pattern_no, int sub_pat_no)
99 {
100 register struct Unit *unit_ptr;
101 register struct Link *link_ptr;
102 register Patterns in_pat;
103 register TopoPtrArray topo_ptr;
104 int i;
105
106 if (pattern_no >= 0){
107 /* calculate startaddress for input pattern array */
108
109 in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
110 topo_ptr = topo_ptr_array;
111
112 /* copy pattern into input unit's activation and calculate output of the
113 input units. */
114 /* order of the topoptrarray: input-, hidden- and then outputunits */
115
116 unit_ptr = *++topo_ptr;
117 while (unit_ptr != (struct Unit *) NULL){
118 /* topo_ptr points to a (topological sorted) unit stucture
119 (input units first) */
120
121 if (unit_ptr->out_func == OUT_IDENTITY){
122 /* identity output function: don't call the output function */
123 unit_ptr->Out.output = unit_ptr->act = *in_pat++;
124 }else if(unit_ptr->out_func == OUT_Custom_Python){
125 unit_ptr->Out.output =
126 kr_PythonOutFunction(unit_ptr->python_out_func,
127 unit_ptr->act = *in_pat++);
128 }else{
129 /* no identity output function: calculate unit's output also */
130 unit_ptr->Out.output =
131 (*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
132 } /*if*/
133 unit_ptr = *++topo_ptr;
134 }
135 }else{
136 /* set unit_ptr and topo_ptr as if an input pattern was provided */
137 topo_ptr = topo_ptr_array;
138 unit_ptr = *++topo_ptr;
139 while (unit_ptr != (struct Unit *) NULL)
140 {
141 unit_ptr = *++topo_ptr;
142 }
143 }
144
145 /* Propagate input to hidden, hidden to hidden and hidden to output */
146
147 for (i=0; i<2; i++){
148 unit_ptr = *++topo_ptr;
149 while (unit_ptr != NULL){
150 /* initialization for propagating hidden units */
151 /* clear error values */
152 unit_ptr->Aux.flint_no = 0.0;
153
154 if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)){
155 /* this is a reference unit, initialize link weight change */
156 /* and counter of link usage */
157 FOR_ALL_LINKS(unit_ptr, link_ptr){
158 link_ptr->value_b = link_ptr->value_c = 0.0;
159 }
160 }
161
162 /* reset bias-change and td-step-counter before each lerning epoch */
163 unit_ptr->value_b = unit_ptr->value_c = 0.0;
164
165 unit_ptr->act = (*unit_ptr->act_func) (unit_ptr);
166 if (unit_ptr->out_func == OUT_IDENTITY){
167 /* identity output function: don't call the output function */
168 unit_ptr->Out.output = unit_ptr->act;
169 }else if(unit_ptr->out_func == OUT_Custom_Python){
170 unit_ptr->Out.output =
171 kr_PythonOutFunction(unit_ptr->python_out_func,
172 unit_ptr->act);
173 }else{
174 /* no identity output function: calculate unit's output also */
175 unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
176 }
177 unit_ptr = *++topo_ptr;
178 }
179 }
180 } /*endfunction*/
181
182
183
184 /*****************************************************************************
185 FUNCTION : propagateTDNetBackward
186
187 PURPOSE : Time Delay Backward error propagation (topological).
188 NOTES : Start calculating the average of the corresponding links in
189 all TD-steps. This average is used to update the links of the
190 1st. receptive field.
191 RETURNS : network error
192 UPDATE : 19.02.1993
193 ******************************************************************************/
propagateTDNetBackward(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max)194 static float propagateTDNetBackward(int pattern_no, int sub_pat_no,
195 float learn_parameter,
196 float delta_max)
197 {
198 register struct Link *link_ptr;
199 register struct Site *site_ptr;
200 register struct Unit *unit_ptr, *unit_ptr1 ;
201 register struct Unit *ref_unit;
202 register Patterns out_pat;
203 register float error, sum_error, eta, devit, learn_error;
204 register TopoPtrArray topo_ptr;
205 int last_log_layer, i;
206 int size;
207
208 sum_error = 0.0; /* reset network error */
209 eta = learn_parameter; /* store learn_parameter in CPU register */
210
211 /* calculate address of the output pattern (with number pattern_no + 1) */
212
213 topo_ptr = topo_ptr_array + (no_of_topo_units + 2);
214 last_log_layer = (*topo_ptr)->lln;
215 out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
216 out_pat += size;
217
218 /* calculate output units only: begin at the end of topo_pointer_array */
219 unit_ptr = *topo_ptr;
220 while (unit_ptr != (struct Unit *) NULL){
221 devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
222
223 if ( (float) fabs( devit ) <= delta_max ){
224 unit_ptr = *--topo_ptr;
225 continue;
226 }
227
228 sum_error += devit * devit; /* sum up the error of the network */
229
230 /* calculate error for output units */
231 /* output layer cannot have time delay structure, so no
232 distinction is necessary*/
233 error = devit * (unit_ptr->act_deriv_func) ( unit_ptr );
234
235 /* calc. the error for adjusting weights and bias of pred. units */
236 learn_error = eta * error;
237
238 /* adjust bias value */
239 unit_ptr->value_b += learn_error;
240 unit_ptr->value_c += 1.0;
241
242 if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){ /* the unit has direkt links */
243 /* error must be saved for each unit of the hiddenlayer */
244 FOR_ALL_LINKS( unit_ptr, link_ptr ){
245 /* adjust link weights and calc. sum of errors of pred. units*/
246 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
247 link_ptr->value_b += learn_error * link_ptr->to->Out.output;
248 link_ptr->value_c += 1.0;
249 }
250 }else{ /* the unit has sites: not necessary for TD-Network */
251 FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
252 /* adjust link weights and calc. sum of errors of pred. units */
253 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
254 link_ptr->weight += learn_error * link_ptr->to->Out.output;
255 }
256 }
257
258 unit_ptr = *--topo_ptr;
259 } /* while */
260
261
262 /* calculate hidden units only. add the weight changes of all receptive
263 fields ; stored for every linkin the Linkstructure value_c of only the
264 first recept. field!
265 */
266
267 unit_ptr = *--topo_ptr;
268 while (unit_ptr != (struct Unit *) NULL){
269 /* calc. the error of the hidden units */
270 error = (unit_ptr->act_deriv_func) (unit_ptr) * unit_ptr->Aux.flint_no;
271
272 /* calc. the error for adjusting weights and bias of pred. units */
273 learn_error = eta * error;
274
275
276 if (unit_ptr->TD.td_connect_typ == 1){
277 /* this is a time delay connection type layer */
278
279 ref_unit = *(unit_ptr->TD.my_topo_ptr + unit_ptr->TD.target_offset);
280 /* adjust bias value */
281 ref_unit->value_b += learn_error;
282 ref_unit->value_c += 1.0;
283
284 if (UNIT_HAS_DIRECT_INPUTS( ref_unit )){
285 /* the unit has direkt links */
286
287 FOR_ALL_LINKS( ref_unit, link_ptr ) {
288 /* adjust link weights and calc. sum of err of pred. units*/
289 /* unit_ptr1 points to the actual predecessor unit,
290 determined by the actual link */
291 unit_ptr1 = *(link_ptr->to->TD.my_topo_ptr
292 + unit_ptr->TD.source_offset);
293
294 if (IS_HIDDEN_UNIT (unit_ptr1)) {
295 /* this unit is a hidden unit: add the error from
296 previous units dependent on type of this predecessor
297 unit */
298 (unit_ptr1)->Aux.flint_no += link_ptr->weight * error;
299 }
300 /* immediately updating the links cannot fit for TD
301 Networks! Add the the delta(ij) of all td_steps in the
302 Linkarray(value_c) of the first recept. field */
303 link_ptr->value_b += learn_error * (unit_ptr1)->Out.output;
304 link_ptr->value_c += 1.0;
305 }
306 }
307 }else{
308 /* fully connected layers */
309 /* immediately update of all physical links */
310 unit_ptr->bias += learn_error;
311
312 if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){
313 /* the unit has direkt links */
314 /* error must be saved for each unit of the hiddenlayer */
315 FOR_ALL_LINKS( unit_ptr, link_ptr ){
316 /* adjust link weights and calc. sum of err of pred units*/
317 if (IS_HIDDEN_UNIT (link_ptr->to))
318 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
319
320 link_ptr->weight += learn_error * link_ptr->to->Out.output;
321 }
322 }else{ /* the unit has sites: not necessary for TD-Network */
323 FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
324 /* adjust link weights and calc. sum of errors of the
325 predecessor units */
326 if (IS_HIDDEN_UNIT (link_ptr->to))
327 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
328 link_ptr->weight += learn_error * link_ptr->to->Out.output;
329 }
330 }
331 }
332 unit_ptr = *--topo_ptr;
333 } /* while */
334
335
336 /* update receptive fields: propagate and calculate all featureunits of
337 the first td-step of each layer. */
338 /* topo_ptr points to the NULL pointer between input and hidden units */
339
340 for (i=0; i<2; i++){
341 unit_ptr = *++topo_ptr;
342 while (unit_ptr != NULL){
343 if (unit_ptr->TD.td_connect_typ==1 &&
344 UNIT_HAS_DIRECT_INPUTS(unit_ptr) &&
345 unit_ptr->value_c > 0.0){
346 /* this is a reference unit of a time delay layer */
347
348 /* update bias of reference unit by average bias change */
349 unit_ptr->bias += unit_ptr->value_b / unit_ptr->value_c;
350
351 FOR_ALL_LINKS( unit_ptr, link_ptr ){
352 /* store average linkweigth changes of all Td-steps */
353 link_ptr->weight += link_ptr->value_b / link_ptr->value_c;
354 } /*for_all_links*/
355 }
356 unit_ptr = *++topo_ptr;
357 }
358 }
359 return( sum_error ); /* return the error of the network */
360 }
361
362
363
364 /*****************************************************************************
365 FUNCTION : LEARN_TDbackprop
366
367 PURPOSE : Time Delay Backpropagation Learning Function
368 NOTES : Input Parameters: 1 : learning parameter
369 2 : delta max
370
371 Output Parameters: 1 : error of the network (sum of all cycles)
372
373 RETURNS :
374 UPDATE : 19.02.1993
375 ******************************************************************************/
LEARN_TDbackprop(int start_pattern,int end_pattern,float parameterInArray[],int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)376 krui_err LEARN_TDbackprop( int start_pattern, int end_pattern,
377 float parameterInArray[], int NoOfInParams,
378 float * *parameterOutArray, int *NoOfOutParams )
379
380 {
381 static float OutParameter[1]; /* OutParameter[0] stores learning error */
382 int ret_code, pattern_no, sub_pat_no;
383 struct Unit *unit_ptr;
384
385 if (NoOfUnits == 0)
386 return( KRERR_NO_UNITS ); /* No Units defined */
387 if (NoOfInParams < 1) /* # has to be changed (must be 2) # */
388 return( KRERR_PARAMETERS ); /* Not enough input parameters */
389
390 *NoOfOutParams = 1; /* One return value is available (the learning error)*/
391 *parameterOutArray = OutParameter; /* set the output parameter reference */
392 ret_code = KRERR_NO_ERROR; /* reset return code */
393
394 if (NetModified || (TopoSortID != TOPOLOGIC_LOGICAL)){
395 /* Net has been modified or topologic array isn't initialized */
396 /* check the topology of the network */
397 /* first: save the logical layer numbers, restore them after check */
398 FOR_ALL_UNITS(unit_ptr)
399 unit_ptr -> Aux.int_no = unit_ptr -> lln;
400 ret_code = kr_topoCheck();
401
402 FOR_ALL_UNITS(unit_ptr)
403 unit_ptr -> lln = unit_ptr -> Aux.int_no;
404 if (ret_code < KRERR_NO_ERROR)
405 return( ret_code ); /* an error has occured */
406 if (ret_code < 2)
407 return( KRERR_NET_DEPTH ); /* the network has less then 2 layers */
408
409 /* count the no. of I/O units and check the patterns */
410 ret_code = kr_IOCheck();
411 if (ret_code < KRERR_NO_ERROR)
412 return( ret_code );
413
414 ret_code = kr_topoSort( TOPOLOGIC_LOGICAL );
415 if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
416 return( ret_code );
417
418 #ifdef DEBUG
419 /* M.V. */
420 j=1;
421 while (krui_setCurrentUnit(j) == KRERR_NO_ERROR) {
422 printf("Unit %d: lln = %d, lun = %d\n",j,
423 kr_getUnitPtr(j) -> lln, kr_getUnitPtr(j) -> lun);
424 j++;
425 }
426 #endif
427
428 NetModified = FALSE;
429 }
430
431 if (NetInitialize || LearnFuncHasChanged){
432 /* Net has been modified or initialized, initialize TDbackprop now */
433 ret_code = initializeTDBackprop();
434 if (ret_code != KRERR_NO_ERROR)
435 return( ret_code );
436 }
437
438
439 /* compute the necessary sub patterns */
440
441 KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
442 if(KernelErrorCode != KRERR_NO_ERROR)
443 return (KernelErrorCode);
444
445
446 NET_ERROR(OutParameter) = 0.0; /* reset network error value */
447
448 while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
449
450 propagateTDNetForward(pattern_no,sub_pat_no); /* Forward propagation */
451
452 /* Backward propagation */
453 /* 1st parameter is the learning parameter
454 2nd parameter is the max. devitation between output pattern and
455 the output of the output unit (delta max)
456 */
457 NET_ERROR( OutParameter ) +=
458 propagateTDNetBackward(pattern_no,sub_pat_no,
459 LEARN_PARAM1( parameterInArray ),
460 LEARN_PARAM2( parameterInArray ) );
461 }
462
463 return( ret_code );
464 }
465
466
467
468 /*****************************************************************************
469 FUNCTION : LEARN_TDBP_McClelland
470
471 PURPOSE : Time Delay Backpropagation Learning Function With McClelland
472 Error function: E = sum(log(1-(oi-ti)^2))
473
474 NOTES : Input Parameters: 1 : learning parameter
475 2 : delta max
476
477 Output Parameters: 1 : error of the network (sum of all cycles)
478
479 RETURNS :
480 UPDATE : 19.02.1993
481 ******************************************************************************/
LEARN_TDBP_McClelland(int start_pattern,int end_pattern,float parameterInArray[],int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)482 krui_err LEARN_TDBP_McClelland( int start_pattern, int end_pattern,
483 float parameterInArray[], int NoOfInParams,
484 float * *parameterOutArray, int *NoOfOutParams )
485
486 {
487 static float OutParameter[1]; /* OutParameter[0] stores learning error*/
488 int ret_code, pattern_no, sub_pat_no;
489 struct Unit *unit_ptr;
490
491 if (NoOfUnits == 0)
492 return( KRERR_NO_UNITS ); /* No Units defined */
493 if (NoOfInParams < 1) /* # has to be changed (must be 2) # */
494 return( KRERR_PARAMETERS ); /* Not enough input parameters */
495
496 *NoOfOutParams = 1; /* One return value is available (the learning error)*/
497 *parameterOutArray = OutParameter; /* set the output parameter reference */
498 ret_code = KRERR_NO_ERROR; /* reset return code */
499
500 if (NetModified || (TopoSortID != TOPOLOGIC_LOGICAL)){
501 /* Net has been modified or topologic array isn't initialized */
502 /* check the topology of the network */
503 /* first: save the logical layer numbers, restore them after check */
504 FOR_ALL_UNITS(unit_ptr)
505 unit_ptr -> Aux.int_no = unit_ptr -> lln;
506 ret_code = kr_topoCheck();
507
508 FOR_ALL_UNITS(unit_ptr)
509 unit_ptr -> lln = unit_ptr -> Aux.int_no;
510 if (ret_code < KRERR_NO_ERROR)
511 return( ret_code ); /* an error has occured */
512 if (ret_code < 2)
513 return( KRERR_NET_DEPTH ); /* the network has less then 2 layers */
514
515 /* count the no. of I/O units and check the patterns */
516 ret_code = kr_IOCheck();
517 if (ret_code < KRERR_NO_ERROR)
518 return( ret_code );
519
520 ret_code = kr_topoSort( TOPOLOGIC_LOGICAL );
521 if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
522 return( ret_code );
523
524 #ifdef DEBUG
525 /* M.V. */
526 j=1;
527 while (krui_setCurrentUnit(j) == KRERR_NO_ERROR) {
528 printf("Unit %d: lln = %d, lun = %d\n", j,
529 kr_getUnitPtr(j) -> lln, kr_getUnitPtr(j) -> lun);
530 j++;
531 }
532 #endif
533
534 NetModified = FALSE;
535 }
536
537 if (NetInitialize || LearnFuncHasChanged){
538 /* Net has been modified or initialized, initialize TDbackprop now */
539 ret_code = initializeTDBackprop();
540 if (ret_code != KRERR_NO_ERROR)
541 return( ret_code );
542 }
543
544
545 /* compute the necessary sub patterns */
546
547 KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
548 if(KernelErrorCode != KRERR_NO_ERROR)
549 return (KernelErrorCode);
550
551
552 NET_ERROR(OutParameter) = 0.0; /* reset network error value */
553
554 while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
555
556 propagateTDNetForward(pattern_no,sub_pat_no); /* Forward propagation */
557
558 /* Backward propagation */
559 /* 1st parameter is the learning parameter
560 2nd parameter is the max. devitation between output pattern and
561 the output of the output unit (delta max)
562 */
563 NET_ERROR( OutParameter ) +=
564 propagateTDNetBackMcClelland(pattern_no,sub_pat_no,
565 LEARN_PARAM1( parameterInArray ),
566 LEARN_PARAM2( parameterInArray ) );
567 }
568
569 return( ret_code );
570 }
571
572
573 /*****************************************************************************
574 FUNCTION : propagateTDNetBackMcClelland
575
576 PURPOSE : Time Delay Backward error propagation (topological).
577 NOTES : Start calculating the average of the corresponding links in
578 all TD-steps. This average is used to update the links of the
579 1st. receptive field.
580 RETURNS : network error
581 UPDATE : 19.02.1993
582 ******************************************************************************/
propagateTDNetBackMcClelland(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max)583 static float propagateTDNetBackMcClelland(int pattern_no, int sub_pat_no,
584 float learn_parameter,
585 float delta_max)
586 {
587 register struct Link *link_ptr;
588 register struct Site *site_ptr;
589 register struct Unit *unit_ptr, *unit_ptr1 ;
590 register struct Unit *ref_unit;
591 register Patterns out_pat;
592 register float error, sum_error, eta, devit, learn_error;
593 register TopoPtrArray topo_ptr;
594 int last_log_layer, i;
595 int size;
596
597
598 sum_error = 0.0; /* reset network error */
599 eta = learn_parameter; /* store learn_parameter in CPU register */
600
601 /* calculate address of the output pattern (with number pattern_no + 1) */
602
603 topo_ptr = topo_ptr_array + (no_of_topo_units + 2);
604 last_log_layer = (*topo_ptr)->lln;
605 out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
606 out_pat += size;
607
608 /* calculate output units only: begin at the end of topo_pointer_array */
609 unit_ptr = *topo_ptr;
610 while (unit_ptr != (struct Unit *) NULL){
611 devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
612
613 if ( (float) fabs( devit ) <= delta_max ){
614 unit_ptr = *--topo_ptr;
615 continue;
616 }
617
618 sum_error += -log10(1- fabs(devit)); /* sum up error of the network */
619
620 /* calculate error for output units */
621 /* output layer cannot have time delay structure, so no
622 distinction is necessary*/
623 error = log10(1- fabs(devit)) * (unit_ptr->act_deriv_func) (unit_ptr);
624 if(devit > 0) error = -error;
625
626 /* calc. error for adjusting weights and bias of predecessor units */
627 learn_error = eta * error;
628
629 /* adjust bias value */
630 unit_ptr->value_b += learn_error;
631 unit_ptr->value_c += 1.0;
632
633 if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){
634 /* the unit has direkt links */
635 /* error must be saved for each unit of the hiddenlayer */
636 FOR_ALL_LINKS( unit_ptr, link_ptr ){
637 /* adjust link weights and calc. sum of errors of pred. units*/
638 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
639 link_ptr->value_b += learn_error * link_ptr->to->Out.output;
640 link_ptr->value_c += 1.0;
641 }
642 }else{ /* the unit has sites: not necessary for TD-Network */
643 FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
644 /* adjust link weights and calc. sum of errors of
645 predecessor units */
646 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
647 link_ptr->weight += learn_error * link_ptr->to->Out.output;
648 }
649 }
650
651 unit_ptr = *--topo_ptr;
652 } /* while */
653
654
655 /* calculate hidden units only. add the weightchanges of all receptive
656 fields ; stored for every link in the Linkstructure value_c of only
657 the first recept. field!
658 */
659
660 unit_ptr = *--topo_ptr;
661 while (unit_ptr != (struct Unit *) NULL){
662 /* calc. the error of the hidden units */
663 error = (unit_ptr->act_deriv_func) (unit_ptr) * unit_ptr->Aux.flint_no;
664
665 /* calc. error for adjusting weights and bias of predecessor units */
666 learn_error = eta * error;
667
668
669 if (unit_ptr->TD.td_connect_typ == 1){
670 /* this is a time delay connection type layer */
671
672 ref_unit = *(unit_ptr->TD.my_topo_ptr + unit_ptr->TD.target_offset);
673 /* adjust bias value */
674 ref_unit->value_b += learn_error;
675 ref_unit->value_c += 1.0;
676
677 if (UNIT_HAS_DIRECT_INPUTS( ref_unit )){
678 /* the unit has direkt links */
679
680 FOR_ALL_LINKS( ref_unit, link_ptr ){
681 /* adjust link weights and calc. sum of err of pred. units*/
682 /* unit_ptr1 points to the actual predecessor unit,
683 determined by the actual link */
684 unit_ptr1 = *(link_ptr->to->TD.my_topo_ptr
685 + unit_ptr->TD.source_offset);
686
687 if (IS_HIDDEN_UNIT (unit_ptr1)){
688 /* this unit is a hidden unit: add the error from
689 previous units dependent on type of this predecessor
690 unit */
691 (unit_ptr1)->Aux.flint_no += link_ptr->weight * error;
692 }
693 /* immediately updating the links cannot fit for TD
694 Networks! Add the the delta(ij) of all td_steps in the
695 Linkarray(value_c) of the first recept. field */
696 link_ptr->value_b += learn_error * (unit_ptr1)->Out.output;
697 link_ptr->value_c += 1.0;
698 }
699 }
700 }else{
701 /* fully connected layers */
702 /* immediately update of all physical links */
703 unit_ptr->bias += learn_error;
704
705 if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){
706 /* the unit has direkt links */
707 /* error must be saved for each unit of the hiddenlayer */
708 FOR_ALL_LINKS( unit_ptr, link_ptr ){
709 /* adjust link weights and calc. sum of
710 errors of predecessor units */
711 if (IS_HIDDEN_UNIT (link_ptr->to))
712 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
713
714 link_ptr->weight += learn_error * link_ptr->to->Out.output;
715 }
716 }else{/* the unit has sites: not necessary for TD-Network */
717 FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
718 /* adjust link weights and calc. sum of errors of the
719 predecessor units */
720 if (IS_HIDDEN_UNIT (link_ptr->to))
721 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
722 link_ptr->weight += learn_error * link_ptr->to->Out.output;
723 }
724 }
725 }
726 unit_ptr = *--topo_ptr;
727 } /* while */
728
729
730 /* update receptive fields: propagate and calculate all featureunits of
731 the first td-step of each layer. */
732 /* topo_ptr points to the NULL pointer between input and hidden units */
733
734 for (i=0; i<2; i++){
735 unit_ptr = *++topo_ptr;
736 while (unit_ptr != NULL){
737 if (unit_ptr->TD.td_connect_typ==1 &&
738 UNIT_HAS_DIRECT_INPUTS(unit_ptr) &&
739 unit_ptr->value_c > 0.0){
740 /* this is a reference unit of a time delay layer */
741
742 /* update bias of reference unit by average bias change */
743 unit_ptr->bias += unit_ptr->value_b / unit_ptr->value_c;
744
745 FOR_ALL_LINKS( unit_ptr, link_ptr ){
746 /* store average linkweigth changes of all Td-steps */
747 link_ptr->weight += link_ptr->value_b / link_ptr->value_c;
748 } /*for_all_links*/
749 }
750 unit_ptr = *++topo_ptr;
751 }
752 }
753 return( sum_error );/* return the error of the network */
754 }
755
756
757
758
759 /*****************************************************************************
760 FUNCTION : TEST_TDbackprop
761
762 PURPOSE : Time Delay Backpropagation testing Function
763 NOTES : Input Parameters: 2 : delta max
764
765 Output Parameters: 1 : error of the network (sum of all cycles)
766
767 RETURNS :
768 UPDATE : 21.01.95
769 ******************************************************************************/
TEST_TDbackprop(int start_pattern,int end_pattern,float parameterInArray[],int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)770 krui_err TEST_TDbackprop( int start_pattern, int end_pattern,
771 float parameterInArray[], int NoOfInParams,
772 float * *parameterOutArray, int *NoOfOutParams )
773
774 {
775 static float OutParameter[1]; /* OutParameter[0] stores learning error */
776 int ret_code, pattern_no, sub_pat_no;
777
778 if (NoOfInParams < 1) /* # has to be changed (must be 2) # */
779 return( KRERR_PARAMETERS ); /* Not enough input parameters */
780
781 *NoOfOutParams = 1; /* One return value is available (the learning error)*/
782 *parameterOutArray = OutParameter; /* set the output parameter reference */
783 ret_code = KRERR_NO_ERROR; /* reset return code */
784
785
786 /* compute the necessary sub patterns */
787
788 KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
789 if(KernelErrorCode != KRERR_NO_ERROR)
790 return (KernelErrorCode);
791
792
793 NET_ERROR(OutParameter) = 0.0; /* reset network error value */
794
795 while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
796
797 propagateTDNetForward(pattern_no,sub_pat_no); /* Forward propagation */
798
799 /* Backward propagation */
800 /* 1st parameter is the learning parameter
801 2nd parameter is the max. devitation between output pattern and
802 the output of the output unit (delta max)
803 */
804 NET_ERROR( OutParameter ) +=
805 testTDNetBackward(pattern_no,sub_pat_no,
806 LEARN_PARAM1( parameterInArray ),
807 LEARN_PARAM2( parameterInArray ) );
808 }
809
810 return( ret_code );
811 }
812
813
814
815
816 /*****************************************************************************
817 FUNCTION : testTDNetBackward
818
819 PURPOSE : Error Calculation for Time Delay Backward error propagation (topological).
820 NOTES : Start calculating the average of the corresponding links in
821 all TD-steps. This average is used to update the links of the
822 1st. receptive field.
823 RETURNS : network error
824 UPDATE : 21.01.95
825 ******************************************************************************/
testTDNetBackward(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max)826 static float testTDNetBackward(int pattern_no, int sub_pat_no,
827 float learn_parameter,
828 float delta_max)
829 {
830 register struct Unit *unit_ptr;
831 register Patterns out_pat;
832 register float sum_error, eta, devit;
833 register TopoPtrArray topo_ptr;
834 int last_log_layer;
835 int size;
836
837 sum_error = 0.0; /* reset network error */
838 eta = learn_parameter; /* store learn_parameter in CPU register */
839
840 /* calculate address of the output pattern (with number pattern_no + 1) */
841
842 topo_ptr = topo_ptr_array + (no_of_topo_units + 2);
843 last_log_layer = (*topo_ptr)->lln;
844 out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
845 out_pat += size;
846
847 /* calculate output units only: begin at the end of topo_pointer_array */
848 unit_ptr = *topo_ptr;
849 while (unit_ptr != (struct Unit *) NULL){
850 devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
851
852 /* der Fehler wird auch summiert, wenn die Gewichtsfaktoren nicht adaptiert werden */
853 sum_error += devit * devit; /* sum up the error of the network */
854 if ( (float) fabs( devit ) <= delta_max ){
855 devit = 0; /* joe: sonst koennen Fehler auftreten*/
856 }
857 unit_ptr = *--topo_ptr;
858
859 }
860
861 return( sum_error ); /* return the error of the network */
862 }
863
864