1 /*****************************************************************************
2   FILE           : $Source: /projects/higgs1/SNNS/CVS/SNNS/kernel/sources/learn_f.c,v $
3   SHORTNAME      : learn_f
4   SNNS VERSION   : 4.2
5 
6   PURPOSE        : SNNS-Kernel Learning Functions
7   NOTES          : with following learning functions:
8 	           - Backpropagation
9 	           - Backpropagation with momentum term
10                    - Quickprop
11 	           - Counterpropagation
12 	           - BackPercolation
13                    - Backpropagation through time
14                    - Batch backpropagation through time
15                    - Quickprop through time
16 		   - Kohonen (by Univ. of Tuebingen)
17 
18   AUTHOR         : Niels Mache
19   DATE           : 01.10.90
20 
21   CHANGED BY     : Sven Doering, Michael Vogt, Martin Reczko ,Guenter Mamier
22   RCS VERSION    : $Revision: 2.46 $
23   LAST CHANGE    : $Date: 1998/05/20 09:35:23 $
24 
25     Copyright (c) 1990-1995  SNNS Group, IPVR, Univ. Stuttgart, FRG
26     Copyright (c) 1996-1998  SNNS Group, WSI, Univ. Tuebingen, FRG
27 
28 ******************************************************************************/
29 #include <config.h>
30 #include <stdlib.h>
31 #include <stdio.h>
32 #include <string.h>
33 #include <math.h>
34 #ifdef HAVE_VALUES_H
35 #include <values.h>
36 #else
37 #include <limits.h>
38 #endif
39 #include <ctype.h>
40 
41 #include <limits.h>
42 #define MAXINT INT_MAX
43 
44 #include "kr_typ.h"		/* Kernel Types and Constants  */
45 #include "kr_const.h"		/* Constant Declarators for SNNS-Kernel  */
46 #include "kr_def.h"		/* Default Values  */
47 
48 #ifndef rand
49 #include "random.h"	 /*  Randomize Library Function Prototypes  */
50 #endif
51 
52 #include "kernel.h"		/* kernel function prototypes  */
53 #include "kr_mac.h"		/* Kernel Macros   */
54 #include "glob_typ.h"
55 #include "kr_ui.h"
56 #include "kr_art.h"
57 #include "kr_art1.h"
58 #include "kr_art2.h"
59 #include "kr_amap.h"
60 #include "krart_df.h"
61 #include "kr_newpattern.h"
62 #include "kr_JordElm.h"
63 #include "prun_f.h"
64 
65 #ifdef PARAGON_KERNEL
66 #include PARAGON_INCLUDE
67 #include "kr_ipdef.h"
68 #endif
69 
70 
71 #include "learn_f.ph"
72 
73 extern FlintType OUT_Custom_Python(register FlintType activation);
74 extern FlintType ACT_Custom_Python(struct Unit * unit_ptr);
75 extern FlintType ACT_DERIV_Custom_Python(struct Unit * unit_ptr);
76 extern FlintType ACT_2_DERIV_Custom_Python(struct Unit * unit_ptr);
77 
78 
79 /*****************************************************************************
80  *****************************************************************************
81 
82   GROUP        : backpropagation learning algorithm
83 
84   AUTHOR       : Niels Mache
85 
86 ******************************************************************************
87 ******************************************************************************/
88 
89 
90 /*****************************************************************************
91   FUNCTION : propagateNetForward
92 
93   PURPOSE  : forward pass for most of the learning algorithms
94   RETURNS  :
95   NOTES    : topological forward propagation
96 
97   UPDATE   : 05.11.1993
98 ******************************************************************************/
99 
propagateNetForward(int pattern_no,int sub_pat_no)100 void propagateNetForward(int pattern_no, int sub_pat_no)
101 {
102     register struct Unit *unit_ptr;
103     register Patterns in_pat;
104     register TopoPtrArray topo_ptr;
105 
106 
107     /* calculate startaddress for input pattern array  */
108     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
109     if(in_pat == NULL){
110 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
111 	return;
112     }
113 
114     topo_ptr = topo_ptr_array;
115 
116     /* copy pattern into input unit's activation and calculate output of the
117        input units */
118     while ((unit_ptr = *++topo_ptr) != NULL) {
119 
120 	/* topo_ptr points to a (topological sorted) unit
121 	   stucture (input unit first)  */
122 	if (unit_ptr->out_func == OUT_IDENTITY)
123 	    /* identity output function: there is no need to call the output
124 	       function  */
125 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
126 	else if(unit_ptr->out_func == OUT_Custom_Python) {
127 		unit_ptr->Out.output
128 		 = kr_PythonOutFunction(unit_ptr->python_out_func,
129 		                          unit_ptr->act = *in_pat++);
130 	} else
131 	    /* no identity output function: calculate unit's output also  */
132 	    unit_ptr->Out.output
133 		= (*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
134     }
135 
136     /* popagate hidden units  */
137     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
138 						   (topological sorted) unit
139 						   stucture */
140 	/* clear error values  */
141 	unit_ptr->Aux.flint_no = 0.0;
142 
143 	/* calculate the activation value of the unit: call the activation
144 	   function if needed  */
145 	unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
146 			kr_PythonActFunction(unit_ptr->python_act_func,
147 						unit_ptr) :
148 			(*unit_ptr->act_func) (unit_ptr)) ;
149 
150 	if (unit_ptr->out_func == OUT_IDENTITY)
151 	    /* identity output function: there is no need to call the output
152 	       function  */
153 	    unit_ptr->Out.output = unit_ptr->act;
154 	else if(unit_ptr->out_func == OUT_Custom_Python) {
155 		unit_ptr->Out.output
156 		 = kr_PythonOutFunction(unit_ptr->python_out_func,
157 		                          unit_ptr->act);
158 	} else
159 	    /* no identity output function: calculate unit's output also  */
160 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
161     }
162 
163     /* popagate output units  */
164     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
165 						   (topological sorted) unit
166 						   stucture */
167 	/* clear error values  */
168 	unit_ptr->Aux.flint_no = 0.0;
169 
170 	/* calculate the activation value of the unit: call the activation
171 	   function if needed  */
172 	if(unit_ptr->act_func == ACT_Custom_Python) {
173 		unit_ptr->act =
174 			kr_PythonActFunction(unit_ptr->python_act_func,
175 						unit_ptr);
176 	} else unit_ptr->act = (*unit_ptr->act_func) (unit_ptr);
177 
178 	if (unit_ptr->out_func == OUT_IDENTITY)
179 	    /* identity output function: there is no need to call the output
180 	       function  */
181 	    unit_ptr->Out.output = unit_ptr->act;
182 	else if(unit_ptr->out_func == OUT_Custom_Python) {
183 		unit_ptr->Out.output
184 		 = kr_PythonOutFunction(unit_ptr->python_out_func,
185 		                          unit_ptr->act);
186 	} else
187 	    /* no identity output function: calculate unit's output also  */
188 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
189     }
190 }
191 
192 
193 
194 /*****************************************************************************
195   FUNCTION : propagateNetBackward2
196 
197   PURPOSE  : backward pass of the backprop learning algorithm
198   RETURNS  : network error
199   NOTES    : network must be topologically sorted
200 
201   UPDATE   : 07.02.1994 by Sven Doering
202 ******************************************************************************/
propagateNetBackward2(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max)203 static float propagateNetBackward2(int pattern_no, int sub_pat_no,
204 				   float learn_parameter, float delta_max)
205 {
206     register struct Link *link_ptr;
207     register struct Site *site_ptr;
208     register struct Unit *unit_ptr;
209     register Patterns out_pat;
210     register float  error, sum_error, eta, devit, learn_error;
211     register TopoPtrArray topo_ptr;
212     int size;
213 
214     sum_error = 0.0;		/* reset network error  */
215     eta = learn_parameter;	/* store learn_parameter in CPU register  */
216 
217     /* calculate address of the output pattern (with number pattern_no + 1)  */
218     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
219     if(out_pat == NULL){
220 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
221 	return(-1);
222     }
223     out_pat += size;
224 
225 
226     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
227        pointers  */
228     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
229 
230     /* calculate output units only  */
231     while ((unit_ptr = *--topo_ptr) != NULL) {
232 	devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
233 	if ((float) fabs(devit) <= delta_max)
234 	    continue;
235 
236 	sum_error += devit * devit; /* sum up the error of the network  */
237 
238 	/* calc. error for output units	 */
239 	error = devit * ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
240 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
241 						unit_ptr) :
242 			(unit_ptr->act_deriv_func) (unit_ptr));
243 	/* calc. the error for adjusting weights and bias of the pred.
244 	   units  */
245 	if (IS_SPECIAL_UNIT(unit_ptr))
246 	    learn_error = 0.0;
247 	else
248 	    learn_error = eta * error;
249 	/* adjust bias value  */
250 	unit_ptr->bias += learn_error;
251 
252 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
253 	    /* the unit has direkt links  */
254 	    FOR_ALL_LINKS(unit_ptr, link_ptr) { /* adjust links and
255 						   calc. errors of the
256 						   predecessor units  */
257 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
258 		    link_ptr->weight += learn_error * link_ptr->to->Out.output;
259 		}
260 	    } else {		/* the unit has sites  */
261 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
262 		    /* adjust links and calc. errors of the predecessor units */
263 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
264 		    link_ptr->weight += learn_error * link_ptr->to->Out.output;
265 		}
266 	    }
267     }
268 
269     /* calculate hidden units only  */
270     while ((unit_ptr = *--topo_ptr) != NULL) {
271 	/* calc. the error of the (hidden) unit  */
272 	error = ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
273 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
274 						unit_ptr) :
275 			(unit_ptr->act_deriv_func) (unit_ptr)) *
276 	    unit_ptr->Aux.flint_no;
277 	/* calc. the error for adjusting weights and bias of the pred.
278 	   units  */
279 	if (IS_SPECIAL_UNIT(unit_ptr))
280 	    learn_error =0.0;
281 	else
282 	    learn_error = eta * error;
283 	/* adjust bias value  */
284 	unit_ptr->bias += learn_error;
285 
286 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
287 	    /* the unit has direkt links	 */
288 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust links and
289 						   calc. sum of errors
290 						   of the pred. units */
291 		    if IS_HIDDEN_UNIT
292 			(link_ptr->to)
293 			/* this link points to a hidden unit: sum up the
294 			   error's from previos units  */
295 			    link_ptr->to->Aux.flint_no +=
296 				link_ptr->weight * error;
297 		    link_ptr->weight
298 			+= learn_error * link_ptr->to->Out.output;
299 		}
300 	    } else {		/* the unit has sites  */
301 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
302 		    /* adjust links and calc sum of errors of the pred. units */
303 		    if IS_HIDDEN_UNIT
304 			(link_ptr->to)
305 			/* this link points to a hidden unit: sum up the
306 			   error's from previos units  */
307 			    link_ptr->to->Aux.flint_no +=
308 				link_ptr->weight * error;
309 		    link_ptr->weight
310 			+= learn_error * link_ptr->to->Out.output;
311 		}
312 	    }
313     }
314 
315     return (sum_error);		/* return the error of the network */
316 }
317 
318 
319 
320 /*****************************************************************************
321   FUNCTION : LEARN_backprop
322 
323   PURPOSE  : main routine for the backpropagation algorithm
324   RETURNS  : kernel error code
325   NOTES    : Input Parameters:   1 : learning parameter
326                                  2 : delta max
327 
328              Output Parameters:  1 : error of the network (sum of all cycles)
329 
330 
331   UPDATE   : 05.11.1993
332 ******************************************************************************/
LEARN_backprop(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)333 krui_err LEARN_backprop(int start_pattern, int end_pattern,
334 			float *parameterInArray, int NoOfInParams,
335 			float **parameterOutArray, int *NoOfOutParams)
336 {
337     static float    OutParameter[1];	/* OutParameter[0] stores the
338 					   learning error */
339     int             pattern_no, sub_pat_no, no_of_layers;
340 
341 
342     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
343 
344     /* ####  have to be changed (must be 2)  #### */
345     if (NoOfInParams < 1) {	/* Not enough input parameters	 */
346 	KernelErrorCode = KRERR_PARAMETERS;
347 	return (KernelErrorCode);
348     }
349     *NoOfOutParams = 1;		/* One return value is available (the
350 				   learning error)  */
351     *parameterOutArray = OutParameter;	/* set the output parameter reference */
352 
353     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
354 	/* Net has been modified or topologic array isn't initialized */
355 	/* check the topology of the network  */
356 	no_of_layers = kr_topoCheck();
357 	if (KernelErrorCode != KRERR_NO_ERROR)
358 	    /* an error has occured	 */
359 	    return (KernelErrorCode);
360 
361 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
362 	    KernelErrorCode = KRERR_FEW_LAYERS;
363 	    return (KernelErrorCode);
364 	}
365 	/* count the no. of I/O units and check the patterns  */
366 	if (kr_IOCheck() != KRERR_NO_ERROR)
367 	    return (KernelErrorCode);
368 
369 	/* sort units by topology and by topologic type  */
370 	(void) kr_topoSort(TOPOLOGICAL_FF);
371 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
372 	    (KernelErrorCode != KRERR_DEAD_UNITS))
373 	    return (KernelErrorCode);
374 
375 	NetModified = FALSE;
376     }
377 
378 
379     /* compute the necessary sub patterns */
380 
381     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
382     if(KernelErrorCode != KRERR_NO_ERROR)
383 	return (KernelErrorCode);
384 
385 
386     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
387 
388     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
389 
390 	propagateNetForward(pattern_no,sub_pat_no);   /* Forward propagation */
391 
392 	/* Backward propagation  */
393 	/* 1st parameter is the learning parameter 2nd parameter is the max.
394 	   devitation between output pattern and the output of the output
395 	   unit (delta max) */
396 	NET_ERROR(OutParameter) +=
397 	    propagateNetBackward2(pattern_no,sub_pat_no,
398 				  LEARN_PARAM1(parameterInArray),
399 				  LEARN_PARAM2(parameterInArray));
400     }
401 
402     return (KernelErrorCode);
403 }
404 
405 
406 /*****************************************************************************
407   FUNCTION : TEST_backprop             joe
408 
409   PURPOSE  : main routine for the test of MLPs
410   RETURNS  : kernel error code
411   NOTES    : Output Parameters:  1 : error of the network (sum of all cycles)
412 
413 
414   UPDATE   : 17.01.95
415 ******************************************************************************/
TEST_backprop(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)416 krui_err TEST_backprop(int start_pattern, int end_pattern,
417 			float *parameterInArray, int NoOfInParams,
418 			float **parameterOutArray, int *NoOfOutParams)
419 {
420 
421     static float    OutParameter[1];	/* OutParameter[0] stores the
422 					   learning error */
423     int             pattern_no, sub_pat_no, no_of_layers;
424 
425 
426     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
427 
428     if (NoOfInParams < 1) {	/* Not enough input parameters	 */
429 	KernelErrorCode = KRERR_PARAMETERS;
430 	return (KernelErrorCode);
431     }
432     *NoOfOutParams = 1;		/* One return value is available (the
433 				   learning error)  */
434     *parameterOutArray = OutParameter;	/* set the output parameter reference */
435 
436     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
437 	/* Net has been modified or topologic array isn't initialized */
438 	/* check the topology of the network  */
439 	no_of_layers = kr_topoCheck();
440 	if (KernelErrorCode != KRERR_NO_ERROR)
441 	    /* an error has occured	 */
442 	    return (KernelErrorCode);
443 
444 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
445 	    KernelErrorCode = KRERR_FEW_LAYERS;
446 	    return (KernelErrorCode);
447 	}
448 	/* count the no. of I/O units and check the patterns  */
449 	if (kr_IOCheck() != KRERR_NO_ERROR)
450 	    return (KernelErrorCode);
451 
452 	/* sort units by topology and by topologic type  */
453 	(void) kr_topoSort(TOPOLOGICAL_FF);
454 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
455 	    (KernelErrorCode != KRERR_DEAD_UNITS))
456 	    return (KernelErrorCode);
457 
458 	NetModified = FALSE;
459     }
460 
461 
462     /* compute the necessary sub patterns */
463 
464     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
465     if(KernelErrorCode != KRERR_NO_ERROR)
466 	return (KernelErrorCode);
467 
468 
469     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
470 
471     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
472 
473 	propagateNetForward(pattern_no,sub_pat_no);   /* Forward propagation */
474 
475 	NET_ERROR(OutParameter) +=
476 	    testNetBackward2(pattern_no,sub_pat_no,
477 				  LEARN_PARAM1(parameterInArray),
478 				  LEARN_PARAM2(parameterInArray));
479 
480     }
481 
482     return (KernelErrorCode);
483 }
484 
485 /*****************************************************************************
486   FUNCTION : testNetBackward2                        joe
487 
488   PURPOSE  : calculates network error for MLPs
489   RETURNS  : network error
490   NOTES    : network must be topologically sorted
491 
492   UPDATE   : 19.02.95
493 ******************************************************************************/
testNetBackward2(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max)494 static float testNetBackward2(int pattern_no, int sub_pat_no,
495 				   float learn_parameter, float delta_max)
496 {
497     register struct Unit *unit_ptr;
498     register Patterns out_pat;
499     register float sum_error, devit;
500     register TopoPtrArray topo_ptr;
501     int size;
502 
503     sum_error = 0.0;		/* reset network error  */
504 
505     /* calculate address of the output pattern (with number pattern_no + 1)  */
506     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
507     if(out_pat == NULL){
508 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
509 	return(-1);
510     }
511     out_pat += size;
512 
513 
514     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
515        pointers  */
516     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
517 
518     /* calculate output units only  */
519     while ((unit_ptr = *--topo_ptr) != NULL) {
520 	devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
521 	if ((float) fabs(devit) <= delta_max)
522 	    continue;
523 
524 	sum_error += devit * devit; /* sum up the error of the network  */
525 
526     }
527 
528 
529     return (sum_error);		/* return the error of the network */
530 }
531 
532 
533 
534 /*****************************************************************************
535   FUNCTION : propagateNetBackwardBatch
536 
537   PURPOSE  : backward pass in batch mode for the backprop learning algorithm
538   RETURNS  : network error
539   NOTES    :
540 
541   UPDATE   : 05.11.1993 by Guenter Mamier
542 ******************************************************************************/
propagateNetBackwardBatch(int pattern_no,int sub_pat_no,float delta_max)543 float propagateNetBackwardBatch(int pattern_no, int sub_pat_no, float delta_max)
544 {
545     register struct Link *link_ptr;
546     register struct Site *site_ptr;
547     register struct Unit *unit_ptr;
548     register Patterns out_pat;
549     register float  error, sum_error, devit;
550     register TopoPtrArray topo_ptr;
551     int size;
552 
553     sum_error = 0.0;		/* reset network error  */
554 
555     /* calculate address of the output pattern (with number pattern_no+1)*/
556     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
557     if(out_pat == NULL){
558 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
559 	return(-1);
560     }
561 
562     out_pat += size;
563 
564     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
565        pointers  */
566     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
567 
568     /* calculate output units only  */
569     /* no test for special units takes place because the final weight */
570     /* change is performed  by updateWeights */
571     while ((unit_ptr = *--topo_ptr) != NULL) {
572 	devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
573 	if ((float) fabs(devit) <= delta_max)
574 	    continue;
575 
576 	sum_error += devit * devit; /* sum up the error of the network  */
577 
578 	/* calc. error for output units	 */
579 	error = devit * ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
580 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
581 						unit_ptr) :
582 			(unit_ptr->act_deriv_func) (unit_ptr));
583 	/* calc. the error for adjusting weights and bias of the pred.
584 	   units  */
585 	/* adjust bias value  */
586 	unit_ptr->value_a += error;
587 
588 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
589 	    /* the unit has direkt links  */
590 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust links and
591 						   calc. sum of errors
592 						   of pred. units  */
593 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
594 		link_ptr->value_a += error * link_ptr->to->Out.output;
595 	    }
596 	} else {
597 	    /* the unit has sites  */
598 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
599 		/* adjust links and calc. sum of errors of pred. units */
600 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
601 		link_ptr->value_a += error * link_ptr->to->Out.output;
602 	    }
603 	}
604     }
605 
606     /* calculate hidden units only  */
607     /* no test for special units takes place because the final weight */
608     /* change is performed  by updateWeights */
609     while ((unit_ptr = *--topo_ptr) != NULL) {
610 	/* calc. the error of the (hidden) unit  */
611 	error = ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
612 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
613 						unit_ptr) :
614 			(unit_ptr->act_deriv_func) (unit_ptr)) *
615 	    unit_ptr->Aux.flint_no;
616 	/* calc. the error for adjusting weights and bias of the pred.
617 	   units  */
618 	/* adjust bias value  */
619 	unit_ptr->value_a += error;
620 
621 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
622 	    /* the unit has direkt links	 */
623 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {
624 		/* adjust links and calc sum of errors of the pred. units */
625 		if IS_HIDDEN_UNIT
626 		    (link_ptr->to)
627 			/* this link points to a hidden unit: sum up the
628 			   error's from previos units  */
629 			link_ptr->to->Aux.flint_no +=
630 			    link_ptr->weight * error;
631 		link_ptr->value_a += error * link_ptr->to->Out.output;
632 	    }
633 	} else {
634 	    /* the unit has sites  */
635 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
636 		/* adjust links and calc sum of errors of the pred. units */
637 		if IS_HIDDEN_UNIT
638 		    (link_ptr->to)
639 			/* this link points to a hidden unit: sum up the
640 			   error's from previos units  */
641 			link_ptr->to->Aux.flint_no
642 			    += link_ptr->weight * error;
643 		link_ptr->value_a += error * link_ptr->to->Out.output;
644 	    }
645 	}
646     }
647 
648     return (sum_error);		/* return the error of the network */
649 }
650 
651 /*****************************************************************************
652   FUNCTION : propagateClassNetBackwardBatch
653 
654   PURPOSE  : backward pass in batch mode for the backprop learning algorithm
655              only updates weights if units usr_flag info matches current
656 	     pattern class (if classes are given)
657   RETURNS  : network error
658   NOTES    :
659 
660   UPDATE   : 31.03.98 Michael Vogt
661 ******************************************************************************/
propagateClassNetBackwardBatch(int pattern_no,int sub_pat_no,float delta_max)662 float propagateClassNetBackwardBatch(int pattern_no, int sub_pat_no, float delta_max)
663 {
664     register struct Link *link_ptr;
665     register struct Site *site_ptr;
666     register struct Unit *unit_ptr;
667     register Patterns out_pat;
668     register float  error, sum_error, devit;
669     register TopoPtrArray topo_ptr;
670     int size;
671     int pattern_class;
672     unsigned long int class_flag;
673     const int maxclasses = 8 * sizeof(unsigned long int);
674     int adjust_this;
675     sum_error = 0.0;		/* reset network error  */
676 
677     /* calculate address of the output pattern (with number pattern_no+1)*/
678     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
679     if(out_pat == NULL){
680 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
681 	return(-1);
682     }
683     pattern_class = kr_getSubPatClass(pattern_no,sub_pat_no);
684     if (pattern_class >= maxclasses)
685     {
686 	fprintf(stderr,
687 		"propagateClassNetBackwardBatch: pattern class index %d out of range\n"
688 		"pattern is trained as usual\n", pattern_class);
689 	pattern_class = -1;
690     }
691     if (pattern_class >= 0)
692 	class_flag = ((unsigned long int) 1) << pattern_class;
693 
694 #ifdef DEBUG
695     printf("maxclasses: %d, pattern_class: %d, class_flag: %lx\n",
696 	  maxclasses, pattern_class, class_flag);
697 #endif
698 
699     out_pat += size;
700 
701     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
702        pointers  */
703     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
704 
705     /* calculate output units only  */
706     /* no test for special units takes place because the final weight */
707     /* change is performed  by updateWeights */
708     while ((unit_ptr = *--topo_ptr) != NULL) {
709 	devit = *(--out_pat) - unit_ptr->Out.output; /* calc. devitation */
710 	if ((float) fabs(devit) <= delta_max)
711 	    continue;
712 
713 	adjust_this= (pattern_class == -1 || unit_ptr->usr_flags & class_flag);
714 #ifdef DEBUG
715 	printf("%s%s ",
716 	       adjust_this ? "+" : "-",
717 	       unit_ptr->unit_name ? unit_ptr->unit_name : "");
718 #endif
719 	sum_error += devit * devit; /* sum up the error of the network  */
720 
721 	/* calc. error for output units	 */
722 	error = devit * ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
723 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
724 						unit_ptr) :
725 			(unit_ptr->act_deriv_func) (unit_ptr)) ;
726 	/* calc. the error for adjusting weights and bias of the pred.
727 	   units  */
728 	if (adjust_this)
729 	{
730 	    /* adjust bias value  */
731 	    unit_ptr->value_a += error;
732 	    unit_ptr->value_b += 1.0;
733 
734 	    if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
735 		/* the unit has direkt links  */
736 		FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust links and
737 							   calc. sum of errors
738 							   of pred. units  */
739 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
740 		    link_ptr->value_a += error * link_ptr->to->Out.output;
741 		    link_ptr->value_b += 1.0;
742 		}
743 	    } else {
744 		/* the unit has sites  */
745 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
746 		    /* adjust links and calc. sum of errors of pred. units */
747 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
748 		    link_ptr->value_a += error * link_ptr->to->Out.output;
749 		    link_ptr->value_b += 1.0;
750 		}
751 	    }
752 	}
753     }
754 
755     /* calculate hidden units only  */
756     /* no test for special units takes place because the final weight */
757     /* change is performed  by updateWeights */
758     while ((unit_ptr = *--topo_ptr) != NULL) {
759 	adjust_this= (pattern_class == -1 || unit_ptr->usr_flags & class_flag);
760 #ifdef DEBUG
761 	printf("%s%s ",
762 	       adjust_this ? "+" : "-",
763 	       unit_ptr->unit_name ? unit_ptr->unit_name : "");
764 #endif
765 	/* calc. the error of the (hidden) unit  */
766 	error = ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
767 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
768 						unit_ptr) :
769 			(unit_ptr->act_deriv_func) (unit_ptr))
770 		* unit_ptr->Aux.flint_no;
771 	/* calc. the error for adjusting weights and bias of the pred. units */
772 	if (adjust_this)
773 	{
774 	    /* adjust bias value  */
775 	    unit_ptr->value_a += error;
776 	    unit_ptr->value_b += 1.0;
777 
778 	    if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
779 		/* the unit has direkt links	 */
780 		FOR_ALL_LINKS(unit_ptr, link_ptr) {
781 		    /* adjust links and calc sum of errors of the pred. units */
782 		    if (IS_HIDDEN_UNIT(link_ptr->to))
783 			/* this link points to a hidden unit: sum up the
784 			   error's from previos units  */
785 			link_ptr->to->Aux.flint_no += link_ptr->weight * error;
786 
787 		    link_ptr->value_a += error * link_ptr->to->Out.output;
788 		    link_ptr->value_b += 1.0;
789 		}
790 	    } else {
791 		/* the unit has sites  */
792 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
793 		    /* adjust links and calc sum of errors of the pred. units */
794 		    if (IS_HIDDEN_UNIT(link_ptr->to))
795 			/* this link points to a hidden unit: sum up the
796 			   error's from previos units  */
797 			link_ptr->to->Aux.flint_no += link_ptr->weight * error;
798 
799 		    link_ptr->value_a += error * link_ptr->to->Out.output;
800 		    link_ptr->value_b += 1.0;
801 		}
802 	    }
803 	}
804     }
805 
806 #ifdef DEBUG
807     printf("\n");
808 #endif
809     return (sum_error);		/* return the error of the network */
810 }
811 
812 
813 /*****************************************************************************
814   FUNCTION : clearDeltas
815 
816   PURPOSE  : clears delta values for a new run of backprop batch
817   RETURNS  : kernel error code
818   NOTES    :
819 
820   UPDATE   : 05.11.1993 by Guenter Mamier
821 ******************************************************************************/
clearDeltas(void)822 krui_err clearDeltas(void)
823 {
824     register FlagWord flags;
825     register struct Link *link_ptr;
826     register struct Unit *unit_ptr;
827     register struct Site *site_ptr;
828     register FlintType fastnull = 0.0;
829 
830     FOR_ALL_UNITS(unit_ptr) {
831 	flags = unit_ptr->flags;
832 
833 	if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {	/* unit is in use  */
834 	    unit_ptr->value_a = fastnull;
835 
836 	    if (flags & UFLAG_SITES) {	/* unit has sites  */
837 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
838 		    link_ptr->value_a = fastnull;
839 	    } else {		/* unit has no sites   */
840 		if (flags & UFLAG_DLINKS) { /* unit has direct links   */
841 		    FOR_ALL_LINKS(unit_ptr, link_ptr)
842 			link_ptr->value_a = fastnull;
843 		}
844 	    }
845 	}
846     }
847 
848     return (KRERR_NO_ERROR);
849 }
850 
851 /*****************************************************************************
852   FUNCTION : clearAllDeltas
853 
854   PURPOSE  : clears all delta values for a new run of special batch schemes
855   RETURNS  : kernel error code
856   NOTES    :
857 
858   UPDATE   : 31.03.98 by Michael Vogt
859 ******************************************************************************/
clearAllDeltas(void)860 krui_err clearAllDeltas(void)
861 {
862     register FlagWord flags;
863     register struct Link *link_ptr;
864     register struct Unit *unit_ptr;
865     register struct Site *site_ptr;
866     register FlintType fastnull = 0.0;
867 
868     FOR_ALL_UNITS(unit_ptr) {
869 	flags = unit_ptr->flags;
870 
871 	if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {	/* unit is in use  */
872 	    unit_ptr->value_a = fastnull;
873 	    unit_ptr->value_b = fastnull;
874 	    unit_ptr->value_c = fastnull;
875 
876 	    if (flags & UFLAG_SITES) {	/* unit has sites  */
877 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
878 		    {
879 			link_ptr->value_a = fastnull;
880 			link_ptr->value_b = fastnull;
881 			link_ptr->value_c = fastnull;
882 		    }
883 	    } else if (flags & UFLAG_DLINKS) {
884 		/* unit has direct links   */
885 		FOR_ALL_LINKS(unit_ptr, link_ptr)
886 		    {
887 			link_ptr->value_a = fastnull;
888 			link_ptr->value_b = fastnull;
889 			link_ptr->value_c = fastnull;
890 		    }
891 	    }
892 	}
893     }
894 
895     return (KRERR_NO_ERROR);
896 }
897 
898 
899 
900 /*****************************************************************************
901   FUNCTION : updateWeights
902 
903   PURPOSE  : Update the weights after all patterns have been presented by
904              backpropBatch
905   RETURNS  : kernel error code
906   NOTES    :
907 
908   UPDATE   : 05.11.1993 by Guenter Mamier
909 ******************************************************************************/
updateWeights(float eta)910 static krui_err updateWeights(float eta)
911 {
912     register FlagWord flags;
913     register struct Link *link_ptr;
914     register struct Unit *unit_ptr;
915     register struct Site *site_ptr;
916 
917     FOR_ALL_UNITS(unit_ptr) {
918 	if (!IS_SPECIAL_UNIT(unit_ptr)) {
919 	    flags = unit_ptr->flags;
920 
921 	    if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {
922 		/* unit is in use  */
923 		unit_ptr->bias += unit_ptr->value_a * eta;
924 
925 		if (flags & UFLAG_SITES) {
926 		    /* unit has sites  */
927 		    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
928 			link_ptr->weight += link_ptr->value_a * eta;
929 		} else {
930 		    /* unit has no sites   */
931 		    if (flags & UFLAG_DLINKS) {	/* unit has direct links    */
932 			FOR_ALL_LINKS(unit_ptr, link_ptr)
933 			    link_ptr->weight += link_ptr->value_a * eta;
934 		    }
935 		}
936 	    }
937 	}
938     }
939     return (KRERR_NO_ERROR);
940 }
941 
942 /*****************************************************************************
943   FUNCTION : updateNormalizedWeights
944 
945   PURPOSE  : Update the weights after all patterns have been presented by
946              a spezialized batch learning scheme, which places the number
947 	     of weight changes in the value_b fields.
948   RETURNS  : kernel error code
949   NOTES    : This function should not be called with a normalized learning rate
950 
951   UPDATE   : 31.03.98 Michael Vogt
952 ******************************************************************************/
updateNormalizedWeights(float eta)953 static krui_err updateNormalizedWeights(float eta)
954 {
955     register FlagWord flags;
956     register struct Link *link_ptr;
957     register struct Unit *unit_ptr;
958     register struct Site *site_ptr;
959     register FlintType fastnull = 0.0;
960 
961     FOR_ALL_UNITS(unit_ptr) {
962 	if (!IS_SPECIAL_UNIT(unit_ptr)) {
963 	    flags = unit_ptr->flags;
964 
965 	    if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {
966 		/* unit is in use */
967 		if (unit_ptr->value_b > fastnull)
968 		    unit_ptr->bias +=
969 			unit_ptr->value_a * eta/unit_ptr->value_b;
970 		if (flags & UFLAG_SITES) {
971 		    /* unit has sites */
972 		    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
973 			{
974 			    if (link_ptr->value_b > fastnull)
975 				link_ptr->weight +=
976 				    link_ptr->value_a * eta/link_ptr->value_b;
977 			}
978 		} else if (flags & UFLAG_DLINKS) {
979 		    /* unit has direct links */
980 		    FOR_ALL_LINKS(unit_ptr, link_ptr)
981 			{
982 			    if (link_ptr->value_b > fastnull)
983 				link_ptr->weight +=
984 				    link_ptr->value_a * eta/link_ptr->value_b;
985 			}
986 		}
987 	    }
988 	}
989     }
990     return (KRERR_NO_ERROR);
991 }
992 
993 
994 /*****************************************************************************
995   FUNCTION : LEARN_backpropBatch
996 
997   PURPOSE  : main routine for the batch version of the backpropagation
998              algorithm
999   RETURNS  : kernel error code
1000   NOTES    : Input Parameters:   1 : learning parameter
1001                                  2 : delta max
1002 
1003              Output Parameters:  1 : error of the network (sum of all cycles)
1004 
1005   UPDATE   : 05.11.1993 by Guenter Mamier
1006 ******************************************************************************/
LEARN_backpropBatch(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1007 krui_err  LEARN_backpropBatch(int start_pattern, int end_pattern,
1008 			      float *parameterInArray, int NoOfInParams,
1009 			      float **parameterOutArray, int *NoOfOutParams)
1010 {
1011     static float    OutParameter[1];	/* OutParameter[0] stores the
1012 					   learning error  */
1013     int             pattern_no, sub_pat_no, no_of_layers;
1014     int             pattern_count;
1015 
1016     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
1017 
1018     /* ####  have to be changed (must be 2)  #### */
1019     if (NoOfInParams < 1) {	/* Not enough input parameters	 */
1020 	KernelErrorCode = KRERR_PARAMETERS;
1021 	return (KernelErrorCode);
1022     }
1023     *NoOfOutParams = 1;	  /* One return value is available ( learning error)  */
1024     *parameterOutArray = OutParameter;	/* set the output parameter reference */
1025 
1026     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
1027 	/* Net has been modified or topologic array isn't initialized */
1028 	/* check the topology of the network  */
1029 	no_of_layers = kr_topoCheck();
1030 	if (KernelErrorCode != KRERR_NO_ERROR)
1031 	    /* an error has occured	 */
1032 	    return (KernelErrorCode);
1033 
1034 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
1035 	    KernelErrorCode = KRERR_FEW_LAYERS;
1036 	    return (KernelErrorCode);
1037 	}
1038 	/* count the no. of I/O units and check the patterns  */
1039 	if (kr_IOCheck() != KRERR_NO_ERROR)
1040 	    return (KernelErrorCode);
1041 
1042 	/* sort units by topology and by topologic type  */
1043 	(void) kr_topoSort(TOPOLOGICAL_FF);
1044 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
1045 	    (KernelErrorCode != KRERR_DEAD_UNITS))
1046 	    return (KernelErrorCode);
1047 
1048 	NetModified = FALSE;
1049     }
1050     clearDeltas();
1051 
1052     /* compute the necessary sub patterns */
1053 
1054     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
1055     if(KernelErrorCode != KRERR_NO_ERROR)
1056 	return (KernelErrorCode);
1057 
1058 
1059     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
1060     pattern_count = 0;
1061     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
1062 	pattern_count++;
1063 	propagateNetForward(pattern_no,sub_pat_no);  /* Forward propagation */
1064 
1065 	/* Backward propagation  */
1066 	/* 1st parameter is the learning parameter 2nd parameter is the max.
1067 	   devitation between output pattern and the output of the output
1068 	   unit (delta max) */
1069 	NET_ERROR(OutParameter) +=
1070 	    propagateNetBackwardBatch(pattern_no,sub_pat_no,
1071 				      LEARN_PARAM2(parameterInArray));
1072     }
1073 
1074     if (pattern_count > 0)
1075 	updateWeights(LEARN_PARAM1(parameterInArray)/pattern_count);
1076 
1077 
1078     return (KernelErrorCode);
1079 }
1080 
1081 
1082 /*****************************************************************************
1083  *****************************************************************************
1084 
1085   GROUP        : backpropagation learning algorithm with momentum term
1086 
1087   AUTHOR       : Niels Mache
1088 
1089 ******************************************************************************
1090 ******************************************************************************/
1091 
1092 
1093 /*****************************************************************************
1094   FUNCTION : initializeBackpropMomentum
1095 
1096   PURPOSE  : backprop-momentum initialisation
1097   RETURNS  : kernel error code
1098   NOTES    :
1099 
1100   UPDATE   : 05.11.1993 by Guenter Mamier
1101 ******************************************************************************/
initializeBackpropMomentum(void)1102 static krui_err initializeBackpropMomentum(void)
1103 {
1104     register FlagWord flags;
1105     register struct Link *link_ptr;
1106     register struct Unit *unit_ptr;
1107     register struct Site *site_ptr;
1108 
1109 
1110     FOR_ALL_UNITS(unit_ptr) {
1111 	flags = unit_ptr->flags;
1112 
1113 	if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {	/* unit is in use  */
1114 	    unit_ptr->value_a = (FlintType) 0;
1115 
1116 	    if (flags & UFLAG_SITES) {	/* unit has sites  */
1117 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
1118 		    link_ptr->value_b = (FlintType) 0;
1119 	    } else {		/* unit has no sites   */
1120 		if (flags & UFLAG_DLINKS) {	/* unit has direct links   */
1121 		    FOR_ALL_LINKS(unit_ptr, link_ptr)
1122 			link_ptr->value_b = (FlintType) 0;
1123 		}
1124 	    }
1125 	}
1126     }
1127 
1128     return (KRERR_NO_ERROR);
1129 }
1130 
1131 
1132 
1133 
1134 /*****************************************************************************
1135   FUNCTION : Backprop_momentum_FSE
1136   PURPOSE  : Backward error propagation (topological) of backpropagation
1137              learnig function with momentum term and flat spot elimination
1138 
1139   RETURNS  : network error
1140   NOTES    :
1141 
1142   UPDATE   : 07.02.1994 by Sven Doering
1143 ******************************************************************************/
Backprop_momentum_FSE(int pattern_no,int sub_pat_no,float learn_parameter,float mu,float FSE_term,float delta_max)1144 static float Backprop_momentum_FSE(int pattern_no, int sub_pat_no,
1145 				   float learn_parameter,
1146 				   float mu, float FSE_term, float delta_max)
1147 {
1148     register struct Link *link_ptr;
1149     register struct Site *site_ptr;
1150     register struct Unit *unit_ptr;
1151     register Patterns out_pat;
1152     register float  error, sum_error, eta, devit, learn_error, mu_help;
1153     register TopoPtrArray topo_ptr;
1154     int size;
1155 
1156 
1157     sum_error = 0.0;		/* reset network error  */
1158     eta = learn_parameter;	/* store learn_parameter in CPU register  */
1159 
1160     /* calculate address of the output pattern (with number pattern_no + 1)  */
1161     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
1162     if(out_pat == NULL){
1163 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
1164 	return(-1);
1165     }
1166     out_pat += size;
1167 
1168 
1169     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
1170        pointers  */
1171     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
1172 
1173     /* calculate output units only  */
1174     while ((unit_ptr = *--topo_ptr) != NULL) {
1175 	devit = *(--out_pat) - unit_ptr->Out.output;	/* calc. devitation */
1176 	if ((float) fabs(devit) <= delta_max)
1177 	    continue;
1178 
1179 	sum_error += devit * devit;	/* sum up the error of the network  */
1180 	/* calc. error for output units	 */
1181 	error = devit * (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
1182 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
1183 						unit_ptr) :
1184 			(unit_ptr->act_deriv_func) (unit_ptr))  + FSE_term);
1185 
1186 	/* calc. the error for adjusting weights and bias of the predecessor
1187 	   units  */
1188 	mu_help = mu;
1189 	learn_error = eta * error;
1190 	if(IS_SPECIAL_UNIT( unit_ptr )){
1191 	    learn_error = 0.0;
1192 	    mu = 0.0;
1193 	}
1194 	unit_ptr->value_a = learn_error + mu * unit_ptr->value_a;
1195 	/* adjust bias value  */
1196 	unit_ptr->bias += unit_ptr->value_a;
1197 
1198 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {	/* the unit has direkt links  */
1199 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust link weights and
1200 						   calc. sum of errors of the
1201 						   predecessor units  */
1202 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
1203 		link_ptr->value_b = learn_error * link_ptr->to->Out.output +
1204 		                    mu * link_ptr->value_b;
1205 		link_ptr->weight += link_ptr->value_b;
1206 	    }
1207 	} else {		/* the unit has sites  */
1208 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
1209 		/* adjust links and calc. sum of errors of the pred. units */
1210 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
1211 		link_ptr->value_b = learn_error * link_ptr->to->Out.output +
1212 		                    mu * link_ptr->value_b;
1213 		link_ptr->weight += link_ptr->value_b;
1214 	    }
1215 	}
1216 	mu = mu_help;
1217     }
1218 
1219 
1220     /* calculate hidden units only  */
1221     while ((unit_ptr = *--topo_ptr) != NULL) {
1222 	/* calc. the error of the (hidden) unit  */
1223 	error = unit_ptr->Aux.flint_no *
1224 	        (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
1225 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
1226 						unit_ptr) :
1227 			(unit_ptr->act_deriv_func) (unit_ptr))  + FSE_term);
1228 
1229 	/* calc. the error for adjusting weights and bias of the pred. units */
1230 	mu_help = mu;
1231 	learn_error = eta * error;
1232 	if(IS_SPECIAL_UNIT( unit_ptr )){
1233 	    learn_error = 0.0;
1234 	    mu = 0.0;
1235 	}
1236 	unit_ptr->value_a = learn_error + mu * unit_ptr->value_a;
1237 	/* adjust bias value  */
1238 	unit_ptr->bias += unit_ptr->value_a;
1239 
1240 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {	/* the unit has direkt links */
1241 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust link weights and
1242 						   calc. sum of errors of the
1243 						   predecessor units  */
1244 		if IS_HIDDEN_UNIT
1245 		    (link_ptr->to)
1246 		    /* this link points to a hidden unit: sum up the error's
1247 		       from previos units  */
1248 			link_ptr->to->Aux.flint_no += link_ptr->weight * error;
1249 
1250 		link_ptr->value_b = learn_error * link_ptr->to->Out.output +
1251 		                    mu * link_ptr->value_b;
1252 		link_ptr->weight += link_ptr->value_b;
1253 	    }
1254 	} else {		/* the unit has sites  */
1255 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
1256 		/* adjust links and calc. sum of errors of the pred. units */
1257 		if IS_HIDDEN_UNIT
1258 		    (link_ptr->to)
1259 		    /* this link points to a hidden unit: sum up the error's
1260 		       from previos units  */
1261 			link_ptr->to->Aux.flint_no += link_ptr->weight * error;
1262 
1263 		link_ptr->value_b = learn_error * link_ptr->to->Out.output +
1264 		                    mu * link_ptr->value_b;
1265 		link_ptr->weight += link_ptr->value_b;
1266 	    }
1267 	}
1268 	mu = mu_help;
1269     }
1270 
1271     return (sum_error);		/* return the error of the network */
1272 }
1273 
1274 
1275 /*****************************************************************************
1276   FUNCTION : LEARN_backpropMomentum
1277 
1278   PURPOSE  : main routine for backpropagation with momentum
1279   RETURNS  : kernel error code
1280   NOTES    : Input Parameters:   1 : learning parameter
1281                                  2 : momentum term
1282 				 3 : flat-spot-elimination value
1283 				 4 : delta max
1284 
1285              Output Parameters:  1 : error of the network (sum of all cycles)
1286 
1287   UPDATE   : 05.11.1993 by Guenter Mamier
1288 ******************************************************************************/
LEARN_backpropMomentum(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1289 krui_err LEARN_backpropMomentum(int start_pattern, int end_pattern,
1290 				float *parameterInArray, int NoOfInParams,
1291 				float **parameterOutArray, int *NoOfOutParams)
1292 {
1293     static float OutParameter[1];  /*OutParameter[0] stores the learning error*/
1294     int          ret_code, pattern_no, sub_pat_no;
1295 
1296 
1297     if (NoOfInParams < 1)	/* ####  have to be changed (must be 2)  #### */
1298 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
1299 
1300     *NoOfOutParams = 1;		/* One return value is available (the
1301 				   learning error)  */
1302     *parameterOutArray = OutParameter;	/* set the output parameter reference */
1303     ret_code = KRERR_NO_ERROR;	/* reset return code  */
1304 
1305     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
1306 	/* Net has been modified or topologic array isn't initialized */
1307 	/* check the topology of the network  */
1308 	ret_code = kr_topoCheck();
1309 	if (ret_code < KRERR_NO_ERROR)
1310 	    return (ret_code);	/* an error has occured  */
1311 	if (ret_code < 2)
1312 	    return (KRERR_FEW_LAYERS);	/* the network has less then 2 layers */
1313 
1314 	/* count the no. of I/O units and check the patterns  */
1315 	ret_code = kr_IOCheck();
1316 	if (ret_code < KRERR_NO_ERROR)
1317 	    return (ret_code);
1318 
1319 	/* sort units by topology and by topologic type  */
1320 	ret_code = kr_topoSort(TOPOLOGICAL_FF);
1321 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
1322 	    return (ret_code);
1323 
1324 	NetModified = FALSE;
1325     }
1326     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
1327 						   initialized, initialize
1328 						   backprop now  */
1329 	ret_code = initializeBackpropMomentum();
1330 	if (ret_code != KRERR_NO_ERROR)
1331 	    return (ret_code);
1332     }
1333 
1334 
1335     /* compute the necessary sub patterns */
1336 
1337     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
1338     if(KernelErrorCode != KRERR_NO_ERROR)
1339 	return (KernelErrorCode);
1340 
1341 
1342     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
1343 
1344     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
1345 
1346 	propagateNetForward(pattern_no,sub_pat_no);	/* Forward propagation */
1347 
1348 	/* Backward propagation  */
1349 	/* 1st parameter is the learning parameter 2nd parameter is the
1350 	   momentum term 3rd parameter is the flat-spot-elimination value 4th
1351 	   parameter is the max. devitation between output pattern and the
1352 	   output of the output unit (delta max) */
1353 	NET_ERROR(OutParameter) +=
1354 	    Backprop_momentum_FSE(pattern_no,sub_pat_no,
1355 				  LEARN_PARAM1(parameterInArray),
1356 				  LEARN_PARAM2(parameterInArray),
1357 				  LEARN_PARAM3(parameterInArray),
1358 				  LEARN_PARAM4(parameterInArray));
1359     }
1360 
1361     return (ret_code);
1362 }
1363 
1364 /*****************************************************************************
1365   FUNCTION : TEST_backpropMomentum              joe
1366 
1367   PURPOSE  : main routine for testing backpropagation with momentum
1368   RETURNS  : kernel error code
1369   NOTES    : Input Parameters:   1 : learning parameter
1370                                  2 : momentum term
1371 				 3 : flat-spot-elimination value
1372 				 4 : delta max
1373 
1374              Output Parameters:  1 : error of the network (sum of all cycles)
1375 
1376   UPDATE   : 20.01.95 by Joachim Danz
1377 ******************************************************************************/
TEST_backpropMomentum(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1378 krui_err TEST_backpropMomentum(int start_pattern, int end_pattern,
1379 				float *parameterInArray, int NoOfInParams,
1380 				float **parameterOutArray, int *NoOfOutParams)
1381 {
1382     static float OutParameter[1];  /*OutParameter[0] stores the learning error*/
1383     int          ret_code, pattern_no, sub_pat_no;
1384 
1385 
1386     if (NoOfInParams < 1)	/* ####  have to be changed (must be 2)  #### */
1387 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
1388 
1389     *NoOfOutParams = 1;		/* One return value is available (the
1390 				   learning error)  */
1391     *parameterOutArray = OutParameter;	/* set the output parameter reference */
1392     ret_code = KRERR_NO_ERROR;	/* reset return code  */
1393 
1394     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
1395 	/* Net has been modified or topologic array isn't initialized */
1396 	/* check the topology of the network  */
1397 	ret_code = kr_topoCheck();
1398 	if (ret_code < KRERR_NO_ERROR)
1399 	    return (ret_code);	/* an error has occured  */
1400 	if (ret_code < 2)
1401 	    return (KRERR_FEW_LAYERS);	/* the network has less then 2 layers */
1402 
1403 	/* count the no. of I/O units and check the patterns  */
1404 	ret_code = kr_IOCheck();
1405 	if (ret_code < KRERR_NO_ERROR)
1406 	    return (ret_code);
1407 
1408 	/* sort units by topology and by topologic type  */
1409 	ret_code = kr_topoSort(TOPOLOGICAL_FF);
1410 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
1411 	    return (ret_code);
1412 
1413 	NetModified = FALSE;
1414     }
1415 
1416 
1417     /* compute the necessary sub patterns */
1418 
1419     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
1420     if(KernelErrorCode != KRERR_NO_ERROR)
1421 	return (KernelErrorCode);
1422 
1423 
1424     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
1425 
1426     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
1427 
1428 	propagateNetForward(pattern_no,sub_pat_no);	/* Forward propagation */
1429 
1430 	/* Backward propagation  */
1431 	/* 1st parameter is the learning parameter 2nd parameter is the
1432 	   momentum term 3rd parameter is the flat-spot-elimination value 4th
1433 	   parameter is the max. devitation between output pattern and the
1434 	   output of the output unit (delta max) */
1435 	NET_ERROR(OutParameter) +=
1436 	    testNetBackward2(pattern_no,sub_pat_no,
1437 				  LEARN_PARAM1(parameterInArray),
1438 				   LEARN_PARAM4(parameterInArray));
1439     }
1440 
1441     return (ret_code);
1442 }
1443 
1444 
1445 
1446 /*****************************************************************************
1447  *****************************************************************************
1448 
1449   GROUP        : backpropagation learning algorithm with weight decay
1450 
1451   AUTHOR       : Tobias Schreiner
1452 
1453 ******************************************************************************
1454 ******************************************************************************/
1455 
1456 
1457 /*****************************************************************************
1458   FUNCTION : Backprop_weightdecay
1459   PURPOSE  : Backward error propagation (topological) of backpropagation
1460              learnig function with weigth decay
1461 
1462   RETURNS  : network error
1463   NOTES    :
1464 
1465   UPDATE   :
1466 ******************************************************************************/
Backprop_weightdecay(int pattern_no,int sub_pat_no,float learn_parameter,float wd_gamma,float min_weight,float delta_max)1467 static float Backprop_weightdecay (int pattern_no, int sub_pat_no,
1468 				   float learn_parameter,
1469 				   float wd_gamma, float min_weight,
1470 				   float delta_max)
1471 
1472 {
1473 
1474     register Patterns out_pat;
1475     register float error, sum_error, eta, devit, learn_error;
1476     register TopoPtrArray topo_ptr;
1477     int size;
1478 
1479     /* reset network error  */
1480     sum_error = 0.0;
1481 
1482     /* store learn_parameter in CPU register  */
1483     eta = learn_parameter;
1484 
1485     /* calculate address of the output pattern (with number pattern_no + 1)  */
1486 /*    out_pat = out_patterns + (pattern_no + 1) * NoOfOutputPatterns;*/
1487 
1488     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
1489     if(out_pat == NULL)
1490     {
1491 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
1492 	return(-1);
1493     }
1494     out_pat += size;
1495 
1496     /* add 3 to no_of_topo_units because the topologic array contains */
1497     /* 4 NULL pointers  */
1498     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
1499 
1500     /* calculate output units only  */
1501     while ((unitPtr = *--topo_ptr) != NULL)
1502     {
1503 	/* update unitNo */
1504 	unitNo = unitPtr - unit_array;
1505 
1506 	/* calculate devitation */
1507 	devit = *(--out_pat) - unitPtr->Out.output;
1508 	if ((float) fabs (devit) <= delta_max)
1509 	    continue;
1510 
1511 	/* sum up the error of the network  */
1512 	sum_error += devit * devit;
1513 
1514 	/* calculate error for output units	 */
1515 	error = devit * ((unitPtr->act_deriv_func == ACT_DERIV_Custom_Python) ?
1516 			kr_PythonActFunction(unitPtr->python_act_deriv_func,
1517 						unitPtr) :
1518 			(unitPtr->act_deriv_func) (unitPtr)) ;
1519 
1520 	/* calculate the error for adjusting weights and bias of the */
1521 	/* predecessor units  */
1522 	if (IS_SPECIAL_UNIT(unitPtr))
1523 	    learn_error = 0.0;
1524 	else
1525 	    learn_error = eta * error;
1526 
1527 	/* adjust bias value  */
1528 	unitPtr->bias += learn_error - wd_gamma * unitPtr->bias;
1529 
1530 	/* initialize prevLinkPtr */
1531 	prevLinkPtr = NULL;
1532 
1533 	if UNIT_HAS_DIRECT_INPUTS (unitPtr)
1534 	    /* the unit has direkt links  */
1535 	    for (linkPtr = (struct Link *) unitPtr->sites;
1536 		 linkPtr != NULL;
1537 		 linkPtr = linkPtr ? linkPtr->next : linkPtr)
1538 	    {
1539 		/* calculate errors of the predecessor units  */
1540 		linkPtr->to->Aux.flint_no += linkPtr->weight * error;
1541 
1542 		/* adjust the link weights */
1543 		linkPtr->weight +=
1544 		    learn_error * linkPtr->to->Out.output
1545 			- wd_gamma * linkPtr->weight;
1546 
1547 		/* prune link if smaller than minimum */
1548 		if (fabs (linkPtr->weight) < min_weight)
1549 		    kr_deleteLink ();
1550 
1551 		/* adjust prevLinkPtr */
1552 		prevLinkPtr = linkPtr;
1553 	    }
1554 	else
1555 	    /* the unit has sites  */
1556 	    for (sitePtr = unitPtr->sites;
1557 		 sitePtr != NULL;
1558 		 sitePtr = sitePtr ? sitePtr->next : sitePtr)
1559 		for (linkPtr = sitePtr->links;
1560 		     linkPtr != NULL;
1561 		     linkPtr = linkPtr ? linkPtr->next : linkPtr)
1562 		{
1563 		    /* calculate errors of the predecessor units  */
1564 		    linkPtr->to->Aux.flint_no += linkPtr->weight * error;
1565 
1566 		    /* adjust the link weights */
1567 		    linkPtr->weight +=
1568 			learn_error * linkPtr->to->Out.output
1569 			    - wd_gamma * linkPtr->weight;
1570 
1571 		    /* prune link if smaller than minimum */
1572 		    if (fabs (linkPtr->weight) < min_weight)
1573 			kr_deleteLink ();
1574 
1575 		    /* adjust prevLinkPtr */
1576 		    prevLinkPtr = linkPtr;
1577 		}
1578     }
1579 
1580     /* calculate hidden units only  */
1581     while ((unitPtr = *--topo_ptr) != NULL)
1582     {
1583 	/* update unitNo */
1584 	unitNo = unitPtr - unit_array;
1585 
1586 	/* calculate the error of the (hidden) unit  */
1587 	error = ((unitPtr->act_deriv_func == ACT_DERIV_Custom_Python) ?
1588 			kr_PythonActFunction(unitPtr->python_act_deriv_func,
1589 						unitPtr) :
1590 			(unitPtr->act_deriv_func) (unitPtr))  *
1591 	    unitPtr->Aux.flint_no;
1592 
1593 	/* calculate the error for adjusting weights and bias of the */
1594 	/* predecessor units  */
1595 	if (IS_SPECIAL_UNIT(unitPtr))
1596 	    learn_error = 0.0;
1597 	else
1598 	    learn_error = eta * error;
1599 
1600 	/* adjust bias value  */
1601 	unitPtr->bias += learn_error - wd_gamma * unitPtr->bias;
1602 
1603 	/* initialize prevLinkPtr */
1604 	prevLinkPtr = NULL;
1605 
1606 	if UNIT_HAS_DIRECT_INPUTS (unitPtr)
1607 	    /* the unit has direkt links  */
1608 	    for (linkPtr = (struct Link *) unitPtr->sites;
1609 		 linkPtr != NULL;
1610 		 linkPtr = linkPtr ? linkPtr->next : linkPtr)
1611 	    {
1612 		/* calculate errors of the predecessor units  */
1613 		if IS_HIDDEN_UNIT (linkPtr->to)
1614 		    /* this link points to a hidden unit: sum up the */
1615 		    /* error's from previos units  */
1616 		    linkPtr->to->Aux.flint_no +=
1617 			linkPtr->weight * error;
1618 
1619 		/* adjust link weights */
1620 		linkPtr->weight +=
1621 		    learn_error * linkPtr->to->Out.output
1622 			- wd_gamma * linkPtr->weight;
1623 
1624 		/* prune link if smaller than minimum */
1625 		if (fabs (linkPtr->weight) < min_weight)
1626 		    kr_deleteLink ();
1627 
1628 		/* adjust prevLinkPtr */
1629 		prevLinkPtr = linkPtr;
1630 	    }
1631 	else
1632 	    /* the unit has sites  */
1633 	    for (sitePtr = unitPtr->sites;
1634 		 sitePtr != NULL;
1635 		 sitePtr = sitePtr ? sitePtr->next : sitePtr)
1636 		for (linkPtr = sitePtr->links;
1637 		     linkPtr != NULL;
1638 		     linkPtr = linkPtr ? linkPtr->next : linkPtr)
1639 		{
1640 		    /* calculate errors of the predecessor units  */
1641 		    if IS_HIDDEN_UNIT (linkPtr->to)
1642 			/* this link points to a hidden unit: sum up */
1643 			/* the error's from previos units  */
1644 			linkPtr->to->Aux.flint_no +=
1645 			    linkPtr->weight * error;
1646 
1647 		    /* adjust links */
1648 		    linkPtr->weight +=
1649 			learn_error * linkPtr->to->Out.output
1650 			    - wd_gamma * linkPtr->weight;
1651 
1652 		    /* prune link if smaller than minimum */
1653 		    if (fabs (linkPtr->weight) < min_weight)
1654 			kr_deleteLink ();
1655 
1656 		    /* adjust prevLinkPtr */
1657 		    prevLinkPtr = linkPtr;
1658 		}
1659     }
1660 
1661     return (sum_error);		/* return the error of the network */
1662 }
1663 
1664 
1665 /*****************************************************************************
1666   FUNCTION : LEARN_backpropWeightDecay
1667 
1668   PURPOSE  : main routine for backpropagation with weight decay
1669   RETURNS  : kernel error code
1670   NOTES    : Input Parameters:   1 : learning parameter
1671                                  2 : parameter for weight decay
1672 				 3 : minimum weight (smaller weights
1673 				     will be pruned)
1674 				 4 : delta max
1675 
1676              Output Parameters:  1 : error of the network (sum of all cycles)
1677 
1678 	     special flags are reset!
1679 
1680   UPDATE   :
1681 ******************************************************************************/
LEARN_backpropWeightDecay(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1682 krui_err LEARN_backpropWeightDecay (int start_pattern, int end_pattern,
1683 		float *parameterInArray, int NoOfInParams,
1684 		float **parameterOutArray, int *NoOfOutParams)
1685 
1686 {
1687 
1688     static float OutParameter [1];
1689     /* OutParameter [0] stores the learning error */
1690     int pattern_no, sub_pat_no, no_of_layers;
1691 
1692     /* reset return code  */
1693     KernelErrorCode = KRERR_NO_ERROR;
1694 
1695     /* assure four input parameters */
1696     if (NoOfInParams < 4)
1697     {
1698 	KernelErrorCode = KRERR_PARAMETERS;
1699 	return (KernelErrorCode);
1700     }
1701 
1702     /* learning error is the only output parameter */
1703     *NoOfOutParams = 1;
1704     *parameterOutArray = OutParameter;
1705 
1706     if (NetModified || (TopoSortID != TOPOLOGICAL_FF))
1707 	/* Net has been modified or topologic array isn't initialized */
1708     {
1709 	/* check the topology of the network  */
1710 	no_of_layers = kr_topoCheck ();
1711 	if (KernelErrorCode != KRERR_NO_ERROR)
1712 	    return (KernelErrorCode);	/* an error has occured  */
1713 	if (no_of_layers < 2)
1714 	    /* the network has less then 2 layers */
1715 	{
1716 	    KernelErrorCode = KRERR_FEW_LAYERS;
1717 	    return (KernelErrorCode);
1718 	}
1719 
1720 	/* count the no. of I/O units and check the patterns  */
1721 	if (kr_IOCheck () != KRERR_NO_ERROR)
1722 	    return (KernelErrorCode);
1723 
1724 	/* sort units by topology and by topologic type  */
1725 	kr_topoSort (TOPOLOGICAL_FF);
1726 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
1727 	    (KernelErrorCode != KRERR_DEAD_UNITS))
1728 	    return (KernelErrorCode);
1729 
1730 	NetModified = FALSE;
1731     }
1732 
1733     /* compute the necessary sub patterns */
1734     KernelErrorCode = kr_initSubPatternOrder(start_pattern, end_pattern);
1735     if(KernelErrorCode != KRERR_NO_ERROR)
1736 	return (KernelErrorCode);
1737 
1738     /* reset network error value  */
1739     NET_ERROR (OutParameter) = 0.0;
1740 
1741     while (kr_getSubPatternByOrder (&pattern_no, &sub_pat_no))
1742     {
1743 	/* Forward propagation */
1744 	propagateNetForward(pattern_no,sub_pat_no);
1745 
1746 	/* Backward propagation with parameters according to header */
1747 	NET_ERROR(OutParameter) +=
1748 	    Backprop_weightdecay (pattern_no, sub_pat_no,
1749 				  LEARN_PARAM1 (parameterInArray),
1750 				  LEARN_PARAM2 (parameterInArray),
1751 				  LEARN_PARAM3 (parameterInArray),
1752 				  LEARN_PARAM4 (parameterInArray));
1753     }
1754 
1755     pr_checkDeadUnits();
1756 
1757     return (KernelErrorCode);
1758 }
1759 
1760 
1761 /*****************************************************************************
1762   FUNCTION : LEARN_backpropChunk
1763 
1764   PURPOSE  : main routine for the chunk-update version of the backpropagation
1765              algorithm
1766   RETURNS  : kernel error code
1767   NOTES    : Input Parameters:   1 : learning parameter
1768                                  2 : delta max
1769                                  3 : chunk size
1770 
1771              Output Parameters:  1 : error of the network (sum of all cycles)
1772 
1773   UPDATE   :
1774 ******************************************************************************/
LEARN_backpropChunk(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1775 krui_err  LEARN_backpropChunk(int start_pattern, int end_pattern,
1776 			      float *parameterInArray, int NoOfInParams,
1777 			      float **parameterOutArray, int *NoOfOutParams)
1778 {
1779     static float    OutParameter[1];	/* OutParameter[0] stores the
1780 					   learning error  */
1781     int             pattern_no, sub_pat_no, no_of_layers;
1782     int             i = 0;
1783 
1784 
1785     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
1786 
1787     if (NoOfInParams < 3) {	/* Not enough input parameters	 */
1788 	KernelErrorCode = KRERR_PARAMETERS;
1789 	return (KernelErrorCode);
1790     }
1791 
1792     *NoOfOutParams = 1;	  /* One return value is available ( learning error)  */
1793     *parameterOutArray = OutParameter;	/* set the output parameter reference */
1794     OutParameter[0] = 0.0;
1795 
1796     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
1797 	/* Net has been modified or topologic array isn't initialized */
1798 	/* check the topology of the network  */
1799 	no_of_layers = kr_topoCheck();
1800 	if (KernelErrorCode != KRERR_NO_ERROR)
1801 	    /* an error has occured	 */
1802 	    return (KernelErrorCode);
1803 
1804 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
1805 	    KernelErrorCode = KRERR_FEW_LAYERS;
1806 	    return (KernelErrorCode);
1807 	}
1808 	/* count the no. of I/O units and check the patterns  */
1809 	if (kr_IOCheck() != KRERR_NO_ERROR)
1810 	    return (KernelErrorCode);
1811 
1812 	/* sort units by topology and by topologic type  */
1813 	(void) kr_topoSort(TOPOLOGICAL_FF);
1814 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
1815 	    (KernelErrorCode != KRERR_DEAD_UNITS))
1816 	    return (KernelErrorCode);
1817 
1818 	NetModified = FALSE;
1819     }
1820 
1821     NET_ERROR(OutParameter) = 0.0; /* init error variable */
1822 
1823     /* compute the necessary sub patterns */
1824     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
1825     if(KernelErrorCode != KRERR_NO_ERROR)
1826 	return (KernelErrorCode);
1827 
1828 
1829     /* prepare for beginning of one chunk */
1830     clearDeltas();
1831 
1832     i = 0;
1833     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
1834 
1835         propagateNetForward(pattern_no,sub_pat_no);
1836         NET_ERROR(OutParameter) +=
1837             propagateNetBackwardBatch(pattern_no,sub_pat_no,
1838                                       LEARN_PARAM2(parameterInArray));
1839 
1840 	if( ++i >= (int)(LEARN_PARAM3(parameterInArray))){
1841 	    updateWeights(LEARN_PARAM1(parameterInArray)/i);
1842 	    clearDeltas();
1843 	    i = 0;
1844 	}
1845     }
1846 
1847     /* update the network weights */
1848     if (i>0)
1849     {
1850 	updateWeights(LEARN_PARAM1(parameterInArray)/i);
1851     }
1852 
1853     return (KernelErrorCode);
1854 
1855 }
1856 
1857 /*****************************************************************************
1858   FUNCTION : LEARN_backpropJogChunk
1859 
1860   PURPOSE  : main routine for the chunk-update version of the backpropagation
1861              algorithm with embedded weights jogging
1862   RETURNS  : kernel error code
1863   NOTES    : Input Parameters:   1 : learning parameter
1864                                  2 : delta max
1865                                  3 : chunk size
1866                                  4 : lower jog value
1867                                  5 : upper jog value
1868 
1869              Output Parameters:  1 : error of the network (sum of all cycles)
1870 
1871 	     To be used within batchman scripts that apply automatically
1872 	     decrease of absolute jog values.
1873 
1874   UPDATE   :
1875 ******************************************************************************/
LEARN_backpropJogChunk(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1876 krui_err  LEARN_backpropJogChunk(int start_pattern, int end_pattern,
1877 				 float *parameterInArray, int NoOfInParams,
1878 				 float **parameterOutArray, int *NoOfOutParams)
1879 {
1880     static float    OutParameter[1];	/* OutParameter[0] stores the
1881 					   learning error  */
1882     int             pattern_no, sub_pat_no, no_of_layers;
1883     int             i = 0;
1884 
1885 
1886     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
1887 
1888     if (NoOfInParams < 3) {	/* Not enough input parameters	 */
1889 	KernelErrorCode = KRERR_PARAMETERS;
1890 	return (KernelErrorCode);
1891     }
1892 
1893     *NoOfOutParams = 1;	  /* One return value is available ( learning error)  */
1894     *parameterOutArray = OutParameter;	/* set the output parameter reference */
1895     OutParameter[0] = 0.0;
1896 
1897     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
1898 	/* Net has been modified or topologic array isn't initialized */
1899 	/* check the topology of the network  */
1900 	no_of_layers = kr_topoCheck();
1901 	if (KernelErrorCode != KRERR_NO_ERROR)
1902 	    /* an error has occured	 */
1903 	    return (KernelErrorCode);
1904 
1905 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
1906 	    KernelErrorCode = KRERR_FEW_LAYERS;
1907 	    return (KernelErrorCode);
1908 	}
1909 	/* count the no. of I/O units and check the patterns  */
1910 	if (kr_IOCheck() != KRERR_NO_ERROR)
1911 	    return (KernelErrorCode);
1912 
1913 	/* sort units by topology and by topologic type  */
1914 	(void) kr_topoSort(TOPOLOGICAL_FF);
1915 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
1916 	    (KernelErrorCode != KRERR_DEAD_UNITS))
1917 	    return (KernelErrorCode);
1918 
1919 	NetModified = FALSE;
1920     }
1921 
1922     NET_ERROR(OutParameter) = 0.0; /* init error variable */
1923 
1924     /* compute the necessary sub patterns */
1925     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
1926     if(KernelErrorCode != KRERR_NO_ERROR)
1927 	return (KernelErrorCode);
1928 
1929 
1930     /* prepare for beginning of one chunk */
1931     clearDeltas();
1932 
1933     i = 0;
1934     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
1935 
1936 	if (i==0 &&
1937 	    (LEARN_PARAM4(parameterInArray) != 0.0 ||
1938 	     LEARN_PARAM5(parameterInArray) != 0.0))
1939 	    kr_jogWeights(LEARN_PARAM4(parameterInArray),
1940 			  LEARN_PARAM5(parameterInArray));
1941 
1942         propagateNetForward(pattern_no,sub_pat_no);
1943         NET_ERROR(OutParameter) +=
1944             propagateNetBackwardBatch(pattern_no,sub_pat_no,
1945                                       LEARN_PARAM2(parameterInArray));
1946 
1947 	if( ++i == (int)(LEARN_PARAM3(parameterInArray))){
1948 	    updateWeights(LEARN_PARAM1(parameterInArray)/i);
1949 	    clearDeltas();
1950 	    i = 0;
1951 	}
1952     }
1953 
1954     /* update the network weights */
1955     if (i>0)
1956     {
1957 	updateWeights(LEARN_PARAM1(parameterInArray)/i);
1958     }
1959 
1960     return (KernelErrorCode);
1961 
1962 }
1963 
1964 /*****************************************************************************
1965   FUNCTION : LEARN_backpropClassJogChunk
1966 
1967   PURPOSE  : main routine for the chunk-update version of the backpropagation
1968              algorithm with embedded weights jogging
1969 	     only neurons with matching class information are trained:
1970 
1971 	     with x in {0, 1, 2, 3, ...}
1972 	     if unit name = "class+x[+x]*"
1973 	         only train, if current pattern class index is one of the given
1974                  x values
1975 	     else if unit name = "class-x[-x]*"
1976 	         only train, if current pattern class index is different from
1977                  all of the given x values
1978 	     else (for all other unit names and if no class information at all)
1979                  train as usual
1980 
1981   RETURNS  : kernel error code
1982   NOTES    : Input Parameters:   1 : learning parameter
1983                                  2 : delta max
1984                                  3 : chunk size
1985                                  4 : lower jog value
1986                                  5 : upper jog value
1987 
1988              Output Parameters:  1 : error of the network (sum of all cycles)
1989 
1990 	     To be used within batchman scripts that apply automatically
1991 	     decrease of absolute jog values.
1992 
1993   UPDATE   :
1994 ******************************************************************************/
LEARN_backpropClassJogChunk(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)1995 krui_err  LEARN_backpropClassJogChunk(int start_pattern, int end_pattern,
1996 				 float *parameterInArray, int NoOfInParams,
1997 				 float **parameterOutArray, int *NoOfOutParams)
1998 {
1999     static float    OutParameter[1];	/* OutParameter[0] stores the
2000 					   learning error  */
2001     int             pattern_no, sub_pat_no, no_of_layers;
2002     int             i = 0;
2003     unsigned long int class_flags;
2004     unsigned long int class_add_flags;
2005     char            delimiter;
2006     char            *class_p;
2007     char            class_str[9];
2008     int             class_num;
2009     const int       maxclasses = 8 * sizeof(unsigned long int);
2010     struct Unit     *unit_ptr;
2011 
2012     KernelErrorCode = KRERR_NO_ERROR;	/* reset return code  */
2013 
2014     if (NoOfInParams < 3) {	/* Not enough input parameters	 */
2015 	KernelErrorCode = KRERR_PARAMETERS;
2016 	return (KernelErrorCode);
2017     }
2018 
2019     *NoOfOutParams = 1;	  /* One return value is available ( learning error)  */
2020     *parameterOutArray = OutParameter;	/* set the output parameter reference */
2021     OutParameter[0] = 0.0;
2022 
2023     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
2024 	/* Net has been modified or topologic array isn't initialized */
2025 	/* check the topology of the network  */
2026 	no_of_layers = kr_topoCheck();
2027 	if (KernelErrorCode != KRERR_NO_ERROR)
2028 	    /* an error has occured	 */
2029 	    return (KernelErrorCode);
2030 
2031 	if (no_of_layers < 2) {	/* the network has less then 2 layers  */
2032 	    KernelErrorCode = KRERR_FEW_LAYERS;
2033 	    return (KernelErrorCode);
2034 	}
2035 	/* count the no. of I/O units and check the patterns  */
2036 	if (kr_IOCheck() != KRERR_NO_ERROR)
2037 	    return (KernelErrorCode);
2038 
2039 	/* sort units by topology and by topologic type  */
2040 	(void) kr_topoSort(TOPOLOGICAL_FF);
2041 	if ((KernelErrorCode != KRERR_NO_ERROR) &&
2042 	    (KernelErrorCode != KRERR_DEAD_UNITS))
2043 	    return (KernelErrorCode);
2044 
2045 	/* set the usr_flags field of all units according to the class
2046 	   information given in the unit name:
2047 	   A bit set at position 2^n means to train this unit for class n */
2048 	FOR_ALL_UNITS(unit_ptr) {
2049 	    if (unit_ptr->unit_name &&
2050 		(strncmp(unit_ptr->unit_name, "class+", 6) == 0 ||
2051 		 strncmp(unit_ptr->unit_name, "class-", 6) == 0)
2052 		)
2053 	    {
2054 		delimiter = unit_ptr->unit_name[5];
2055 		if (delimiter == '+')
2056 		    class_flags = ((unsigned long int) 0); /* 00000000 */
2057 		else /* delimiter == '-' */
2058 		    class_flags = ~((unsigned long int) 0); /* 11111111 */
2059 
2060 		class_p = &(unit_ptr->unit_name[5]);
2061 		while (*class_p != '\0')
2062 		{
2063 		    class_p++;
2064 		    class_str[0] = '\0';
2065 		    strncpy(class_str, class_p, 8);
2066 		    i = 0;
2067 		    while (i < 8 && isdigit(class_str[i]))
2068 			i++;
2069 		    if (i<8)
2070 			class_str[i] = '\0';
2071 		    class_num = atoi(class_str);
2072 		    if (class_num >= maxclasses)
2073 		    {
2074 			fprintf(stderr,
2075 				"LEARN_backpropClassJogChunk: "
2076 				"units class information not handled\n"
2077 				"%d is >= %d (maxclasses)\n",
2078 				class_num, maxclasses);
2079 			class_add_flags = ((unsigned long int) 0);
2080 		    }
2081 		    else
2082 			class_add_flags =
2083 			    ((unsigned long int) 1) << class_num;
2084 		    if (delimiter == '+')
2085 			class_flags |= class_add_flags;
2086 		    else
2087 			class_flags ^= class_add_flags;
2088 		    while (*class_p != '\0' && *class_p != delimiter)
2089 			class_p++;
2090 		}
2091 		unit_ptr->usr_flags = class_flags;
2092 	    }
2093 	    else
2094 		unit_ptr->usr_flags = ~((unsigned long int) 0); /* 11111111 */
2095 #ifdef DEBUG
2096 	    printf("%s: %lx  ",
2097 		  unit_ptr->unit_name ? unit_ptr->unit_name : "",
2098 		  unit_ptr->usr_flags);
2099 #endif
2100 	}
2101 #ifdef DEBUG
2102 	printf("\n");
2103 #endif
2104 	NetModified = FALSE;
2105     }
2106 
2107     NET_ERROR(OutParameter) = 0.0; /* init error variable */
2108 
2109     /* compute the necessary sub patterns */
2110     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
2111     if(KernelErrorCode != KRERR_NO_ERROR)
2112 	return (KernelErrorCode);
2113 
2114 
2115     /* prepare for beginning of one chunk */
2116     clearAllDeltas();
2117 
2118     i = 0;
2119     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
2120 
2121 	if (i==0 &&
2122 	    (LEARN_PARAM4(parameterInArray) != 0.0 ||
2123 	     LEARN_PARAM5(parameterInArray) != 0.0))
2124 	{
2125 #ifdef HAVE_LRAND48
2126 	    unsigned short lastseed[3];
2127 	    unsigned short *seedptr;
2128 
2129 	    /* read out current seed information and also use it */
2130 	    seedptr = seed48(lastseed);
2131 	    lastseed[0] = seedptr[0];
2132 	    lastseed[1] = seedptr[1];
2133 	    lastseed[2] = seedptr[2];
2134 	    seed48(lastseed);
2135 #endif
2136 	    kr_jogWeights(LEARN_PARAM4(parameterInArray),
2137 			  LEARN_PARAM5(parameterInArray));
2138 #ifdef HAVE_LRAND48
2139 	    /* reset to previous random seed */
2140 	    seed48(lastseed);
2141 #endif
2142 	}
2143 
2144         propagateNetForward(pattern_no,sub_pat_no);
2145         NET_ERROR(OutParameter) +=
2146             propagateClassNetBackwardBatch(pattern_no,sub_pat_no,
2147 					   LEARN_PARAM2(parameterInArray));
2148 
2149 	if( ++i >= (int)(LEARN_PARAM3(parameterInArray))){
2150 	    updateNormalizedWeights(LEARN_PARAM1(parameterInArray));
2151 	    clearAllDeltas();
2152 	    i = 0;
2153 	}
2154     }
2155 
2156     /* update the network weights */
2157     if (i>0)
2158     {
2159 	updateNormalizedWeights(LEARN_PARAM1(parameterInArray));
2160     }
2161 
2162     return (KernelErrorCode);
2163 
2164 }
2165 
2166 
2167 /*****************************************************************************
2168  *****************************************************************************
2169 
2170   GROUP        : quickpropagation learning function
2171 
2172   AUTHOR       : Peter Zimmerer
2173 
2174 ******************************************************************************
2175 ******************************************************************************/
2176 
2177 /*****************************************************************************
2178   FUNCTION : initializeQuickprop
2179 
2180   PURPOSE  : initializes the quickprop learning
2181   RETURNS  : kernel error code
2182   NOTES    :
2183 
2184   UPDATE   : 05.11.1993 by Guenter Mamier
2185 ******************************************************************************/
initializeQuickprop(void)2186 static krui_err initializeQuickprop(void)
2187 {
2188     register unsigned short flags;
2189     register struct Link *link_ptr;
2190     register struct Unit *unit_ptr;
2191     register struct Site *site_ptr;
2192 
2193 
2194     FOR_ALL_UNITS(unit_ptr) {
2195 	flags = unit_ptr->flags;
2196 
2197 	if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE) {	/* unit is in use  */
2198 	    unit_ptr->value_a = unit_ptr->value_b =
2199 		unit_ptr->value_c = (FlintType) 0;
2200 
2201 	    if (flags & UFLAG_SITES) {	/* unit has sites  */
2202 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
2203 		    link_ptr->value_a = link_ptr->value_b =
2204 			                link_ptr->value_c = (FlintType) 0;
2205 	    } else {		/* unit has no sites   */
2206 		if (flags & UFLAG_DLINKS) {	/* unit has direct links    */
2207 		    FOR_ALL_LINKS(unit_ptr, link_ptr)
2208 			link_ptr->value_a = link_ptr->value_b =
2209 			                    link_ptr->value_c = (FlintType) 0;
2210 		}
2211 	    }
2212 	}
2213     }
2214 
2215     return (KRERR_NO_ERROR);
2216 }
2217 
2218 
2219 
2220 /*****************************************************************************
2221   FUNCTION : propagateNetBackwardQuickprop
2222 
2223   PURPOSE  : quickprop backward error propagation
2224   RETURNS  : network error
2225   NOTES    : quickprop backward error propagation
2226              (topological) for quickprop with SIGMOID_PRIME_OFFSET
2227 	     batch-modus: without adaption of links and bias
2228 
2229   UPDATE   : 05.11.1993 by Guenter Mamier
2230 ******************************************************************************/
propagateNetBackwardQuickprop(int pattern_no,int sub_pat_no,float delta_max)2231 static float propagateNetBackwardQuickprop(int pattern_no, int sub_pat_no,
2232 					   float delta_max)
2233 {
2234     register struct Link *link_ptr;
2235     register struct Site *site_ptr;
2236     register struct Unit *unit_ptr;
2237     register Patterns out_pat;
2238     register float  error,	/* error  */
2239     sum_error,			/* sum of the error  */
2240     devit;			/* deviation  */
2241     TopoPtrArray    topo_ptr;
2242     int size;
2243 
2244 
2245     sum_error = 0.0;		/* reset network error  */
2246 
2247     /* calculate address of the output pattern (with number pattern_no + 1)  */
2248     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
2249     out_pat += size;
2250 
2251     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
2252        pointers  */
2253     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
2254 
2255     /* calculate output units only  */
2256     while ((unit_ptr = *--topo_ptr) != NULL) {
2257 	devit = *(--out_pat) - unit_ptr->Out.output;
2258 	/* = o * (1.0 - o) in [0.0,0.25], */
2259 	/* for asymmetric logistic function */
2260 
2261 	if ((float) fabs(devit) <= delta_max)
2262 	    continue;
2263 
2264 	sum_error += devit * devit; /* sum up the error of the network  */
2265 
2266 	/* calc. error for output units	 */
2267 	error = devit * (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
2268 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
2269 						unit_ptr) :
2270 			(unit_ptr->act_deriv_func) (unit_ptr))  +
2271 			 SIGMOID_PRIME_OFFSET);
2272 
2273 	unit_ptr->value_c += -error; /* calculate the bias slopes  */
2274 	/* learn bias like a weight  */
2275 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
2276 	    /* the unit has direct links  */
2277 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* calculate the slopes */
2278 		link_ptr->value_c += -error * link_ptr->to->Out.output;
2279 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
2280 	    }
2281 	} else {
2282 	    /* the unit has sites  */
2283 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
2284 		/* calculate the value_cs  */
2285 		link_ptr->value_c += -error * link_ptr->to->Out.output;
2286 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
2287 	    }
2288 	}
2289     }
2290 
2291 
2292     /* calculate hidden units only  */
2293     while ((unit_ptr = *--topo_ptr) != NULL) {
2294 	error = (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
2295 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
2296 						unit_ptr) :
2297 			(unit_ptr->act_deriv_func) (unit_ptr))  +
2298 		 SIGMOID_PRIME_OFFSET) * unit_ptr->Aux.flint_no;
2299 
2300 	unit_ptr->value_c += -error; /* calculate the bias slopes  */
2301 	/* learn bias like a weight  */
2302 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {
2303 	    /* the unit has direct links  */
2304 	    FOR_ALL_LINKS(unit_ptr, link_ptr) { /* calculate the slopes  */
2305 		if IS_HIDDEN_UNIT
2306 		    (link_ptr->to)
2307 			/* this link points to a hidden unit: sum up the
2308 			   error's from previos units  */
2309 			link_ptr->to->Aux.flint_no +=
2310 			    link_ptr->weight * error;
2311 
2312 		link_ptr->value_c += -error * link_ptr->to->Out.output;
2313 	    }
2314 	} else {
2315 	    /* the unit has sites  */
2316 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
2317 		/* calculate the slopes  */
2318 		if IS_HIDDEN_UNIT
2319 		    (link_ptr->to)
2320 			/* this link points to a hidden unit: sum up the
2321 			   error's from previos units  */
2322 			link_ptr->to->Aux.flint_no +=
2323 			    link_ptr->weight * error;
2324 
2325 		link_ptr->value_c += -error * link_ptr->to->Out.output;
2326 	    }
2327 	}
2328     }
2329     return (sum_error);		/* return the error of the network  */
2330 }
2331 
2332 
2333 /*****************************************************************************
2334   FUNCTION : MODI_quickprop
2335 
2336   PURPOSE  : modifies the network at the end of each epoch
2337   RETURNS  :
2338   NOTES    :
2339 
2340   UPDATE   : 06.11.1993 by Guenter Mamier
2341 ******************************************************************************/
MODI_quickprop(float learn_parameter,float max_factor,float decay)2342 static void  MODI_quickprop(float learn_parameter, float max_factor,
2343 			    float decay)
2344  /* learning parameter */
2345  /* maximal grow factor of weights */
2346  /* decay factor */
2347 
2348 {
2349     double          deltaw;	/* actual weight (bias) change */
2350     float           shfac;	/* shrink factor */
2351     register struct Link *link_ptr;
2352     register struct Site *site_ptr;
2353     register struct Unit *unit_ptr;
2354     TopoPtrArray    topo_ptr;
2355     bool            hidden_units;
2356 
2357 
2358     /* maximal grow factor of weights is max_factor  */
2359     shfac = max_factor / (1.0 + max_factor);
2360 
2361      topo_ptr = topo_ptr_array + (NoOfInputUnits + 1);
2362     hidden_units = TRUE;
2363 
2364     /* calculate hidden and output units only  */
2365     do {
2366 	if ((unit_ptr = *++topo_ptr) == NULL) {
2367 	    if (!hidden_units)
2368 		break;		/* end of topologic pointer array reached  */
2369 	    unit_ptr = *++topo_ptr;	/* skip NULL pointer  */
2370 	    hidden_units = FALSE;
2371 	}
2372 	if (IS_SPECIAL_UNIT(unit_ptr)) {
2373 	    unit_ptr->value_a =
2374 	    unit_ptr->value_b =
2375 	    unit_ptr->value_c = 0.0;
2376 	    if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {/* unit has direct links */
2377 		FOR_ALL_LINKS(unit_ptr, link_ptr) {
2378 		    link_ptr->value_a =
2379 		    link_ptr->value_b =
2380 		    link_ptr->value_c = 0.0;
2381 		}
2382 	    } else {		/* the unit has sites  */
2383 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
2384 		    link_ptr->value_a =
2385 		    link_ptr->value_b =
2386 		    link_ptr->value_c = 0.0;
2387 		}
2388 	    }
2389 	} else {
2390 	    deltaw = 0.0;	/* adjust bias like a weight  */
2391 	    if (unit_ptr->value_a > 0.0) { /* previous step was positive  */
2392 		if (unit_ptr->value_c < 0.0)
2393 		    /* same direction,i.e. slope, value_b have same sign  */
2394 		    deltaw += learn_parameter * (-unit_ptr->value_c);
2395 
2396 		if (unit_ptr->value_c <= shfac * unit_ptr->value_b)
2397 		    /* maximal positive step  */
2398 		    deltaw += max_factor * unit_ptr->value_a;
2399 		else
2400 		    /* littler positive step squared approximation  */
2401 		    deltaw += unit_ptr->value_c /
2402 			      (unit_ptr->value_b - unit_ptr->value_c)
2403 			      * unit_ptr->value_a;
2404 	    } else if (unit_ptr->value_a < 0.0) {	/* previous step was
2405 							   negative  */
2406 		if (unit_ptr->value_c > 0.0)
2407 		    /* same direction,i.e. slope, prevslope have same sign  */
2408 		    deltaw += learn_parameter * (-unit_ptr->value_c);
2409 
2410 		if (unit_ptr->value_c >= shfac * unit_ptr->value_b)
2411 		    /* maximal negative step  */
2412 		    deltaw += max_factor * unit_ptr->value_a;
2413 		else
2414 		    /* littler negative step squared approximation */
2415 		    deltaw += unit_ptr->value_c /
2416 			      (unit_ptr->value_b - unit_ptr->value_c)
2417 			      * unit_ptr->value_a;
2418 	    } else
2419 		/* previous step was 0.0  */
2420 		/* start of learning process with BP  */
2421 		deltaw += learn_parameter * (-unit_ptr->value_c);
2422 
2423 	    unit_ptr->bias += deltaw;	/* new bias */
2424 	    unit_ptr->value_a = deltaw;	/* bias change */
2425 	    unit_ptr->value_b = unit_ptr->value_c;	/* previous slope */
2426 	    unit_ptr->value_c = decay * unit_ptr->bias;	/* set new slope  */
2427 
2428 	    /* adjust links */
2429 	    if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {/* unit has direct links */
2430 		FOR_ALL_LINKS(unit_ptr, link_ptr) {
2431 		    deltaw = 0.0;
2432 
2433 		    if (link_ptr->value_a > 0.0) { /* prev step was positive */
2434 			if (link_ptr->value_c < 0.0)
2435 			    /* same direction,i.e. slope, prevslope have same
2436 			       sign  */
2437 			    deltaw += learn_parameter * (-link_ptr->value_c);
2438 
2439 			if (link_ptr->value_c <= shfac * link_ptr->value_b)
2440 			    /* maximal positive step  */
2441 			    deltaw += max_factor * link_ptr->value_a;
2442 			else
2443 			    deltaw += link_ptr->value_c /
2444 				      (link_ptr->value_b - link_ptr->value_c)
2445 				      * link_ptr->value_a;
2446 		    } else if (link_ptr->value_a < 0.0) {
2447 			/* previous step was negative */
2448 			if (link_ptr->value_c > 0.0)
2449 			    /* same direction,i.e. slope, prevslope have same
2450 			       sign */
2451 			    deltaw += learn_parameter * (-link_ptr->value_c);
2452 
2453 			if (link_ptr->value_c >= shfac * link_ptr->value_b)
2454 			    /* maximal negative step  */
2455 			    deltaw += max_factor * link_ptr->value_a;
2456 			else
2457 			    deltaw += link_ptr->value_c /
2458 				      (link_ptr->value_b - link_ptr->value_c)
2459 				      * link_ptr->value_a;
2460 		    } else	/* previous step was 0.0  */
2461 			/* start of learning process with BP  */
2462 			deltaw += learn_parameter * (-link_ptr->value_c);
2463 
2464 		    link_ptr->weight += deltaw;	/* new weight */
2465 		    link_ptr->value_a = deltaw;	/* weight change */
2466 		    link_ptr->value_b = link_ptr->value_c;  /* previous slope */
2467 		    /* set new slope  */
2468 		    link_ptr->value_c = decay * link_ptr->weight;
2469 		}		/* for links  */
2470 	    } else {		/* the unit has sites  */
2471 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr) {
2472 		    deltaw = 0.0;
2473 		    if (link_ptr->value_a > 0.0) {	/* previous step was
2474 							   positive  */
2475 			if (link_ptr->value_c < 0.0)
2476 			    /* same direction,i.e. slope, prevslope have same
2477 			       sign  */
2478 			    deltaw += learn_parameter * (-link_ptr->value_c);
2479 
2480 			if (link_ptr->value_c <= shfac * link_ptr->value_b)
2481 			    /* maximal positive step  */
2482 			    deltaw += max_factor * link_ptr->value_a;
2483 			else
2484 			    /* littler positive step squared approximation  */
2485 			    deltaw += link_ptr->value_c /
2486 				      (link_ptr->value_b - link_ptr->value_c)
2487 				       * link_ptr->value_a;
2488 		    } else if (link_ptr->value_a < 0.0) {
2489 			/* previous step was negative  */
2490 			if (link_ptr->value_c > 0.0)
2491 			    /* same direction,i.e. slope, prevslope have same
2492 			       sign  */
2493 			    deltaw += learn_parameter * (-link_ptr->value_c);
2494 
2495 			if (link_ptr->value_c >= shfac * link_ptr->value_b)
2496 			    /* maximal negative step  */
2497 			    deltaw += max_factor * link_ptr->value_a;
2498 			else
2499 			    deltaw += link_ptr->value_c /
2500 				      (link_ptr->value_b - link_ptr->value_c)
2501 				      * link_ptr->value_a;
2502 		    } else	/* previous step was 0.0  */
2503 			/* start of learning process with BP  */
2504 			deltaw += learn_parameter * (-link_ptr->value_c);
2505 
2506 		    link_ptr->weight += deltaw;	/* new weight */
2507 		    link_ptr->value_a = deltaw;	/* weight change */
2508 		    link_ptr->value_b = link_ptr->value_c; /* previous slope */
2509 		    /* set new slope */
2510 		    link_ptr->value_c = decay * link_ptr->weight;
2511 		}
2512 	    }
2513 	}
2514     }				/* for units  */
2515     while (TRUE);
2516 
2517 }
2518 
2519 
2520 /*****************************************************************************
2521   FUNCTION : LEARN_quickprop
2522 
2523   PURPOSE  : Quickprop learning function
2524   RETURNS  : kernel error code
2525   NOTES    : Input Parameters:   1 : learning parameter
2526                                  2 : max factor (of the net after every epoch)
2527                                  3 : decay
2528                                  4 : delta max
2529 
2530              Output Parameters:  1 : error of the network (sum of all cycles)
2531 
2532 
2533   UPDATE   : 06.11.1993 by Guenter Mamier
2534 ******************************************************************************/
LEARN_quickprop(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)2535 krui_err LEARN_quickprop(int start_pattern, int end_pattern,
2536 			 float *parameterInArray, int NoOfInParams,
2537 			 float **parameterOutArray, int *NoOfOutParams)
2538 {
2539     static float OutParameter[1]; /* OutParameter[0] stores the learning error*/
2540     int          pattern_no, sub_pat_no, ret_code;
2541 
2542 
2543     if (NoOfInParams < 1)	/* ###  have to be changed  (must be 3)  #### */
2544 	return (KRERR_PARAMETERS);	/* not enough input parameters  */
2545 
2546     *NoOfOutParams = 1;		/* one return value is available (the
2547 				   learning error) */
2548 
2549     *parameterOutArray = OutParameter;	/* set output parameter reference  */
2550     ret_code = KRERR_NO_ERROR;	/* reset return code  */
2551 
2552     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
2553 	/* Net has been modified or topologic array isn't initialized */
2554 	/* check the topology of the network  */
2555 	ret_code = kr_topoCheck();
2556 	if (ret_code < KRERR_NO_ERROR)
2557 	    return (ret_code);	/* an error has occured  */
2558 	if (ret_code < 2)
2559 	    return (KRERR_FEW_LAYERS);	/* the network has less then 2 layers */
2560 
2561 	/* count the no. of I/O units and check the patterns  */
2562 	ret_code = kr_IOCheck();
2563 	if (ret_code < KRERR_NO_ERROR)
2564 	    return (ret_code);
2565 
2566 	/* sort units by topology and by topologic type  */
2567 	ret_code = kr_topoSort(TOPOLOGICAL_FF);
2568 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
2569 	    return (ret_code);
2570 
2571 	NetModified = FALSE;
2572     }
2573     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
2574 						   initialized, initialize
2575 						   backprop now  */
2576 	ret_code = initializeQuickprop();
2577 	if (ret_code != KRERR_NO_ERROR)
2578 	    return (ret_code);
2579     }
2580 
2581 
2582     /* compute the necessary sub patterns */
2583 
2584     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
2585     if(KernelErrorCode != KRERR_NO_ERROR)
2586 	return (KernelErrorCode);
2587 
2588 
2589     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
2590 
2591     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
2592 
2593 	propagateNetForward(pattern_no,sub_pat_no);   /* Forward propagation */
2594 
2595 	/* backward propagation and summation of gradient  */
2596 	NET_ERROR(OutParameter) +=
2597 	    propagateNetBackwardQuickprop(pattern_no,sub_pat_no,
2598 					  LEARN_PARAM4(parameterInArray));
2599     }
2600 
2601     /* modificate links and bias  */
2602     MODI_quickprop(LEARN_PARAM1(parameterInArray),
2603 		   LEARN_PARAM2(parameterInArray),
2604 		   LEARN_PARAM3(parameterInArray));
2605 
2606     return (ret_code);
2607 }
2608 
2609 
2610 
2611 /*****************************************************************************
2612  *****************************************************************************
2613 
2614   GROUP        : Counterpropagation learning function
2615 
2616 ******************************************************************************
2617 ******************************************************************************/
2618 
2619 /*****************************************************************************
2620   FUNCTION : initializeCPN
2621 
2622   PURPOSE  : Counterpropagation initialisation
2623   RETURNS  : kernel error code
2624   NOTES    :
2625 
2626   UPDATE   : 06.11.1993 by Guenter Mamier
2627 ******************************************************************************/
initializeCPN(void)2628 static krui_err initializeCPN(void)
2629 {
2630     register struct Unit *unit_ptr;
2631 
2632 
2633     NoOfLearnedPatterns = 0;
2634 
2635     /* set unit's bias to zero  */
2636     FOR_ALL_UNITS(unit_ptr)
2637 	if ((unit_ptr->flags & UFLAG_IN_USE) == UFLAG_IN_USE)
2638 	/* unit is in use  */
2639 	unit_ptr->bias = (FlintType) 0;
2640 
2641     return (KRERR_NO_ERROR);
2642 }
2643 
2644 
2645 /*****************************************************************************
2646   FUNCTION : normalize_weight
2647 
2648   PURPOSE  : Counterpropagation initialisation
2649   RETURNS  :
2650   NOTES    :
2651 
2652   UPDATE   : 06.11.1993 by Guenter Mamier
2653 ******************************************************************************/
normalize_weight(struct Unit * winner_ptr,float sum)2654 static void normalize_weight(struct Unit * winner_ptr, float sum)
2655 {
2656     register struct Site *site_ptr;
2657     register struct Link *link_ptr;
2658     register float  amount;
2659 
2660 
2661     amount = 1.0 / sqrt(sum);
2662 
2663     /* not necessary to see whether this is a special unit */
2664 
2665     if (winner_ptr->flags & UFLAG_SITES)
2666 	/* the unit has sites */
2667 	FOR_ALL_SITES_AND_LINKS(winner_ptr, site_ptr, link_ptr)
2668 	    link_ptr->weight = link_ptr->weight * amount;
2669     else
2670 	/* the unit has direct links */
2671 	FOR_ALL_LINKS(winner_ptr, link_ptr)
2672 	    link_ptr->weight = link_ptr->weight * amount;
2673 }
2674 
2675 
2676 /*****************************************************************************
2677   FUNCTION : normalize_inputvector
2678 
2679   PURPOSE  :
2680   RETURNS  :
2681   NOTES    :
2682 
2683   UPDATE   : 06.11.1993 by Guenter Mamier
2684 ******************************************************************************/
normalize_inputvector(float sum)2685 static void normalize_inputvector(float sum)
2686 {
2687     register struct Unit *unit_ptr;
2688     register float  amount;
2689 
2690 
2691     amount = 1.0 / sqrt(sum);
2692 
2693     FOR_ALL_UNITS(unit_ptr)
2694 	if (IS_INPUT_UNIT(unit_ptr) && UNIT_IN_USE(unit_ptr))
2695 	/* this is a input unit */
2696 	unit_ptr->Out.output = unit_ptr->Out.output * amount;
2697 }
2698 
2699 
2700 
2701 /*****************************************************************************
2702   FUNCTION : propagateNet_CPN
2703 
2704   PURPOSE  : forward pass of counterprop
2705   RETURNS  :
2706   NOTES    :
2707 
2708   UPDATE   : 06.11.1993 by Guenter Mamier
2709 ******************************************************************************/
propagateNet_CPN(int pattern_no,int sub_pat_no,float alpha,float beta,float threshold)2710 static float propagateNet_CPN(int pattern_no, int sub_pat_no, float alpha,
2711 			      float beta, float threshold)
2712 {
2713     register struct Link *link_ptr;
2714     register struct Site *site_ptr;
2715     register struct Unit *unit_ptr;
2716     register struct Unit *winner_ptr;
2717     register Patterns in_pat, out_pat;
2718     float           maximum, sum_error, devit, learn_error, sum;
2719     float           unit_ptr_net;
2720     float           noOfPatterns_mul_NoHiddenUnits;
2721     register TopoPtrArray topo_ptr;
2722 
2723     /* calculate the activation and the output values         */
2724     /* of the input units (Input Layer)                       */
2725 
2726     noOfPatterns_mul_NoHiddenUnits = (float) NoOfLearnedPatterns *
2727 	                             (float) NoOfHiddenUnits;
2728 
2729     sum = 0.0;
2730 
2731     /* calculate startaddress for input pattern array  */
2732     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
2733 
2734 
2735     topo_ptr = topo_ptr_array;
2736 
2737     /* copy pattern into input unit's activation and calculate output of the
2738        input units */
2739     while ((unit_ptr = *++topo_ptr) != NULL) {
2740 
2741 	/* topo_ptr points to the unit stuctures (sorted by: input-, hidden-
2742 	   and output-units, separated with NULL pointers) */
2743 	sum += *in_pat * *in_pat;
2744 
2745 	if (unit_ptr->out_func == OUT_IDENTITY)
2746 	    /* identity output function: there is no need to call the output
2747 	       function  */
2748 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
2749 	else if(unit_ptr->out_func == OUT_Custom_Python)
2750 		unit_ptr->Out.output =
2751 			kr_PythonOutFunction(unit_ptr->python_out_func,
2752 				unit_ptr->act = *in_pat++);
2753 	else
2754 	    /* no identity output function: calculate unit's output also  */
2755 	    unit_ptr->Out.output =
2756 		(*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
2757     }
2758 
2759     if (sum != 0.0)
2760 	/* normalize the inputvector */
2761 	normalize_inputvector(sum);
2762 
2763 
2764     /* propagate Kohonen Layer   */
2765 
2766     /* calculate the activation and the output values         */
2767     /* of the hidden units (Kohonen Layer)                    */
2768 
2769 
2770     winner_ptr = NULL;
2771     maximum = -1.0e30;		/* contains the maximum of the activations */
2772 
2773     /* popagate hidden units  */
2774     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
2775 						   (topological sorted) unit
2776 						   stucture */
2777 	unit_ptr_net = 0.0;
2778 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {	/* the unit has direct links */
2779 	    FOR_ALL_LINKS(unit_ptr, link_ptr)
2780 		unit_ptr_net += (link_ptr->weight * link_ptr->to->Out.output);
2781 	} else {		/* the unit has sites	 */
2782 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
2783 		unit_ptr_net += (link_ptr->weight * link_ptr->to->Out.output);
2784 	}
2785 
2786 	if (unit_ptr->bias >= noOfPatterns_mul_NoHiddenUnits)
2787 	    unit_ptr_net -= threshold;
2788 
2789 	if (maximum < unit_ptr_net) {	/* determine winner unit  */
2790 	    winner_ptr = unit_ptr;
2791 	    maximum = unit_ptr_net;
2792 	}
2793 	/* reset output and activation of hidden units  */
2794 	unit_ptr->Out.output = unit_ptr->act = (FlintType) 0;
2795     }
2796 
2797 
2798     /* the competitive winner is chosen                */
2799 
2800     winner_ptr->Out.output = winner_ptr->act = (FlintType) 1;
2801     winner_ptr->bias++;
2802 
2803 
2804 
2805     /* Training the Kohonen Layer
2806 
2807        Only the weights of links that go to the winning unit are adjusted,
2808        the others remain the same. The incoming weights to the competitive
2809        unit are adapted as follows:
2810 
2811        weight(new) = weight(old) + eta * (output - weight(old))
2812 
2813        where eta is the learning constant (0<eta<=1.0)
2814        and output is the output of the input unit
2815     */
2816 
2817 
2818     if (!IS_SPECIAL_UNIT(winner_ptr)) {
2819 	sum = 0.0;
2820 	if (winner_ptr->flags & UFLAG_DLINKS) {	/* the winner unit has direct
2821 						   links  */
2822 	    FOR_ALL_LINKS(winner_ptr, link_ptr) {
2823 		devit = link_ptr->to->Out.output - link_ptr->weight;
2824 		learn_error = alpha * devit;
2825 		link_ptr->weight += learn_error;
2826 		/* this is needed for the normalization of the weight_vector */
2827 		sum += link_ptr->weight * link_ptr->weight;
2828 	    }
2829 	} else {		/* the winner unit has sites  */
2830 	    FOR_ALL_SITES_AND_LINKS(winner_ptr, site_ptr, link_ptr) {
2831 		devit = link_ptr->to->Out.output - link_ptr->weight;
2832 		learn_error = alpha * devit;
2833 		link_ptr->weight += learn_error;
2834 		/* this is needed for the normalization of the weight_vector */
2835 		sum += link_ptr->weight * link_ptr->weight;
2836 	    }
2837 	}
2838 	if (sum != 0.0)
2839 	    normalize_weight(winner_ptr, sum);
2840     }
2841 
2842 
2843     /* propagate Grossberg Layer                      */
2844     /* Training the Grossberg Layer                   */
2845     /* Adaptation of the Grossberg Layer weights is done by the    */
2846     /* Widrow-Hoff rule:                                           */
2847 
2848     /* weight(new) = weight(old) + beta * (target output - output) */
2849 
2850     /* for all weights connected with the winning unit of the      */
2851     /* Kohonen Layers                                              */
2852 
2853 
2854     /* calculate address of the output pattern */
2855     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,NULL);
2856 
2857 
2858     sum_error = 0.0;
2859 
2860     /* popagate output units  */
2861     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
2862 						   (topological sorted) unit
2863 						   stucture */
2864 
2865 	/* calculate the activation and the output values         */
2866 	/* of the output units (Grossberg Layer)                  */
2867 
2868 	/* the activation function is the identity function (weighted sum)
2869 	   and identity output function */
2870 	unit_ptr->Out.output = unit_ptr->act =
2871 		((unit_ptr->act_func == ACT_Custom_Python) ?
2872 			kr_PythonActFunction(unit_ptr->python_act_func,
2873 						unit_ptr) :
2874 			(*unit_ptr->act_func) (unit_ptr)) ;
2875 
2876 	devit = *out_pat++ - unit_ptr->Out.output;    /* calculate devitation */
2877 	sum_error += devit * devit;
2878 	learn_error = beta * devit;
2879 
2880 	if (!IS_SPECIAL_UNIT(unit_ptr)) {
2881 	    if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)){ /* unit has direct links */
2882 		FOR_ALL_LINKS(unit_ptr, link_ptr)
2883 		    if (link_ptr->to == winner_ptr) {
2884 			/* link to the winning unit of the Kohonen Layer */
2885 			link_ptr->weight += learn_error;
2886 			break;
2887 		    }
2888 	    } else {		/* the unit has sites */
2889 		FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
2890 		    if (link_ptr->to == winner_ptr) {
2891 			/* link to the winning unit of the Kohonen Layer */
2892 			link_ptr->weight += learn_error;
2893 			break;
2894 		    }
2895 	    }
2896 	}
2897     }
2898 
2899     return (sum_error);
2900 }
2901 
2902 
2903 
2904 /*****************************************************************************
2905   FUNCTION : LEARN_CPN
2906 
2907   PURPOSE  : main function for counterpropagtion
2908   RETURNS  :
2909   NOTES    :
2910 
2911   UPDATE   : 06.11.1993 by Guenter Mamier
2912 ******************************************************************************/
LEARN_CPN(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)2913 krui_err  LEARN_CPN(int start_pattern, int end_pattern,
2914 		    float *parameterInArray, int NoOfInParams,
2915 		    float **parameterOutArray, int *NoOfOutParams)
2916 {
2917     static float    OutParameter[1];	/* OutParameter[0] stores the
2918 					   learning error  */
2919     int             ret_code, pattern_no, sub_pat_no;
2920 
2921 
2922     if (NoOfInParams < 1)	/* have to be changed (must be 3) */
2923 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
2924 
2925     *NoOfOutParams = 1;		/* one return value is available (the
2926 				   learning error) */
2927     *parameterOutArray = OutParameter;	/* set output parameter reference  */
2928     ret_code = KRERR_NO_ERROR;	/* clear return code  */
2929 
2930 
2931     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
2932 	/* Net has been modified  or topologic array isn't initialized */
2933 	/* check the topology of the network  */
2934 	ret_code = kr_topoCheck();
2935 	if (ret_code < KRERR_NO_ERROR)
2936 	    return (ret_code);	/* an error has occured  */
2937 	if (ret_code != 3)
2938 	    return (KRERR_FEW_LAYERS);	/* the network has less then 2 layers */
2939 
2940 	/* count the no. of I/O units and check the patterns  */
2941 	ret_code = kr_IOCheck();
2942 	if (ret_code < KRERR_NO_ERROR)
2943 	    return (ret_code);
2944 
2945 	/* sort units by topology and by topologic type  */
2946 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
2947 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
2948 	    return (ret_code);
2949 
2950 	NetModified = FALSE;
2951     }
2952     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
2953 						   initialized, initialize
2954 						   backprop now  */
2955 	ret_code = initializeCPN();
2956 	if (ret_code != KRERR_NO_ERROR)
2957 	    return (ret_code);
2958     }
2959 
2960 
2961     /* compute the necessary sub patterns */
2962 
2963     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
2964     if(KernelErrorCode != KRERR_NO_ERROR)
2965 	return (KernelErrorCode);
2966 
2967 
2968     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
2969 
2970     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
2971 
2972 	NoOfLearnedPatterns++;
2973 	NET_ERROR(OutParameter)
2974 	    += propagateNet_CPN(pattern_no,sub_pat_no,
2975 				LEARN_PARAM1(parameterInArray),
2976 				LEARN_PARAM2(parameterInArray),
2977 				LEARN_PARAM3(parameterInArray));
2978     }
2979 
2980     return (ret_code);
2981 }
2982 
2983 
2984 
2985 
2986 /*****************************************************************************
2987  *****************************************************************************
2988 
2989   GROUP        : Back-Percolation Learning Function
2990 
2991   AUTHOR       : Artemis Hatzigeorgiou  Algorithm by Mark Jurik
2992 
2993 ******************************************************************************
2994 ******************************************************************************/
2995 
2996 /*****************************************************************************
2997   FUNCTION : propagateNetForward_perc
2998 
2999   PURPOSE  : topological forward propagation
3000   RETURNS  :
3001   NOTES    :
3002 
3003   UPDATE   : 06.11.1993 by Guenter Mamier
3004 ******************************************************************************/
propagateNetForward_perc(int pattern_no,int sub_pat_no)3005 static void propagateNetForward_perc(int pattern_no, int sub_pat_no)
3006 {
3007     register struct Unit *unit_ptr;
3008     register Patterns in_pat;
3009     register TopoPtrArray topo_ptr;
3010 
3011     /* calculate startaddress for input pattern array  */
3012     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
3013 
3014     topo_ptr = topo_ptr_array;
3015 
3016     /* copy pattern into input unit's activation and calculate output of the
3017        input units */
3018     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
3019 						   (topological sorted) unit
3020 						   stucture (input units
3021 						   first)  */
3022 	if (unit_ptr->out_func == OUT_IDENTITY)
3023 	    /* identity output function: there is no need to call the output
3024 	       function  */
3025 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
3026 	else if(unit_ptr->out_func == OUT_Custom_Python)
3027 		unit_ptr->Out.output =
3028 			kr_PythonOutFunction(unit_ptr->python_out_func,
3029 				unit_ptr->act = *in_pat++);
3030 	else
3031 	    /* no identity output function: calculate unit's output also  */
3032 	    unit_ptr->Out.output =
3033 		(*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
3034     }
3035 
3036     /* popagate hidden units  */
3037     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
3038 						   (topological sorted) unit
3039 						   stucture */
3040 	/* clear values  */
3041 	unit_ptr->Aux.flint_no = 0.0;
3042 	unit_ptr->value_a = 0.0;
3043 	unit_ptr->value_b = 0.000001;
3044 
3045 	/* calculate the activation value of the unit: call the activation
3046 	   function if needed  */
3047 	unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
3048 			kr_PythonActFunction(unit_ptr->python_act_func,
3049 						unit_ptr) :
3050 			(*unit_ptr->act_func) (unit_ptr)) ;
3051 
3052 	if (unit_ptr->out_func == OUT_IDENTITY)
3053 	    /* identity output function: there is no need to call the output
3054 	       function  */
3055 	    unit_ptr->Out.output = unit_ptr->act;
3056 	else if(unit_ptr->out_func == OUT_Custom_Python)
3057 		unit_ptr->Out.output =
3058 			kr_PythonOutFunction(unit_ptr->python_out_func,
3059 				unit_ptr->act);
3060 	else
3061 	    /* no identity output function: calculate unit's output also  */
3062 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
3063     }
3064 
3065     /* popagate output units  */
3066     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
3067 						   (topological sorted) unit
3068 						   stucture */
3069 	/* clear values  */
3070 	unit_ptr->Aux.flint_no = 0.0;
3071 	unit_ptr->value_a = 0.0;
3072 	unit_ptr->value_b = 0.000001;
3073 
3074 	/* calculate the activation value of the unit: call the activation
3075 	   function if needed  */
3076 	unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
3077 			kr_PythonActFunction(unit_ptr->python_act_func,
3078 						unit_ptr) :
3079 			(*unit_ptr->act_func) (unit_ptr)) ;
3080 
3081 	if (unit_ptr->out_func == OUT_IDENTITY)
3082 	    /* identity output function: there is no need to call the output
3083 	       function  */
3084 	    unit_ptr->Out.output = unit_ptr->act;
3085 	else if(unit_ptr->out_func == OUT_Custom_Python)
3086 		unit_ptr->Out.output =
3087 			kr_PythonOutFunction(unit_ptr->python_out_func,
3088 				unit_ptr->act);
3089 	else
3090 	    /* no identity output function: calculate unit's output also  */
3091 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
3092     }
3093 }
3094 
3095 
3096 /*****************************************************************************
3097   FUNCTION : propagateNetBackward_perc
3098 
3099   PURPOSE  :topological backward propagation
3100   RETURNS  : network error
3101   NOTES    :
3102 
3103   UPDATE   : 07.02.1994 by Sven Doering
3104 ******************************************************************************/
propagateNetBackward_perc(int pattern_no,int sub_pat_no,float learn_parameter,float delta_max,float * perc_error)3105 static float propagateNetBackward_perc(int pattern_no, int sub_pat_no,
3106 				       float learn_parameter,
3107 				       float delta_max, float *perc_error)
3108 {
3109     register struct Link *link_ptr;
3110     register struct Unit *unit_ptr;
3111     register Patterns out_pat;
3112     register float  error, sum_error, eta, devit;
3113     register TopoPtrArray topo_ptr;
3114     register float  norm, delta_sig_normaliser, message_weight;
3115     register float  act_err, normalised_error, scaled_error,
3116                     delta_weight_normaliser;
3117     register float  der = 0.0;
3118     register float  tmp;
3119     register int    is_special;
3120     int size;
3121 
3122     sum_error = 0.0;		/* reset network error  */
3123     eta = learn_parameter;	/* store learn_parameter in CPU register  */
3124 
3125     /* calculate address of the output pattern (with number pattern_no + 1)  */
3126     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
3127     out_pat += size;
3128 
3129     /* add 3 to no_of_topo_units because the topologic array contains 4 NULL
3130        pointers  */
3131     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
3132 
3133     /* calculate output units only  */
3134     while ((unit_ptr = *--topo_ptr) != NULL) {
3135 	devit = *(--out_pat) - unit_ptr->Out.output;	/* calc. devitation */
3136 
3137 	if (fabs(devit) > delta_max) {	/* calc. error for output units     */
3138 	    *perc_error += fabs(devit);
3139 	    error = -2.0 * devit *
3140 	    	((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
3141 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
3142 						unit_ptr) :
3143 			(unit_ptr->act_deriv_func) (unit_ptr)) ;
3144 	    act_err = devit * eta;
3145 	    sum_error += devit * devit;	/* sum up the error of the network  */
3146 	} else {		/* set error of output units to zero	 */
3147 	    error = 0.0;
3148 	    act_err = 0.000001 * eta;
3149 	    continue;
3150 	}
3151 
3152 	/* calc. the error for adjusting weights and bias of the predecessor
3153 	   units  */
3154 
3155 	norm = 0.0;
3156 	delta_sig_normaliser = 0.000001;
3157 	FOR_ALL_LINKS(unit_ptr, link_ptr) {	/* adjust link weights and
3158 						   calc. sum of errors of the
3159 						   predecessor units  */
3160 	    if (IS_HIDDEN_UNIT(link_ptr->to))
3161 		norm += fabs(link_ptr->weight);
3162 	    delta_sig_normaliser += SQR(link_ptr->to->Out.output);
3163 	}
3164 	delta_weight_normaliser = delta_sig_normaliser + 1;
3165 	norm += delta_sig_normaliser;
3166 	is_special = IS_SPECIAL_UNIT(unit_ptr);
3167 	normalised_error = act_err / norm;
3168 	scaled_error = act_err / delta_weight_normaliser;
3169 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
3170 	    tmp = link_ptr->weight * error;
3171 	    link_ptr->to->Aux.flint_no += tmp;
3172 
3173 	    message_weight = tmp * tmp;
3174 	    if (!is_special) {
3175 		link_ptr->to->value_a += link_ptr->weight *
3176 		    normalised_error * message_weight;
3177 		link_ptr->to->value_b += message_weight;
3178 		link_ptr->weight += link_ptr->to->Out.output * scaled_error;
3179 	    }
3180 	}
3181 
3182 
3183 	/* adjust bias value  */
3184 	if (!is_special)
3185 	    unit_ptr->bias += scaled_error;
3186     }
3187 
3188     /* calculate hidden units only  */
3189     while ((unit_ptr = *--topo_ptr) != NULL) {
3190 	der = ((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
3191 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
3192 						unit_ptr) :
3193 			(unit_ptr->act_deriv_func) (unit_ptr)) ;
3194 	error = der * unit_ptr->Aux.flint_no;
3195 	act_err = (unit_ptr->value_a / unit_ptr->value_b) * der;
3196 
3197 	/* calc. the error for adjusting weights and bias of the predecessor
3198 	   units  */
3199 
3200 	norm = 0.0;
3201 	delta_sig_normaliser = 0.000001;
3202 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
3203 	    if (IS_HIDDEN_UNIT(link_ptr->to))
3204 		norm += fabs(link_ptr->weight);
3205 
3206 	    delta_sig_normaliser += SQR(link_ptr->to->Out.output);
3207 	}
3208 	delta_weight_normaliser = delta_sig_normaliser + 1;
3209 	norm += delta_sig_normaliser;
3210 	is_special = IS_SPECIAL_UNIT(unit_ptr);
3211 	normalised_error = act_err / norm;
3212 	scaled_error = act_err / delta_weight_normaliser;
3213 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
3214 	    tmp = link_ptr->weight * error;
3215 	    link_ptr->to->Aux.flint_no += tmp;
3216 
3217 	    message_weight = tmp * tmp;
3218 
3219 	    if (!is_special) {
3220 		link_ptr->to->value_a += link_ptr->weight *
3221 		    normalised_error * message_weight;
3222 		link_ptr->to->value_b += message_weight;
3223 		link_ptr->weight += link_ptr->to->Out.output * scaled_error;
3224 	    }
3225 	}
3226 
3227 
3228 	/* adjust bias value  */
3229 	if (!is_special)
3230 	    unit_ptr->bias += scaled_error;
3231     }
3232 
3233     return (sum_error);		/* return the error of the network */
3234 }
3235 
3236 
3237 
3238 /*****************************************************************************
3239   FUNCTION : LEARN_perc
3240 
3241   PURPOSE  : main function for backpercolation
3242   RETURNS  : kernel error code
3243   NOTES    : Input Parameters:   1 : learning parameter
3244                                  2 : delta max
3245 
3246              Output Parameters:  1 : error of the network (sum of all cycles)
3247 
3248   UPDATE   : 06.11.1993 by Guenter Mamier
3249 ******************************************************************************/
LEARN_perc(int start_pattern,int end_pattern,float * parameterInArray,int NoOfInParams,float ** parameterOutArray,int * NoOfOutParams)3250 krui_err LEARN_perc(int start_pattern, int end_pattern,
3251 	            float *parameterInArray, int NoOfInParams,
3252 	            float **parameterOutArray, int *NoOfOutParams)
3253 {
3254     static float    OutParameter[1];	/* OutParameter[0] stores the
3255 					   learning error  */
3256     int             ret_code, pattern_no, sub_pat_no;
3257     float           p_error, l_error;
3258     register struct Unit *unit_ptr;
3259 
3260     if (NoOfInParams < 1)	        /* have to be changed (must be 2)  */
3261 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
3262 
3263     *NoOfOutParams = 1;		        /* One return value is available (the
3264 				           learning error)  */
3265     *parameterOutArray = OutParameter;	/* set the output parameter reference */
3266     ret_code = KRERR_NO_ERROR;	/* reset return code  */
3267 
3268     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
3269 	/* Net has been modified or topologic array isn't initialized */
3270 	/* check the topology of the network  */
3271 	FOR_ALL_UNITS(unit_ptr)
3272 	    if UNIT_HAS_SITES
3273 	    (unit_ptr)
3274 		return (KRERR_SITES_NO_SUPPORT);
3275 
3276 	ret_code = kr_topoCheck();
3277 	if (ret_code < KRERR_NO_ERROR)
3278 	    return (ret_code);	/* an error has occured  */
3279 	if (ret_code < 2)
3280 	    return (KRERR_FEW_LAYERS);	/* the network has less then 2 layers */
3281 
3282 	/* count the no. of I/O units and check the patterns  */
3283 	ret_code = kr_IOCheck();
3284 	if (ret_code < KRERR_NO_ERROR)
3285 	    return (ret_code);
3286 
3287 	/* sort units by topology and by topologic type  */
3288 	ret_code = kr_topoSort(TOPOLOGICAL_FF);
3289 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
3290 	    return (ret_code);
3291 
3292 	NetModified = FALSE;
3293     }
3294     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
3295 						   initialized, initialize
3296 						   backprop now  */
3297 	if (ret_code != KRERR_NO_ERROR)
3298 	    return (ret_code);
3299 	parameterInArray[4] = 1.0;
3300     }
3301 
3302 
3303     /* compute the necessary sub patterns */
3304 
3305     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
3306     if(KernelErrorCode != KRERR_NO_ERROR)
3307 	return (KernelErrorCode);
3308 
3309 
3310     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
3311     p_error = 0.0;
3312 
3313     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
3314 
3315 	propagateNetForward_perc(pattern_no,sub_pat_no);  /* Forward pass */
3316 
3317 	/* Backward propagation  */
3318 	/* 1st parameter is the learning parameter 2nd parameter is the max.
3319 	   devitation between output pattern and the output of the output
3320 	   unit (delta max) */
3321 
3322 	NET_ERROR(OutParameter) +=
3323 	    propagateNetBackward_perc(pattern_no,sub_pat_no,
3324 				      LEARN_PARAM1(parameterInArray),
3325 				      LEARN_PARAM3(parameterInArray), &p_error);
3326     }
3327 
3328     p_error = p_error / (kr_TotalNoOfSubPatPairs()* NoOfOutputUnits);
3329 
3330     if (p_error < LEARN_PARAM2(parameterInArray)) {
3331 	p_error = (parameterInArray[4] + p_error) / 2;
3332 	l_error = exp((parameterInArray[4] - p_error) /
3333 		      (parameterInArray[4] + p_error));
3334 	if (l_error <= 0.5)
3335 	    l_error = 0.5;
3336 	else if (l_error >= 1.05)
3337 	    l_error = 1.05;
3338 	parameterInArray[0] = parameterInArray[0] * l_error;
3339     }
3340     parameterInArray[4] = p_error;
3341 
3342     return (ret_code);
3343 }
3344 
3345 
3346 
3347 /*****************************************************************************
3348  *****************************************************************************
3349 
3350   GROUP  : Radial Basis Functions Learning
3351 
3352   AUTHOR : Michael Vogt
3353   Notes  : Use of special entries in links and units with RBFs:
3354 
3355            for Units in hidden layer:
3356 	   Unit value_a: |X - L|^2  == norm^2 == square of euclidean distance
3357 	                  between all links and all input units to this unit.
3358 	   Unit value_b: delta_BIAS == sum of all deltas to BIAS during learning
3359 	   Unit value_c: Backpropagated weighted sum of errors in output layer
3360 
3361 	   for Units in output layer:
3362 	   Unit value_a: error (y_learn - y_net) during learning current pattern
3363 	   Unit value_b: delta_BIAS == sum of all deltas to BIAS during learning
3364 
3365 	   for links between input and hidden layer:
3366 	   Link value_b: delta for this link during learning (link treated as
3367 	                 vector)
3368 	   Link value_a: Momentum term for this link (last change)
3369 
3370 	   for links between hidden and output layer:
3371 	   Link value_b: delta for weight of this link during learning.
3372 	   Link value_a: Momentum term for this link (last change)
3373 
3374 	   for links between input and output layer:
3375 	   Link value_b: delta for weight of this link during learning.
3376 	   Link value_a: Momentum term for this link (last change)
3377 
3378 ******************************************************************************
3379 ******************************************************************************/
3380 
3381 
3382 /*****************************************************************************
3383   FUNCTION : RbfLearnClean
3384 
3385   PURPOSE  : Clean all deltas, so that learning can start.
3386   RETURNS  : kernel error code
3387   NOTES    : Called every time LEARN_RBF is called to be sure that there is
3388              no stuff inside the value_b fields of links and units
3389 
3390   UPDATE   : 06.11.1993 by Guenter Mamier
3391 ******************************************************************************/
RbfLearnClean(void)3392 krui_err RbfLearnClean(void)
3393 {
3394     register struct Unit *unit_ptr;
3395     register struct Link *link_ptr;
3396 
3397     FOR_ALL_UNITS(unit_ptr) {
3398 	unit_ptr->value_b = 0.0;
3399 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
3400 	    link_ptr->value_b = 0.0;
3401 	}
3402     }
3403 
3404     return KRERR_NO_ERROR;
3405 }
3406 
3407 
3408 
3409 /*****************************************************************************
3410   FUNCTION : RbfLearnForward
3411 
3412   PURPOSE  : Forward propagation of current pattern. Calculation of different
3413              value_a fields. value_c of hidden units is set to 0.0
3414   RETURNS  : kernel error code
3415   NOTES    :
3416 
3417   UPDATE   : 06.11.1993 by Guenter Mamier
3418 ******************************************************************************/
RbfLearnForward(int pattern_no,int sub_pat_no)3419 krui_err  RbfLearnForward(int pattern_no, int sub_pat_no)
3420 {
3421     register struct Unit *unit_ptr;
3422     register Patterns current_in_pattern;
3423     register Patterns current_out_pattern;
3424     register TopoPtrArray topo_ptr;
3425 
3426     /* calculate index of current input pattern in Pattern array:	 */
3427     current_in_pattern = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
3428 
3429     /* activate input units with current patterns and calculate	 */
3430     /* their output value:						 */
3431 
3432     topo_ptr = topo_ptr_array;
3433     while ((unit_ptr = *(++topo_ptr)) != NULL) {
3434 	/* go through all input units, set activation and calculate */
3435 	/* output:							 */
3436 
3437 	unit_ptr->act = *current_in_pattern++;
3438 	if(unit_ptr->out_func == OUT_IDENTITY)
3439 		unit_ptr->Out.output = unit_ptr->act;
3440 	else if(unit_ptr->out_func == OUT_Custom_Python)
3441 		unit_ptr->Out.output =
3442 			kr_PythonOutFunction(unit_ptr->python_out_func,
3443 				unit_ptr->act);
3444 	else
3445 	unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
3446     }
3447 
3448     /* activate hidden units, by calling the activation function	 */
3449     /* (has to be a RBF activation function which places norm ^ 2	 */
3450     /* into value_a of the unit: see trans_f.c: RbfUnitGetNormsqr). */
3451     /* The output function is supposed to be OUT_IDENTITY !		 */
3452     /* (so the output function is never called !)			 */
3453 
3454     while ((unit_ptr = *(++topo_ptr)) != NULL) {
3455 	unit_ptr->act = unit_ptr->Out.output =
3456 	    ((unit_ptr->act_func == ACT_Custom_Python) ?
3457 			kr_PythonActFunction(unit_ptr->python_act_func,
3458 						unit_ptr) :
3459 			(*unit_ptr->act_func) (unit_ptr)) ;
3460 
3461 	unit_ptr->value_c = 0.0;
3462     }
3463 
3464     /* activate output units. Again, the output function is supposed */
3465     /* to be OUT_IDENTITY. The calculated output is compared to the */
3466     /* current pattern, the error (difference) is calculated and    */
3467     /* stored in value_a of the current output unit.		 */
3468 
3469     current_out_pattern = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,NULL);
3470 
3471     while ((unit_ptr = *(++topo_ptr)) != NULL) {
3472 	unit_ptr->act = unit_ptr->Out.output =
3473 	    ((unit_ptr->act_func == ACT_Custom_Python) ?
3474 			kr_PythonActFunction(unit_ptr->python_act_func,
3475 						unit_ptr) :
3476 			(unit_ptr->act_func) (unit_ptr)) ;
3477 	unit_ptr->value_a = *current_out_pattern++ - unit_ptr->act;
3478     }
3479 
3480     return KRERR_NO_ERROR;
3481 }
3482 
3483 
3484 #define	RBF_LEARN_CENTER	0x1
3485 #define RBF_LEARN_BIAS		0x2
3486 #define RBF_LEARN_WEIGHT	0x4
3487 #define RBF_LEARN_PAIN		0x8
3488 
3489 /*****************************************************************************
3490   FUNCTION : RbfLearnAdjustDelta
3491 
3492   PURPOSE  : Adjusting of all deltas (value_b fields) by using the current
3493              input pattern (activation of input units) and the stored error
3494 	     of the output units (value_a). value_c of hidden units is used
3495 	     too!
3496   RETURNS  :
3497   NOTES    :
3498 
3499   UPDATE   : 06.11.1993 by Guenter Mamier
3500 ******************************************************************************/
RbfLearnAdjustDelta(float para_center,float para_bias,float para_weight,float para_pain,float para_momentum,float para_delta_max,int learn_mask)3501 float RbfLearnAdjustDelta(float para_center, float para_bias,
3502 			  float para_weight, float para_pain,
3503 			  float para_momentum, float para_delta_max,
3504 			  int learn_mask)
3505 {
3506     register struct Unit *curr_unit;	/* current unit		 */
3507     register struct Link *curr_link;	/* current link		 */
3508     register struct Unit *source_unit;	/* input unit to link	 */
3509     register TopoPtrArray topo_ptr;
3510     register float  center_delta;	/* delta of centers	 */
3511     register float  w_error;	        /* weighted error of 	 */
3512                                         /* output unit		 */
3513     register float  w2_error;           /* w_error for special u.*/
3514     register float  learn_error;
3515 
3516     /* start with last unit in output layer:			 */
3517     topo_ptr = topo_ptr_array + no_of_topo_units + 3;
3518 
3519     learn_error = 0.0;
3520 
3521     /* work on the output layer and all links leading to it:	 */
3522 
3523     while ((curr_unit = *(--topo_ptr)) != NULL) {
3524 	/* go on with next unit if |error| <= delta_max		 */
3525 	if ((float) fabs(curr_unit->value_a) <= para_delta_max)
3526 	    continue;
3527 
3528 	/* error, weighted by the deviation of the activation:	 */
3529 	w2_error = w_error = (curr_unit->value_a) *
3530 	    ((curr_unit->act_deriv_func == ACT_DERIV_Custom_Python) ?
3531 			kr_PythonActFunction(curr_unit->python_act_deriv_func,
3532 						curr_unit) :
3533 			(curr_unit->act_deriv_func) (curr_unit)) ;
3534 
3535 	/* sum up the learning error:				 */
3536 	learn_error += (curr_unit->value_a) * (curr_unit->value_a);
3537 
3538 	if (learn_mask & RBF_LEARN_WEIGHT) {
3539 	    /* sum up all deltas for change of bias:		 */
3540 
3541 #ifdef RBF_INCR_LEARNING
3542 	    if (IS_SPECIAL_UNIT(curr_unit)
3543 		w_error = 0.0;
3544 	    curr_unit->bias += para_weight * w_error;
3545 #else
3546 	    curr_unit->value_b += w_error;
3547 #endif
3548 	}
3549 	if (learn_mask) {
3550 	    FOR_ALL_LINKS(curr_unit, curr_link) {
3551 		source_unit = curr_link->to;
3552 
3553 		/* sum up deltas for change of link weight:	 */
3554 
3555 #ifdef RBF_INCR_LEARNING
3556 		curr_link->weight += para_weight * w_error *
3557 		    source_unit->Out.output;
3558 #else
3559 		curr_link->value_b += w_error * source_unit->Out.output;
3560 #endif
3561 
3562 		/* if comming from hidden unit: sum up delta for change */
3563 		/* of bias of hidden unit:			        */
3564 		if (IS_HIDDEN_UNIT(source_unit))
3565 		    source_unit->value_c += w2_error * curr_link->weight;
3566 	    }
3567 	}
3568     }
3569 
3570     /* work on the hidden layer and all links leading to it:	 */
3571 
3572     if (learn_mask & (RBF_LEARN_CENTER | RBF_LEARN_BIAS)) {
3573 	while ((curr_unit = *(--topo_ptr)) != NULL) {
3574 	    /* now calculate delta for weights of links (centers of the */
3575 	    /* RBF function)						 */
3576 	    curr_unit->Aux.int_no = 2;	/* derivated to norm ^2 */
3577 	    center_delta = curr_unit->value_c *
3578 		((curr_unit->act_deriv_func == ACT_DERIV_Custom_Python) ?
3579 			kr_PythonActFunction(curr_unit->python_act_deriv_func,
3580 						curr_unit) :
3581 			(curr_unit->act_deriv_func) (curr_unit)) ;
3582 
3583 	    if (learn_mask & RBF_LEARN_CENTER) {
3584 #ifdef RBF_INCR_LEARNING
3585 		if (IS_SPECIAL_UNIT(curr_unit))
3586 		    center_delta = 0.0;
3587 #endif
3588 		FOR_ALL_LINKS(curr_unit, curr_link) {
3589 
3590 #ifdef RBF_INCR_LEARNING
3591 		    curr_link->weight += para_center * center_delta *
3592 			((curr_link->to->Out.output) - (curr_link->weight));
3593 #else
3594 		    curr_link->value_b += center_delta *
3595 			((curr_link->to->Out.output) - (curr_link->weight));
3596 #endif
3597 		}
3598 	    }
3599 	    /* calculate delta for bias (parameter of RBF function):	 */
3600 	    curr_unit->Aux.int_no = 3;	/* derivation to bias!  */
3601 
3602 #ifdef RBF_INCR_LEARNING
3603 	    if (!IS_SPECIAL_UNIT(curr_unit))
3604 		curr_unit->bias += para_bias * curr_unit->value_c *
3605 		    ((curr_unit->act_deriv_func == ACT_DERIV_Custom_Python) ?
3606 			kr_PythonActFunction(curr_unit->python_act_deriv_func,
3607 						curr_unit) :
3608 			(*curr_unit->act_deriv_func) (curr_unit)) ;
3609 #else
3610 	    curr_unit->value_b += curr_unit->value_c *
3611 		((curr_unit->act_deriv_func == ACT_DERIV_Custom_Python) ?
3612 			kr_PythonActFunction(curr_unit->python_act_deriv_func,
3613 						curr_unit) :
3614 			(curr_unit->act_deriv_func) (curr_unit)) ;
3615 #endif
3616 	}
3617     }
3618     return learn_error;
3619 }
3620 
3621 
3622 
3623 /*****************************************************************************
3624   FUNCTION : RbfLearnAdjustWeights
3625 
3626   PURPOSE  : Adjusting of all learnable parameters, depending on collected
3627              deltas and on actual learning parameters.
3628   RETURNS  :
3629   NOTES    :
3630 
3631   UPDATE   : 06.11.1993 by Guenter Mamier
3632 ******************************************************************************/
3633 void RbfLearnAdjustWeights(float para_center, float para_bias,
3634 			   float para_weight, float para_momentum)
3635 {
3636     register struct Unit *curr_unit;	/* current unit		 */
3637     register struct Link *curr_link;	/* current link		 */
3638     register TopoPtrArray topo_ptr;
3639 
3640 #ifdef RBF_DELTA_PROT
3641     static int      step = 0;	/* current learning step */
3642     char            filename[20];	/* Name of prot file	 */
3643     FILE           *protfile;	/* filepointer		 */
3644 
3645 #endif
3646 
3647 #ifdef RBF_DELTA_PROT
3648     step++;
3649     sprintf(filename, "rbf_%04d.prot", step);
3650     protfile = fopen(filename, "w");
3651     if (protfile == NULL)
3652 	fprintf(stderr, "RbfLearnAdjustWeights: Can't open protfile\n");
3653 #endif
3654 
3655     /* start with last unit in output layer:			 */
3656     topo_ptr = topo_ptr_array + no_of_topo_units + 3;
3657 
3658 #ifdef RBF_DELTA_PROT
3659     fprintf(protfile, "%s\t\t\n", "h -> o");
3660 #endif
3661 
3662     while ((curr_unit = *(--topo_ptr)) != NULL) {
3663 	if (!IS_SPECIAL_UNIT(curr_unit)) {
3664 	    /* adjust bias of output unit:                       */
3665 	    curr_unit->bias += para_weight * (curr_unit->value_b);
3666 
3667 #ifdef RBF_DELTA_PROT
3668 	    fprintf(protfile, "%13s:\t\n", curr_unit->unit_name);
3669 #endif
3670 
3671 	    /* adjust weights of links leading to this unit:	 */
3672 	    FOR_ALL_LINKS(curr_unit, curr_link) {
3673 
3674 #ifdef RBF_DELTA_PROT
3675 		fprintf(protfile, "%-10.2e\t\n",
3676 			para_weight * (curr_link->value_b));
3677 #endif
3678 
3679 		curr_link->weight +=
3680 		    (curr_link->value_a = para_weight * (curr_link->value_b)
3681 		     + para_momentum * curr_link->value_a);
3682 	    }
3683 	}
3684     }
3685 
3686     /* now adjust weights of hidden layer:			 */
3687 
3688 #ifdef RBF_DELTA_PROT
3689     fprintf(protfile, "%s\t\t\n", "i -> h");
3690 #endif
3691 
3692     while ((curr_unit = *(--topo_ptr)) != NULL) {
3693 	if (!IS_SPECIAL_UNIT(curr_unit)) {
3694 	    /* adjust bias of hidden unit (parameter of RBF function):	 */
3695 	    curr_unit->bias += para_bias * (curr_unit->value_b);
3696 	    if (curr_unit->bias <= 0.0)
3697 		fprintf(stderr, "Hidden unit bias %f !\n", curr_unit->bias);
3698 
3699 #ifdef RBF_DELTA_PROT
3700 	    fprintf(protfile, "%13s:\t\n", curr_unit->unit_name);
3701 #endif
3702 
3703 	    /* adjust weights of links (centers of RBF functions):	 */
3704 	    FOR_ALL_LINKS(curr_unit, curr_link) {
3705 
3706 #ifdef RBF_DELTA_PROT
3707 		fprintf(protfile, "%-10.2e\t\n",
3708 			para_center * (curr_link->value_b));
3709 #endif
3710 
3711 		curr_link->weight +=
3712 		    (curr_link->value_a = para_center * (curr_link->value_b)
3713 		     + para_momentum * curr_link->value_a);
3714 	    }
3715 	}
3716     }
3717 
3718 #ifdef RBF_DELTA_PROT
3719     fclose(protfile);
3720 #endif
3721 }
3722 
3723 
3724 
3725 /*****************************************************************************
3726   FUNCTION : RbfTopoCheck
3727 
3728   PURPOSE  : Topological Check for Radial Basis Functions.
3729              Also the number of output units is compared to the patterns.
3730   RETURNS  :
3731   NOTES    :
3732 
3733   UPDATE   : 06.11.1993 by Guenter Mamier
3734 ******************************************************************************/
3735 krui_err RbfTopoCheck(void)
3736 {
3737     krui_err        ret_code;	/* error return code		 */
3738 
3739     /* Net has been modified or topologic array isn't		 */
3740     /* initialized. check the topology of the network.		 */
3741     ret_code = kr_topoCheck();
3742     if (ret_code < KRERR_NO_ERROR)
3743 	return (ret_code);	/* an error has occured */
3744     if (ret_code < 2)
3745 	return (KRERR_NET_DEPTH);	/* the network has less */
3746     /* then 2 layers	 */
3747 
3748     /* count the no. of I/O units and check the patterns	 */
3749     ret_code = kr_IOCheck();
3750     if (ret_code < KRERR_NO_ERROR)
3751 	return (ret_code);
3752 
3753     /* sort units by topology and by topologic type		 */
3754     ret_code = kr_topoSort(TOPOLOGICAL_FF);
3755 
3756     return ret_code;
3757 }
3758 
3759 
3760 /*****************************************************************************
3761   FUNCTION : LEARN_RBF
3762 
3763   PURPOSE  : Learning function for RBF (GRBF) called from kernel.
3764   RETURNS  : kernel error code
3765   NOTES    : Use of Learning Parameters:
3766              LEARN_PARAM1: learning parameter for adjusting centers (links
3767 	                   between input and hidden layer, treated as vectors)
3768              LEARN_PARAM2: learning parameter for adjusting RBF-parameter
3769 	                   (BIAS of units in hidden layer)
3770              LEARN_PARAM3: learning parameter for adjusting weights (all links
3771 	                   to output layer + bias of output units)
3772 	     LEARN_PARAM4: maximum difference between output value and teaching
3773                            input which is treated as error 0.0 (delta_max)
3774              LEARN_PARAM5: factor for momentum term
3775 
3776   UPDATE   : 06.11.1993 by Guenter Mamier
3777 ******************************************************************************/
3778 krui_err LEARN_RBF(int start_pattern, int end_pattern,
3779 		   float *parameterInArray, int NoOfInParams,
3780 		   float **parameterOutArray, int *NoOfOutParams)
3781 {
3782     static float    OutParameter[1];	/* OutParameter[0] stores	 */
3783                                         /* the learning error   	 */
3784     int             ret_code, pattern_no, sub_pat_no, learn_mask;
3785     float           para_bias, para_center, para_weight, para_pain,
3786                     para_momentum,para_delta_max;
3787 
3788     register struct Unit *unit_ptr;
3789     register struct Link *link_ptr;
3790 
3791 #ifdef RBF_LEARN_PROT
3792     static int      schritt = 1;
3793     int             fehler_zaehler = 0;
3794     float           temp_fehler;
3795     FILE           *protfile;
3796 
3797 #endif
3798 
3799     if (NoOfUnits == 0)
3800 	return (KRERR_NO_UNITS);/* No Units defined		 */
3801     if (NoOfInParams < 1)	/* has to be changed (must be 4) */
3802 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
3803 
3804     *NoOfOutParams = 1;		/* One return value is available */
3805                                 /* (the learning error)		 */
3806     *parameterOutArray = OutParameter;	/* set the reference to */
3807     /* the output parameter */
3808 
3809     ret_code = KRERR_NO_ERROR;	/* default return code		 */
3810 
3811     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)) {
3812 	ret_code = RbfTopoCheck();
3813 
3814 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
3815 	    return (ret_code);
3816 
3817 	NetModified = FALSE;
3818     }
3819     if (NetInitialize || LearnFuncHasChanged) {
3820 	fprintf(stderr, "Initialization RBF_Weights should be called!\n");
3821 	/* initialize fields for momentum term */
3822 	FOR_ALL_UNITS(unit_ptr) {
3823 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {
3824 		link_ptr->value_a = 0.0;
3825 	    }
3826 	}
3827     }
3828     NET_ERROR(OutParameter) = 0.0;
3829     para_center = -LEARN_PARAM1(parameterInArray);
3830     para_bias = LEARN_PARAM2(parameterInArray);
3831     para_weight = LEARN_PARAM3(parameterInArray);
3832     para_momentum = LEARN_PARAM5(parameterInArray);
3833     para_delta_max = LEARN_PARAM4(parameterInArray);
3834     para_pain = 0.0;		/* not used now	*/
3835 
3836     /* set learn mask in condition of the learning parameters:	 */
3837     learn_mask = 0;
3838     if (para_center != 0.0)
3839 	learn_mask |= RBF_LEARN_CENTER;
3840     if (para_bias != 0.0)
3841 	learn_mask |= RBF_LEARN_BIAS;
3842     if (para_weight != 0.0)
3843 	learn_mask |= RBF_LEARN_WEIGHT;
3844     if (para_pain != 0.0)
3845 	learn_mask |= RBF_LEARN_PAIN;
3846 
3847 #ifndef RBF_INCR_LEARNING
3848     ret_code = RbfLearnClean();
3849     if (ret_code != KRERR_NO_ERROR)
3850 	return ret_code;
3851 #endif
3852 
3853 
3854     /* compute the necessary sub patterns */
3855 
3856     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
3857     if(KernelErrorCode != KRERR_NO_ERROR)
3858 	return (KernelErrorCode);
3859 
3860     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
3861 
3862 	RbfLearnForward(pattern_no,sub_pat_no);	/* forward propagation	 */
3863 
3864 	/* backward propagation 					 */
3865 
3866 #ifdef RBF_LEARN_PROT
3867 	temp_fehler = RbfLearnAdjustDelta(para_center,
3868 			   para_bias, para_weight, para_pain, para_momentum,
3869 					  para_delta_max, learn_mask);
3870 	NET_ERROR(OutParameter) += temp_fehler;
3871 	if (temp_fehler > 0.0)
3872 	    fehler_zaehler++;
3873 #else
3874 	NET_ERROR(OutParameter) += RbfLearnAdjustDelta(para_center,
3875 			   para_bias, para_weight, para_pain, para_momentum,
3876 						para_delta_max, learn_mask);
3877 #endif
3878     }
3879 
3880 #ifndef RBF_INCR_LEARNING
3881     RbfLearnAdjustWeights(para_center, para_bias, para_weight,
3882 			  para_momentum);
3883 #endif
3884 
3885 #ifdef RBF_LEARN_PROT
3886     protfile = fopen("rbf_learn_prot_file", "a");
3887     if (schritt == 1) {
3888 	fprintf(protfile, "# Neues Lernprotokoll: \n");
3889     }
3890     fprintf(protfile, "%d %f %d\n", schritt, NET_ERROR(OutParameter),
3891 	    fehler_zaehler);
3892     fclose(protfile);
3893     schritt++;
3894 #endif
3895 
3896     return (ret_code);
3897 }
3898 
3899 /*****************************************************************************
3900  *****************************************************************************
3901 
3902   GROUP  (Radial Basis Functions) Dynamic Decay Adjustment (DDA) Learning
3903 
3904   AUTHORS Michael R. Berthold (Michael.Berthold@informatik.uni-karlsruhe.de) and
3905           Markus J. Weihrauch (Markus.Weihrauch@informatik.uni-karlsruhe.de)
3906   NOTES Use of special entries in links and units with RBFs
3907         for units in hidden layer:
3908           unit.value_a == |X - L|^2  = norm^2 = summed squares of euclidian
3909             distances between all links and input units of this unit.
3910         for units in output layer:
3911           unit.value_a == <desired activation> - <actual activation>
3912 	Use of the following global variables
3913 	  KernelErrorCode, NoOfInputUnits, NoOfHiddenUnits, NoOfOutputUnits,
3914 	  NetModified, TopoSortID, topo_ptr_array
3915 	Uses the following global MACROS
3916 	  MAXINT, KRERR_...
3917 	Output units *must* have "Act_Identity" and RBF (==hidden) units *must*
3918 	  have "Act_RBF_Gaussian" as activation function.
3919 	All output functions must be "Out_Identity".
3920 
3921 ******************************************************************************
3922 ******************************************************************************/
3923 
3924 /* The RBFs act. fct. is exp(-distance^2/sigma^2). Sigma == 1/sqrt(bias) */
3925 #define RBF_MIN_SIGMA 1e-10
3926 #define RBF_MAX_SIGMA 1e10
3927 
3928 /* This is the maximal number of units displayed in the graphical display */
3929 #define DEF_MAX_UNITS_DISPLAYED 20
3930 
3931 #define RBF_GET_UNIT_NO( unit_ptr ) ( (unit_ptr) - unit_array )
3932 
3933 #define RBF_ERROR_CHECK( x ) \
3934   KernelErrorCode = ( x ); \
3935   if ( KernelErrorCode < KRERR_NO_ERROR && \
3936        KernelErrorCode != KRERR_DEAD_UNITS ) return KernelErrorCode;
3937 
3938 /* Factor for the calculation of the inverse RBF activation function */
3939 #define RBF_INV ( -1.0 * log(theta_neg) )
3940 
3941 #define RBF_SQR( x ) ( (x) * (x) )
3942 #define RBF_MIN( x, y ) ( (x) < (y) ? (x) : (y) )
3943 #define RBF_MAX( x, y ) ( (x) > (y) ? (x) : (y) )
3944 
3945 /*****************************************************************************
3946   FUNCTION : LEARN_RBF_DDA
3947 
3948   PURPOSE  : Learning function for RBFs called from kernel.
3949   RETURNS  : An kernel error code and OutParameter[0] == Mean Square Error
3950              (MSE is not very meaningful in classification tasks)
3951   NOTES    : LEARN_PARAM1:
3952                theta_pos, minimum activation of correct RBF (default 0.4)
3953              LEARN_PARAM2:
3954 	       theta_neg, maximum activation of competing RBFs (default 0.2)
3955 	     LEARN_PARAM3:
3956 	       #units per line/column in display (default 20)
3957 
3958   UPDATE   : 8 May 1995 by mb
3959 ******************************************************************************/
3960 
3961 krui_err LEARN_RBF_DDA(int start_pattern, int end_pattern,
3962 		       float  *parameterInArray,  int  NoOfInParams,
3963 		       float **parameterOutArray, int *NoOfOutParams)
3964 {
3965   static float OutParameter[1];
3966   float        theta_pos           =       LEARN_PARAM1(parameterInArray),
3967                theta_neg           =       LEARN_PARAM2(parameterInArray);
3968   int          max_units_displayed = (int )LEARN_PARAM3(parameterInArray),
3969                pattern_no, sub_pat_no,
3970                xmin_in,  ymin_in, xmin_out, ymin_out,
3971                xmax_in,  ymax_in, xmax_out, ymax_out;
3972   struct Unit  *input_unit_ptr, *hidden_unit_ptr, *output_unit_ptr;
3973   struct Link  *link_ptr;
3974   TopoPtrArray topo_ptr;
3975   Patterns     out_pat_ptr;
3976 
3977 
3978   /* Treat parameters */
3979 
3980   if  ( theta_pos == 0.0 ) theta_pos = 0.4;
3981 
3982   if  ( theta_neg == 0.0 ) theta_neg = 0.2;
3983 
3984   if ( theta_pos <= 0.0 || theta_pos > 1.0 ) return DDA_PARAM_ONE;
3985 
3986   if ( theta_neg <= 0.0 || theta_neg > 1.0 ) return DDA_PARAM_TWO;
3987 
3988   if ( max_units_displayed == 0 ) max_units_displayed = DEF_MAX_UNITS_DISPLAYED;
3989 
3990   if ( max_units_displayed <= 0 ) return DDA_PARAM_THREE;
3991 
3992 
3993   /*  If net has been modified or topologic array isn't initialized:
3994       Check the topology of the network  */
3995 
3996   if ( NetModified || (TopoSortID != TOPOLOGICAL_FF) )
3997     {
3998       int no_of_layers = kr_topoCheck();
3999       RBF_ERROR_CHECK( no_of_layers );
4000 
4001       if ( no_of_layers < 1 ) return KRERR_FEW_LAYERS;
4002       if ( no_of_layers > 3 ) return KRERR_MUCH_LAYERS;
4003 
4004       /*  count the no. of I/O units and check the patterns  */
4005       RBF_ERROR_CHECK( kr_IOCheck () );
4006 
4007       /*  sort units by topology and by topologic type  */
4008       RBF_ERROR_CHECK( kr_topoSort( TOPOLOGICAL_FF ) );
4009 
4010       NetModified = FALSE;
4011     }
4012 
4013 
4014   /* Check the topology and find min+max positions */
4015 
4016   /* NULL before first  unit */
4017   topo_ptr = topo_ptr_array;
4018   xmin_in = ymin_in =  INT_MAX;
4019   xmax_in = ymax_in = -INT_MAX;
4020 
4021   while ( (input_unit_ptr = * ++topo_ptr ) != NULL)
4022     {
4023       if ( !IS_INPUT_UNIT(input_unit_ptr) ) return KRERR_TOPOLOGY;
4024 
4025       if ( UNIT_HAS_INPUTS(input_unit_ptr) ||
4026 	  UNIT_HAS_SITES(input_unit_ptr) )
4027 	{
4028 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(input_unit_ptr) );
4029 	  return KRERR_UNEXPECTED_SITES;
4030 	}
4031 
4032 #if 0
4033       if ( strcmp( krui_getUnitActFuncName( RBF_GET_UNIT_NO(input_unit_ptr) ),
4034 		   "Act_Identity" ) )
4035 	return DDA_INPUT_ACT_FUNC;
4036 
4037       if ( input_unit_ptr->out_func != OUT_IDENTITY )
4038 	{
4039 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(input_unit_ptr) );
4040 	  return KRERR_OUT_FUNC;
4041 	}
4042 #endif
4043 
4044       input_unit_ptr->bias = 0.0;
4045 
4046       xmin_in = RBF_MIN( xmin_in, input_unit_ptr->unit_pos.x );
4047       ymin_in = RBF_MIN( ymin_in, input_unit_ptr->unit_pos.y );
4048       xmax_in = RBF_MAX( xmax_in, input_unit_ptr->unit_pos.x );
4049       ymax_in = RBF_MAX( ymax_in, input_unit_ptr->unit_pos.y );
4050     }
4051 
4052   /* topo_ptr points now to the NULL before first hidden (RBF) unit */
4053 
4054   while ( (hidden_unit_ptr = * ++topo_ptr ) != NULL)
4055     {
4056       int no_input_units = 0;
4057 
4058       if ( UNIT_HAS_SITES(hidden_unit_ptr) )
4059 	{
4060 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(hidden_unit_ptr) );
4061 	  return KRERR_UNEXPECTED_SITES;
4062 	}
4063 
4064       if ( hidden_unit_ptr->out_func != OUT_IDENTITY )
4065 	{
4066 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(hidden_unit_ptr) );
4067 	  return KRERR_OUT_FUNC;
4068 	}
4069 
4070       if ( strcmp( krui_getUnitActFuncName( RBF_GET_UNIT_NO(hidden_unit_ptr) ),
4071 		   "Act_RBF_Gaussian" ) )
4072 	return DDA_HIDDEN_ACT_FUNC;
4073 
4074       FOR_ALL_LINKS( hidden_unit_ptr, link_ptr )
4075 	{
4076 	  if ( !IS_INPUT_UNIT(link_ptr->to) ) return KRERR_TOPOLOGY;
4077 
4078 	  no_input_units++;
4079 	}
4080 
4081       if ( no_input_units != NoOfInputUnits ||
4082 	  !IS_HIDDEN_UNIT(hidden_unit_ptr) ||
4083 	  !UNIT_HAS_DIRECT_INPUTS(hidden_unit_ptr) ||
4084 	  hidden_unit_ptr->sites == NULL ) return KRERR_TOPOLOGY;
4085     }
4086 
4087   /* topo_ptr points now to the NULL before first output unit */
4088 
4089   xmin_out = ymin_out =  INT_MAX;
4090   xmax_out = ymax_out = -INT_MAX;
4091 
4092   while ( (output_unit_ptr = * ++topo_ptr ) != NULL)
4093     {
4094       if ( UNIT_HAS_SITES(output_unit_ptr) )
4095 	{
4096 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(output_unit_ptr) );
4097 	  return KRERR_UNEXPECTED_SITES;
4098 	}
4099 
4100 #if 0
4101       if ( output_unit_ptr->out_func != OUT_IDENTITY )
4102 	{
4103 	  krui_setCurrentUnit( RBF_GET_UNIT_NO(output_unit_ptr) );
4104 	  return KRERR_OUT_FUNC;
4105 	}
4106 
4107       if ( strcmp( krui_getUnitActFuncName( RBF_GET_UNIT_NO(output_unit_ptr) ),
4108 		   "Act_Identity" ) )
4109 	return DDA_OUTPUT_ACT_FUNC;
4110 #endif
4111       if ( strcmp( krui_getUnitActFuncName( RBF_GET_UNIT_NO(output_unit_ptr) ),
4112 		   "Act_Identity" ) )
4113         {
4114 	  RBF_ERROR_CHECK( krui_setUnitActFunc ( RBF_GET_UNIT_NO(output_unit_ptr), "Act_Identity" ) );
4115         }
4116 
4117       if ( output_unit_ptr->out_func != OUT_IDENTITY )
4118         output_unit_ptr->out_func = OUT_IDENTITY;
4119 
4120       output_unit_ptr->bias = 0.0;
4121 
4122       xmin_out = RBF_MIN( xmin_out, output_unit_ptr->unit_pos.x );
4123       ymin_out = RBF_MIN( ymin_out, output_unit_ptr->unit_pos.y );
4124       xmax_out = RBF_MAX( xmax_out, output_unit_ptr->unit_pos.x );
4125       ymax_out = RBF_MAX( ymax_out, output_unit_ptr->unit_pos.y );
4126 
4127       /* Set the weights of all links from RBFs to output to zero */
4128       FOR_ALL_LINKS( output_unit_ptr, link_ptr )
4129 	{
4130 	  if ( start_pattern != end_pattern ) link_ptr->weight = 0.0;
4131 
4132 	  if ( !IS_HIDDEN_UNIT(link_ptr->to) ) return DDA_SHORTCUTS;
4133 	}
4134 
4135       /* If it's an empty net no connections are allowed */
4136       if ( !IS_OUTPUT_UNIT(output_unit_ptr) ||
4137 	  (NoOfHiddenUnits == 0 && output_unit_ptr->sites != NULL ))
4138 	return KRERR_TOPOLOGY;
4139     }
4140 
4141 
4142   /* The big MAIN Loop:
4143      Loop through patterns from pattern no. start_pattern to end_pattern */
4144 
4145   /* compute the necessary sub patterns */
4146   RBF_ERROR_CHECK( kr_initSubPatternOrder ( start_pattern, end_pattern ) );
4147 
4148   while( kr_getSubPatternByOrder ( &pattern_no, &sub_pat_no ) )
4149     {
4150       int      correct_output_unit_no;
4151       struct   Link *max_to_out_link_ptr;
4152       struct   Unit *max_rbf_ptr, *correct_output_unit_ptr;
4153 
4154       /* forward propagation */
4155       RbfLearnForward ( pattern_no, sub_pat_no );
4156 
4157       /* Find correct output neuron */
4158 
4159       out_pat_ptr = kr_getSubPatData ( pattern_no, sub_pat_no, OUTPUT, NULL );
4160 
4161       topo_ptr = topo_ptr_array + 2 + NoOfInputUnits  + NoOfHiddenUnits;
4162       correct_output_unit_ptr = NULL;
4163 
4164       while( (output_unit_ptr = * ++topo_ptr) != NULL )
4165 	if ( *out_pat_ptr++ > 0.0 )
4166 	  if ( correct_output_unit_ptr == NULL )
4167 	    correct_output_unit_ptr = output_unit_ptr;
4168 	  else
4169 	    return DDA_DESIRED_CLASS;
4170 
4171       /* Is there a desired class ? */
4172       max_rbf_ptr = NULL;
4173 
4174       if ( correct_output_unit_ptr != NULL )
4175 	{
4176 	  correct_output_unit_no = RBF_GET_UNIT_NO ( correct_output_unit_ptr );
4177 
4178 	  /* Find nearest RBF (having highest activation) of correct class */
4179 
4180 	  FOR_ALL_LINKS( correct_output_unit_ptr, link_ptr)
4181 	    if ( link_ptr->to->act >= theta_pos )
4182 	      if ( max_rbf_ptr != NULL )
4183 		{
4184 		  if ( link_ptr->to->act > max_rbf_ptr->act )
4185 		    max_rbf_ptr = link_ptr->to;
4186 		}
4187 	      else
4188 		max_rbf_ptr = link_ptr->to;
4189 	}
4190 
4191       /* Shrink competing RBFs */
4192 
4193       topo_ptr = topo_ptr_array + 2 + NoOfInputUnits + NoOfHiddenUnits;
4194 
4195       while ( ( output_unit_ptr = * ++topo_ptr ) != NULL )
4196 	/* Only competing classes */
4197 	if ( output_unit_ptr != correct_output_unit_ptr )
4198 	  FOR_ALL_LINKS( output_unit_ptr, link_ptr )
4199 	    /* Only competing RBFs with a too big activation */
4200 	    if ( link_ptr->to->act > theta_neg )
4201 	      {
4202 		/* Shrink! */
4203 		if ( link_ptr->to->bias < 1.0/RBF_SQR( RBF_MIN_SIGMA ) )
4204 		  {
4205 		    if ( link_ptr->to->value_a/RBF_INV > RBF_SQR( RBF_MIN_SIGMA ) )
4206 		      link_ptr->to->bias = RBF_INV/link_ptr->to->value_a;
4207 		    else
4208 		      link_ptr->to->bias = 1.0/RBF_SQR( RBF_MIN_SIGMA );
4209 		  }
4210 #ifdef RBF_DEBUG
4211 		else
4212 		  fprintf(stderr,"\nRBF-DDA WARNING: Sigma too small (competing)!\n");
4213 #endif
4214 	      }
4215       /* End Shrink competing RBFs */
4216 
4217       /* If there is a desired class: is the actual input pattern already covered
4218 	 by an RBF of the correct class? */
4219 
4220       if ( correct_output_unit_ptr != NULL )
4221 	if ( max_rbf_ptr != NULL)
4222 	  {
4223 	    /* increase weight of link from nearest RBF
4224 	       to correct output neuron */
4225 
4226 	    /* Find link from RBF with max. act. to the correct output unit */
4227 	    max_to_out_link_ptr =
4228 	      (struct Link *) correct_output_unit_ptr->sites;
4229 
4230 	    FOR_ALL_LINKS ( correct_output_unit_ptr, link_ptr )
4231 	      if ( link_ptr->to == max_rbf_ptr )
4232 		max_to_out_link_ptr = link_ptr;
4233 
4234 	    /* Increase the weight of this link */
4235 	    max_to_out_link_ptr->weight += 1.0;
4236 	  } /* end if covered or no desired class */
4237 
4238 	else
4239 	  /* Not covered:
4240 	     No RBF near enough or no RBF at all yet ... commit new RBF */
4241 	  {
4242 	    struct Unit *new_rbf_ptr;
4243 	    int new_rbf_no = kr_makeDefaultUnit();
4244 
4245 	    NetModified = TRUE;
4246 	    RBF_ERROR_CHECK( new_rbf_no );
4247 	    RBF_ERROR_CHECK( kr_unitSetTType ( new_rbf_no, HIDDEN ) );
4248 	    RBF_ERROR_CHECK( krui_setUnitActFunc ( new_rbf_no, "Act_RBF_Gaussian" ) );
4249 	    new_rbf_ptr = kr_getUnitPtr ( new_rbf_no );
4250 	    RBF_ERROR_CHECK( KernelErrorCode );
4251 	    new_rbf_ptr->i_act      = 0.0;
4252 	    new_rbf_ptr->bias       = 1.0/RBF_SQR( RBF_MAX_SIGMA );
4253 	    new_rbf_ptr->out_func   = OUT_IDENTITY;
4254 
4255 	    /* Set  weight of links from inputs to new RBF (=center of RBF) */
4256 
4257 	    RBF_ERROR_CHECK( krui_setCurrentUnit ( new_rbf_no ) );
4258 
4259 	    /* Scan input units and create links */
4260 	    topo_ptr = topo_ptr_array;
4261 
4262 	    while ( (input_unit_ptr = * ++topo_ptr ) != NULL)
4263 	      RBF_ERROR_CHECK
4264 		( krui_createLink ( RBF_GET_UNIT_NO ( input_unit_ptr ),
4265 				   input_unit_ptr->act ) );
4266 
4267 	    /* Make link from correct output unit to the RBF having max. act. */
4268 
4269 	    RBF_ERROR_CHECK( kr_setCurrUnit ( correct_output_unit_no ) );
4270 	    RBF_ERROR_CHECK( krui_createLink ( new_rbf_no, 1.0 ) );
4271 
4272 	    /*  sort units by topology and by topologic type  */
4273 	    RBF_ERROR_CHECK( kr_topoSort ( TOPOLOGICAL_FF ) );
4274 
4275 	    /* Shrink new RBF */
4276 
4277 	    /* Scan all competing RBFs */
4278 
4279 	    new_rbf_ptr = kr_getUnitPtr ( new_rbf_no );
4280 	    topo_ptr = topo_ptr_array + 2 + NoOfInputUnits + NoOfHiddenUnits;
4281 
4282 	    while ( (output_unit_ptr = * ++topo_ptr ) != NULL)
4283 	      /* Only competing RBFs! */
4284 	      if ( output_unit_ptr != correct_output_unit_ptr )
4285 		FOR_ALL_LINKS( output_unit_ptr, link_ptr )
4286 		  {
4287 		    float sqr_distance = 0.0;
4288 		    struct Link *link_ptr1, *link_ptr2;
4289 
4290 		    /* Compute Euclidian distance of the center of the new RBF
4291 		       to the center of a competing RBFs */
4292 
4293 		    link_ptr1 = (struct Link *) new_rbf_ptr->sites;
4294 		    link_ptr2 = (struct Link *) link_ptr->to->sites;
4295 
4296 		    while ( link_ptr1 != NULL)
4297 		      {
4298 			float diff;
4299 
4300 			/* Find corresponding link so that link_ptr1 and
4301 			   link_ptr2 start at the same unit. */
4302 
4303 			if ( link_ptr1->to != link_ptr2->to )
4304 			  {
4305 			    link_ptr1 = (struct Link *) new_rbf_ptr->sites;
4306 			    while ( link_ptr1->to != link_ptr2->to )
4307 			      if ( link_ptr1 != NULL )
4308 				link_ptr1 = link_ptr1->next;
4309 			      else
4310 				return KRERR_NP_DOES_NOT_FIT;
4311 			  }
4312 
4313 			/* Compute distance of the two centers */
4314 
4315 			diff = ( link_ptr1->weight - link_ptr2->weight );
4316 			sqr_distance += diff*diff;
4317 			if ( link_ptr1->to != link_ptr2->to )
4318 			  return DDA_CONN_POINTER;
4319 
4320 			link_ptr1 = link_ptr1->next;
4321 			link_ptr2 = link_ptr2->next;
4322 		      }
4323 
4324 		    /* Activation greater than theta_neg? Sigma too big ? */
4325 
4326 		    if ( RBF_INV/new_rbf_ptr->bias > sqr_distance )
4327 		      /* Shrink! */
4328 		      if ( new_rbf_ptr->bias < 1.0/RBF_SQR( RBF_MIN_SIGMA ) )
4329 	  	        {
4330 			  if ( sqr_distance/RBF_INV > RBF_SQR( RBF_MIN_SIGMA ) )
4331 			    new_rbf_ptr->bias = RBF_INV / sqr_distance;
4332 			  else
4333 			    new_rbf_ptr->bias = 1.0/RBF_SQR( RBF_MIN_SIGMA );
4334 		        }
4335 #ifdef RBF_DEBUG
4336 		      else
4337 		        fprintf( stderr, "\nRBF-DDA WARNING: Sigma too small (new)!\n" );
4338 #endif
4339 		  }
4340 
4341 	  } /* end else not covered/new rbf */
4342 
4343     } /* end while kr_getSubPatternByOrder (Main Loop) */
4344 
4345 
4346   /* Set positions of units on display */
4347   {
4348     int i = 0;
4349 
4350     /* Direction of data flow: From left to right */
4351     if ( xmax_in < xmin_out )
4352       {
4353 	topo_ptr = topo_ptr_array + 1 + NoOfInputUnits;
4354 
4355 	while ( (hidden_unit_ptr = * ++topo_ptr ) != NULL)
4356 	  {
4357 	    hidden_unit_ptr->unit_pos.x = xmax_in + 4 + i / max_units_displayed;
4358 	    hidden_unit_ptr->unit_pos.y = RBF_MIN ( ymin_in, ymin_out) +
4359 	      i % max_units_displayed;
4360 	    i++;
4361 	  }
4362 
4363 	while ( (output_unit_ptr = * ++topo_ptr ) != NULL)
4364 	  output_unit_ptr->unit_pos.x +=
4365 	    xmax_in + 8 + (NoOfHiddenUnits - 1) / max_units_displayed - xmin_out;
4366       }
4367 
4368     /* right to left */
4369     else if ( xmin_in > xmax_out &&  !( ymax_in < ymin_out ) )
4370       {
4371 	topo_ptr = topo_ptr_array;
4372 
4373 	while ( (input_unit_ptr = * ++topo_ptr ) != NULL)
4374 	  input_unit_ptr->unit_pos.x +=
4375 	    xmax_out + 8 + (NoOfHiddenUnits - 1) / max_units_displayed - xmin_in ;
4376 
4377 	while ( (hidden_unit_ptr = * ++topo_ptr ) != NULL)
4378 	  {
4379 	    hidden_unit_ptr->unit_pos.x = xmax_out + 4 +
4380 	      ( NoOfHiddenUnits - 1 - i ) / max_units_displayed;
4381 	    hidden_unit_ptr->unit_pos.y =  RBF_MIN ( ymin_in, ymin_out) +
4382 	      i % max_units_displayed;
4383 	    i++;
4384 	  }
4385       }
4386 
4387     /* downwards to upwards */
4388     else if ( ymin_in > ymax_out )
4389       {
4390 	topo_ptr = topo_ptr_array;
4391 
4392 	while ( (input_unit_ptr = * ++topo_ptr ) != NULL)
4393 	  input_unit_ptr->unit_pos.y +=
4394 	    ymax_out + 8 + (NoOfHiddenUnits - 1) / max_units_displayed - ymin_in;
4395 
4396 	while ( (hidden_unit_ptr = * ++topo_ptr ) != NULL)
4397 	  {
4398 	    hidden_unit_ptr->unit_pos.x =  RBF_MIN ( xmin_in, xmin_out) +
4399 	      i % max_units_displayed;
4400 	    hidden_unit_ptr->unit_pos.y = ymax_out + 4 +
4401 	      ( NoOfHiddenUnits - 1 - i ) / max_units_displayed;
4402 	    i++;
4403 	  }
4404       }
4405 
4406     /* Default case: upwards to downwards */
4407     else
4408       {
4409 	topo_ptr = topo_ptr_array + 1 + NoOfInputUnits;
4410 
4411 	while ( (hidden_unit_ptr = * ++topo_ptr ) != NULL)
4412 	  {
4413 	    hidden_unit_ptr->unit_pos.x =  RBF_MIN ( xmin_in, xmin_out) +
4414 	      i % max_units_displayed;
4415 	    hidden_unit_ptr->unit_pos.y = ymax_in + 4 + i / max_units_displayed;
4416 	    i++;
4417 	  }
4418 
4419 	while ( (output_unit_ptr = * ++topo_ptr ) != NULL)
4420 	  output_unit_ptr->unit_pos.y +=
4421 	    ymax_in + 8 + (NoOfHiddenUnits - 1) / max_units_displayed - ymin_out;
4422       }
4423 
4424   } /* end Set positions of units ... */
4425 
4426 
4427   /* Compute Error == # misclassified patterns */
4428 
4429   *NoOfOutParams = 1; /*  One return value: OutParameter[0] == learning error */
4430   *parameterOutArray = OutParameter;  /*  set the output parameter reference  */
4431 
4432   /* reset network error value  */
4433   OutParameter[0] = 0.0;
4434 
4435   /* compute the necessary sub patterns */
4436   RBF_ERROR_CHECK( kr_initSubPatternOrder ( start_pattern, end_pattern ) );
4437 
4438   while( kr_getSubPatternByOrder ( &pattern_no, &sub_pat_no ) )
4439     {
4440       /* forward propagation */
4441       RbfLearnForward ( pattern_no, sub_pat_no );
4442 
4443       topo_ptr = topo_ptr_array + 2 + NoOfInputUnits  + NoOfHiddenUnits;
4444 
4445       while( (output_unit_ptr = * ++topo_ptr) != NULL )
4446 	OutParameter[0] +=  RBF_SQR( output_unit_ptr->value_a );
4447     }
4448 
4449   return ( KRERR_NO_ERROR );
4450 }
4451 
4452 
4453 /*****************************************************************************
4454  *****************************************************************************
4455 
4456 
4457  GROUP        : RPROP learning function, V1.1
4458 
4459   AUTHOR       : Martin Riedmiller, ILKD, University of Karlsruhe
4460   Notes        : RPROP parameters are the initial update value (default 0.1)
4461                  and the maximal update value (default 50.0). The defaults
4462 		 are assumed if the parameters are set to 0.0. It may be
4463 		 helpfull to limit the second paream to 0.01.
4464 
4465 		V1.1 supports weight decay (third parameter)
4466 
4467 		V1.0 in SNNSv3.0 according to descritption in IEEE ICNN '93
4468 		V1.1 in SNNSv3.3 according to technical report (Riedmiller 1994)
4469 	       		  -no backtracking in case of jump over minimum
4470 		        new features:
4471         	          -weight decay included
4472 	       		  -'weights-fixed' removed
4473 
4474 *******************************************************************************
4475 ******************************************************************************/
4476 
4477 #define RPROP_ETAPLUS 1.2
4478 #define RPROP_ETAMINUS 0.5
4479 #define RPROP_MINEPS 1e-6
4480 #define RPROP_MAXEPS 2.0
4481 #define RPROP_DEFAULT_UPDATE_VALUE 0.001
4482 #define SUM_SQUARE_ERROR          0
4483 #define CROSS_ENTROPY_ERROR       1
4484 #define MULTIPLE_CROSS_ERROR      2
4485 
4486 
4487 /*****************************************************************************
4488   FUNCTION : initializeRprop
4489 
4490   PURPOSE  : Rprop initialisation:
4491   RETURNS  : kernel error code
4492   NOTES    : ->value_c : Sum (dEdw)
4493              ->value_b : dw(t-1)
4494              ->value_a : update_value
4495 
4496   UPDATE   : 09.05.1994 by Guenter Mamier
4497 ******************************************************************************/
4498 static krui_err initializeRprop(float update_val)
4499 {
4500     register unsigned short flags;
4501     register struct Link *link_ptr;
4502     register struct Unit *unit_ptr;
4503     register struct Site *site_ptr;
4504 
4505   FOR_ALL_UNITS( unit_ptr ){
4506 	flags = unit_ptr->flags;
4507 
4508       if ( (flags & UFLAG_IN_USE) == UFLAG_IN_USE){ /*  unit is in use  */
4509 	    unit_ptr->value_b = unit_ptr->value_c = (FlintType) 0;
4510 	  unit_ptr->value_a = (FlintType)update_val;
4511 
4512 	  if (flags & UFLAG_SITES){ /*  unit has sites  */
4513 	      FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
4514 		    link_ptr->value_b = link_ptr->value_c = (FlintType) 0;
4515 		  link_ptr->value_a = (FlintType)update_val;
4516 		}
4517 
4518 	  } else{		/*  unit has no sites   */
4519 		if (flags & UFLAG_DLINKS) {	/* unit has direct links   */
4520 		  FOR_ALL_LINKS( unit_ptr, link_ptr ){
4521 			link_ptr->value_b = link_ptr->value_c = (FlintType) 0;
4522 		      link_ptr->value_a = (FlintType)update_val;
4523 		    }
4524 		}
4525 	    }
4526 	}
4527     }
4528 
4529   return( KRERR_NO_ERROR );
4530 }
4531 
4532 
4533 /*****************************************************************************
4534   FUNCTION : computeDevite
4535 
4536   PURPOSE  : one function to compute the error
4537   RETURNS  :
4538   NOTES    :
4539 
4540   UPDATE   :
4541 ******************************************************************************/
4542 static int computeDevite(float *devit, float *sum_error,
4543                          float target, float output, int errorType )
4544 {
4545     int ret = 0;
4546     float sum1,sum2,sum3,sum4;
4547 
4548 
4549     switch(errorType){
4550 
4551     case MULTIPLE_CROSS_ERROR:
4552 	(*devit) = output-target;
4553 	if (output > 0.0)
4554 	    sum1 = target * log (output);
4555 	else
4556 	    sum1 = 0.0;
4557 	if (target > 0.0)
4558 	    sum2 = target * log (target);
4559 	else
4560 	    sum2 = 0.0;
4561 	*sum_error -= (sum1 - sum2);
4562 	break;
4563 
4564     case CROSS_ENTROPY_ERROR:
4565 	(*devit) = target-output;
4566 	if (output > 0.0)
4567 	    sum1 = target * log (output);
4568 	else
4569 	    sum1 = 0.0;
4570 	if (target > 0.0)
4571 	    sum2 = target * log (target);
4572 	else
4573 	    sum2 = 0.0;
4574 	if ( (1 - output) > 0.0)
4575 	    sum3 = (1 - target) * log (1 - output);
4576 	else
4577 	    sum3 = 0.0;
4578 	if ((1 - target) > 0.0)
4579 	    sum4 = ( 1- target) * log ( 1 - target);
4580 	else
4581 	    sum4 = 0.0;
4582 
4583 	*sum_error -= (sum1 - sum2 + sum3 - sum4);
4584 	break;
4585 
4586     case SUM_SQUARE_ERROR:
4587     default:
4588 	(*devit) = target-output;
4589 	*sum_error += (*devit) * (*devit);
4590 	break;
4591     }
4592     return ret;
4593 }
4594 
4595 
4596 
4597 /*****************************************************************************
4598   FUNCTION : computeAlpha
4599 
4600   PURPOSE  : one function to compute the alpha value
4601   RETURNS  :
4602   NOTES    :
4603 
4604   UPDATE   :
4605 ******************************************************************************/
4606 static float computeAlpha(void)
4607 {
4608     int i=0,s,t;
4609     float Alpha = 0.0, sum = 0.0,weightVal;
4610 
4611     for( s = krui_getFirstUnit(); s != 0; s = krui_getNextUnit() ) {
4612 	if (krui_getUnitTType( s ) != INPUT){
4613 	    weightVal = krui_getUnitBias(s);
4614 	    weightVal = krui_getUnitBias(s);
4615 	    sum += weightVal * weightVal;     /* add biases */
4616 	    i++;
4617 	}
4618 
4619 	for( t=krui_getFirstSuccUnit(s,&weightVal); t != 0;
4620 	     t = krui_getNextSuccUnit(&weightVal) ){
4621 	    i++;
4622 	    sum += weightVal * weightVal;     /* add weights */
4623         }
4624 
4625 	/* getNextUnit will get the succ of the current Unit */
4626 	krui_setCurrentUnit( s );
4627     }
4628     if (sum > 0.0)
4629 	Alpha = i / sum;
4630     return Alpha;
4631 }
4632 
4633 
4634 
4635 /*****************************************************************************
4636   FUNCTION : propagateNetForwardRprop
4637 
4638   PURPOSE  : forward pass for Rprop with different error functions
4639   RETURNS  :
4640   NOTES    : topological forward propagation
4641 
4642   UPDATE   :
4643 ******************************************************************************/
4644 static void propagateNetForwardMAP(int pattern_no, int sub_pat_no,
4645 				   int errorType)
4646 {
4647     register struct Unit *unit_ptr;
4648     register Patterns in_pat;
4649     register TopoPtrArray topo_ptr;
4650     float sum_act = 0.0;
4651 
4652     /* calculate startaddress for input pattern array  */
4653     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
4654     if(in_pat == NULL){
4655         KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
4656         return;
4657     }
4658 
4659     topo_ptr = topo_ptr_array;
4660 
4661     /* copy pattern into input unit's activation and calculate output of the
4662        input units */
4663     while ((unit_ptr = *++topo_ptr) != NULL) {
4664 
4665         /* topo_ptr points to a (topological sorted) unit
4666            stucture (input unit first)  */
4667         if (unit_ptr->out_func == OUT_IDENTITY)
4668             /* identity output function: there is no need to call the output
4669                function  */
4670             unit_ptr->Out.output = unit_ptr->act = *in_pat++;
4671 	else if(unit_ptr->out_func == OUT_Custom_Python)
4672 		unit_ptr->Out.output =
4673 			kr_PythonOutFunction(unit_ptr->python_out_func,
4674 				unit_ptr->act = *in_pat++);
4675         else
4676             /* no identity output function: calculate unit's output also  */
4677             unit_ptr->Out.output
4678                 = (*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
4679     }
4680 
4681     /* popagate hidden units  */
4682     while ((unit_ptr = *++topo_ptr) != NULL) {  /* topo_ptr points to a
4683                                                    (topological sorted) unit
4684                                                    stucture */
4685         /* clear error values  */
4686         unit_ptr->Aux.flint_no = 0.0;
4687 
4688         /* calculate the activation value of the unit: call the activation
4689            function if needed  */
4690         unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
4691 			kr_PythonActFunction(unit_ptr->python_act_func,
4692 						unit_ptr) :
4693 			(unit_ptr->act_func) (unit_ptr)) ;
4694 
4695         if (unit_ptr->out_func == OUT_IDENTITY)
4696             /* identity output function: there is no need to call the output
4697                function  */
4698             unit_ptr->Out.output = unit_ptr->act;
4699 	else if(unit_ptr->out_func == OUT_Custom_Python)
4700 		unit_ptr->Out.output =
4701 			kr_PythonOutFunction(unit_ptr->python_out_func,
4702 				unit_ptr->act);
4703         else
4704             /* no identity output function: calculate unit's output also  */
4705             unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
4706     }
4707 
4708     /* popagate output units  */
4709     while ((unit_ptr = *++topo_ptr) != NULL) {  /* topo_ptr points to a
4710                                                    (topological sorted) unit
4711                                                    stucture */
4712         /* clear error values  */
4713         unit_ptr->Aux.flint_no = 0.0;
4714 
4715         /* calculate the activation value of the unit: call the activation
4716            function if needed  */
4717         unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
4718 			kr_PythonActFunction(unit_ptr->python_act_func,
4719 						unit_ptr) :
4720 			(unit_ptr->act_func) (unit_ptr)) ;
4721         sum_act += unit_ptr->act;
4722         if (unit_ptr->out_func == OUT_IDENTITY)
4723             /* identity output function: there is no need to call the output
4724                function  */
4725             unit_ptr->Out.output = unit_ptr->act;
4726 	else if(unit_ptr->out_func == OUT_Custom_Python)
4727 		unit_ptr->Out.output =
4728 			kr_PythonOutFunction(unit_ptr->python_out_func,
4729 				unit_ptr->act);
4730         else
4731             /* no identity output function: calculate unit's output also  */
4732             unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
4733     }
4734     if (errorType == MULTIPLE_CROSS_ERROR){
4735       /* normalize activations of output units  */
4736       while ((unit_ptr = *--topo_ptr) != NULL) {        /* topo_ptr points to a
4737                                                    (topological sorted) unit
4738                                                    stucture */
4739         /* normalize activation */
4740         if (sum_act > 0.0)
4741           unit_ptr->act /= sum_act; /* / (sum_act + unit_ptr->act);*/
4742 
4743         if (unit_ptr->out_func == OUT_IDENTITY)
4744           /* identity output function: there is no need to call the output
4745              function  */
4746           unit_ptr->Out.output = unit_ptr->act;
4747 	else if(unit_ptr->out_func == OUT_Custom_Python)
4748 		unit_ptr->Out.output =
4749 			kr_PythonOutFunction(unit_ptr->python_out_func,
4750 				unit_ptr->act);
4751         else
4752           /* no identity output function: calculate unit's output also  */
4753           unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
4754       }
4755     }
4756 }
4757 
4758 
4759 /*****************************************************************************
4760   FUNCTION : propagateNetBackwardRprop
4761 
4762   PURPOSE  : Pure Backpropagation of gradient without weight-update
4763   RETURNS  : network error
4764   NOTES    : sum(dE/dw) -> value_c.
4765 
4766   UPDATE   : 09.05.1994 by Guenter Mamier
4767 ******************************************************************************/
4768 static float propagateNetBackwardRprop(int pattern_no, int sub_pat_no)
4769 {
4770     register struct Link *link_ptr;
4771     register struct Site *site_ptr;
4772     register struct Unit *unit_ptr;
4773     register Patterns out_pat;
4774     register float  error,	/* error  */
4775                     sum_error,	/* sum of the error  */
4776                     devit;	/* deviation  */
4777     TopoPtrArray    topo_ptr;
4778     int size;
4779 
4780 
4781     sum_error = 0.0;		/* reset network error  */
4782 
4783     /* calculate address of the output pattern (with number pattern_no + 1)  */
4784     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
4785     out_pat += size;
4786 
4787     /*  add 3 to no_of_topo_units because the topologic array contains
4788 	4 NULL pointers  */
4789     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
4790 
4791     /* calculate output units only  */
4792     while ((unit_ptr = *--topo_ptr) != NULL){
4793 	devit = *(--out_pat) - unit_ptr->Out.output;
4794 	/*= o * (1.0 - o) in [0.0,0.25],*/
4795 	/*for asymmetric logistic function*/
4796 
4797 	sum_error += devit * devit;	/* sum up the error of the network  */
4798 
4799 	/* calc. error for output units	 */
4800 	error = devit * (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
4801 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
4802 						unit_ptr) :
4803 			(unit_ptr->act_deriv_func) (unit_ptr)) );
4804 
4805 	unit_ptr->value_c += - error /* *1 */; /* calculate the bias slopes */
4806 	/* learn bias like a weight  */
4807 	if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){
4808 	    /*  the unit has direct links  */
4809 	    FOR_ALL_LINKS( unit_ptr, link_ptr ){
4810 		/*	calculate the slopes  */
4811 		link_ptr->value_c += - error * link_ptr->to->Out.output;
4812 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4813 	    }
4814 	}else{
4815 	    /*  the unit has sites  */
4816 	    FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
4817 		/* calculate the value_cs  */
4818 		link_ptr->value_c += - error * link_ptr->to->Out.output;
4819 		link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4820 	    }
4821 	}
4822     }
4823 
4824 
4825     /* calculate hidden units only  */
4826     while ((unit_ptr = *--topo_ptr) != NULL){
4827 	error = (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
4828 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
4829 						unit_ptr) :
4830 			(unit_ptr->act_deriv_func) (unit_ptr)) ) * unit_ptr->Aux.flint_no;
4831 
4832 	unit_ptr->value_c += - error /* * 1 */; /* calculate the bias slopes  */
4833 	/* learn bias like a weight  */
4834 	if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){ /* the unit has direct links */
4835 	    FOR_ALL_LINKS( unit_ptr, link_ptr ){ /* calculate the slopes  */
4836 		if (link_ptr->to->flags & UFLAG_TTYP_HIDD)
4837 		    /*  this link points to a hidden unit:
4838 			sum up the error's from previos units  */
4839 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4840 
4841 		link_ptr->value_c += - error * link_ptr->to->Out.output;
4842 	    }
4843 	}else {		/*  the unit has sites  */
4844 	    FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
4845 		/* calculate the slopes  */
4846 		if (link_ptr->to->flags & UFLAG_TTYP_HIDD)
4847 		    /*  this link points to a hidden unit:
4848 			sum up the error's from previos units  */
4849 		    link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4850 
4851 		link_ptr->value_c += - error * link_ptr->to->Out.output;
4852 	    }
4853 	}
4854     }
4855     return( sum_error );	/*  return the error of the network  */
4856 }
4857 
4858 
4859 /*****************************************************************************
4860   FUNCTION : propagateNetBackwardMAP
4861 
4862   PURPOSE  : Backward phase for RPROP with weight decay
4863   RETURNS  : network error
4864   NOTES    : sum(dE/dw) -> value_c.
4865 
4866   UPDATE   : 09.05.1994 by Guenter Mamier
4867 ******************************************************************************/
4868 static float propagateNetBackwardMAP(int pattern_no, int sub_pat_no,
4869 				     int errorType)
4870 {
4871     register struct Link *link_ptr;
4872     register struct Site *site_ptr;
4873     register struct Unit *unit_ptr;
4874     register Patterns out_pat;
4875     float  error,       /* error  */
4876                     sum_error,  /* sum of the error  */
4877                     devit;      /* deviation  */
4878     TopoPtrArray    topo_ptr;
4879     int size;
4880 
4881 
4882     sum_error = 0.0;            /* reset network error  */
4883 
4884     /* calculate address of the output pattern (with number pattern_no + 1)  */
4885     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
4886     out_pat += size;
4887 
4888     /*  add 3 to no_of_topo_units because the topologic array contains
4889         4 NULL pointers  */
4890     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
4891 
4892     /* calculate output units only  */
4893     while ((unit_ptr = *--topo_ptr) != NULL){
4894         computeDevite(&devit,&sum_error,*(--out_pat), unit_ptr->Out.output,
4895 		      errorType);
4896 
4897         /* calc. error for output units  */
4898         error = devit;
4899         if (errorType == SUM_SQUARE_ERROR)
4900           error *= (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
4901 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
4902 						unit_ptr) :
4903 			(unit_ptr->act_deriv_func) (unit_ptr)) );
4904 
4905 
4906         unit_ptr->value_c += - error /* *1 */; /* calculate the bias slopes */
4907         /* learn bias like a weight  */
4908         if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){
4909             /*  the unit has direct links  */
4910             FOR_ALL_LINKS( unit_ptr, link_ptr ){
4911                 /*      calculate the slopes  */
4912                 link_ptr->value_c += - error * link_ptr->to->Out.output;
4913                 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4914             }
4915         }else{
4916             /*  the unit has sites  */
4917             FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
4918                 /* calculate the value_cs  */
4919                 link_ptr->value_c += - error * link_ptr->to->Out.output;
4920                 link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4921             }
4922         }
4923     }
4924 
4925 
4926     /* calculate hidden units only  */
4927     while ((unit_ptr = *--topo_ptr) != NULL){
4928         error = (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
4929 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
4930 						unit_ptr) :
4931 			(unit_ptr->act_deriv_func) (unit_ptr)) ) * unit_ptr->Aux.flint_no;
4932 
4933         unit_ptr->value_c += - error /* * 1 */; /* calculate the bias slopes  */
4934         /* learn bias like a weight  */
4935         if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )){ /* the unit has direct links */
4936             FOR_ALL_LINKS( unit_ptr, link_ptr ){ /* calculate the slopes  */
4937                 if (link_ptr->to->flags & UFLAG_TTYP_HIDD)
4938                     /*  this link points to a hidden unit:
4939                         sum up the error's from previos units  */
4940                     link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4941 
4942                 link_ptr->value_c += - error * link_ptr->to->Out.output;
4943             }
4944         }else {         /*  the unit has sites  */
4945             FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
4946                 /* calculate the slopes  */
4947                 if (link_ptr->to->flags & UFLAG_TTYP_HIDD)
4948                     /*  this link points to a hidden unit:
4949                         sum up the error's from previos units  */
4950                     link_ptr->to->Aux.flint_no += link_ptr->weight * error;
4951 
4952                 link_ptr->value_c += - error * link_ptr->to->Out.output;
4953             }
4954         }
4955     }
4956     return( sum_error );        /*  return the error of the network  */
4957 }
4958 
4959 
4960 
4961 /*****************************************************************************
4962   FUNCTION : MODI_rprop
4963 
4964   PURPOSE  : modifies network after each epoch
4965   RETURNS  :
4966   NOTES    :
4967 
4968   UPDATE   : 09.05.1994 by Guenter Mamier
4969 ******************************************************************************/
4970 static void MODI_rprop(float maxeps, float weight_decay)
4971 {
4972     register struct Link *link_ptr;
4973     register struct Site *site_ptr;
4974     register struct Unit *unit_ptr;
4975     TopoPtrArray    topo_ptr;
4976     bool            hidden_units;
4977     float           direction;
4978 
4979 
4980     topo_ptr = topo_ptr_array + (NoOfInputUnits + 1);
4981     hidden_units = TRUE;
4982 
4983     /* calculate hidden and output units only  */
4984     do {
4985 	if ((unit_ptr = *++topo_ptr) == NULL) {
4986 	  if (!hidden_units) break; /* end of topologic pointer array reached */
4987 	    unit_ptr = *++topo_ptr;	/* skip NULL pointer  */
4988 	    hidden_units = FALSE;
4989 	}
4990 
4991       unit_ptr->value_c += weight_decay * unit_ptr->bias;
4992 	direction = unit_ptr->value_b * unit_ptr->value_c;
4993       if (direction < 0.0){	/*  same direction : dw * dEdw < 0  */
4994 	  unit_ptr->value_a *= RPROP_ETAPLUS;	/* adapt update_value*/
4995 	  if (unit_ptr->value_a > maxeps) unit_ptr->value_a = maxeps;
4996 	    if (unit_ptr->value_c < 0.0)
4997 		unit_ptr->value_b = unit_ptr->value_a;
4998 	    else
4999 	      unit_ptr->value_b = - (unit_ptr->value_a);
5000       }else
5001 	  if (direction > 0.0){	/*  direction changed  */
5002 	      unit_ptr->value_b = 0; /* reset for restarting adaptation
5003 					in next step */
5004 	      unit_ptr->value_a *= RPROP_ETAMINUS; /* adapt update_value*/
5005 	    if (unit_ptr->value_a < RPROP_MINEPS)
5006 		unit_ptr->value_a = RPROP_MINEPS;
5007 	} else {
5008 	    /* start of RPROP learning process */
5009 	    if (unit_ptr->value_c < 0.0)
5010 		unit_ptr->value_b = unit_ptr->value_a;
5011 	    else if (unit_ptr->value_c > 0.0)
5012 		  unit_ptr->value_b = - (unit_ptr->value_a);
5013 
5014 	    /* else no action if  derivative was zero */
5015 
5016 	}
5017 
5018       if(!IS_SPECIAL_UNIT(unit_ptr))
5019 	  unit_ptr->bias += unit_ptr->value_b; /* compute new bias*/
5020       unit_ptr->value_c = 0.0;	/* reset */
5021 
5022       /*adjust links*/
5023       if (UNIT_HAS_DIRECT_INPUTS( unit_ptr )) { /*  the unit has direct links */
5024 	  FOR_ALL_LINKS( unit_ptr, link_ptr ){
5025 	      link_ptr->value_c += weight_decay * link_ptr->weight;
5026 		direction = link_ptr->value_b * link_ptr->value_c;
5027 		if (direction < 0.0) {	/* same direction : dw * dEdw < 0  */
5028 		  link_ptr->value_a *= RPROP_ETAPLUS;	/* adapt update_value*/
5029 		  if (link_ptr->value_a > maxeps) link_ptr->value_a = maxeps;
5030 		    if (link_ptr->value_c < 0.0)
5031 			link_ptr->value_b = link_ptr->value_a;
5032 		    else
5033 		      link_ptr->value_b = - (link_ptr->value_a);
5034 	      } else
5035 		  if (direction > 0.0) { /*  direction changed  */
5036 		      link_ptr->value_b = 0; /* reset for restarting adaptation
5037 						in next step */
5038 		      link_ptr->value_a *= RPROP_ETAMINUS;/*adapt update_value*/
5039 		      if( link_ptr->value_a < RPROP_MINEPS)
5040 			link_ptr->value_a = RPROP_MINEPS;
5041 		} else {
5042 		    /* start of RPROP learning process  */
5043 		    if (link_ptr->value_c < 0.0)
5044 			link_ptr->value_b = link_ptr->value_a;
5045 		    else if (link_ptr->value_c > 0.0)
5046 			  link_ptr->value_b = - (link_ptr->value_a);
5047 
5048 		    /* else no action if  derivative was zero */
5049 
5050 		}
5051 	      if(!IS_SPECIAL_UNIT(unit_ptr))
5052 		  link_ptr->weight += link_ptr->value_b;/* compute new weight*/
5053 		link_ptr->value_c = 0.0;	        /* reset */
5054 
5055 	    }
5056 	} else {		/* the unit has sites  */
5057 	  FOR_ALL_SITES_AND_LINKS( unit_ptr, site_ptr, link_ptr ){
5058 	      link_ptr->value_c += weight_decay * link_ptr->weight;
5059 	      direction = link_ptr->value_b * link_ptr->value_c;
5060 	      if (direction < 0.0){ /*  same direction : dw * dEdw <! 0  */
5061 		    link_ptr->value_a *= RPROP_ETAPLUS;	/* adapt update_value */
5062 		  if (link_ptr->value_a > maxeps) link_ptr->value_a = maxeps;
5063 		    if (link_ptr->value_c < 0.0)
5064 			link_ptr->value_b = link_ptr->value_a;
5065 		    else
5066 		      link_ptr->value_b = - (link_ptr->value_a);
5067 	      } else
5068 		  if (direction > 0.0) { /*  direction changed  */
5069 		    link_ptr->value_b = 0;	/* reset for restarting
5070 						   adaptation in next step */
5071 		      link_ptr->value_a *= RPROP_ETAMINUS;/*adapt update_value*/
5072 		      if(link_ptr->value_a < RPROP_MINEPS)
5073 			link_ptr->value_a = RPROP_MINEPS;
5074 		  }else {
5075 		    /* start of RPROP learning process */
5076 		    if (link_ptr->value_c < 0.0)
5077 			link_ptr->value_b = link_ptr->value_a;
5078 		    else if (link_ptr->value_c > 0.0)
5079 			  link_ptr->value_b = - (link_ptr->value_a);
5080 
5081 		    /* else no action if  derivative was zero */
5082 
5083 		}
5084 
5085 	      if(!IS_SPECIAL_UNIT(unit_ptr))
5086 		  link_ptr->weight += link_ptr->value_b; /*compute new weight*/
5087 		link_ptr->value_c = 0.0;	/* reset */
5088 	    }
5089 	}
5090     }				/* for units  */
5091   while( TRUE );
5092 
5093 }
5094 
5095 
5096 /*****************************************************************************
5097   FUNCTION : LEARN_rprop
5098 
5099   PURPOSE  : RPROP learning function
5100   RETURNS  : kernel error code
5101   NOTES    : Input Parameters:   1 : initial update value
5102                                  2 : maxeps;
5103                                  3 : exponent weight decay
5104 
5105              Output Parameters:  1 : error of the network (sum of all cycles)
5106 
5107   UPDATE   : 09.05.1994 by Guenter Mamier
5108 ******************************************************************************/
5109 krui_err LEARN_rprop(int start_pattern, int end_pattern,
5110 		     float *parameterInArray, int NoOfInParams,
5111 		     float **parameterOutArray, int *NoOfOutParams)
5112 {
5113     static float OutParameter[1]; /*OutParameter[0] stores the*/
5114     /*learning error*/
5115     int    pattern_no, sub_pat_no, ret_code;
5116     float  maxeps, update_value, wd;
5117 
5118 
5119     if (NoOfUnits == 0)
5120 	return( KRERR_NO_UNITS ); /*  No Units defined  */
5121 
5122     if (NoOfInParams < 3)
5123 	return( KRERR_PARAMETERS ); /*  not enough input parameters  */
5124 
5125     /* DEFAULTS: */
5126 
5127     if (( update_value = LEARN_PARAM1( parameterInArray )) == 0.0)
5128 	update_value = RPROP_DEFAULT_UPDATE_VALUE;
5129     if ((maxeps = LEARN_PARAM2( parameterInArray )) == 0.0)
5130 	maxeps = RPROP_MAXEPS;
5131     if (update_value > maxeps) update_value = maxeps;
5132 
5133     wd = LEARN_PARAM3( parameterInArray );
5134     if (wd != 0.0)
5135 	wd = (float) pow(10.0, (double)(- wd));
5136 
5137 
5138 
5139 
5140     *NoOfOutParams = 1;		/* one return value */
5141 
5142     *parameterOutArray = OutParameter;	/* set output parameter reference  */
5143     ret_code = KRERR_NO_ERROR;	/* reset return code  */
5144 
5145     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)){
5146 	/* Net has been modified  or topologic array isn't initialized */
5147 	/* check the topology of the network  */
5148 	ret_code = kr_topoCheck();
5149 	if (ret_code < KRERR_NO_ERROR)
5150 	    return( ret_code ); /*  an error has occured  */
5151 	if (ret_code < 2)
5152 	    return( KRERR_NET_DEPTH ); /*the network has less than 2 layers  */
5153 
5154 	/* count the no. of I/O units and check the patterns  */
5155 	ret_code = kr_IOCheck();
5156 	if (ret_code < KRERR_NO_ERROR)  return( ret_code );
5157 
5158 	/* sort units by topology and by topologic type  */
5159 	ret_code = kr_topoSort( TOPOLOGICAL_FF );
5160 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
5161 	    return( ret_code );
5162 
5163     }
5164 
5165     if (NetModified || NetInitialize || LearnFuncHasChanged){
5166 	/*  Net has been modified or initialized, initialize RPROP */
5167 	ret_code = initializeRprop(update_value);
5168 	if (ret_code != KRERR_NO_ERROR)  return( ret_code );
5169     }
5170     NetModified = FALSE;
5171 
5172     /* compute the necessary sub patterns */
5173 
5174     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5175     if(KernelErrorCode != KRERR_NO_ERROR)
5176 	return (KernelErrorCode);
5177 
5178     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
5179 
5180     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
5181 	propagateNetForward(pattern_no,sub_pat_no);   /* forward propagation  */
5182 
5183 	/* backward propagation and summation of gradient  */
5184 	NET_ERROR(OutParameter)
5185 	    += propagateNetBackwardRprop(pattern_no,sub_pat_no);
5186     }
5187 
5188     /*  modify links and bias  */
5189     MODI_rprop(maxeps, wd);
5190     return( ret_code );
5191 }
5192 
5193 /*****************************************************************************
5194   FUNCTION : LEARN_RpropMAP
5195 
5196   PURPOSE  : RPROP learning function with adaptive weight decay
5197   RETURNS  : kernel error code
5198   NOTES    : Input Parameters:   1 : maxeps
5199 
5200              Output Parameters:  1 : error of the network (sum of all cycles)
5201 
5202   UPDATE   :
5203 ******************************************************************************/
5204 krui_err LEARN_RpropMAP(int start_pattern, int end_pattern,
5205                      float *parameterInArray, int NoOfInParams,
5206                      float **parameterOutArray, int *NoOfOutParams)
5207 {
5208     static float OutParameter[1]; /*OutParameter[0] stores the*/
5209     /*learning error*/
5210     static int counter=0;
5211     int    pattern_no, sub_pat_no, ret_code,errorType=0,updateEpoch=20;
5212     float  maxeps, update_value;
5213     float  alpha, beta, lambda;
5214 
5215 
5216     if (NoOfUnits == 0)
5217         return( KRERR_NO_UNITS ); /*  No Units defined  */
5218 
5219     if (NoOfInParams < 1)
5220         return( KRERR_PARAMETERS ); /*  not enough input parameters  */
5221 
5222     /* DEFAULTS: */
5223 
5224     if ((maxeps = LEARN_PARAM1( parameterInArray )) == 0.0)
5225         maxeps = RPROP_MAXEPS;
5226     update_value = RPROP_DEFAULT_UPDATE_VALUE;
5227     if (update_value > maxeps) update_value = maxeps;
5228 
5229 
5230     if (( update_value = LEARN_PARAM1( parameterInArray )) == 0.0)
5231         update_value = RPROP_DEFAULT_UPDATE_VALUE;
5232     if ((maxeps = LEARN_PARAM2( parameterInArray )) == 0.0)
5233         maxeps = RPROP_MAXEPS;
5234     if (update_value > maxeps) update_value = maxeps;
5235     if (!(( lambda = LEARN_PARAM3( parameterInArray )) == 0.0))
5236         lambda = (float) pow(10,(double)(- lambda));
5237 
5238     updateEpoch = (int) LEARN_PARAM4( parameterInArray );
5239     errorType = (int) LEARN_PARAM5( parameterInArray );
5240 
5241     *NoOfOutParams = 1;         /* one return value */
5242 
5243     *parameterOutArray = OutParameter;  /* set output parameter reference  */
5244     ret_code = KRERR_NO_ERROR;  /* reset return code  */
5245 
5246     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)){
5247         /* Net has been modified  or topologic array isn't initialized */
5248         /* check the topology of the network  */
5249         ret_code = kr_topoCheck();
5250         if (ret_code < KRERR_NO_ERROR)
5251             return( ret_code ); /*  an error has occured  */
5252         if (ret_code < 2)
5253             return( KRERR_NET_DEPTH ); /*the network has less than 2 layers  */
5254 
5255         /* count the no. of I/O units and check the patterns  */
5256         ret_code = kr_IOCheck();
5257         if (ret_code < KRERR_NO_ERROR)  return( ret_code );
5258 
5259         /* sort units by topology and by topologic type  */
5260         ret_code = kr_topoSort( TOPOLOGICAL_FF );
5261         if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
5262             return( ret_code );
5263         counter = 0;
5264     }
5265 
5266     if (NetModified || NetInitialize || LearnFuncHasChanged){
5267         /*  Net has been modified or initialized, initialize RPROP */
5268         ret_code = initializeRprop(update_value);
5269         if (ret_code != KRERR_NO_ERROR)  return( ret_code );
5270         counter = 0;
5271     }
5272     NetModified = FALSE;
5273 
5274     /* compute the necessary sub patterns */
5275 
5276     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5277     if(KernelErrorCode != KRERR_NO_ERROR)
5278         return (KernelErrorCode);
5279 
5280     NET_ERROR(OutParameter) = 0.0;      /* reset network error value  */
5281 
5282     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
5283         propagateNetForwardMAP(pattern_no,sub_pat_no,errorType);
5284 
5285         /* backward propagation and summation of gradient  */
5286         NET_ERROR(OutParameter)
5287             += propagateNetBackwardMAP(pattern_no,sub_pat_no, errorType);
5288     }
5289     counter++;
5290     if (updateEpoch && (counter % updateEpoch == 0) ){
5291       /* compute bayes hyperparameter */
5292       alpha = computeAlpha();
5293       beta  = krui_getNoOfPatterns() / NET_ERROR(OutParameter);
5294       if (beta ==  0.0)
5295          lambda = 0.0; /* should never happen */
5296         else
5297           lambda = alpha / beta;
5298 
5299       fprintf(stderr,"Epoch %d, beta:  %.4f alpha: %.4f lambda: %.4f \n",
5300               counter, beta, alpha, lambda);
5301     }
5302     /*  modify links and bias  */
5303     MODI_rprop(maxeps, lambda);
5304     return( ret_code );
5305 }
5306 
5307 
5308 
5309 /*****************************************************************************
5310   FUNCTION : TEST_rprop
5311 
5312   PURPOSE  : RPROP testing function
5313   RETURNS  : kernel error code
5314   NOTES    : Input Parameters:   1 : initial update value
5315                                  2 : maxeps;
5316                                  3 : exponent weight decay
5317 
5318              Output Parameters:  1 : error of the network (sum of all cycles)
5319 
5320   UPDATE   : 03.03.95 Joachim Danz
5321 ******************************************************************************/
5322 krui_err TEST_rprop(int start_pattern, int end_pattern,
5323 		     float *parameterInArray, int NoOfInParams,
5324 		     float **parameterOutArray, int *NoOfOutParams)
5325 {
5326     static float OutParameter[1]; /*OutParameter[0] stores the*/
5327     /*learning error*/
5328     int    pattern_no, sub_pat_no, ret_code;
5329 
5330     if (NoOfUnits == 0)
5331 	return( KRERR_NO_UNITS ); /*  No Units defined  */
5332 
5333     if (NoOfInParams < 3)
5334 	return( KRERR_PARAMETERS ); /*  not enough input parameters  */
5335 
5336 
5337 
5338     *NoOfOutParams = 1;		/* one return value */
5339 
5340     *parameterOutArray = OutParameter;	/* set output parameter reference  */
5341     ret_code = KRERR_NO_ERROR;	/* reset return code  */
5342 
5343     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)){
5344 	/* Net has been modified  or topologic array isn't initialized */
5345 	/* check the topology of the network  */
5346 	ret_code = kr_topoCheck();
5347 	if (ret_code < KRERR_NO_ERROR)
5348 	    return( ret_code ); /*  an error has occured  */
5349 	if (ret_code < 2)
5350 	    return( KRERR_NET_DEPTH ); /*the network has less than 2 layers  */
5351 
5352 	/* count the no. of I/O units and check the patterns  */
5353 	ret_code = kr_IOCheck();
5354 	if (ret_code < KRERR_NO_ERROR)  return( ret_code );
5355 
5356 	/* sort units by topology and by topologic type  */
5357 	ret_code = kr_topoSort( TOPOLOGICAL_FF );
5358 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
5359 	    return( ret_code );
5360 
5361 	NetModified = FALSE;
5362     }
5363 
5364 
5365     /* compute the necessary sub patterns */
5366 
5367     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5368     if(KernelErrorCode != KRERR_NO_ERROR)
5369 	return (KernelErrorCode);
5370 
5371     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
5372 
5373     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
5374 	propagateNetForward(pattern_no,sub_pat_no);   /* forward propagation  */
5375 
5376 	/* backward propagation and summation of gradient  */
5377 	NET_ERROR(OutParameter)
5378 	    += testNetBackwardRprop(pattern_no,sub_pat_no);
5379     }
5380 
5381     return( ret_code );
5382 }
5383 
5384 
5385 /*****************************************************************************
5386   FUNCTION : testNetBackwardRprop
5387 
5388   PURPOSE  : Calculation of Error
5389   RETURNS  : network error
5390   NOTES    : sum(dE/dw) -> value_c.
5391 
5392   UPDATE   : 03.03.1995 by Joachim Danz
5393 ******************************************************************************/
5394 static float testNetBackwardRprop(int pattern_no, int sub_pat_no)
5395 {
5396     register struct Unit *unit_ptr;
5397     register Patterns out_pat;
5398     register float  error,	/* error  */
5399                     sum_error,	/* sum of the error  */
5400                     devit;	/* deviation  */
5401     TopoPtrArray    topo_ptr;
5402     int size;
5403 
5404 
5405     sum_error = 0.0;		/* reset network error  */
5406 
5407     /* calculate address of the output pattern (with number pattern_no + 1)  */
5408     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
5409     out_pat += size;
5410 
5411     /*  add 3 to no_of_topo_units because the topologic array contains
5412 	4 NULL pointers  */
5413     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
5414 
5415     /* calculate output units only  */
5416     while ((unit_ptr = *--topo_ptr) != NULL){
5417 	devit = *(--out_pat) - unit_ptr->Out.output;
5418 	/*= o * (1.0 - o) in [0.0,0.25],*/
5419 	/*for asymmetric logistic function*/
5420 
5421 	sum_error += devit * devit;	/* sum up the error of the network  */
5422 
5423 	/* calc. error for output units	 */
5424 	error = devit * (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
5425 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
5426 						unit_ptr) :
5427 			(unit_ptr->act_deriv_func) (unit_ptr)) );
5428 
5429     }
5430 
5431     return( sum_error );	/*  return the error of the network  */
5432 }
5433 
5434 
5435 /*****************************************************************************
5436   FUNCTION : testNetBackwardMAP
5437 
5438   PURPOSE  : Calculation of Error
5439   RETURNS  : network error
5440   NOTES    : sum(dE/dw) -> value_c.
5441 
5442   UPDATE   : 03.03.1997 by Thomas Ragg
5443 ******************************************************************************/
5444 static float testNetBackwardMAP(int pattern_no, int sub_pat_no, int errorType)
5445 {
5446     register struct Unit *unit_ptr;
5447     register Patterns out_pat;
5448     float           sum_error,  /* sum of the error  */
5449                     devit;      /* deviation  */
5450     TopoPtrArray    topo_ptr;
5451     int size;
5452 
5453 
5454     sum_error = 0.0;            /* reset network error  */
5455 
5456     /* calculate address of the output pattern (with number pattern_no + 1)  */
5457     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
5458     out_pat += size;
5459 
5460     /*  add 3 to no_of_topo_units because the topologic array contains
5461         4 NULL pointers  */
5462     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
5463 
5464     /* calculate output units only  */
5465     while ((unit_ptr = *--topo_ptr) != NULL){
5466 	computeDevite(&devit, &sum_error, *(--out_pat), unit_ptr->Out.output,
5467 		      errorType);
5468     }
5469     return( sum_error );        /*  return the error of the network  */
5470 }
5471 
5472 
5473 
5474 
5475 /*****************************************************************************
5476   FUNCTION : TEST_MAP
5477 
5478   PURPOSE  : RPROP_MAP testing function
5479   RETURNS  : kernel error code
5480   NOTES    : Input Parameters:   1 : initial update value
5481                                  2 : maxeps;
5482                                  3 : exponent weight decay
5483                                  4 : #epochs update hyperparameter
5484                                  5 : error function type
5485 
5486              Output Parameters:  1 : error of the network (sum of all cycles)
5487 
5488   UPDATE   : 03.10.97 Thomas Ragg
5489 ******************************************************************************/
5490 krui_err TEST_MAP(int start_pattern, int end_pattern,
5491                      float *parameterInArray, int NoOfInParams,
5492                      float **parameterOutArray, int *NoOfOutParams)
5493 {
5494     static float OutParameter[1]; /*OutParameter[0] stores the*/
5495     /*learning error*/
5496     int    pattern_no, sub_pat_no, ret_code;
5497     int errorType;
5498 
5499     if (NoOfUnits == 0)
5500         return( KRERR_NO_UNITS ); /*  No Units defined  */
5501 
5502     if (NoOfInParams < 3)
5503         return( KRERR_PARAMETERS ); /*  not enough input parameters  */
5504 
5505     errorType = (int) LEARN_PARAM5( parameterInArray );
5506 
5507     *NoOfOutParams = 1;         /* one return value */
5508 
5509     *parameterOutArray = OutParameter;  /* set output parameter reference  */
5510     ret_code = KRERR_NO_ERROR;  /* reset return code  */
5511 
5512     if (NetModified || (TopoSortID != TOPOLOGICAL_FF)){
5513         /* Net has been modified  or topologic array isn't initialized */
5514         /* check the topology of the network  */
5515         ret_code = kr_topoCheck();
5516         if (ret_code < KRERR_NO_ERROR)
5517             return( ret_code ); /*  an error has occured  */
5518         if (ret_code < 2)
5519             return( KRERR_NET_DEPTH ); /*the network has less than 2 layers  */
5520 
5521         /* count the no. of I/O units and check the patterns  */
5522         ret_code = kr_IOCheck();
5523         if (ret_code < KRERR_NO_ERROR)  return( ret_code );
5524 
5525         /* sort units by topology and by topologic type  */
5526         ret_code = kr_topoSort( TOPOLOGICAL_FF );
5527         if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
5528             return( ret_code );
5529 
5530         NetModified = FALSE;
5531     }
5532 
5533 
5534     /* compute the necessary sub patterns */
5535 
5536     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5537     if(KernelErrorCode != KRERR_NO_ERROR)
5538         return (KernelErrorCode);
5539 
5540     NET_ERROR(OutParameter) = 0.0;      /* reset network error value  */
5541 
5542     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
5543         propagateNetForwardMAP(pattern_no,sub_pat_no,errorType);
5544 
5545         /* backward propagation and summation of gradient  */
5546         NET_ERROR(OutParameter)
5547           += testNetBackwardMAP(pattern_no,sub_pat_no,errorType);
5548     }
5549 
5550     return( ret_code );
5551 }
5552 
5553 
5554 
5555 
5556 /*****************************************************************************
5557  *****************************************************************************
5558 
5559   GROUP        : ART 1 learning function
5560 
5561   AUTHOR       : Kai-Uwe Herrmann
5562 
5563 ******************************************************************************
5564 ******************************************************************************/
5565 
5566 
5567 
5568 /*****************************************************************************
5569   FUNCTION : LEARN_ART1
5570 
5571   PURPOSE  : ART 1 learning function.
5572   RETURNS  : kernel error code
5573   NOTES    : 1 input-parameter  :  1. vigilance parameter RHO
5574 
5575              output-parameters  :  numbers of classified patterns,
5576                                    separator -1,
5577                                    numbers of not classifiable patterns
5578 
5579   UPDATE   : 06.11.1993 by Guenter Mamier
5580 ******************************************************************************/
5581 krui_err  LEARN_ART1(int start_pattern, int end_pattern,
5582 		     float parameterInArray[], int NoOfInParams,
5583 		     float **parameterOutArray, int *NoOfOutParams)
5584 {
5585     krui_err        ret_code = KRERR_NO_ERROR;
5586     int             pattern_no, sub_pat_no;	/* Contains actual */
5587 						/* pattern number */
5588     int             start, end;
5589     int             i,n;
5590     struct Unit    *winner_ptr;	/* recognition unit which is the winner of
5591 				   w.t.a  */
5592     TopoPtrArray    topo_layer[6];	/* topo_layer[0] : *first input unit
5593 					   topo_layer[1] : *first comp. unit
5594 					   topo_layer[2] : *first rec.  unit
5595 					   topo_layer[3] : *first delay unit
5596 					   topo_layer[4] : *first local reset
5597 					   unit topo_layer[5] : *first
5598 					   special unit (classified_unit) */
5599     TopoPtrArray    topo_ptr;
5600     FlintType       beta;
5601     float           rho;
5602 
5603 
5604     /* Check number of incoming parameters */
5605 
5606     if (NoOfInParams < 1) {
5607 	ret_code = KRERR_PARAMETERS;
5608 	return (ret_code);
5609     }				/* if */
5610     /* rho is the vigilance parameter   */
5611     rho = parameterInArray[0];
5612 
5613 
5614     /* Check interval for vigilance parameter and constant value L */
5615 
5616     if ((rho < 0.0) || (rho > 1.0)) {
5617 	ret_code = KRERR_PARAMETERS;
5618 	return (ret_code);
5619     }				/* if */
5620     /* Check if network has been modified or learning func has been changed */
5621     if (NetModified || LearnFuncHasChanged || (TopoSortID != ART1_TOPO_TYPE)) {
5622 	(void) kr_topoSort(ART1_TOPO_TYPE);
5623 	ret_code = KernelErrorCode;
5624 	if (ret_code != KRERR_NO_ERROR) {
5625 	    NetModified = TRUE;
5626 	    return (ret_code);
5627 	}			/* if */
5628 	NetModified = FALSE;
5629 	LearnFuncHasChanged = FALSE;
5630     }				/* if */
5631     /* set initial activation values */
5632     ret_code = kra1_init_i_act(rho);
5633 
5634     if (ret_code != KRERR_NO_ERROR) {
5635 	return (ret_code);
5636     }				/* if */
5637     /* beta is another learning parameter of the network which is determined
5638        when initializing the network. It is there written to the bias field
5639        of the structure of each unit. Now we will read this value. */
5640     beta = (unit_array + 1)->bias;
5641 
5642     if (beta <= 0.0) {
5643 	topo_msg.error_code = KRERR_PARAM_BETA;
5644 	topo_msg.src_error_unit = 0;
5645 	topo_msg.dest_error_unit = 1;
5646 	return (topo_msg.error_code);
5647     }				/* if */
5648     /* # of output parameters is 0  */
5649     *NoOfOutParams = 0;
5650     *parameterOutArray = NULL;
5651 
5652 
5653     /* get pointers to first elements of each layer in topo_ptr_array */
5654 
5655     topo_ptr = topo_ptr_array + 1;
5656 
5657     for (i = 0; i <= 5; i++) {
5658 	topo_layer[i] = topo_ptr;
5659 	do {
5660 	} while (*topo_ptr++ != NULL);
5661 
5662     }				/* for */
5663 
5664 
5665     /* compute the necessary sub patterns */
5666 
5667     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5668     if(KernelErrorCode != KRERR_NO_ERROR)
5669 	return (KernelErrorCode);
5670 
5671 
5672     /* Search phase */
5673     start = kr_AbsPosOfFirstSubPat(start_pattern);
5674     end   = kr_AbsPosOfFirstSubPat(end_pattern);
5675     end  += kr_NoOfSubPatPairs(end_pattern) - 1;
5676 
5677     for(n=start; n<=end; n++){
5678 
5679 	kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
5680 
5681 	/* initialize the unit activations of the whole net */
5682 
5683 	ret_code = krart_reset_activations();
5684 	if (ret_code != KRERR_NO_ERROR) {
5685 	    return (ret_code);
5686 	}			/* if */
5687 	/* put pattern into input units */
5688 	ret_code = put_ART1_in_pattern(pattern_no, sub_pat_no, topo_layer[0]);
5689 	if (ret_code != KRERR_NO_ERROR) {
5690 	    return (ret_code);
5691 	}			/* if */
5692 	/* repeat synchronous propagation and look for winner until pattern
5693 	   is classified or network tells us, that pattern is not
5694 	   classifiable */
5695 	do {
5696 	    /* 1 propagation step (all units push their information onto
5697 	       their output and calculate their new activation. */
5698 
5699 	    krart_prop_synch();
5700 
5701 	    /* look for the recognition unit with the highest activation
5702 	       returns a NULL pointer if all recognition units have
5703 	       activation 0.0 */
5704 	    winner_ptr = krart_get_winner(topo_layer[2], 1.0);
5705 
5706 	} while (!(ART1_CLASSIFIED) && !(ART1_NOT_CLASSIFIABLE));
5707 
5708 
5709 	/* training phase */
5710 
5711 	if (ART1_CLASSIFIED) {
5712 
5713 	    /* Train network i.e. adjust weights between comparison layer and
5714 	       winner_unit and vice versa */
5715 
5716 	    ret_code = adjust_ART1_weights(beta, topo_layer[1],
5717 					   topo_layer[3], winner_ptr);
5718 
5719 	    if (ret_code != KRERR_NO_ERROR) {
5720 		return (ret_code);
5721 	    }/* if */
5722 	}/* if */
5723     }/* for */
5724 
5725 
5726     return (ret_code);
5727 
5728 }/* LEARN_ART1 */
5729 
5730 
5731 
5732 
5733 /*****************************************************************************
5734   FUNCTION : put_ART1_in_pattern
5735 
5736   PURPOSE  : pushes a new pattern into the input units of the network
5737   RETURNS  : kernel error code
5738   NOTES    :
5739 
5740   UPDATE   : 06.11.1993 by Guenter Mamier
5741 ******************************************************************************/
5742 static krui_err put_ART1_in_pattern(int pattern_no, int sub_pat_no,
5743 				    TopoPtrArray topo_inp_ptr)
5744 {
5745     int             ret_code = KRERR_NO_ERROR;
5746     register Patterns in_pat;
5747     struct Unit    *unit_ptr;
5748     TopoPtrArray    topo_ptr = topo_inp_ptr;
5749 
5750     /* calculate startadress of actual pattern   */
5751     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
5752 
5753     while ((unit_ptr = *topo_ptr++) != NULL) {
5754 	if (unit_ptr->out_func == OUT_IDENTITY) {
5755 	    unit_ptr->act = unit_ptr->Out.output = *in_pat++;
5756 	} else if(unit_ptr->out_func == OUT_Custom_Python) {
5757 		unit_ptr->act = *in_pat++;
5758 		unit_ptr->Out.output =
5759 			kr_PythonOutFunction(unit_ptr->python_out_func,
5760 				unit_ptr->act);
5761 	} else {
5762 	    unit_ptr->act = *in_pat++;
5763 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
5764 	}/* if */
5765     }/* while */
5766 
5767     return (ret_code);
5768 
5769 }/* put_ART1_in_pattern */
5770 
5771 
5772 
5773 /*****************************************************************************
5774   FUNCTION : adjust_ART1_weights
5775 
5776   PURPOSE  : training function for ART1 networks
5777   RETURNS  : kernel error code
5778   NOTES    : Parameters:
5779              beta         constant value beta > 0.0
5780 
5781              comp_ptr     points to pointer to first comparison unit
5782 
5783              delay_ptr    points to pointer to first unit in the delay layer.
5784 	                  The t(j,i) links are not between recognition layer
5785 			  and comparison layer but between the respective
5786 			  delay unit of the recogniton unit and the comparison
5787 			  layer. So first we have to look for the corresponding
5788 			  delay unit of the winning unit before training these
5789 			  weights.
5790 
5791              winner_ptr   points to winning unit of the recognition layer.
5792 
5793   UPDATE   : 06.11.1993 by Guenter Mamier
5794 ******************************************************************************/
5795 static krui_err adjust_ART1_weights(double beta, TopoPtrArray comp_ptr,
5796 				    TopoPtrArray delay_ptr,
5797 				    struct Unit * winner_ptr)
5798 {
5799     krui_err        ret_code = KRERR_NO_ERROR;
5800     TopoPtrArray    topo_ptr = NULL;
5801     struct Unit    *unit_ptr_comp = NULL, *unit_ptr_delay = NULL;
5802     struct Link    *link_ptr = NULL;
5803     bool            found_delay_unit = FALSE;
5804     FlintType       sum_ck = 0.0;
5805 
5806 
5807     /* get corresponding unit of the winning unit in the delay layer */
5808 
5809     topo_ptr = delay_ptr;
5810 
5811     while ((!found_delay_unit) && (*topo_ptr != NULL)) {
5812 
5813 	unit_ptr_delay = *topo_ptr++;
5814 
5815 	if (((struct Link *) unit_ptr_delay->sites)->to == winner_ptr) {
5816 	    found_delay_unit = TRUE;
5817 	}/* if */
5818     }/* while */
5819 
5820     if (!found_delay_unit) {
5821 
5822 	/* There was no delay unit found corresponding to the winning
5823 	   recognition unit */
5824 
5825 	ret_code = KRERR_TOPOLOGY;
5826 	return (ret_code);
5827 
5828     }/* if */
5829     /* Adjust weights between winning unit (delay-layer) and comparison layer
5830        (t(j,i) link values)
5831 
5832     t(j,i) = c(i)   where j is the number of the winning neuron in the delay
5833        layer and i ist the number of a comparison unit. */
5834     topo_ptr = comp_ptr;
5835 
5836     while ((unit_ptr_comp = *topo_ptr++) != NULL) {
5837 
5838 	sum_ck += unit_ptr_comp->act;	/* sum up activatons of comparison
5839 					   layer. sum_ck is needed for b(i,j) */
5840 
5841 	FOR_ALL_LINKS(unit_ptr_comp, link_ptr) {
5842 
5843 	    if (link_ptr->to == unit_ptr_delay) {
5844 		link_ptr->weight = ART1_ADJUST_LINK_DEL_CMP(unit_ptr_comp);
5845 	    }/* if */
5846 	}/* FOR_ALL_LINKS */
5847 
5848     }/* while */
5849 
5850 
5851     /* Adjust weights between comparison layer and winning unit (recognition
5852        layer) (b(i,j) link values)
5853 
5854        b(i,j) = c(i) / (beta + sum(k)(c(k)))
5855 
5856        where j is the number of the winning neuron in the recognition layer, i
5857        ist the number of a comparison unit and k runs over all comparison
5858        units. (sum(k)(c(k))) = sum_ck.
5859     */
5860 
5861 
5862     FOR_ALL_LINKS(winner_ptr, link_ptr) {
5863 
5864 	if (link_ptr->to->lln == ART1_CMP_LAY) {
5865 	    link_ptr->weight = (FlintType)ART1_ADJUST_LINK_CMP_REC(link_ptr->to,
5866 								   beta,sum_ck);
5867 	}/* if */
5868     }/* FOR_ALL_LINKS */
5869 
5870 
5871     return (ret_code);
5872 }/* adjust_ART1_weights () */
5873 
5874 
5875 /*****************************************************************************
5876  *****************************************************************************
5877 
5878   GROUP        : ART2 learning function
5879 
5880   AUTHOR       : Kai-Uwe Herrmann
5881 
5882 ******************************************************************************
5883 ******************************************************************************/
5884 
5885 
5886 /*****************************************************************************
5887   FUNCTION : LEARN_ART2
5888 
5889   PURPOSE  : ART2 learning function.
5890   RETURNS  : kernel error code
5891   NOTES    : Parameters:
5892                     6 input-parameter  :  1. vigilance parameter RHO
5893                                           2. Parameter a
5894                                           3. Parameter b
5895                                           4. Parameter c
5896                                           5. Parameter e
5897                                           6. Parameter THETA
5898 
5899                     output-parameters  :  none
5900 
5901 
5902   UPDATE   : 06.11.1993 by Guenter Mamier
5903 ******************************************************************************/
5904 krui_err LEARN_ART2(int start_pattern, int end_pattern,
5905 		    float parameterInArray[], int NoOfInParams,
5906 		    float **parameterOutArray, int *NoOfOutParams)
5907 {
5908     krui_err        ret_code = KRERR_NO_ERROR;
5909     int             pattern_no, sub_pat_no; /* Contains actual pattern number */
5910     int             i,n;
5911     int             start, end;
5912     struct Unit    *winner_ptr;	/* recognition unit which is the winner of
5913 				   w.t.a */
5914     TopoPtrArray    topo_layer[12];	/* topo_layer[0] : *first input unit
5915 					   topo_layer[1] : *first w unit
5916 					   topo_layer[2] : *first x unit
5917 					   topo_layer[3] : *first u unit
5918 					   topo_layer[4] : *first v unit
5919 					   topo_layer[5] : *first p unit
5920 					   topo_layer[6] : *first q unit
5921 					   topo_layer[7] : *first r unit
5922 					   topo_layer[8] : *first rec.  unit
5923 					   topo_layer[10] : *first local
5924 					   reset unit */
5925     TopoPtrArray    topo_ptr;
5926     FlintType       rho, param_a, param_b, param_c, param_d, theta;
5927 
5928 
5929     /* Check number of incoming parameters */
5930 
5931     if (NoOfInParams < 5) {
5932 	ret_code = KRERR_PARAMETERS;
5933 	return (ret_code);
5934     }/* if */
5935     rho = parameterInArray[0];
5936     param_a = parameterInArray[1];
5937     param_b = parameterInArray[2];
5938     param_c = parameterInArray[3];
5939     theta = parameterInArray[4];
5940 
5941 
5942     /* Check if network has been modified or learning func has been changed */
5943 
5944     if (NetModified || LearnFuncHasChanged || (TopoSortID != ART2_TOPO_TYPE)) {
5945 	(void) kr_topoSort(ART2_TOPO_TYPE);
5946 	ret_code = KernelErrorCode;
5947 	if (ret_code != KRERR_NO_ERROR) {
5948 	    NetModified = TRUE;
5949 	    return (ret_code);
5950 	}/* if */
5951 	NetModified = FALSE;
5952 	LearnFuncHasChanged = FALSE;
5953     }/* if */
5954     /* Read out value of parameter d from bias field of any unit. The value
5955        has been written into the bias field by the init-function */
5956     param_d = (*(topo_ptr_array + 1))->bias;
5957 
5958 
5959     /* Check values of the parameters */
5960 
5961     if ((rho < 0.0) || (rho > 1.0) ||
5962 	(param_a <= 0.0) || (param_b <= 0.0) ||
5963 	((param_c * param_d) / (1 - param_d) > 1.0) ||
5964 	(theta < 0.0) || (theta > 1.0)
5965 	) {
5966 	ret_code = KRERR_PARAMETERS;
5967 	return (ret_code);
5968     }/* if */
5969     ret_code = kra2_set_params(rho, param_a, param_b, param_c, param_d, theta);
5970 
5971     if (ret_code != KRERR_NO_ERROR) {
5972 	return (ret_code);
5973     }/* if */
5974     ret_code = kra2_init_propagate();
5975 
5976     if (ret_code != KRERR_NO_ERROR) {
5977 	return (ret_code);
5978     }/* if */
5979     /* get pointers to first elements of each layer in topo_ptr_array */
5980     topo_ptr = topo_ptr_array + 1;
5981 
5982     for (i = 0; i <= 9; i++) {
5983 	topo_layer[i] = topo_ptr;
5984 	do {
5985 	} while (*topo_ptr++ != NULL);
5986 
5987     }/* for */
5988 
5989 
5990     /* compute the necessary sub patterns */
5991 
5992     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
5993     if(KernelErrorCode != KRERR_NO_ERROR)
5994 	return (KernelErrorCode);
5995 
5996 
5997     /* Search phase */
5998     start = kr_AbsPosOfFirstSubPat(start_pattern);
5999     end   = kr_AbsPosOfFirstSubPat(end_pattern);
6000     end  += kr_NoOfSubPatPairs(end_pattern) - 1;
6001 
6002     for(n=start; n<=end; n++){
6003 
6004 	kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
6005 
6006 	/* initialize the unit activations of the whole net */
6007 
6008 	ret_code = krart_reset_activations();
6009 	if (ret_code != KRERR_NO_ERROR) {
6010 	    return (ret_code);
6011 	}/* if */
6012 	/* put pattern into input units */
6013 	ret_code = put_ART2_in_pattern(pattern_no, sub_pat_no,
6014 				       topo_layer[ART2_INP_LAY-1]);
6015 	if (ret_code != KRERR_NO_ERROR) {
6016 	    return (ret_code);
6017 	}/* if */
6018 
6019 	/* initialize of ART2 Simulator for new pattern */
6020 	kra2_init_pattern();
6021 
6022 	/* repeat synchronous propagation and look for winner until pattern
6023 	   is classified or network tells us, that pattern is not
6024 	   classifiable */
6025 
6026 	do {
6027 
6028 	    /* compute vector norms */
6029 	    kra2_compute_norms();
6030 
6031 	    /* save old activation values of f1-units */
6032 	    kra2_save_for_stability_check();
6033 
6034 	    /* 1 propagation step (all units push their information onto
6035 	       their output and calculate their new activation. */
6036 	    krart_prop_synch();
6037 
6038 	    /* look for the recognition unit with the highest activation
6039 	       returns a NULL pointer if all recognition units have
6040 	       activation 0.0 */
6041 	    winner_ptr = krart_get_winner(topo_layer[ART2_REC_LAY-1], param_d);
6042 
6043 	    /* Check if F1-Layer is stable */
6044 	    kra2_check_f1_stability();
6045 
6046 	    /* Check Reset */
6047 	    kra2_checkReset();
6048 
6049 	} while (!(ART2_CLASSIFIED) && !(ART2_NOT_CLASSIFIABLE));
6050 
6051 
6052 	/* training phase */
6053 
6054 	if (ART2_CLASSIFIED) {
6055 
6056 	    /* Train network i.e. adjust weights between comparison layer and
6057 	       winner_unit and vice versa */
6058 	    ret_code = adjust_ART2_weights(param_d, topo_layer[ART2_P_LAY - 1],
6059 					   winner_ptr);
6060 
6061 	    if (ret_code != KRERR_NO_ERROR) {
6062 		return (ret_code);
6063 	    }/* if */
6064 	}/* if */
6065     }/* for */
6066 
6067     return (ret_code);
6068 
6069 }/* LEARN_ART2 */
6070 
6071 
6072 
6073 /*****************************************************************************
6074   FUNCTION : krui_err put_ART2_in_pattern
6075 
6076   PURPOSE  : pushes a new pattern into the input units of the network
6077   RETURNS  : kernel error code
6078   NOTES    :
6079 
6080   UPDATE   : 06.11.1993 by Guenter Mamier
6081 ******************************************************************************/
6082 static krui_err put_ART2_in_pattern(int pattern_no, int sub_pat_no,
6083 				    TopoPtrArray topo_inp_ptr)
6084 {
6085     int               ret_code = KRERR_NO_ERROR;
6086     register Patterns in_pat;
6087     struct Unit       *unit_ptr;
6088     TopoPtrArray      topo_ptr = topo_inp_ptr;
6089 
6090     /* calculate startadress of actual pattern */
6091     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
6092 
6093     while ((unit_ptr = *topo_ptr++) != NULL) {
6094 
6095 	if (unit_ptr->out_func == OUT_IDENTITY) {
6096 	    unit_ptr->act = unit_ptr->Out.output = *in_pat++;
6097 	} else if(unit_ptr->out_func == OUT_Custom_Python) {
6098 		unit_ptr->act = *in_pat++;
6099 		unit_ptr->Out.output =
6100 			kr_PythonOutFunction(unit_ptr->python_out_func,
6101 				unit_ptr->act);
6102 	} else {
6103 	    unit_ptr->act = *in_pat++;
6104 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
6105 	}/* if */
6106 
6107     }/* while */
6108 
6109     return (ret_code);
6110 
6111 }/* put_ART2_in_pattern */
6112 
6113 
6114 
6115 /*****************************************************************************
6116   FUNCTION : adjust_ART2_weights
6117 
6118   PURPOSE  : training function for ART2 networks
6119   RETURNS  : kernel error code
6120   NOTES    : Parameters:
6121              param_d     constant value 0 < param_d < 1
6122 
6123              p_ptr       points to pointer to first comparison unit
6124 
6125              delay_ptr   points to pointer to first unit in the delay layer.
6126 	                 The z(j,i) links are not between recognition layer
6127 			 and comparison layer but between the respective delay
6128 			 unit of the recogniton unit and the comparison layer.
6129 			 So first we have to look for the corresponding delay
6130 			 unit of the winning unit before training these weights.
6131 
6132              winner_ptr  points to winning unit of the recognition layer.
6133 
6134   UPDATE   : 06.11.1993 by Guenter Mamier
6135 ******************************************************************************/
6136 static krui_err adjust_ART2_weights(double param_d, TopoPtrArray p_ptr,
6137 				    struct Unit * winner_ptr)
6138 {
6139     krui_err        ret_code = KRERR_NO_ERROR;
6140     TopoPtrArray    topo_ptr = NULL;
6141     struct Unit    *unit_ptr_p = NULL;
6142     struct Link    *link_ptr = NULL, *link_ptr_u = NULL;
6143 
6144     /* Adjust weights between winning unit and p layer (z(J,i) link values)
6145 
6146        (d/dt) z(J,i) = z(J,i) + d * (1-d) * [ u(i)/(1-d) - z(J,i) ]
6147 
6148        for (d/dt) -> 0:        z(J,i) = u(i)/(1-d)
6149     */
6150 
6151     topo_ptr = p_ptr;
6152 
6153     while ((unit_ptr_p = *topo_ptr++) != NULL) {
6154 	FOR_ALL_LINKS(unit_ptr_p, link_ptr) {
6155 	    if (link_ptr->to == winner_ptr) {
6156 
6157 		/* lookin' for corresponding u unit */
6158 		FOR_ALL_LINKS(unit_ptr_p, link_ptr_u) {
6159 		    if (link_ptr_u->to->lln == ART2_U_LAY) {
6160 			link_ptr->weight =
6161 			    ART2_ADJUST_LINK_REC_P(link_ptr_u->to, param_d);
6162 			break;
6163 		    }/* if */
6164 		}/* FOR_ALL_LINKS */
6165 
6166 	    }/* if */
6167 	}/* FOR_ALL_LINKS */
6168     }/* while */
6169 
6170 
6171     /* Adjust weights between p layer and winning unit (recognition layer)
6172        (z(i,j) link values)
6173 
6174        (d/dt) z(i,J) = d * (1-d) * [ u(i)/(1-d) - z(i,J) ]
6175 
6176        where J is the number of the winning neuron in the recognition layer, i
6177        ist the number of a p unit
6178 
6179        for (d/dt) -> 0:   z(i,J) = u(i)/(1-d)
6180 
6181     */
6182 
6183 
6184     FOR_ALL_LINKS(winner_ptr, link_ptr) {
6185 	if (link_ptr->to->lln == ART2_P_LAY) {
6186 
6187 	    /* lookin' for corresponding u unit */
6188 	    FOR_ALL_LINKS(link_ptr->to, link_ptr_u) {
6189 		if (link_ptr_u->to->lln == ART2_U_LAY) {
6190 		    link_ptr->weight =
6191 			ART2_ADJUST_LINK_P_REC(link_ptr_u->to, param_d);
6192 
6193 		    break;
6194 		}/* if */
6195 	    }/* FOR_ALL_LINKS */
6196 	}/* if */
6197     }/* FOR_ALL_LINKS */
6198 
6199 
6200     return (ret_code);
6201 
6202 }/* adjust_ART2_weights () */
6203 
6204 
6205 
6206 /*****************************************************************************
6207  *****************************************************************************
6208 
6209   GROUP        : ARTMAP learning function
6210 
6211   AUTHOR       : Kai-Uwe Herrmann
6212 
6213 ******************************************************************************
6214 ******************************************************************************/
6215 
6216 
6217 
6218 /*****************************************************************************
6219   FUNCTION : LEARN_ARTMAP
6220 
6221   PURPOSE  : ARTMAP learning function.
6222   RETURNS  : kernel error code
6223   NOTES    : Parameters:
6224                     3 input-parameter  :  1. vigilance parameter RHOa
6225                                           2. vigilance parameter RHOb
6226                                           3. vigilance parameter RHO
6227 
6228                     output-parameters  :  none
6229 
6230   UPDATE   : 06.11.1993 by Guenter Mamier
6231 ******************************************************************************/
6232 krui_err LEARN_ARTMAP(int start_pattern, int end_pattern,
6233 		      float parameterInArray[], int NoOfInParams,
6234 		      float **parameterOutArray, int *NoOfOutParams)
6235 {
6236     krui_err        ret_code = KRERR_NO_ERROR;
6237     int             pattern_no, sub_pat_no; /* Contains actual pattern number */
6238     int             i,n;
6239     struct Unit    *winner_ptr_a;	/* recognition unit which is the
6240 					   winner of w.t.a ARTa */
6241     struct Unit    *winner_ptr_b;	/* recognition unit which is the
6242 					   winner of w.t.a ARTb */
6243     struct Unit    *unit_ptr;
6244     TopoPtrArray    topo_layer[14];	/* topo_layer[0] : *first input unit
6245 					   ARTa topo_layer[1] : *first comp.
6246 					   unit ARTa topo_layer[2] : *first
6247 					   rec.  unit ARTa topo_layer[3] :
6248 					   *first delay unit ARTa
6249 					   topo_layer[4] : *first local reset
6250 					   unit ARTa topo_layer[5] : *first
6251 					   special unit ARTa
6252 					   (classified_unit) topo_layer[6] :
6253 					   *first input unit ARTb
6254 					   topo_layer[7] : *first comp. unit
6255 					   ARTb topo_layer[8] : *first rec.
6256 					   unit ARTb topo_layer[9] : *first
6257 					   delay unit ARTb topo_layer[10]:
6258 					   *first local reset unit ARTb
6259 					   topo_layer[11]: *first special
6260 					   unit ARTb (classified_unit)
6261 					   topo_layer[12]: *first map unit
6262 					   topo_layer[13]: *first special map
6263 					   unit */
6264 
6265     TopoPtrArray    topo_ptr;
6266     FlintType       beta_a;
6267     FlintType       beta_b;
6268     float           rho_a;
6269     float           rho_b;
6270     float           rho;
6271     int             start, end;
6272 
6273 
6274     /* Check number of incoming parameters */
6275 
6276     if (NoOfInParams < 3) {
6277 	ret_code = KRERR_PARAMETERS;
6278 	return (ret_code);
6279     }/* if */
6280     /* rho is the vigilance parameter */
6281     rho_a = parameterInArray[0];
6282     rho_b = parameterInArray[1];
6283     rho = parameterInArray[2];
6284 
6285 
6286     /* Check interval in which vigilance parameter and constant value L have
6287        to be */
6288 
6289     if ((rho_a < 0.0) || (rho_a > 1.0) || (rho_b < 0.0) ||
6290 	(rho_b > 1.0) || (rho < 0.0) || (rho > 1.0)
6291 	) {
6292 	ret_code = KRERR_PARAMETERS;
6293 	return (ret_code);
6294     }/* if */
6295     /* Check if network has been modified or learning func has been changed */
6296     if (NetModified || LearnFuncHasChanged || (TopoSortID != ARTMAP_TOPO_TYPE)){
6297 	(void) kr_topoSort(ARTMAP_TOPO_TYPE);
6298 	ret_code = KernelErrorCode;
6299 	if (ret_code != KRERR_NO_ERROR) {
6300 	    NetModified = TRUE;
6301 	    return (ret_code);
6302 	}/* if */
6303 	NetModified = FALSE;
6304 	LearnFuncHasChanged = FALSE;
6305     }/* if */
6306 
6307     /* set initial activation values */
6308     ret_code = kram_init_i_act(rho_a, rho_b, rho);
6309 
6310     if (ret_code != KRERR_NO_ERROR) {
6311 	return (ret_code);
6312     }/* if */
6313     /* beta_a, beta_b are other learning parameters of the network which are
6314        determined when initializing the network. They are there written to
6315        the bias field of the structure of each unit of the corresponding ART
6316        1 network. Now we will read these values. */
6317 
6318     /* find an ARTa unit and get ARTa beta value */
6319     for (unit_ptr=unit_array+1; unit_ptr->lln != ARTMAP_INPa_LAY; unit_ptr++);
6320     beta_a = unit_ptr->bias;
6321 
6322     /* find an ARTb unit and get ARTb beta value */
6323     for (unit_ptr=unit_array+1; unit_ptr->lln != ARTMAP_INPb_LAY; unit_ptr++);
6324     beta_b = unit_ptr->bias;
6325 
6326     if ((beta_a <= 0.0) || (beta_b <= 0.0)) {
6327 	topo_msg.error_code = KRERR_PARAM_BETA;
6328 	topo_msg.src_error_unit = 0;
6329 	topo_msg.dest_error_unit = 1;
6330 	return (topo_msg.error_code);
6331     }/* if */
6332 
6333     /* # of output parameters is 0 */
6334     *NoOfOutParams = 0;
6335     *parameterOutArray = NULL;
6336 
6337 
6338     /* get pointers to first elements of each layer in topo_ptr_array */
6339 
6340     topo_ptr = topo_ptr_array + 1;
6341 
6342     for (i = 0; i <= 13; i++) {
6343 	topo_layer[i] = topo_ptr;
6344 	do {
6345 	} while (*topo_ptr++ != NULL);
6346 
6347     }/* for */
6348 
6349 
6350     /* compute the necessary sub patterns */
6351 
6352     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
6353     if(KernelErrorCode != KRERR_NO_ERROR)
6354 	return (KernelErrorCode);
6355 
6356 
6357     /* Search phase */
6358     start = kr_AbsPosOfFirstSubPat(start_pattern);
6359     end   = kr_AbsPosOfFirstSubPat(end_pattern);
6360     end  += kr_NoOfSubPatPairs(end_pattern) - 1;
6361 
6362     for(n=start; n<=end; n++){
6363 
6364 	kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
6365 
6366 	/* initialize the unit activations of the whole net */
6367 	ret_code = krart_reset_activations();
6368 	if (ret_code != KRERR_NO_ERROR) {
6369 	    return (ret_code);
6370 	}/* if */
6371 
6372 	/* put pattern into input units */
6373 	ret_code =
6374 	    put_ARTMAP_in_pattern(pattern_no, sub_pat_no, topo_layer[0],
6375 				  topo_layer[6]);
6376 	if (ret_code != KRERR_NO_ERROR) {
6377 	    return (ret_code);
6378 	}/* if */
6379 	/* repeat synchronous propagation and look for winner until pattern
6380 	   is classified or network tells us, that pattern is not
6381 	   classifiable */
6382 	do {
6383 
6384 	    /* 1 propagation step (all units push their information onto
6385 	       their output and calculate their new activation. */
6386 
6387 	    krart_prop_synch();
6388 
6389 	    /* look for the recognition unit with the highest activation
6390 	       returns a NULL pointer if all recognition units have
6391 	       activation 0.0 */
6392 	    winner_ptr_a = krart_get_winner(topo_layer[2], 1.0);
6393 	    winner_ptr_b = krart_get_winner(topo_layer[8], 1.0);
6394 
6395 	} while (!(ARTMAP_CLASSIFIED) && !(ARTMAP_NOT_CLASSIFIABLE));
6396 
6397 
6398 	/* training phase */
6399 
6400 	if (ARTMAP_CLASSIFIED) {
6401 
6402 	    /* Train network i.e. adjust weights between comparison layer and
6403 	       winner_unit and vice versa of both, ARTa and ARTb. Further
6404 	       adjust weights between ARTa delay and map field layer. */
6405 
6406 	    ret_code = adjust_ARTMAP_weights(beta_a, beta_b,
6407 					     topo_layer[1], topo_layer[7],
6408 					     topo_layer[3], topo_layer[9],
6409 					     topo_layer[12],
6410 					     winner_ptr_a, winner_ptr_b);
6411 
6412 	    if (ret_code != KRERR_NO_ERROR) {
6413 		return (ret_code);
6414 	    }/* if */
6415 	} else {
6416 
6417 	    /* we're doing nothing */
6418 
6419 	}/* if */
6420 
6421     }/* for */
6422 
6423     return (ret_code);
6424 
6425 }/* LEARN_ARTMAP */
6426 
6427 
6428 
6429 /*****************************************************************************
6430   FUNCTION : put_ARTMAP_in_pattern
6431 
6432   PURPOSE  : pushes a new pattern into the input units of the network
6433   RETURNS  : kernel error code
6434   NOTES    :
6435 
6436   UPDATE   : 06.11.1993 by Guenter Mamier
6437 ******************************************************************************/
6438 static krui_err put_ARTMAP_in_pattern(int pattern_no, int sub_pat_no,
6439 				      TopoPtrArray topo_inpa_ptr,
6440 				      TopoPtrArray topo_inpb_ptr)
6441 {
6442     int               ret_code = KRERR_NO_ERROR;
6443     register Patterns in_pat;
6444     struct Unit       *unit_ptr;
6445     TopoPtrArray      topo_ptr_a = topo_inpa_ptr;
6446     TopoPtrArray      topo_ptr_b = topo_inpb_ptr;
6447 
6448 
6449     /* calculate startadress of actual pattern */
6450     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
6451 
6452     while ((unit_ptr = *topo_ptr_a++) != NULL) {
6453 	if (unit_ptr->out_func == OUT_IDENTITY) {
6454 	    unit_ptr->act = unit_ptr->Out.output = *in_pat++;
6455 	} else if(unit_ptr->out_func == OUT_Custom_Python) {
6456 		unit_ptr->act = *in_pat++;
6457 		unit_ptr->Out.output =
6458 			kr_PythonOutFunction(unit_ptr->python_out_func,
6459 				unit_ptr->act);
6460 	} else {
6461 	    unit_ptr->act = *in_pat++;
6462 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
6463 	}/* if */
6464     }/* while */
6465 
6466     while ((unit_ptr = *topo_ptr_b++) != NULL) {
6467 	if (unit_ptr->out_func == OUT_IDENTITY) {
6468 	    unit_ptr->act = unit_ptr->Out.output = *in_pat++;
6469 	} else if(unit_ptr->out_func == OUT_Custom_Python) {
6470 		unit_ptr->act = *in_pat++;
6471 		unit_ptr->Out.output =
6472 			kr_PythonOutFunction(unit_ptr->python_out_func,
6473 				unit_ptr->act);
6474 	} else {
6475 	    unit_ptr->act = *in_pat++;
6476 	    unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
6477 	}/* if */
6478     }/* while */
6479 
6480 
6481     return (ret_code);
6482 
6483 }/* put_ARTMAP_in_pattern */
6484 
6485 
6486 
6487 
6488 /*****************************************************************************
6489   FUNCTION : adjust_ARTMAP_weights
6490 
6491   PURPOSE  : training function for ARTMAP networks
6492   RETURNS  : kernel error code
6493   NOTES    : Parameters:
6494              beta_a         constant value beta of ARTa > 0.0
6495              beta_b         constant value beta of ARTb > 0.0
6496 
6497              compa_ptr      points to pointer to 1st comparison unit of ARTa
6498              compb_ptr      points to pointer to 1st comparison unit of ARTb
6499 
6500              dela_ptr       points to pointer to first unit in the delay layer.
6501 	                    The t(j,i) links are not between recognition layer
6502 			    and comparison layer but between the respective
6503 			    delay unit of the recogniton unit and the
6504 			    comparison layer. So first we have to look for the
6505 			    corresponding delay unit of the winning unit before
6506 			    training these weights.
6507              delb_ptr       points to pointer to first unit in the delay layer.
6508 	                    The t(j,i) links are not between recognition layer
6509 			    and comparison layer but between the respective
6510 			    delay unit of the recogniton unit and the
6511 			    comparison layer. So first we have to look for the
6512 			    corresponding delay unit of the winning unit before
6513 			    training these weights.
6514 
6515              map_ptr        points to pointer to first unit in the map layer
6516 
6517              winner_ptr_a   points to winning unit of the recognition layer of
6518 	                    ARTa.
6519              winner_ptr_b   points to winning unit of the recognition layer of
6520 	                    ARTb.
6521 
6522 
6523   UPDATE   : 06.11.1993 by Guenter Mamier
6524 ******************************************************************************/
6525 static krui_err adjust_ARTMAP_weights(double beta_a, double beta_b,
6526 				      TopoPtrArray compa_ptr,
6527 				      TopoPtrArray compb_ptr,
6528 				      TopoPtrArray dela_ptr,
6529 				      TopoPtrArray delb_ptr,
6530 				      TopoPtrArray map_ptr,
6531 				      struct Unit * winner_ptr_a,
6532 				      struct Unit * winner_ptr_b)
6533 {
6534     krui_err        ret_code = KRERR_NO_ERROR;
6535     TopoPtrArray    topo_ptr = NULL;
6536     struct Unit    *unit_ptr_compa = NULL, *unit_ptr_compb = NULL,
6537                    *unit_ptr_dela = NULL, *unit_ptr_delb = NULL,
6538                    *unit_ptr_map = NULL;
6539 
6540     struct Link    *link_ptr = NULL;
6541 
6542     bool            found_dela_unit = FALSE;
6543     bool            found_delb_unit = FALSE;
6544 
6545     FlintType       sum_ck = 0.0;
6546 
6547 
6548     if ((winner_ptr_a == NULL) || (winner_ptr_b == NULL)) {
6549 	/* We are using ARTMAP in a non-learning mode, wo we are not allowed
6550 	   to adjust weights now. Weights may just be adjusted, if we have an
6551 	   input in ARTa and ARTb each of which brings out a winner in the
6552 	   respective F2-Layer */
6553 	return (ret_code);
6554     }/* if */
6555     /* get corresponding unit of the winning unit of ARTa in the delay layer */
6556     topo_ptr = dela_ptr;
6557 
6558     while ((!found_dela_unit) && (*topo_ptr != NULL)) {
6559 	unit_ptr_dela = *topo_ptr++;
6560 	FOR_ALL_LINKS(unit_ptr_dela, link_ptr) {
6561 	    if (link_ptr->to == winner_ptr_a) {
6562 		found_dela_unit = TRUE;
6563 	    }/* if */
6564 	}/* FOR_ALL_LINKS */
6565     }/* while */
6566 
6567     /* get corresponding unit of the winning unit of ARTb in the delay layer */
6568 
6569     topo_ptr = delb_ptr;
6570     while ((!found_delb_unit) && (*topo_ptr != NULL)) {
6571 	unit_ptr_delb = *topo_ptr++;
6572 	FOR_ALL_LINKS(unit_ptr_delb, link_ptr) {
6573 	    if (link_ptr->to == winner_ptr_b) {
6574 		found_delb_unit = TRUE;
6575 		break;
6576 	    }/* if */
6577 	}/* FOR_ALL_LINKS */
6578     }/* while */
6579 
6580     if ((!found_dela_unit) || (!found_delb_unit)) {
6581 
6582 	/* There was no delay unit found corresponding to the winning
6583 	   recognition unit in ARTa or ARTb */
6584 
6585 	ret_code = KRERR_TOPOLOGY;
6586 	return (ret_code);
6587 
6588     }/* if */
6589     /********* ADJUST WEIGHTS *********/
6590     /* Adjust weights between winning unit (delay-layer) and comparison layer
6591        (t(j,i) link values) -> ARTa
6592 
6593        t(j,i) = c(i)   where j is the number of the winning neuron in the delay
6594        layer and i ist the number of a comparison unit.
6595     */
6596 
6597     topo_ptr = compa_ptr;
6598     while ((unit_ptr_compa = *topo_ptr++) != NULL) {
6599 	sum_ck += unit_ptr_compa->act;	/* sum up activatons of comparison
6600 					   layer. sum_ck is needed for b(i,j) */
6601 	FOR_ALL_LINKS(unit_ptr_compa, link_ptr) {
6602 	    if (link_ptr->to == unit_ptr_dela) {
6603 		link_ptr->weight = ART1_ADJUST_LINK_DEL_CMP(unit_ptr_compa);
6604 	    }/* if */
6605 	}/* FOR_ALL_LINKS */
6606     }/* while */
6607 
6608     /* Adjust weights between comparison layer and winning unit (recognition
6609        layer) -> ARTa
6610 
6611        b(i,j) = c(i) / (beta + sum(k)(c(k)))
6612 
6613        where j is the number of the winning neuron in the recognition layer, i
6614        ist the number of a comparison unit and k runs over all comparison
6615        units. (sum(k)(c(k))) = sum_ck.
6616     */
6617 
6618     FOR_ALL_LINKS(winner_ptr_a, link_ptr) {
6619 	if (link_ptr->to->lln == ARTMAP_CMPa_LAY) {
6620 	    link_ptr->weight = (FlintType)ART1_ADJUST_LINK_CMP_REC(link_ptr->to,
6621 								   beta_a,
6622 								   sum_ck);
6623 	}/* if */
6624     }/* FOR_ALL_LINKS */
6625 
6626 
6627     /* Adjust weights between winning unit (delay-layer) and comparison layer
6628        (t(j,i) link values) -> ARTb
6629 
6630        t(j,i) = c(i)   where j is the number of the winning neuron in the delay
6631        layer and i ist the number of a comparison unit.
6632     */
6633 
6634     topo_ptr = compb_ptr;
6635     sum_ck = 0.0;
6636     while ((unit_ptr_compb = *topo_ptr++) != NULL) {
6637 	sum_ck += unit_ptr_compb->act;	/* sum up activatons of comparison
6638 					   layer. sum_ck is needed for b(i,j) */
6639 	FOR_ALL_LINKS(unit_ptr_compb, link_ptr) {
6640 	    if (link_ptr->to == unit_ptr_delb) {
6641 		link_ptr->weight = ART1_ADJUST_LINK_DEL_CMP(unit_ptr_compb);
6642 	    }/* if */
6643 	}/* FOR_ALL_LINKS */
6644     }/* while */
6645 
6646     /* Adjust weights between comparison layer and winning unit (recognition
6647        layer) (b(i,j) link values)
6648 
6649        b(i,j) = c(i) / (beta + sum(k)(c(k)))
6650 
6651        where j is the number of the winning neuron in the recognition layer, i
6652        ist the number of a comparison unit and k runs over all comparison
6653        units. (sum(k)(c(k))) = sum_ck.
6654     */
6655 
6656     FOR_ALL_LINKS(winner_ptr_b, link_ptr) {
6657 	if (link_ptr->to->lln == ARTMAP_CMPb_LAY) {
6658 	    link_ptr->weight = (FlintType)ART1_ADJUST_LINK_CMP_REC(link_ptr->to,
6659 								   beta_b,
6660 								   sum_ck);
6661 	}/* if */
6662     }/* FOR_ALL_LINKS */
6663 
6664 
6665     /* Adjust weights between delay units of ARTa and map units
6666 
6667        w(i,j) = map(j) where j is the number of a neuron in the map layer i is
6668        the number of the winning neuron in the dela layer
6669     */
6670 
6671     topo_ptr = map_ptr;
6672     while ((unit_ptr_map = *topo_ptr++) != NULL) {
6673 	FOR_ALL_LINKS(unit_ptr_map, link_ptr) {
6674 	    if (link_ptr->to == unit_ptr_dela) {
6675 		/* Same as adjustment between delay and comparison layer */
6676 		link_ptr->weight = ART1_ADJUST_LINK_DEL_CMP(unit_ptr_map);
6677 	    }/* if */
6678 	}/* FOR_ALL_LINKS */
6679     }/* while */
6680 
6681     return (ret_code);
6682 
6683 }/* adjust_ARTMAP_weights () */
6684 
6685 
6686 
6687 /*****************************************************************************
6688  *****************************************************************************
6689 
6690   GROUP        : backpropagation through time learning functions
6691 
6692   AUTHOR       : Martin Reczko
6693   NOTES        : Implemented are Truncated backpropagation through time with
6694                  online-update (BPTT), Truncated backpropagation through time
6695                  with batch-update (BBPTT) and truncated quickprop through
6696 		 time (QPTT) learning functions
6697 
6698 ******************************************************************************
6699 ******************************************************************************/
6700 
6701 
6702 /*****************************************************************************
6703   FUNCTION : BPTT_clear_deltaw
6704 
6705   PURPOSE  : BPTT weight change reset
6706   RETURNS  : kernel error code
6707   NOTES    :
6708 
6709   UPDATE   : 06.11.1993 by Guenter Mamier
6710 ******************************************************************************/
6711 static krui_err BPTT_clear_deltaw(void)
6712 {
6713     register struct Unit *unit_ptr;
6714     struct Link    *link_ptr;
6715 
6716 
6717     FOR_ALL_UNITS(unit_ptr) {
6718 
6719 	/* reset old weight changes (_a), old gradients (_b) and gradient
6720 	   accumulators (_c) */
6721 	unit_ptr->value_a = 0.0;
6722 	unit_ptr->value_b = 0.0;
6723 	unit_ptr->value_c = 0.0;
6724 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
6725 	    link_ptr->value_a = 0.0;
6726 	    link_ptr->value_b = 0.0;
6727 	    link_ptr->value_c = 0.0;
6728 	}
6729     }
6730     return (KRERR_NO_ERROR);
6731 }
6732 
6733 
6734 
6735 /*****************************************************************************
6736   FUNCTION : initializeBPTT
6737 
6738   PURPOSE  : BPTT network activity reset
6739   RETURNS  : kernel error code
6740   NOTES    : BPTT data structures: unit:
6741              unit_ptr->olddelta : delta values, after finished calculation
6742 	                          for 1 time step
6743              unit_ptr->newdelta : accumulators for new delta values
6744 
6745   UPDATE   : 06.11.1993 by Guenter Mamier
6746 ******************************************************************************/
6747 static krui_err initializeBPTT(void)
6748 {
6749     register struct Unit *unit_ptr;
6750     int             i;
6751 
6752     FOR_ALL_UNITS(unit_ptr) {
6753 	/* clear netact-copies */
6754 	for (i = 0; i < MAX_BPTT_BACKSTEP; i++)
6755 	    unit_ptr->actbuf[i] = 0.0;
6756     }
6757     return (KRERR_NO_ERROR);
6758 }
6759 
6760 
6761 
6762 /*****************************************************************************
6763   FUNCTION : BPTT_propagateNetForward
6764 
6765   PURPOSE  : topological forward propagation (backprop thru time)
6766   RETURNS  :
6767   NOTES    :
6768 
6769   UPDATE   : 06.11.1993 by Guenter Mamier
6770 ******************************************************************************/
6771 static void BPTT_propagateNetForward(int pattern_no, int sub_pat_no, int nhist)
6772 {
6773     register struct Unit *unit_ptr;
6774     register Patterns in_pat;
6775     register TopoPtrArray topo_ptr;
6776     TopoPtrArray    first_hidden_ptr;
6777     int             i, done_hidden;
6778     int             all_zero_input = 1;	/* flag to reset net-copies */
6779 
6780     /* calculate startaddress for input pattern array  */
6781     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
6782 
6783     topo_ptr = topo_ptr_array;
6784 
6785     /* ACTIVATE INPUT LAYER */
6786     /* copy pattern into input unit's activation and calculate output of the
6787        input units */
6788     /* topo_ptr points to a (topological sorted) unit stucture (input units
6789        first)  */
6790     while ((unit_ptr = *++topo_ptr) != NULL){
6791 
6792 	/* apply input pattern */
6793 	if (unit_ptr->out_func == OUT_IDENTITY)
6794 	    /* there is no need to call the output function  */
6795 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
6796 	else if(unit_ptr->out_func == OUT_Custom_Python)
6797 		unit_ptr->Out.output =
6798 			kr_PythonOutFunction(unit_ptr->python_out_func,
6799 				unit_ptr->act = *in_pat++);
6800 	else
6801 	    /* no identity output function: calculate unit's output also  */
6802 	    unit_ptr->Out.output =
6803 		(*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
6804 	if (fabs(unit_ptr->act) > 0.000001)
6805 	    all_zero_input = 0;	/* no reset-input */
6806 
6807 	/* BPTT: shift the actbuf for this input buffer one step back in time */
6808 	for (i = nhist; i > 0; i--) {
6809 	    unit_ptr->actbuf[i] = unit_ptr->actbuf[i - 1];
6810 	}
6811 
6812 	/* the new input pattern moves into the second time-layer with index 1,
6813 	   since activations for this pattern are calculated in time-layer 0 */
6814 	unit_ptr->actbuf[1] = unit_ptr->act;
6815 
6816     }
6817 
6818     /* An all-zero input pattern resets all network activities */
6819     if (all_zero_input) {
6820 	initializeBPTT();    /* reset all netact-copies at start of sequences */
6821     }
6822 
6823     /* INPUT LAYER DONE */
6824 
6825     /* store first hidden unit pointer */
6826     first_hidden_ptr = topo_ptr;
6827 
6828     /* shift all actbufs for non-input units one step back in time, make most
6829        recent activity visible in unit_ptr->Out.output for subsequent calls
6830        to act_func */
6831     while ((unit_ptr = *++topo_ptr) != NULL) {	/* hidden layer */
6832 	for (i = nhist; i > 0; i--)
6833 	    unit_ptr->actbuf[i] = unit_ptr->actbuf[i - 1];
6834 	unit_ptr->Out.output = unit_ptr->actbuf[1];
6835     }
6836 
6837     while ((unit_ptr = *++topo_ptr) != NULL) {	/* output layer */
6838 	for (i = nhist; i > 0; i--)
6839 	    unit_ptr->actbuf[i] = unit_ptr->actbuf[i - 1];
6840 	unit_ptr->Out.output = unit_ptr->actbuf[1];
6841     }
6842 
6843 
6844     /* calculate new activities for hidden and output units */
6845     /* point to first hidden unit */
6846     topo_ptr = first_hidden_ptr;
6847     done_hidden = 0;
6848     while (((unit_ptr = *++topo_ptr) != NULL) || (done_hidden == 0))
6849 	if (unit_ptr == NULL) {
6850 	    done_hidden = 1;
6851 	} else {
6852 	    /* calc actbuf[0] using actbuf[1], don't update Out.output while
6853 	       updating units, wait until all units are processed  */
6854 	    unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
6855 			kr_PythonActFunction(unit_ptr->python_act_func,
6856 						unit_ptr) :
6857 			(unit_ptr->act_func) (unit_ptr)) ;
6858 	    unit_ptr->actbuf[0] = unit_ptr->act;
6859 	}
6860 
6861     /* set Out.output */
6862     topo_ptr = first_hidden_ptr;
6863     done_hidden = 0;
6864     while (((unit_ptr = *++topo_ptr) != NULL) || (done_hidden == 0))
6865 	if (unit_ptr == NULL) {
6866 	    done_hidden = 1;
6867 	} else {
6868 	    if (unit_ptr->out_func == OUT_IDENTITY)
6869 		/* identity output function: there is no need to call the
6870 		   output function  */
6871 		unit_ptr->Out.output = unit_ptr->act;
6872 	    else if(unit_ptr->out_func == OUT_Custom_Python)
6873 	    	unit_ptr->Out.output =
6874 			kr_PythonOutFunction(unit_ptr->python_out_func,
6875 				unit_ptr->act);
6876 	    else
6877 		/* no identity output function: calculate unit's output also  */
6878 		unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act);
6879 	}
6880 }
6881 
6882 
6883 /*****************************************************************************
6884   FUNCTION : initOldDeltas
6885 
6886   PURPOSE  :
6887   RETURNS  :
6888   NOTES    : BPTT starts at the first time-layer (actbuf[0]).
6889              The deltas for this layer are calculated for the output units by
6890 	     comparison with the target values. All other deltas for hidden
6891 	     units are zero. The deltas are propagated to the second time-layer
6892 	     (actbuf[1]) into oldelta
6893 
6894   UPDATE   : 06.11.1993 by Guenter Mamier
6895 ******************************************************************************/
6896 static float initOldDeltas(int pattern_no, int sub_pat_no)
6897 {
6898     register struct Link *link_ptr;
6899     register struct Unit *unit_ptr;
6900     register Patterns out_pat;
6901     register float  error, sum_error, devit, delta, tmp;
6902     register TopoPtrArray topo_ptr;
6903     TopoPtrArray    first_hidden_ptr;
6904     int             all_correct = 1;	/* flag, wether all bits in the
6905 					   pattern are correct */
6906     int size;
6907 
6908     /* Initdelta, Step 1: clear all olddeltas (accumulate delta in olddelta) */
6909 
6910     topo_ptr = topo_ptr_array;
6911     while ((unit_ptr = *++topo_ptr) != NULL) {	/* input units */
6912 	unit_ptr->olddelta = 0.0;
6913     }
6914 
6915     /* store first hidden unit pointer */
6916     first_hidden_ptr = topo_ptr;
6917     while ((unit_ptr = *++topo_ptr) != NULL) {	/* hidden units */
6918 	unit_ptr->olddelta = 0.0;
6919     }
6920 
6921     while ((unit_ptr = *++topo_ptr) != NULL) {	/* output units */
6922 	unit_ptr->olddelta = 0.0;
6923     }
6924 
6925     sum_error = 0.0;		/* reset network error  */
6926 
6927     /* calculate address of the output pattern (with number pattern_no + 1)  */
6928     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
6929     out_pat += size;
6930 
6931 
6932     /* last output unit: add 3 to no_of_topo_units because the topologic
6933        array contains 4 NULL pointers  */
6934     topo_ptr = topo_ptr_array + (no_of_topo_units + 3);
6935 
6936     /* LOOP FOR ALL OUTPUT UNITS */
6937     /* calculate olddelta for output units  */
6938     while ((unit_ptr = *--topo_ptr) != NULL) {
6939 	tmp = unit_ptr->Out.output;
6940 	devit = *(--out_pat);
6941 
6942 	/* count correct bits using threshold of 0.5 */
6943 	if (devit > 0.5) {
6944 	    if (tmp > 0.5)
6945 		NoOfLearnedPatterns++;
6946 	    else
6947 		all_correct = 0;
6948 	} else {
6949 	    if (tmp <= 0.5)
6950 		NoOfLearnedPatterns++;
6951 	    else
6952 		all_correct = 0;
6953 	}
6954 
6955 	devit = devit - tmp;	/* calc. devitation (target_j - output_j) */
6956 	error = devit * devit;
6957 	sum_error += error;
6958 
6959 	/* BPTT uses sum_j ( o_j - t_j )^2 as error function => -2.0 * ... */
6960 	delta = -2.0 * devit * (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
6961 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
6962 						unit_ptr) :
6963 			(unit_ptr->act_deriv_func) (unit_ptr)) );
6964 
6965 	/* Initdelta, Step 2: upstream propagation of gradients for backprop */
6966 	FOR_ALL_LINKS(unit_ptr, link_ptr) {
6967 	    tmp = delta * link_ptr->weight;
6968 	    link_ptr->to->olddelta += tmp;	/* accumulate delta */
6969 	    /* accumulate weight gradient */
6970 	    link_ptr->value_c += link_ptr->to->actbuf[1] * delta;
6971 	}
6972 
6973 	/* accumulate bias gradient */
6974 	unit_ptr->value_c += delta;
6975     }/* output units done */
6976 
6977     /* Initdelta, Step 3:  clear newdelta */
6978     topo_ptr = topo_ptr_array;
6979     while ((unit_ptr = *++topo_ptr) != NULL) {	/* input units */
6980 	unit_ptr->newdelta = 0.0;
6981     }
6982 
6983     while ((unit_ptr = *++topo_ptr) != NULL) {	/* hidden units */
6984 	unit_ptr->newdelta = 0.0;
6985     }
6986 
6987     while ((unit_ptr = *++topo_ptr) != NULL) {	/* output units */
6988 	unit_ptr->newdelta = 0.0;
6989     }
6990 
6991     return (sum_error);
6992 }
6993 
6994 
6995 /*****************************************************************************
6996   FUNCTION : oneStepBackprop
6997 
6998   PURPOSE  : calc weight changes between consecutive time steps
6999   RETURNS  : network error
7000   NOTES    : heart of BPTT
7001 
7002   UPDATE   : 06.11.1993 by Guenter Mamier
7003 ******************************************************************************/
7004 static float oneStepBackprop(int backstep, int pattern_no, int sub_pat_no,
7005 			     int nhist)
7006 {
7007     register struct Link *link_ptr;
7008     register struct Unit *unit_ptr;
7009     double          delta, sum_error;
7010     register TopoPtrArray topo_ptr;
7011     int             done_hidden, nextlayer;
7012     float           tmp;
7013 
7014     /* CHECK FOR START OF BACKPROP AT THE LAST TIME LAYER */
7015     if (backstep == 0) {
7016 	sum_error = initOldDeltas(pattern_no,sub_pat_no);
7017 	return (sum_error);	/* start case */
7018     } else			/* at least for time layer 0, old deltas are
7019 				   known */
7020 	sum_error = 0.0;
7021 
7022     /* index of next layer (used frequently!) */
7023     nextlayer = backstep + 1;
7024 
7025     /* point to seperator after last input unit */
7026     topo_ptr = topo_ptr_array;	/* + (NoOfInputUnits + 1); */
7027     while ((unit_ptr = *++topo_ptr) != NULL);
7028     done_hidden = 0;
7029 
7030     /* DO BACKPROP FOR ALL NON-INPUT-UNITS */
7031     while (((unit_ptr = *++topo_ptr) != NULL) || (done_hidden == 0))
7032 	if (unit_ptr == NULL) {	/* skip NULL seperator between hidden and
7033 				   output units */
7034 	    done_hidden = 1;
7035 	} else {		/* delta =  f'(net[backstep]) * olddelta */
7036 	    /* copy actbuf[backstep] to act to enable call to act_deriv_func
7037 	       (overhead: better definition of activation functions required) */
7038 	    unit_ptr->act = unit_ptr->actbuf[backstep];
7039 	    delta = (((unit_ptr->act_deriv_func == ACT_DERIV_Custom_Python) ?
7040 			kr_PythonActFunction(unit_ptr->python_act_deriv_func,
7041 						unit_ptr) :
7042 			(unit_ptr->act_deriv_func) (unit_ptr)) ) * unit_ptr->olddelta;
7043 
7044 	    /* propagate gradients upstream */
7045 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {
7046 		tmp = delta * link_ptr->weight;
7047 		link_ptr->to->newdelta += tmp;	/* accumulate delta */
7048 		/* accumulate weight gradient */
7049 		link_ptr->value_c += link_ptr->to->actbuf[nextlayer] * delta;
7050 	    }
7051 
7052 	    /* accumulate bias gradient */
7053 	    unit_ptr->value_c += delta;
7054 	}
7055 
7056     /* copy newdeltas to olddeltas, clear newdeltas */
7057     topo_ptr = topo_ptr_array;
7058     while ((unit_ptr = *++topo_ptr) != NULL) {	/* input units */
7059 	unit_ptr->olddelta = unit_ptr->newdelta;
7060 	unit_ptr->newdelta = 0.0;
7061     }
7062 
7063     while ((unit_ptr = *++topo_ptr) != NULL) {	/* hidden units */
7064 	unit_ptr->olddelta = unit_ptr->newdelta;
7065 	unit_ptr->newdelta = 0.0;
7066     }
7067 
7068     while ((unit_ptr = *++topo_ptr) != NULL) {	/* output units */
7069 	unit_ptr->olddelta = unit_ptr->newdelta;
7070 	unit_ptr->newdelta = 0.0;
7071     }
7072 
7073     return (sum_error);
7074 }
7075 
7076 
7077 
7078 /*****************************************************************************
7079   FUNCTION : BPTTadapt
7080 
7081   PURPOSE  : adapt all weights after BPTT using steepest descent with momentum
7082   RETURNS  :
7083   NOTES    :
7084 
7085   UPDATE   : 06.11.1993 by Guenter Mamier
7086 ******************************************************************************/
7087 static void BPTTadapt(float step_size, float bptt_momentum)
7088 {
7089     register struct Link *link_ptr;
7090     register struct Unit *unit_ptr;
7091     register TopoPtrArray topo_ptr;
7092     int             done_hidden = 0;
7093     float           delta;
7094 
7095     /* point to seperator after last input unit */
7096     topo_ptr = topo_ptr_array + (NoOfInputUnits + 1);
7097 
7098     /* for each non-input unit: add weight changes to old weights */
7099     while (((unit_ptr = *++topo_ptr) != NULL) || (done_hidden == 0)) {
7100 	if (unit_ptr == NULL) {
7101 	    done_hidden = 1;
7102 	} else {
7103 	    delta = step_size * (-unit_ptr->value_c) +
7104 		bptt_momentum * unit_ptr->value_a;
7105 	    if (!IS_SPECIAL_UNIT(unit_ptr))
7106 		unit_ptr->bias += delta;
7107 	    unit_ptr->value_a = delta;
7108 	    unit_ptr->value_c = 0.0;
7109 	    /* set act to last activity, since it was scrambled by bptt */
7110 	    unit_ptr->act = unit_ptr->Out.output;
7111 	    FOR_ALL_LINKS(unit_ptr, link_ptr) {
7112 		delta = step_size * (-link_ptr->value_c) +
7113 		    bptt_momentum * link_ptr->value_a;
7114 		link_ptr->value_a = delta;
7115 		link_ptr->value_c = 0.0;
7116 	    }
7117 	    if (!IS_SPECIAL_UNIT(unit_ptr))
7118 		FOR_ALL_LINKS(unit_ptr, link_ptr) {
7119 		    link_ptr->weight += link_ptr->value_a;
7120 		}
7121 	}
7122     }
7123 }
7124 
7125 
7126 
7127 /*****************************************************************************
7128   FUNCTION : BPTT_propagateNetBackward
7129 
7130   PURPOSE  : BPTT-main: accumulate weight changes backward thru time
7131   RETURNS  : network error
7132   NOTES    :
7133 
7134   UPDATE   : 06.11.1993 by Guenter Mamier
7135 ******************************************************************************/
7136 static float BPTT_propagateNetBackward(int pattern_no, int sub_pat_no,int nhist)
7137 {
7138     float           error = 0.0;
7139     float           dummy;
7140     int             backstep;
7141 
7142     /* go nhist steps back thru time */
7143     for (backstep = 0; backstep < nhist; backstep++)
7144 	if (backstep == 0) {
7145 	    /* start at output, pattern-error is calculated first */
7146 	    error = oneStepBackprop(backstep, pattern_no, sub_pat_no, nhist);
7147 	} else {
7148 	    dummy = oneStepBackprop(backstep, pattern_no, sub_pat_no, nhist);
7149 	}
7150     return (error);
7151 }
7152 
7153 
7154 
7155 /*****************************************************************************
7156   FUNCTION : LEARN_BPTT
7157 
7158   PURPOSE  : Backpropagation through time learning function
7159   RETURNS  : kernel error code
7160   NOTES    : Input Parameters:   1 : step_size
7161                                  2 : momentum
7162 				 3 : nhist
7163              Output Parameters:  1 : error of the network (sum of all cycles)
7164 
7165 
7166   UPDATE   : 06.11.1993 by Guenter Mamier
7167 ******************************************************************************/
7168 krui_err LEARN_BPTT(int start_pattern, int end_pattern,
7169 		    float *parameterInArray, int NoOfInParams,
7170 		    float **parameterOutArray, int *NoOfOutParams)
7171 {
7172     static float    OutParameter[1];	/* OutParameter[0] stores the
7173 					   learning error  */
7174     int             ret_code, pattern_no, sub_pat_no, patterns;
7175     int             nhist;	/* number of steps back in time */
7176     register struct Unit *unit_ptr;
7177 
7178     if (NoOfUnits == 0)
7179 	return (KRERR_NO_UNITS);        /* No Units defined	 */
7180     if (NoOfInParams < 1)	        /* has to be ... snns habit ? */
7181 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
7182 
7183     *NoOfOutParams = 1;		/* One return value is available (the
7184 				   learning error)  */
7185     *parameterOutArray = OutParameter;	/* set the output parameter reference */
7186     ret_code = KRERR_NO_ERROR;	/* reset return code  */
7187 
7188     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7189 	/* Net has been modified or topologic array isn't initialized */
7190 	/* any connected topology allowed */
7191 	/* count the no. of I/O units and check the patterns  */
7192 	ret_code = kr_IOCheck();
7193 	if (ret_code < KRERR_NO_ERROR)
7194 	    return (ret_code);
7195 
7196 	/* sort units by ''topologic type'', criterion is visibility
7197 	   (input,hidden,output), not topology */
7198 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7199 
7200 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7201 	    return (ret_code);
7202 	/* sites are not supported, check absence */
7203 	FOR_ALL_UNITS(unit_ptr)
7204 	    if UNIT_HAS_SITES
7205 	    (unit_ptr)
7206 		return (KRERR_SITES_NO_SUPPORT);
7207 	NetModified = FALSE;
7208     }
7209     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7210 						   initialized, clear weight
7211 						   changes */
7212 	ret_code = BPTT_clear_deltaw();
7213 	if (ret_code != KRERR_NO_ERROR)
7214 	    return (ret_code);
7215     }
7216     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
7217 
7218     NoOfLearnedPatterns = 0;	/* correct bits using threshold of 0.5 */
7219     nhist = LEARN_PARAM3(parameterInArray);
7220     if (nhist > MAX_BPTT_BACKSTEP)
7221 	return (KRERR_NET_DEPTH);	/* actbuf and learning functions
7222 					   support only MAX_BPTT_BACKSTEP net
7223 					   copies */
7224 
7225     /* compute the necessary sub patterns */
7226 
7227     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
7228     if(KernelErrorCode != KRERR_NO_ERROR)
7229 	return (KernelErrorCode);
7230 
7231     patterns = 0;
7232     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
7233 
7234 	/* FORWARD-BPTT */
7235 	/* 1st parameter is the pattern number 2nd parameter is the number of
7236 	   steps back in time */
7237 	BPTT_propagateNetForward(pattern_no,sub_pat_no,nhist); /*Forward pass */
7238 
7239 	/* Backward propagation  */
7240 	NET_ERROR(OutParameter)
7241 	    += BPTT_propagateNetBackward(pattern_no, sub_pat_no, nhist);
7242 
7243 	/* online version: adapt net after each pattern has been
7244 	   backpropagated through time and weight changes have accumulated
7245 	   through time */
7246 	BPTTadapt(LEARN_PARAM1(parameterInArray),
7247 		  LEARN_PARAM2(parameterInArray));
7248 
7249 	patterns++;
7250     }
7251     return (ret_code);
7252 }
7253 
7254 
7255 /*****************************************************************************
7256   FUNCTION : TEST_BPTT
7257 
7258   PURPOSE  : Backpropagation through time validation function
7259   RETURNS  : kernel error code
7260   NOTES    : Input Parameters:   1 : step_size
7261                                  2 : momentum
7262 				 3 : nhist
7263              Output Parameters:  1 : error of the network (sum of all cycles)
7264 
7265  AUTHOR    : 02.06.1995, Martin Reczko
7266 ******************************************************************************/
7267 krui_err TEST_BPTT(int start_pattern, int end_pattern,
7268 		    float *parameterInArray, int NoOfInParams,
7269 		    float **parameterOutArray, int *NoOfOutParams)
7270 {
7271     static float    OutParameter[1];	/* OutParameter[0] stores the
7272 					   learning error  */
7273     int             ret_code, pattern_no, sub_pat_no, patterns;
7274     int             nhist;	/* number of steps back in time */
7275     register struct Unit *unit_ptr;
7276 
7277     if (NoOfUnits == 0)
7278 	return (KRERR_NO_UNITS);        /* No Units defined	 */
7279     if (NoOfInParams < 1)	        /* has to be ... snns habit ? */
7280 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
7281 
7282     *NoOfOutParams = 1;		/* One return value is available (the
7283 				   learning error)  */
7284     *parameterOutArray = OutParameter;	/* set the output parameter reference */
7285     ret_code = KRERR_NO_ERROR;	/* reset return code  */
7286 
7287     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7288 	/* Net has been modified or topologic array isn't initialized */
7289 	/* any connected topology allowed */
7290 	/* count the no. of I/O units and check the patterns  */
7291 	ret_code = kr_IOCheck();
7292 	if (ret_code < KRERR_NO_ERROR)
7293 	    return (ret_code);
7294 
7295 	/* sort units by ''topologic type'', criterion is visibility
7296 	   (input,hidden,output), not topology */
7297 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7298 
7299 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7300 	    return (ret_code);
7301 	/* sites are not supported, check absence */
7302 	FOR_ALL_UNITS(unit_ptr)
7303 	    if UNIT_HAS_SITES
7304 	    (unit_ptr)
7305 		return (KRERR_SITES_NO_SUPPORT);
7306 	NetModified = FALSE;
7307     }
7308     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7309 						   initialized, clear weight
7310 						   changes */
7311 	ret_code = BPTT_clear_deltaw();
7312 	if (ret_code != KRERR_NO_ERROR)
7313 	    return (ret_code);
7314     }
7315     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
7316 
7317     NoOfLearnedPatterns = 0;	/* correct bits using threshold of 0.5 */
7318     nhist = 1;
7319 
7320     /* compute the necessary sub patterns */
7321 
7322     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
7323     if(KernelErrorCode != KRERR_NO_ERROR)
7324 	return (KernelErrorCode);
7325 
7326     patterns = 0;
7327 
7328     while (kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)) {
7329 
7330 	/* FORWARD-BPTT */
7331 	/* 1st parameter is the pattern number 2nd parameter is the number of
7332 	   steps back in time */
7333 	BPTT_propagateNetForward(pattern_no,sub_pat_no,nhist); /*Forward pass*/
7334 
7335 	/* Backward propagation  */
7336 	NET_ERROR(OutParameter)
7337 	    += BPTT_propagateNetBackward(pattern_no, sub_pat_no, nhist);
7338 
7339     }
7340 printf("%d bits correct\n",    NoOfLearnedPatterns);fflush(stdout);
7341     return (ret_code);
7342 }
7343 
7344 
7345 /*****************************************************************************
7346   FUNCTION : LEARN_BBPTT
7347 
7348   PURPOSE  : Batch backpropagation through time learning function (BBPTT)
7349   RETURNS  :
7350   NOTES    : Input Parameters:   1 : step_size
7351                                  2 : momentum
7352 				 3 : nhist
7353              Output Parameters:  1 : error of the network (sum of all cycles)
7354 
7355   UPDATE   : 06.11.1993 by Guenter Mamier
7356 ******************************************************************************/
7357 krui_err LEARN_BBPTT(int start_pattern, int end_pattern,
7358 		     float *parameterInArray, int NoOfInParams,
7359 		     float **parameterOutArray, int *NoOfOutParams)
7360 {
7361     static float    OutParameter[1];	/* OutParameter[0] stores the
7362 					   learning error  */
7363     int             ret_code, pattern_no, sub_pat_no, patterns;
7364     int             nhist;	/* number of steps back in time */
7365     register struct Unit *unit_ptr;
7366 
7367     if (NoOfUnits == 0)
7368 	return (KRERR_NO_UNITS);        /* No Units defined	 */
7369     if (NoOfInParams < 1)     	        /* has to be ... snns habit ? */
7370 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
7371 
7372     *NoOfOutParams = 1;		/* One return value is available (the
7373 				   learning error)  */
7374     *parameterOutArray = OutParameter;	/* set the output parameter reference */
7375     ret_code = KRERR_NO_ERROR;	/* reset return code  */
7376 
7377     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7378 	/* Net has been modified or topologic array isn't initialized */
7379 	/* any connected topology allowed */
7380 	/* count the no. of I/O units and check the patterns  */
7381 	ret_code = kr_IOCheck();
7382 	if (ret_code < KRERR_NO_ERROR)
7383 	    return (ret_code);
7384 
7385 	/* sort units by ''topologic type'', criterion is visibility
7386 	   (input,hidden,output), not topology */
7387 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7388 
7389 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7390 	    return (ret_code);
7391 	/* sites are not supported, check absence */
7392 	FOR_ALL_UNITS(unit_ptr)
7393 	    if UNIT_HAS_SITES
7394 	    (unit_ptr)
7395 		return (KRERR_SITES_NO_SUPPORT);
7396 	NetModified = FALSE;
7397     }
7398     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7399 						   initialized, clear weight
7400 						   changes */
7401 	ret_code = BPTT_clear_deltaw();
7402 	if (ret_code != KRERR_NO_ERROR)
7403 	    return (ret_code);
7404     }
7405     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
7406 
7407     NoOfLearnedPatterns = 0;	/* correct bits using threshold of 0.5 */
7408     nhist = LEARN_PARAM3(parameterInArray);
7409     if (nhist > MAX_BPTT_BACKSTEP)
7410 	return (KRERR_NET_DEPTH);	/* actbuf and learning functions
7411 					   support only MAX_BPTT_BACKSTEP net
7412 					   copies */
7413 
7414     /* compute the necessary sub patterns */
7415 
7416     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
7417     if(KernelErrorCode != KRERR_NO_ERROR)
7418 	return (KernelErrorCode);
7419 
7420 
7421     patterns = 0;
7422     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
7423 
7424 	/* FORWARD-BPTT */
7425 	/* 1st parameter is the pattern number 2nd parameter is the number of
7426 	   steps back in time */
7427 	BPTT_propagateNetForward(pattern_no,sub_pat_no,nhist); /*Forward pass */
7428 
7429 	/* Backward propagation  */
7430 	NET_ERROR(OutParameter)
7431 	    += BPTT_propagateNetBackward(pattern_no, sub_pat_no, nhist);
7432 	patterns++;
7433     }
7434 
7435     /* batch version */
7436     BPTTadapt(LEARN_PARAM1(parameterInArray) / patterns,
7437 	      LEARN_PARAM2(parameterInArray));
7438 
7439     return (ret_code);
7440 }
7441 
7442 
7443 
7444 /*****************************************************************************
7445   FUNCTION : LEARN_QPTT
7446 
7447   PURPOSE  : Quickprop through time learning function
7448   RETURNS  : kernel error code
7449   NOTES    : Input Parameters:   1 : step_size
7450                                  2 : maximum step growth
7451 				 3 : decay factor
7452 				 4 : nhist
7453              Output Parameters:  1 : error of the network (sum of all cycles)
7454 
7455   UPDATE   : 06.11.1993 by Guenter Mamier
7456 ******************************************************************************/
7457 krui_err  LEARN_QPTT(int start_pattern, int end_pattern,
7458 		     float *parameterInArray, int NoOfInParams,
7459 		     float **parameterOutArray, int *NoOfOutParams)
7460 {
7461     static float    OutParameter[1];	/* OutParameter[0] stores the
7462 					   learning error  */
7463     int             ret_code, pattern_no, sub_pat_no, patterns;
7464     int             nhist;	/* number of steps back in time */
7465     register struct Unit *unit_ptr;
7466 
7467     if (NoOfUnits == 0)
7468 	return (KRERR_NO_UNITS);/* No Units defined	 */
7469     if (NoOfInParams < 1)	/* snns habit ? */
7470 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
7471 
7472     *NoOfOutParams = 1;		/* One return value is available (the
7473 				   learning error)  */
7474     *parameterOutArray = OutParameter;	/* set the output parameter reference */
7475     ret_code = KRERR_NO_ERROR;	/* reset return code  */
7476 
7477     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7478 	/* Net has been modified or topologic array isn't initialized */
7479 	/* any connected topology allowed */
7480 	/* count the no. of I/O units and check the patterns  */
7481 	ret_code = kr_IOCheck();
7482 	if (ret_code < KRERR_NO_ERROR)
7483 	    return (ret_code);
7484 
7485 	/* sort units by ''topologic type'', criterion is visibility
7486 	   (input,hidden,output), not topology */
7487 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7488 
7489 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7490 	    return (ret_code);
7491 	/* sites are not supported, check absence */
7492 	FOR_ALL_UNITS(unit_ptr)
7493 	    if UNIT_HAS_SITES
7494 	    (unit_ptr)
7495 		return (KRERR_SITES_NO_SUPPORT);
7496 	NetModified = FALSE;
7497     }
7498     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7499 						   initialized, clear weight
7500 						   changes */
7501 	ret_code = BPTT_clear_deltaw();
7502 	if (ret_code != KRERR_NO_ERROR)
7503 	    return (ret_code);
7504     }
7505     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
7506 
7507     NoOfLearnedPatterns = 0;	/* correct bits using threshold of 0.5 */
7508     nhist = LEARN_PARAM4(parameterInArray);
7509     if (nhist > MAX_BPTT_BACKSTEP)
7510 	return (KRERR_NET_DEPTH);	/* actbuf and learning functions
7511 					   support only MAX_BPTT_BACKSTEP net
7512 					   copies */
7513 
7514 
7515     /* compute the necessary sub patterns */
7516 
7517     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
7518     if(KernelErrorCode != KRERR_NO_ERROR)
7519 	return (KernelErrorCode);
7520 
7521 
7522     patterns = 0;
7523     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
7524 
7525 	/* FORWARD-BPTT */
7526 	/* 1st parameter is the pattern number 2nd parameter is the number of
7527 	   steps back in time */
7528 	BPTT_propagateNetForward(pattern_no,sub_pat_no,nhist); /*Forward pass */
7529 
7530 	/* Backward propagation  */
7531 	NET_ERROR(OutParameter)
7532 	    += BPTT_propagateNetBackward(pattern_no, sub_pat_no, nhist);
7533 
7534 	patterns++;
7535     }
7536     MODI_quickprop(LEARN_PARAM1(parameterInArray) / patterns,
7537 		   LEARN_PARAM2(parameterInArray),
7538 		   LEARN_PARAM3(parameterInArray));
7539 
7540     return (ret_code);
7541 }
7542 
7543 
7544 /*****************************************************************************
7545 
7546   GROUP        : kohonen_learning
7547 
7548   PURPOSE      : learning algorithm for Kohonen Feature Map
7549   AUTHOR       : Marc Seemann
7550 
7551 ******************************************************************************/
7552 
7553 
7554 /*****************************************************************************
7555   FUNCTION : propagateNet_kohonen
7556 
7557   PURPOSE  : Propagate and train a pattern
7558   NOTES    :
7559   UPDATE   : 07.02 1994 by Sven Doering
7560 
7561     Copyright (c) 1990-1995  SNNS Group, IPVR, Univ. Stuttgart, FRG
7562     Copyright (c) 1996-1998  SNNS Group, WSI, Univ. Tuebingen, FRG
7563 
7564 ******************************************************************************/
7565 
7566 static float propagateNet_kohonen(int pattern_no, int sub_pat_no, float height,
7567 				  float radius, int sizehor)
7568 {
7569     register struct Link *link_ptr;
7570     register struct Site *site_ptr;
7571     register struct Unit *unit_ptr;
7572     register struct Unit *winner_ptr;
7573     register Patterns in_pat;
7574     register int    NoOfCompounds, sizever, verwin, horwin, hor, ver, helpver,
7575     helphor, range;
7576     float           maximum, sum_error, deviat, learn_error, sum;
7577     float           unit_ptr_net;
7578     register TopoPtrArray topo_ptr;
7579     float           adapt;
7580     int             winner, current_no;
7581 
7582 
7583     /* calculate the activation and the output values         */
7584     /* of the input units (Input Layer)                       */
7585 
7586     NoOfCompounds = NoOfInputUnits;
7587     sizever = NoOfHiddenUnits / sizehor;
7588 
7589     sum = 0.0;
7590 
7591     /* calculate startaddress for input pattern array  */
7592     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
7593 
7594     topo_ptr = topo_ptr_array;
7595 
7596     /* copy pattern into input unit's activation and calculate output of the
7597        input units */
7598     while ((unit_ptr = *++topo_ptr) != NULL) { /* topo_ptr points to the
7599 						  unit stuctures (sorted by:
7600 						  input-, hidden- and
7601 						  output-units, separated
7602 						  with NULL pointers) */
7603 	sum += *in_pat * *in_pat;
7604 
7605 	if (unit_ptr->out_func == OUT_IDENTITY)
7606 	    /* identity output function: there is no need to call the output
7607 	       function  */
7608 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
7609 	else if(unit_ptr->out_func == OUT_Custom_Python)
7610 		unit_ptr->Out.output =
7611 			kr_PythonOutFunction(unit_ptr->python_out_func,
7612 				unit_ptr->act = *in_pat++);
7613 	else
7614 	    /* no identity output function: calculate unit's output also  */
7615 	    unit_ptr->Out.output =
7616 		(*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
7617     }
7618 
7619     if (sum != 0.0)
7620 	/* normalize the inputvector */
7621 	normalize_inputvector(sum);
7622 
7623     /* propagate Kohonen Layer  */
7624 
7625     /* calculate the activation and the output values */
7626     /* of the cmpetitive units (hidden layer) */
7627 
7628     /* winner is determined using the dot product */
7629 
7630 
7631     winner_ptr = NULL;
7632     maximum = -1.0e30;		/* contains the maximum of the activations */
7633     current_no = 0;
7634 
7635     /* propagate hidden units  */
7636     while ((unit_ptr = *++topo_ptr) != NULL) {	/* topo_ptr points to a
7637 						   (topological sorted) unit
7638 						   stucture */
7639 	unit_ptr_net = 0.0;
7640 	if (UNIT_HAS_DIRECT_INPUTS(unit_ptr)) {	/* the unit has direct links */
7641 	    FOR_ALL_LINKS(unit_ptr, link_ptr)
7642 		unit_ptr_net += (link_ptr->weight * link_ptr->to->Out.output);
7643 	} else {		/* the unit has sites  */
7644 	    FOR_ALL_SITES_AND_LINKS(unit_ptr, site_ptr, link_ptr)
7645 		unit_ptr_net += (link_ptr->weight * link_ptr->to->Out.output);
7646 	}
7647 
7648 	if (maximum < unit_ptr_net) {	/* determine winner unit  */
7649 	    winner_ptr = unit_ptr;
7650 	    maximum = unit_ptr_net;
7651 	    winner = current_no;
7652 	}
7653 	current_no++;
7654 	/* reset output and activation of hidden units  */
7655 	unit_ptr->Out.output = unit_ptr->act = (FlintType) 0;
7656     }
7657 
7658     /* the competitive winner is chosen */
7659 
7660     winner_ptr->Out.output = winner_ptr->act = (FlintType) 1;
7661     winner_ptr->bias++;
7662     winner_ptr->value_a = (FlintType) (pattern_no + 1);
7663 
7664     /* store number of according pattern in winner unit */
7665 
7666     horwin = winner % sizehor;
7667     verwin = winner / sizehor;
7668 
7669 
7670     /***************************************************************/
7671     /* Train  the  SOM                                             */
7672 
7673     /* Only the weights of links that go to the winner and its     */
7674     /* neighbourhood are adjusted, the others remain the same.     */
7675     /* The incoming weights to the competitive units are adapted   */
7676     /* as follows:                                                 */
7677 
7678     /* weight(new) = weight(old) + adapt * (output - weight(old))  */
7679 
7680     /* where adapt is the learning rate (0 < adapt <= 1.0)         */
7681     /* and output is the value of the input unit vector            */
7682 
7683     /***************************************************************/
7684 
7685 
7686     for (ver = 0; ver < sizever; ver++)
7687 	for (hor = 0; hor < sizehor; hor++)
7688 	    if ((hor < radius + horwin) &&
7689 		(hor > horwin - radius) &&
7690 		(ver < radius + verwin) &&
7691 		(ver > verwin - radius)) {
7692 		helpver = (float) ((ver - verwin) * (ver - verwin));
7693 		helphor = (float) ((hor - horwin) * (hor - horwin));
7694 		adapt = height * exp(-(helpver + helphor) /
7695 				     (float) (radius * radius));
7696 
7697 		sum = 0.0;
7698 		range = ver * sizehor + hor + 1 + NoOfCompounds;
7699 
7700 		/* get unit pointer of unit in adaptation range */
7701 		unit_ptr = kr_getUnitPtr(range);
7702 
7703 		if(!IS_SPECIAL_UNIT(unit_ptr)){
7704 		    if (unit_ptr->flags & UFLAG_DLINKS) { /* the unit has  */
7705 							  /* direct links  */
7706 			FOR_ALL_LINKS(unit_ptr, link_ptr) {
7707 			    deviat=link_ptr->to->Out.output - link_ptr->weight;
7708 			    learn_error = adapt * deviat;
7709 			    link_ptr->weight += learn_error;
7710 			    /* this is needed for the normalization of the
7711 			       weight_vector */
7712 			    sum += link_ptr->weight * link_ptr->weight;
7713 			}
7714 		    } else {	/* the winner unit has sites  */
7715 			FOR_ALL_SITES_AND_LINKS(winner_ptr,site_ptr,link_ptr) {
7716 			    deviat=link_ptr->to->Out.output - link_ptr->weight;
7717 			    learn_error = adapt * deviat;
7718 			    link_ptr->weight += learn_error;
7719 			    /* this is needed for the normalization of the
7720 			       weight_vector */
7721 			    sum += link_ptr->weight * link_ptr->weight;
7722 			}
7723 		    }
7724 		    if (sum != 0.0)
7725 			normalize_weight(unit_ptr, sum);
7726 		}
7727 	    }
7728     sum_error = 0.0;		/* 0.0 is chosen arbitrarily and serves no
7729 				   purpose */
7730     return (sum_error);
7731 }
7732 
7733 
7734 /*****************************************************************************
7735   FUNCTION : initializeKohonenLearning
7736 
7737   PURPOSE  : initialize the SOM
7738   NOTES    :
7739   UPDATE   : 19.08.1993
7740 
7741     Copyright (c) 1990-1995  SNNS Group, IPVR, Univ. Stuttgart, FRG
7742     Copyright (c) 1996-1998  SNNS Group, WSI, Univ. Tuebingen, FRG
7743 
7744 ******************************************************************************/
7745 static krui_err initializeKohonenLearning(void)
7746 {
7747     register unsigned short flags;
7748     register struct Unit *unit_ptr;
7749 
7750     FOR_ALL_UNITS(unit_ptr) {
7751 	flags = unit_ptr->flags;
7752 
7753 	if ((flags & UFLAG_IN_USE) == UFLAG_IN_USE)	/* unit is in use  */
7754 	    unit_ptr->value_a = unit_ptr->bias = (FlintType) 0.0;
7755     }
7756     return (KRERR_NO_ERROR);
7757 }
7758 
7759 
7760 
7761 /*****************************************************************************
7762   FUNCTION : LEARN_kohonen
7763 
7764   PURPOSE  :  incorporates the body of the kohonen learning algorithm
7765   NOTES    :  the parameterInArray must contain 4 parameter
7766                       1) initial adaptation height
7767                       2) initial adaptation radius
7768        	              3) multiplication factor for height
7769                       4) multiplication factor for radius
7770            	      5) horizontal size of the competitive (hidden) layer
7771 
7772   UPDATE   : july 15 1994
7773 ******************************************************************************/
7774 krui_err LEARN_kohonen(int start_pattern, int end_pattern,
7775 		       float parameterInArray[], int NoOfInParams,
7776 		       float **parameterOutArray, int *NoOfOutParams)
7777 {
7778     static float    OutParameter[1];	/* OutParameter[0] stores the
7779 					   learning error  */
7780     int             ret_code, pattern_no, sub_pat_no;
7781 
7782     if (NoOfUnits == 0)
7783 	return (KRERR_NO_UNITS);/* No Units defined    */
7784     if (NoOfInParams < 5)	/* see Note  */
7785 	return (KRERR_PARAMETERS);	/* Not enough input parameters  */
7786 
7787     *NoOfOutParams = 1;		/* one return value is available (the
7788 				   learning error) */
7789     *parameterOutArray = OutParameter;	/* set output parameter reference  */
7790     ret_code = KRERR_NO_ERROR;	/* clear return code  */
7791 
7792 
7793     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7794 	/* Net has been modified or topologic array isn't initialized */
7795 	/* count the no. of I/O units and check the patterns  */
7796 	ret_code = kr_IOCheck();
7797 	if (ret_code == KRERR_NO_OUTPUT_UNITS)
7798 	    ret_code = KRERR_NO_ERROR;
7799 	if (ret_code < KRERR_NO_ERROR)
7800 	    return (ret_code);
7801 
7802 	/* sort units by topology and by topologic type  */
7803 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7804 	if (ret_code == KRERR_NO_OUTPUT_UNITS)
7805 	    ret_code = KRERR_NO_ERROR;
7806 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7807 	    return (ret_code);
7808 
7809 	NetModified = FALSE;
7810     }
7811     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7812 						   initialized, initialize
7813 						   kohonen now  */
7814 	ret_code = initializeKohonenLearning();
7815 	if (ret_code != KRERR_NO_ERROR)
7816 	    return (ret_code);
7817     }
7818     if ((int) LEARN_PARAM5(parameterInArray) == 0) {
7819 	ret_code = KRERR_PARAMETERS;
7820 	return (ret_code);
7821     }
7822     if ((LEARN_PARAM3(parameterInArray) > 1.0) ||
7823 	(LEARN_PARAM3(parameterInArray) < 0.0)) {
7824 	ret_code = KRERR_PARAMETERS;
7825 	return (ret_code);
7826     }
7827     if ((LEARN_PARAM4(parameterInArray) > 1.0) ||
7828 	(LEARN_PARAM4(parameterInArray) < 0.0)) {
7829 	ret_code = KRERR_PARAMETERS;
7830 	return (ret_code);
7831     }
7832 
7833 
7834     /* compute the necessary sub patterns */
7835 
7836     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
7837     if(KernelErrorCode != KRERR_NO_ERROR)
7838 	return (KernelErrorCode);
7839 
7840 
7841     NET_ERROR(OutParameter) = 0.0;	/* reset network error value  */
7842     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
7843 
7844 	NoOfLearnedPatterns++;
7845 	NET_ERROR(OutParameter) +=
7846 	    propagateNet_kohonen(pattern_no,sub_pat_no,
7847 				 LEARN_PARAM1(parameterInArray),
7848 				 LEARN_PARAM2(parameterInArray),
7849 				 (int) LEARN_PARAM5(parameterInArray));
7850 
7851 	LEARN_PARAM1(parameterInArray) *= LEARN_PARAM3(parameterInArray);
7852 	LEARN_PARAM2(parameterInArray) *= LEARN_PARAM4(parameterInArray);
7853 
7854     }
7855     return (ret_code);
7856 }
7857 
7858 
7859 /*****************************************************************************
7860   FUNCTION : spanning_tree
7861 
7862   PURPOSE  : calculate the spanning tree of the kohonen feature map
7863   NOTES    : evaluating the learn function doesn't affect the net itself
7864 
7865   UPDATE   : july 13 1993
7866 ******************************************************************************/
7867 krui_err  spanning_tree(void)
7868 {
7869     register TopoPtrArray topo_ptr;
7870     register struct Unit *unit_ptr;
7871     int             ret_code, n, pattern_no, sub_pat_no;
7872 
7873 
7874     if (NoOfUnits == 0)
7875 	return (KRERR_NO_UNITS);/* No Units defined    */
7876 
7877     ret_code = KRERR_NO_ERROR;	/* clear return code  */
7878 
7879 
7880     if (NetModified || (TopoSortID != TOPOLOGIC_TYPE)) {
7881 	/* Net has been modified or topologic array isn't initialized */
7882 	/* count the no. of I/O units and check the patterns  */
7883 	ret_code = kr_IOCheck();
7884 	if (ret_code == KRERR_NO_OUTPUT_UNITS)
7885 	    ret_code = KRERR_NO_ERROR;
7886 	if (ret_code < KRERR_NO_ERROR)
7887 	    return (ret_code);
7888 
7889 	/* sort units by topology and by topologic type  */
7890 	ret_code = kr_topoSort(TOPOLOGIC_TYPE);
7891 	if (ret_code == KRERR_NO_OUTPUT_UNITS)
7892 	    ret_code = KRERR_NO_ERROR;
7893 	if ((ret_code != KRERR_NO_ERROR) && (ret_code != KRERR_DEAD_UNITS))
7894 	    return (ret_code);
7895 
7896 	NetModified = FALSE;
7897     }
7898     if (NetInitialize || LearnFuncHasChanged) {	/* Net has been modified or
7899 						   initialized, initialize
7900 						   kohonen now  */
7901 	ret_code = initializeKohonenLearning();
7902 	if (ret_code != KRERR_NO_ERROR)
7903 	    return (ret_code);
7904     }
7905     topo_ptr = topo_ptr_array;
7906 
7907     while ((unit_ptr = *++topo_ptr) != NULL);
7908     /* topo_ptr points to the units' stucture (sorted by: input-, hidden- and
7909        output-units, separated by NULL pointers) */
7910 
7911     while ((unit_ptr = *++topo_ptr) != NULL)
7912 	/* topo_ptr points to hidden_units */
7913 	unit_ptr->value_a = 0;	/* the unit next to a pattern stores the
7914 				   number of that pattern in value_a, at the
7915 				   beginning initialized to 0 */
7916 
7917 
7918     n = 0;
7919     while(kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n++)){
7920 
7921 	/* To calculate the winning unit we call the  propagateNet_kohonen
7922 	   function, and treat the map as 1-dimensional array */
7923 	propagateNet_kohonen(pattern_no, sub_pat_no, 0.0, 0.0, 1);
7924     }
7925 
7926     return (ret_code);
7927 
7928 }/* spanning_tree */
7929 
7930 
7931 
7932 
7933 /*****************************************************************************
7934 
7935   GROUP        : JORDAN / ELMAN networks
7936 
7937   PURPOSE      : learning functions for JORDAN / ELMAN networks
7938   AUTHOR       : Tobias Soyez
7939 
7940 ******************************************************************************/
7941 
7942 
7943 /*****************************************************************************
7944   FUNCTION : update_je_context_units
7945 
7946   PURPOSE  : synchronous update of context units
7947   NOTES    :
7948 
7949   UPDATE   :
7950 ******************************************************************************/
7951 
7952 static void update_je_context_units (int pattern_no, int sub_pat_no,
7953 				     float use_real_value_percent)
7954 
7955 {
7956     register TopoPtrArray   topo_ptr, topo_ptr_context ;
7957     register struct Unit   *unit_ptr ;
7958     register Patterns       out_pat  ;
7959     int size;
7960 
7961 
7962     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
7963     out_pat += size;
7964 
7965     topo_ptr_context = topo_ptr_array + (no_of_topo_units + 3) ;
7966 
7967 
7968     /* ----  store real output ---- */
7969 
7970     if (use_real_value_percent > 1.0)
7971 	use_real_value_percent = 1.0;
7972     else
7973 	if (use_real_value_percent < 0.0)
7974 	    use_real_value_percent = 0.0;
7975 
7976     topo_ptr = topo_ptr_context ;
7977 
7978     while ((unit_ptr = *--topo_ptr) != NULL)
7979     {
7980       unit_ptr->actbuf[0]  = unit_ptr->Out.output ;
7981       unit_ptr->Out.output = (1.0 - use_real_value_percent) * *--out_pat +
7982 	                     use_real_value_percent * unit_ptr->Out.output;
7983     }
7984 
7985 
7986     /* ----  calculate new activation of context units ---- */
7987 
7988    topo_ptr = topo_ptr_context ;
7989 
7990     while ((unit_ptr = *++topo_ptr) != NULL)
7991     {
7992       unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
7993 			kr_PythonActFunction(unit_ptr->python_act_func,
7994 						unit_ptr) :
7995 			(unit_ptr->act_func) (unit_ptr))  ;
7996 
7997       if (unit_ptr->out_func == OUT_IDENTITY)
7998         unit_ptr->Out.output = unit_ptr->act ;
7999       else if(unit_ptr->out_func == OUT_Custom_Python)
8000       	unit_ptr->Out.output =
8001 		kr_PythonOutFunction(unit_ptr->python_out_func,
8002 			unit_ptr->act);
8003       else
8004         unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act) ;
8005     }
8006 
8007     /* ----  restore real output  ---- */
8008 
8009     topo_ptr = topo_ptr_context ;
8010 
8011     while ((unit_ptr = *--topo_ptr) != NULL)
8012     {
8013       unit_ptr->Out.output = unit_ptr->actbuf[0] ;
8014     }
8015 }
8016 
8017 
8018 /*****************************************************************************
8019   FUNCTION : reset_je_context_units
8020 
8021   PURPOSE  : resets the context units
8022   NOTES    :
8023 
8024   UPDATE   :
8025 ******************************************************************************/
8026 
8027 static void reset_je_context_units (void)
8028 
8029 {
8030   register TopoPtrArray   topo_ptr ;
8031   register struct Unit   *unit_ptr ;
8032 
8033 
8034   topo_ptr = topo_ptr_array + (no_of_topo_units + 3) ;
8035 
8036   while ((unit_ptr = *++topo_ptr) != NULL)
8037   {
8038     unit_ptr->act = unit_ptr->i_act ;
8039 
8040     if (unit_ptr->out_func == OUT_IDENTITY)
8041       unit_ptr->Out.output = unit_ptr->act ;
8042     else if(unit_ptr->out_func == OUT_Custom_Python)
8043     	unit_ptr->Out.output =
8044 		kr_PythonOutFunction(unit_ptr->python_out_func,
8045 			unit_ptr->act);
8046     else
8047       unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act) ;
8048   }
8049 }
8050 
8051 
8052 /*****************************************************************************
8053   FUNCTION : check_je_network
8054 
8055   PURPOSE  : checks the topology of a partial recurrent network
8056              (i.e. JORDAN and ELMAN networks)
8057   NOTES    :
8058 
8059   UPDATE   :
8060 ******************************************************************************/
8061 static krui_err check_je_network (void)
8062 
8063 {
8064     /*  check the topology of the network  */
8065     (void) kr_topoCheckJE () ;
8066     if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8067 
8068     /*	count the no. of I/O units and check the patterns  */
8069     if (kr_IOCheck() != KRERR_NO_ERROR) return (KernelErrorCode) ;
8070 
8071     /*	sort units by topology and by topologic type  */
8072     (void) kr_topoSort (TOPOLOGICAL_JE) ;
8073 
8074     if ((KernelErrorCode != KRERR_NO_ERROR) &&
8075         (KernelErrorCode != KRERR_DEAD_UNITS))
8076       return (KernelErrorCode) ;
8077 
8078     NetModified = FALSE;
8079     return (KRERR_NO_ERROR) ;
8080 }
8081 
8082 
8083 
8084 /*****************************************************************************
8085   FUNCTION : LEARN_JE_Backprop
8086 
8087   PURPOSE  : backpropagation learning function for JORDAN / ELMAN networks
8088   NOTES    : input parameters  :   1. learning parameter
8089                                    2. delta max
8090 				   3. influence of real output
8091 				      (= 0 -> only teacher force)
8092              output parameters :   1. error of the network (sum of all cycles)
8093              return value      :   kernel error code
8094   UPDATE   :
8095 ******************************************************************************/
8096 krui_err  LEARN_JE_Backprop (int     start_pattern    , int  end_pattern ,
8097                              float  *parameterInArray , int  NoOfInParams,
8098                              float **parameterOutArray, int *NoOfOutParams)
8099 
8100 {
8101   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8102                                   /* learning error             */
8103   int	        n, pattern_no,sub_pat_no ;
8104   int           start, end;
8105 
8106 
8107 
8108   KernelErrorCode = KRERR_NO_ERROR;  /*  reset return code  */
8109 
8110   if (NoOfInParams < 3) return (KRERR_PARAMETERS) ;
8111 
8112   *NoOfOutParams     = 1            ; /* one return value is available      */
8113                                       /* (the learning error)               */
8114   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8115   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8116 
8117   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8118   {
8119      KernelErrorCode = check_je_network () ;
8120      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8121   }
8122 
8123   reset_je_context_units () ;
8124 
8125 
8126     /* compute the necessary sub patterns */
8127 
8128     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8129     if(KernelErrorCode != KRERR_NO_ERROR)
8130 	return (KernelErrorCode);
8131 
8132     start = kr_AbsPosOfFirstSubPat(start_pattern);
8133     end   = kr_AbsPosOfFirstSubPat(end_pattern);
8134     end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8135 
8136     for(n=start; n<=end; n++){
8137 
8138 	kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8139 
8140 	propagateNetForward (pattern_no,sub_pat_no) ;
8141 	NET_ERROR (OutParameter) +=
8142 	    propagateNetBackward2 (pattern_no,sub_pat_no,
8143 				   LEARN_PARAM1 (parameterInArray),
8144 				   LEARN_PARAM2 (parameterInArray)) ;
8145 	update_je_context_units (pattern_no,sub_pat_no,
8146 				 LEARN_PARAM3(parameterInArray)) ;
8147     }
8148 
8149   return (KernelErrorCode) ;
8150 }
8151 
8152 
8153 /*****************************************************************************
8154   FUNCTION : TEST_JE_Backprop
8155 
8156   PURPOSE  : backpropagation learning function for JORDAN / ELMAN networks
8157   NOTES    : input parameters  :   1. learning parameter
8158                                    2. delta max
8159 				   3. influence of real output
8160 				      (= 0 -> only teacher force)
8161 				      has no meaning for validation
8162              output parameters :   1. error of the network (sum of all cycles)
8163              return value      :   kernel error code
8164   UPDATE   :
8165 ******************************************************************************/
8166 krui_err  TEST_JE_Backprop (int     start_pattern    , int  end_pattern ,
8167                              float  *parameterInArray , int  NoOfInParams,
8168                              float **parameterOutArray, int *NoOfOutParams)
8169 
8170 {
8171   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8172                                   /* learning error             */
8173   int	        n, pattern_no,sub_pat_no ;
8174   int           start, end;
8175 
8176 
8177   KernelErrorCode = KRERR_NO_ERROR;  /*  reset return code  */
8178 
8179   if (NoOfInParams < 3) return (KRERR_PARAMETERS) ;
8180 
8181   *NoOfOutParams     = 1            ; /* one return value is available      */
8182                                       /* (the learning error)               */
8183   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8184   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8185 
8186   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8187   {
8188      KernelErrorCode = check_je_network () ;
8189      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8190   }
8191 
8192   reset_je_context_units () ;
8193 
8194 
8195     /* compute the necessary sub patterns */
8196 
8197     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8198     if(KernelErrorCode != KRERR_NO_ERROR)
8199 	return (KernelErrorCode);
8200 
8201     start = kr_AbsPosOfFirstSubPat(start_pattern);
8202     end   = kr_AbsPosOfFirstSubPat(end_pattern);
8203     end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8204 
8205     for(n=start; n<=end; n++){
8206 
8207 	kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8208 
8209 	propagateNetForward (pattern_no,sub_pat_no) ;
8210 	NET_ERROR (OutParameter) +=
8211 	    testNetBackward2 (pattern_no,sub_pat_no,
8212 				   LEARN_PARAM1 (parameterInArray),
8213 				   LEARN_PARAM2 (parameterInArray)) ;
8214 	test_update_je_context_units (pattern_no,sub_pat_no) ;
8215     }
8216 
8217   return (KernelErrorCode) ;
8218 }
8219 
8220 /*****************************************************************************
8221   FUNCTION : TEST_JE_BackpropMomentum
8222 
8223   PURPOSE  : test network with momentum term learning funcyion
8224              for JORDAN / ELMAN networks
8225   NOTES    : input parameters  :   4. delta max
8226                                    5. influence of real output
8227 				      (= 0 -> only teacher force)
8228 				      has no meaning for validation
8229              output parameters :   1. error of the network (sum of all cycles)
8230              return value      :   kernel error code
8231   UPDATE   :
8232 ******************************************************************************/
8233 krui_err TEST_JE_BackpropMomentum(int start_pattern, int end_pattern,
8234 				   float *parameterInArray, int NoOfInParams,
8235 				   float **parameterOutArray,
8236 				   int *NoOfOutParams)
8237 
8238 {
8239   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8240                                   /* learning error             */
8241   int	        n, pattern_no,sub_pat_no ;
8242   int           start, end;
8243 
8244   KernelErrorCode = KRERR_NO_ERROR;  /*  reset return code  */
8245 
8246   if (NoOfInParams < 5) return (KRERR_PARAMETERS) ;
8247 
8248   *NoOfOutParams     = 1            ; /* one return value is available      */
8249                                       /* (the learning error)               */
8250   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8251   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8252 
8253 
8254 
8255   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8256   {
8257      KernelErrorCode = check_je_network () ;
8258      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8259   }
8260 
8261   reset_je_context_units () ;
8262 
8263 
8264   /* compute the necessary sub patterns */
8265 
8266   KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8267   if(KernelErrorCode != KRERR_NO_ERROR)
8268       return (KernelErrorCode);
8269 
8270   start = kr_AbsPosOfFirstSubPat(start_pattern);
8271   end   = kr_AbsPosOfFirstSubPat(end_pattern);
8272   end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8273 
8274   for(n=start; n<=end; n++){
8275 
8276       kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8277 
8278       propagateNetForward (pattern_no,sub_pat_no) ;
8279       NET_ERROR (OutParameter) +=
8280 	  testNetBackward2  (pattern_no,sub_pat_no,
8281 				 LEARN_PARAM1( parameterInArray ),
8282 				 LEARN_PARAM4( parameterInArray )) ;
8283       test_update_je_context_units (pattern_no, sub_pat_no) ;
8284   }
8285 
8286   return (KernelErrorCode) ;
8287 }
8288 
8289 /*****************************************************************************
8290   FUNCTION : test_update_je_context_units           joe
8291 
8292   PURPOSE  : synchronous update of context units
8293   NOTES    :
8294 
8295   UPDATE   : 03.03.95
8296 ******************************************************************************/
8297 
8298 static void test_update_je_context_units (int pattern_no, int sub_pat_no)
8299 
8300 {
8301     register TopoPtrArray   topo_ptr, topo_ptr_context ;
8302     register struct Unit   *unit_ptr ;
8303     register Patterns       out_pat  ;
8304     int size;
8305 
8306 
8307     out_pat = kr_getSubPatData(pattern_no,sub_pat_no,OUTPUT,&size);
8308     out_pat += size;
8309 
8310     topo_ptr_context = topo_ptr_array + (no_of_topo_units + 3) ;
8311 
8312 
8313     /* ----  store real output                        ---- */
8314 
8315     topo_ptr = topo_ptr_context ;
8316 
8317     while ((unit_ptr = *--topo_ptr) != NULL)
8318     {
8319       unit_ptr->actbuf[0]  = unit_ptr->Out.output ;
8320       unit_ptr->Out.output = *--out_pat ;
8321     }
8322 
8323 
8324     /* ----  calculate new activation of context units ---- */
8325 
8326    topo_ptr = topo_ptr_context ;
8327 
8328     while ((unit_ptr = *++topo_ptr) != NULL)
8329     {
8330       unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
8331 			kr_PythonActFunction(unit_ptr->python_act_func,
8332 						unit_ptr) :
8333 			(unit_ptr->act_func) (unit_ptr))  ;
8334 
8335       if (unit_ptr->out_func == OUT_IDENTITY)
8336         unit_ptr->Out.output = unit_ptr->act ;
8337       else if(unit_ptr->out_func == OUT_Custom_Python)
8338       	unit_ptr->Out.output =
8339 		kr_PythonOutFunction(unit_ptr->python_out_func,
8340 			unit_ptr->act);
8341       else
8342         unit_ptr->Out.output = (*unit_ptr->out_func) (unit_ptr->act) ;
8343     }
8344 
8345     /* ----  restore real output  ---- */
8346 
8347     topo_ptr = topo_ptr_context ;
8348 
8349     while ((unit_ptr = *--topo_ptr) != NULL)
8350     {
8351       unit_ptr->Out.output = unit_ptr->actbuf[0] ;
8352     }
8353 
8354 }
8355 
8356 
8357 
8358 /*****************************************************************************
8359   FUNCTION : LEARN_JE_BackpropMomentum
8360 
8361   PURPOSE  : backpropagation with momentum term learning funcyion
8362              for JORDAN / ELMAN networks
8363   NOTES    : input parameters  :   1. learning parameter
8364                                    2. momentum factor
8365                                    3. flat spot elimination
8366                                    4. delta max
8367 				   5. influence of real output
8368 				      (= 0 -> only teacher force)
8369              output parameters :   1. error of the network (sum of all cycles)
8370              return value      :   kernel error code
8371   UPDATE   :
8372 ******************************************************************************/
8373 krui_err LEARN_JE_BackpropMomentum(int start_pattern, int end_pattern,
8374 				   float *parameterInArray, int NoOfInParams,
8375 				   float **parameterOutArray,
8376 				   int *NoOfOutParams)
8377 
8378 {
8379   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8380                                   /* learning error             */
8381   int	        n, pattern_no,sub_pat_no ;
8382   int           start, end;
8383 
8384   KernelErrorCode = KRERR_NO_ERROR;  /*  reset return code  */
8385 
8386   if (NoOfInParams < 5) return (KRERR_PARAMETERS) ;
8387 
8388   *NoOfOutParams     = 1            ; /* one return value is available      */
8389                                       /* (the learning error)               */
8390   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8391   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8392 
8393   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8394   {
8395      KernelErrorCode = check_je_network () ;
8396      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8397   }
8398 
8399   if (NetInitialize || LearnFuncHasChanged)
8400   {  /*  Net has been modified or initialized, initialize backprop now  */
8401     KernelErrorCode = initializeBackpropMomentum () ;
8402     if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8403   }
8404 
8405   reset_je_context_units () ;
8406 
8407 
8408   /* compute the necessary sub patterns */
8409 
8410   KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8411   if(KernelErrorCode != KRERR_NO_ERROR)
8412       return (KernelErrorCode);
8413 
8414   start = kr_AbsPosOfFirstSubPat(start_pattern);
8415   end   = kr_AbsPosOfFirstSubPat(end_pattern);
8416   end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8417 
8418   for(n=start; n<=end; n++){
8419 
8420       kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8421 
8422       propagateNetForward (pattern_no,sub_pat_no) ;
8423       NET_ERROR (OutParameter) +=
8424 	  Backprop_momentum_FSE (pattern_no,sub_pat_no,
8425 				 LEARN_PARAM1( parameterInArray ),
8426 				 LEARN_PARAM2( parameterInArray ),
8427 				 LEARN_PARAM3( parameterInArray ),
8428 				 LEARN_PARAM4( parameterInArray )) ;
8429       update_je_context_units (pattern_no, sub_pat_no,
8430 			       LEARN_PARAM5(parameterInArray)) ;
8431   }
8432 
8433   return (KernelErrorCode) ;
8434 }
8435 
8436 
8437 /*****************************************************************************
8438   FUNCTION : LEARN_JE_Quickprop
8439 
8440   PURPOSE  : quickprop learning function for JORDAN / ELMAN networks
8441   NOTES    : input parameters  :   1. learning parameter
8442                                    2. max. growth factor
8443                                    3. weight decay
8444                                    4. delta max
8445 				   5. influence of real output
8446 				      (= 0 -> only teacher force)
8447              output parameters :   1. error of the network (sum of all cycles)
8448              return value      :   kernel error code
8449   UPDATE   :
8450 ******************************************************************************/
8451 krui_err  LEARN_JE_Quickprop (int     start_pattern    , int  end_pattern ,
8452                               float  *parameterInArray , int  NoOfInParams,
8453                               float **parameterOutArray, int *NoOfOutParams)
8454 
8455 {
8456   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8457                                   /* learning error             */
8458   int	        n,pattern_no,sub_pat_no ;
8459   int           start, end;
8460 
8461   KernelErrorCode = KRERR_NO_ERROR;  /*  reset return code  */
8462 
8463   if (NoOfInParams < 5) return (KRERR_PARAMETERS) ;
8464 
8465   *NoOfOutParams     = 1            ; /* one return value is available      */
8466                                       /* (the learning error)               */
8467   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8468   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8469 
8470   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8471   {
8472      KernelErrorCode = check_je_network () ;
8473      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8474   }
8475 
8476   if (NetInitialize || LearnFuncHasChanged)
8477   {
8478     /*  Net has been modified or initialized, initialize quickprop now  */
8479     KernelErrorCode = initializeQuickprop () ;
8480     if (KernelErrorCode != KRERR_NO_ERROR)  return (KernelErrorCode) ;
8481   }
8482 
8483 
8484   reset_je_context_units () ;
8485 
8486 
8487 
8488   /* compute the necessary sub patterns */
8489 
8490   KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8491   if(KernelErrorCode != KRERR_NO_ERROR)
8492     return (KernelErrorCode);
8493 
8494   start = kr_AbsPosOfFirstSubPat(start_pattern);
8495   end   = kr_AbsPosOfFirstSubPat(end_pattern);
8496   end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8497 
8498   for(n=start; n<=end; n++){
8499 
8500       kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8501 
8502       propagateNetForward (pattern_no,sub_pat_no) ;
8503       NET_ERROR(OutParameter) +=
8504 	  propagateNetBackwardQuickprop (pattern_no,sub_pat_no,
8505 					 LEARN_PARAM4 (parameterInArray)) ;
8506       update_je_context_units (pattern_no,sub_pat_no,
8507 			       LEARN_PARAM5(parameterInArray)) ;
8508   }
8509 
8510   MODI_quickprop (LEARN_PARAM1 (parameterInArray),
8511                   LEARN_PARAM2 (parameterInArray),
8512                   LEARN_PARAM3 (parameterInArray)) ;
8513 
8514   return (KernelErrorCode) ;
8515 }
8516 
8517 
8518 
8519 /*****************************************************************************
8520   FUNCTION : LEARN_JE_Rprop
8521 
8522   PURPOSE  : rprop learning function for JORDAN / ELMAN networks
8523   NOTES    : input parameters  :   1. delta 0
8524                                    2. delta max
8525 				   3. ????????
8526 				   4. influence of real output
8527 				      (= 0 -> only teacher force)
8528              output parameters :   1. error of the network (sum of all cycles)
8529              return value      :   kernel error code
8530   UPDATE   :
8531 ******************************************************************************/
8532 krui_err  LEARN_JE_Rprop    (int     start_pattern    , int  end_pattern ,
8533                              float  *parameterInArray , int  NoOfInParams,
8534                              float **parameterOutArray, int *NoOfOutParams)
8535 
8536 {
8537   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8538                                   /* learning error             */
8539   int	        pattern_no,sub_pat_no ;
8540   int           n, ret_code, blocksize ;
8541   float         maxeps, wd, update_value ;
8542   int           start, end;
8543 
8544 
8545   if (NoOfInParams < 4) return (KRERR_PARAMETERS) ;
8546 
8547   if (( update_value = LEARN_PARAM1 (parameterInArray)) == 0.0)
8548     update_value = RPROP_DEFAULT_UPDATE_VALUE;
8549   if ((maxeps = LEARN_PARAM2 (parameterInArray)) == 0.0)
8550     maxeps = RPROP_MAXEPS;
8551   if (!(( wd = LEARN_PARAM3( parameterInArray )) == 0.0))
8552       wd = (float) pow(10,(double)(- wd));
8553   if (update_value > maxeps) update_value = maxeps;
8554 
8555 
8556   KernelErrorCode = ret_code = KRERR_NO_ERROR;  /*  reset return code  */
8557 
8558 
8559   *NoOfOutParams     = 1            ; /* one return value is available      */
8560                                       /* (the learning error)               */
8561   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8562   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8563 
8564   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8565   {
8566      KernelErrorCode = check_je_network () ;
8567      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8568   }
8569 
8570   if (NetInitialize || LearnFuncHasChanged)
8571   {  /*  Net has been modified or initialized, initialize RPROP */
8572     ret_code = initializeRprop (update_value) ;
8573     if (ret_code != KRERR_NO_ERROR)  return (ret_code) ;
8574   }
8575 
8576   /* DEFAULTS: */
8577   if ((blocksize = LEARN_PARAM3 (parameterInArray)) == 0)
8578     blocksize = end_pattern;
8579 
8580   reset_je_context_units () ;
8581 
8582 
8583   /* compute the necessary sub patterns */
8584 
8585   KernelErrorCode = kr_initSubPatternOrder(start_pattern,blocksize);
8586   if(KernelErrorCode != KRERR_NO_ERROR)
8587       return (KernelErrorCode);
8588 
8589   start = kr_AbsPosOfFirstSubPat(start_pattern);
8590   end   = kr_AbsPosOfFirstSubPat(end_pattern);
8591   end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8592 
8593   for(n=start; n<=end; n++){
8594 
8595       kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8596 
8597       propagateNetForward (pattern_no,sub_pat_no) ;
8598       NET_ERROR (OutParameter) +=
8599 	  propagateNetBackwardRprop (pattern_no,sub_pat_no) ;
8600       update_je_context_units (pattern_no,sub_pat_no,
8601 			       LEARN_PARAM4(parameterInArray)) ;
8602   }
8603   MODI_rprop (maxeps,wd) ;
8604   return (KernelErrorCode) ;
8605 }
8606 
8607 /*****************************************************************************
8608   FUNCTION : TEST_JE_Rprop
8609 
8610   PURPOSE  : rprop testing function for JORDAN / ELMAN networks
8611   NOTES    : input parameters  :   1. delta 0
8612                                    2. delta max
8613 				   3. ??????????
8614 				   4. influence of real output
8615 				      (= 0 -> only teacher force)
8616 				      has no meaning for validation
8617              output parameters :   1. error of the network (sum of all cycles)
8618              return value      :   kernel error code
8619   UPDATE   : 03.03.95 Joachim Danz
8620 ******************************************************************************/
8621 krui_err  TEST_JE_Rprop    (int     start_pattern    , int  end_pattern ,
8622 			    float  *parameterInArray , int  NoOfInParams,
8623 			    float **parameterOutArray, int *NoOfOutParams)
8624 
8625 {
8626   static float  OutParameter[1] ; /* OutParameter[0] stores the */
8627                                   /* learning error             */
8628   int	        pattern_no,sub_pat_no ;
8629   int           n, ret_code, blocksize ;
8630   int           start, end;
8631 
8632 
8633   if (NoOfInParams < 4) return (KRERR_PARAMETERS) ;
8634 
8635   KernelErrorCode = ret_code = KRERR_NO_ERROR;  /*  reset return code  */
8636 
8637 
8638   *NoOfOutParams     = 1            ; /* one return value is available      */
8639                                       /* (the learning error)               */
8640   *parameterOutArray = OutParameter ; /* set the output parameter reference */
8641   NET_ERROR (OutParameter) = 0.0    ; /* reset network error value          */
8642 
8643 
8644   /* DEFAULTS: */
8645   if ((blocksize = LEARN_PARAM3 (parameterInArray)) == 0)
8646     blocksize = end_pattern;
8647 
8648   if (NetModified || (TopoSortID != TOPOLOGICAL_JE))
8649   {
8650      KernelErrorCode = check_je_network () ;
8651      if (KernelErrorCode != KRERR_NO_ERROR) return (KernelErrorCode) ;
8652   }
8653 
8654   reset_je_context_units () ;
8655 
8656 
8657   /* compute the necessary sub patterns */
8658 
8659   KernelErrorCode = kr_initSubPatternOrder(start_pattern,blocksize);
8660   if(KernelErrorCode != KRERR_NO_ERROR)
8661       return (KernelErrorCode);
8662 
8663   start = kr_AbsPosOfFirstSubPat(start_pattern);
8664   end   = kr_AbsPosOfFirstSubPat(end_pattern);
8665   end  += kr_NoOfSubPatPairs(end_pattern) - 1;
8666 
8667   for(n=start; n<=end; n++){
8668 
8669       kr_getSubPatternByNo(&pattern_no,&sub_pat_no,n);
8670 
8671       propagateNetForward (pattern_no,sub_pat_no) ;
8672       NET_ERROR (OutParameter) +=
8673 	  testNetBackwardRprop (pattern_no,sub_pat_no) ;
8674       test_update_je_context_units (pattern_no,sub_pat_no) ;
8675   }
8676   return (KernelErrorCode) ;
8677 }
8678 
8679 
8680 
8681 /*****************************************************************************
8682 
8683   GROUP        : Functions for autoassoziative memory networks
8684 
8685   PURPOSE      : Implement autoassoziative memory networks, including learning
8686                  functions for Rummelhart & McClelland's Delta Rule and Hebbian
8687 		 learning
8688   AUTHOR       : Jamie DeCoster
8689 
8690 ******************************************************************************/
8691 
8692 /*****************************************************************************
8693   FUNCTION : RM_propagate
8694 
8695   PURPOSE  : forward propagation for Rummelhart & McClelland's Delta Rule
8696   NOTES    :
8697 
8698   UPDATE   : 17.02.1994
8699 ******************************************************************************/
8700 static void RM_propagate (int pattern_no, int sub_pat_no, float prop_step)
8701 {
8702 
8703     int t;
8704     register struct Unit   *unit_ptr;
8705     register Patterns      in_pat;
8706     register TopoPtrArray  topo_ptr;
8707 
8708 
8709     /*  calculate startaddress for input pattern array  */
8710     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
8711     if(in_pat == NULL){
8712 	KernelErrorCode = KRERR_NP_NO_SUCH_PATTERN;
8713 	return;
8714     }
8715 
8716     topo_ptr = topo_ptr_array;
8717 
8718     /*  copy pattern into input unit's activation and calculate output of the
8719 	input units */
8720     while ((unit_ptr = *++topo_ptr) != NULL){
8721 
8722 	/*  topo_ptr points to a (topological sorted) unit stucture  */
8723 	if (unit_ptr->out_func == OUT_IDENTITY)
8724 	    /*  identity output function: don't call the output function  */
8725 	    unit_ptr->Out.output = unit_ptr->act = *in_pat++;
8726 	else if(unit_ptr->out_func == OUT_Custom_Python)
8727 		unit_ptr->Out.output =
8728 			kr_PythonOutFunction(unit_ptr->python_out_func,
8729 				unit_ptr->act = *in_pat++);
8730 	else
8731 	    /*  no identity output function: calculate unit's output also  */
8732 	    unit_ptr->Out.output =
8733 		(*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
8734     }
8735 
8736     for (t=0; t < prop_step; ++t){
8737 
8738 	FOR_ALL_UNITS( unit_ptr )
8739 	    if UNIT_IN_USE( unit_ptr ){
8740 
8741 		/* update unit activations first  */
8742 		if ( !IS_INPUT_UNIT( unit_ptr))
8743 		    /*  unit isn't an input unit and is in use and enabled  */
8744 		    unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
8745 			kr_PythonActFunction(unit_ptr->python_act_func,
8746 						unit_ptr) :
8747 			(unit_ptr->act_func) (unit_ptr)) ;
8748 
8749 		/* update unit outputs  */
8750 		if (unit_ptr->out_func == OUT_IDENTITY)
8751 		    /*  identity output function: don't call output function  */
8752 		    unit_ptr->Out.output = unit_ptr->act;
8753 		else if(unit_ptr->out_func == OUT_Custom_Python)
8754 			unit_ptr->Out.output =
8755 				kr_PythonOutFunction(unit_ptr->python_out_func,
8756 					unit_ptr->act);
8757 		else
8758 		    /*  calculate unit's output also  */
8759 		    unit_ptr->Out.output = (*unit_ptr->out_func)(unit_ptr->act);
8760 	    }
8761     }
8762 
8763 }
8764 
8765 
8766 /*****************************************************************************
8767   FUNCTION : RM_learn
8768 
8769   PURPOSE  : backward propagation for Rummelhart & McClelland's Delta Rule
8770   NOTES    :
8771 
8772   UPDATE   : 11.02.1994
8773 ******************************************************************************/
8774 static void RM_learn(float learn_parameter)
8775 {
8776     register struct Link *link_ptr;
8777     register struct Site *site_ptr;
8778     register struct Unit *unit_ptr;
8779     float ex_in, in_in, error, eta;
8780 
8781     eta = learn_parameter;
8782 
8783     FOR_ALL_UNITS (unit_ptr)
8784 	if (!IS_INPUT_UNIT (unit_ptr)){
8785 	    /* World units don't learn so their inputs are not examined */
8786 
8787 	    in_in = 0;
8788 
8789 	    FOR_ALL_LINKS (unit_ptr, link_ptr)
8790 		if (IS_INPUT_UNIT (link_ptr->to))
8791 		    /* Determine the input from the world unit */
8792 		    ex_in = link_ptr->to->act * link_ptr->weight;
8793 		else
8794 		    /* Determine the input from the network */
8795 		    in_in += link_ptr->to->act * link_ptr->weight;
8796 
8797 	    /* Error defined as the difference between the world input and
8798 	       the input from the net */
8799 	    error = ex_in - in_in;
8800 
8801 	    /* Modify the weights */
8802 	    if (UNIT_HAS_DIRECT_INPUTS (unit_ptr)){
8803 		FOR_ALL_LINKS (unit_ptr, link_ptr)
8804 		    if (!IS_INPUT_UNIT (link_ptr->to))
8805 			/* The link between a world unit and its corresponding
8806 			   learning unit is always 1 */
8807 			link_ptr->weight += link_ptr->to->act * eta * error;
8808 	    }else{
8809 		FOR_ALL_SITES_AND_LINKS (unit_ptr, site_ptr, link_ptr)
8810 		    if (!IS_INPUT_UNIT (link_ptr->to))
8811 			link_ptr->weight += link_ptr->to->act * eta * error;
8812 	    }
8813 	}
8814 }
8815 
8816 
8817 
8818 /*****************************************************************************
8819   FUNCTION : LEARN_RM_delta
8820 
8821   PURPOSE  : McClelland & Rumelhart's learning rule
8822                       Input parameter:   1: learning parameter
8823 		                         2: no. of propagation steps
8824 		      Output parameter:  1:  Learning error
8825   NOTES    :
8826 
8827   UPDATE   : 11.02.1994
8828 ******************************************************************************/
8829 krui_err LEARN_RM_delta (int start_pattern, int end_pattern,
8830 			 float *parameterInArray, int NoOfInParams,
8831 			 float **parameterOutArray, int *NoOfOutParams)
8832 {
8833 
8834     static float OutParameter [1];
8835     int pattern_no,sub_pat_no;
8836     float Learn_p;
8837     float prop_step;
8838 
8839     KernelErrorCode = KRERR_NO_ERROR;
8840 
8841     /* Checking for learning parameter */
8842     if (NoOfInParams < 2){
8843 	KernelErrorCode = KRERR_PARAMETERS;
8844 	return (KernelErrorCode);
8845     }
8846 
8847     Learn_p = LEARN_PARAM1 (parameterInArray);
8848     prop_step = LEARN_PARAM2 (parameterInArray);
8849     if (prop_step == 0){
8850 	KernelErrorCode = KRERR_PARAMETERS;
8851 	return (KernelErrorCode);
8852     }
8853 
8854     *NoOfOutParams = 1; /* Out Parameter = Learning error */
8855     *parameterOutArray = OutParameter;
8856 
8857     (void) kr_topoSort (TOPOLOGIC_TYPE);
8858 
8859     /* compute the necessary sub patterns */
8860 
8861     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8862     if(KernelErrorCode != KRERR_NO_ERROR)
8863 	return (KernelErrorCode);
8864 
8865 
8866     /* reset network error value  */
8867     NET_ERROR (OutParameter) = 0.0;
8868 
8869     /* Determine order of pattern presentation */
8870     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
8871 
8872 	/* Propagate the pattern through the network */
8873 	RM_propagate (pattern_no,sub_pat_no,prop_step);
8874 
8875 	/* Update links */
8876 	RM_learn (Learn_p);
8877 
8878 	/* Compute network error */
8879 	NET_ERROR (OutParameter) += Hebb_error(prop_step);
8880     }
8881 
8882     return (KernelErrorCode);
8883 }
8884 
8885 
8886 /*****************************************************************************
8887   FUNCTION : Hebb_error
8888 
8889   PURPOSE  : Compute the error of the network for the Hebbian learning rule
8890   NOTES    :
8891 
8892   UPDATE   : 09.03.1994
8893 ******************************************************************************/
8894 static float Hebb_error(int NoOfTimes)
8895 {
8896 
8897     struct Unit *unit_ptr;
8898     struct Link *link_ptr;
8899     float error, sum_error, ex_in;
8900     int t;
8901 
8902 
8903     /* update unit activations first  */
8904     for(t=0; t < NoOfTimes; ++t){
8905 	FOR_ALL_UNITS( unit_ptr )
8906 	    if ( !IS_INPUT_UNIT( unit_ptr))
8907 		if UNIT_IN_USE( unit_ptr )
8908 		    /*  unit isn't an input unit and is in use and enabled  */
8909 		    unit_ptr->act = ((unit_ptr->act_func == ACT_Custom_Python) ?
8910 			kr_PythonActFunction(unit_ptr->python_act_func,
8911 						unit_ptr) :
8912 			(unit_ptr->act_func) (unit_ptr)) ;
8913 
8914 	/* update unit outputs */
8915 	FOR_ALL_UNITS( unit_ptr )
8916 	    if UNIT_IN_USE( unit_ptr )
8917 		if (unit_ptr->out_func == OUT_IDENTITY)
8918 		    /* there is no need to call the output function  */
8919 		    unit_ptr->Out.output = unit_ptr->act;
8920 		else if(unit_ptr->out_func == OUT_Custom_Python)
8921 			unit_ptr->Out.output =
8922 				kr_PythonOutFunction(unit_ptr->python_out_func,
8923 					unit_ptr->act);
8924 		else
8925 		    /* calculate unit's output also  */
8926 		    unit_ptr->Out.output = (*unit_ptr->out_func)(unit_ptr->act);
8927     }
8928 
8929 
8930     /* calculate the error defined as the difference between the internal
8931        and external inputs */
8932 
8933     sum_error = 0.0;
8934 
8935     FOR_ALL_UNITS (unit_ptr){
8936 	FOR_ALL_LINKS (unit_ptr, link_ptr)
8937 	    if (IS_INPUT_UNIT (link_ptr->to)){
8938 		ex_in = link_ptr->to->act;
8939 		error = ex_in - unit_ptr->act;
8940 		sum_error += error * error;
8941 	    }
8942     }
8943     return (sum_error);
8944 }
8945 
8946 
8947 
8948 /*****************************************************************************
8949   FUNCTION : LEARN_Hebb
8950 
8951   PURPOSE  : Hebbian learning rule
8952                       Input parameter:   1: learning parameter
8953 		                         2: Maximum absolute weight strength
8954 		      Output parameter:  1: Network error
8955   NOTES    :
8956 
8957   UPDATE   : 09.03.1994
8958 ******************************************************************************/
8959 krui_err LEARN_HEBB (int start_pattern, int end_pattern,
8960 		     float *parameterInArray, int NoOfInParams,
8961 		     float **parameterOutArray, int *NoOfOutParams)
8962 {
8963     static float OutParameter [1];
8964     int pattern_no, sub_pat_no;
8965     int NoOfTimes;
8966     float Learn_p, Weight_MAX;
8967     register struct Unit *unit_ptr;
8968     register struct Link *link_ptr;
8969     register struct Site *site_ptr;
8970     register Patterns in_pat;
8971     register TopoPtrArray topo_ptr;
8972 
8973 
8974     KernelErrorCode = KRERR_NO_ERROR;
8975 
8976     if (NoOfInParams < 3){  /* Checking for learning parameter */
8977 	KernelErrorCode = KRERR_PARAMETERS;
8978 	return (KernelErrorCode);
8979     }
8980 
8981     *NoOfOutParams = 1;  /* Out Parameter = Learning error */
8982     *parameterOutArray = OutParameter;
8983 
8984     Learn_p    = LEARN_PARAM1 (parameterInArray);
8985     Weight_MAX = LEARN_PARAM2 (parameterInArray);
8986     NoOfTimes  = (int)LEARN_PARAM3 (parameterInArray);
8987 
8988     if (NoOfTimes == 0){  /* Checking for learning parameter */
8989 	KernelErrorCode = KRERR_PARAMETERS;
8990 	return (KernelErrorCode);
8991     }
8992 
8993     kr_topoSort (TOPOLOGIC_TYPE);
8994 
8995     /* compute the necessary sub patterns */
8996 
8997     KernelErrorCode = kr_initSubPatternOrder(start_pattern,end_pattern);
8998     if(KernelErrorCode != KRERR_NO_ERROR)
8999         return (KernelErrorCode);
9000 
9001 
9002     /* reset network error value  */
9003     NET_ERROR (OutParameter) = 0.0;
9004 
9005     /* Determine order of pattern presentation */
9006     while(kr_getSubPatternByOrder(&pattern_no,&sub_pat_no)){
9007 
9008 	/* calculate startaddress for input pattern array */
9009     in_pat = kr_getSubPatData(pattern_no,sub_pat_no,INPUT,NULL);
9010 
9011 	topo_ptr = topo_ptr_array;
9012 
9013 	/* copy pattern into input units  and calculate their output */
9014 	while ((unit_ptr = *++topo_ptr) != NULL){
9015 	    /* topo_ptr points to a unit structure (input units first) */
9016 	    if (unit_ptr->out_func == OUT_IDENTITY)
9017 		/* identity output function */
9018 		unit_ptr->Out.output = unit_ptr->act = *in_pat++;
9019 	    else if(unit_ptr->out_func == OUT_Custom_Python)
9020 	    	unit_ptr->Out.output =
9021 			kr_PythonOutFunction(unit_ptr->python_out_func,
9022 				unit_ptr->act = *in_pat++);
9023 	    else
9024 		/* calculate unit's output */
9025 		unit_ptr->Out.output =
9026 		    (*unit_ptr->out_func) (unit_ptr->act = *in_pat++);
9027 	}
9028 
9029 	/* copy pattern from the world units to the learning units */
9030 	FOR_ALL_UNITS (unit_ptr)
9031 	    FOR_ALL_LINKS (unit_ptr, link_ptr)
9032 		if (IS_INPUT_UNIT (link_ptr->to))
9033 		    unit_ptr->act = link_ptr->to->act;
9034 
9035 	/* Network has the same structure as the RM_delta autoassociative
9036 	   network. Here we update the learning unit links. */
9037 	FOR_ALL_UNITS (unit_ptr)
9038 	    if (!IS_INPUT_UNIT (unit_ptr)){
9039 
9040 		/* Update the links */
9041 		if (UNIT_HAS_DIRECT_INPUTS (unit_ptr)){
9042 		    FOR_ALL_LINKS (unit_ptr, link_ptr)
9043 			if (!IS_INPUT_UNIT (link_ptr->to)){
9044 			    /* Only change learning links */
9045 			    link_ptr->weight +=
9046 				Learn_p * unit_ptr->act * (link_ptr->to->act);
9047 			    if (link_ptr->weight > Weight_MAX)
9048 				link_ptr->weight = Weight_MAX;
9049 			    if (link_ptr->weight < -Weight_MAX)
9050 				link_ptr->weight = -Weight_MAX;
9051 			}
9052 		}else{
9053 		    FOR_ALL_SITES_AND_LINKS (unit_ptr, site_ptr, link_ptr)
9054 			if (!IS_INPUT_UNIT (link_ptr->to)){
9055 			    link_ptr->weight +=
9056 				Learn_p * unit_ptr->act * (link_ptr->to->act);
9057 			    if (link_ptr->weight > Weight_MAX)
9058 				link_ptr->weight = Weight_MAX;
9059 			    if (link_ptr->weight < -Weight_MAX)
9060 				link_ptr->weight = -Weight_MAX;
9061 			}
9062 		}
9063 	    }
9064 
9065 	NET_ERROR (OutParameter) += Hebb_error (NoOfTimes);
9066     }
9067     return (KernelErrorCode);
9068 }
9069 
9070