1 /** @file percinit.cpp
2 
3   @brief CPerceptronNN weights initialization
4 
5   @author Jakub Ad�mek
6   Last modified $Id: percinit.cc,v 1.2 2002/04/19 17:44:55 jakubadamek Exp $
7 */
8 
9 #include "perceptron.h"
10 #include "utils.h"
11 
12 #include <time.h>
13 #include <math.h>
14 #include <sys/timeb.h>
15 
16 /** returns the sum distribution of Gauss normal distribution with average = 0,
17 	stdError = 1, for a given probability 0..1 */
18 
normalDistribution(double average,double variance)19 double normalDistribution (double average, double variance)
20 {
21 	double probability = double(rand())/RAND_MAX;
22 
23 	// Table with distributions 0.05,0.1,0.15,0.2,...0.5
24 	// for probability > 0.5 the result is -normalDist(1-prob)
25 	double tab [10] =
26 		{ -1.64 /*0,05*/,-1.28/*0.1*/,-1.04,-0.84,-0.67,-0.52,-0.39,-0.25,-0.13,0 /*0.5*/};
27 
28 	bool big = probability > 0.5;
29 	if (big) probability = 1 - probability;
30 	int index = int (((probability + 0.0025) * 20) - 1);
31 	if (index == -1) index = 0;
32 	double retVal = big ? -tab[index] : tab[index];
33 	return retVal * variance + average;
34 }
35 
weightInitNguyenWidrow()36 void CPerceptronNN::weightInitNguyenWidrow ()
37 {
38 	// region in which the transfer function is not saturated
39 	double activeRegion = 2,
40 		// max and min value of input signals
41 		xmax, xmin;
42 	// overlapping factor
43 	double kapa = 0.7;
44 	int inpmin, inpmax, inp, layer, il;
45 	TVectorFloat a;
46 	double norm;
47 	for (int neu=layerSizes[0]; neu < neurons.size(); ++neu) {
48 		if (neu < layerSizes[1])
49 			{ xmin = -1; xmax = 1; }
50 		else { xmin = 0; xmax = 1; }
51 		layer = neurons[neu].layer;
52 		inpmin = 0;
53 		for (il = 0; il < layer-1; ++il)
54 			inpmin += layerSizes[il];
55 		inpmax = inpmin + layerSizes[il];
56 		a.Realloc (inpmax - inpmin);
57 		norm = 0;
58 		for (inp=0; inp < inpmax - inpmin; ++inp) {
59 			a[inp] = 2.0 * double(rand())/RAND_MAX - 1;
60 			norm += sqr(a[inp]);
61 		}
62 		norm = sqrt (norm);
63 		for (int inp=inpmin; inp < inpmax; ++inp)
64 			if (weights->d[getConnection(inp,neu)])
65 			weights->d[getConnection(inp,neu)] =
66 				(a[inp-inpmin] / norm) * activeRegion * kapa
67 				* pow (layerSizes[layer],1/double(layerSizes[layer-1]))
68 				/ (xmax - xmin);
69 		xmin = -1 - xmax * weights->d[getConnection(inpmin,neu)];
70 		xmax = 1 - xmin * weights->d[getConnection(inpmin,neu)];
71 		if (weights->d[getConnection(-1,neu)])
72 			weights->d[getConnection(-1,neu)] = (xmax-xmin)*double(rand()) / RAND_MAX + xmin;
73 	}
74 }
75 
weightInit()76 void CPerceptronNN::weightInit ()
77 {
78 	TPerceptron * nStart, * nEnd=NULL;
79 	int con;
80 
81 	for (con=0; con < weights->d.size(); ++con)
82 		weights->d[con] = 1;
83 
84 	switch ((int)weightInitProcedure) {
85 	case WI_NGUYEN_WIDROW: weightInitNguyenWidrow (); break;
86 	case WI_RANDOM:
87 		while (next_connection (nStart,nEnd,con,&seriesNeurons[0]))
88 			weights->d[con] = normalDistribution (0,1.0/sqrt(nEnd->inputCount));
89 		break;
90 	}
91 }
92 
93