1 /*
2  * =====================================================================================
3  *
4  *       Filename:  fsom.c
5  *
6  *    Description:  Manage a self-organizing map (SOM) as a neural network
7  *
8  *        Version:  0.1
9  *        Created:  15/10/2010 13:53:31
10  *       Revision:  none
11  *       Compiler:  gcc
12  *
13  *         Author:  BlackLight (http://0x00.ath.cx), <blacklight@autistici.org>
14  *    Contributor:  evilsocket (http://www.evilsocket.net), <evilsocket@gmail.com>
15  *        Licence:  GNU GPL v.3
16  *        Company:  DO WHAT YOU WANT CAUSE A PIRATE IS FREE, YOU ARE A PIRATE!
17  *
18  * =====================================================================================
19  */
20 
21 #include	"fsom.h"
22 
23 #include	<float.h>
24 #include	<limits.h>
25 #include	<math.h>
26 #include	<memory.h>
27 #include	<stdio.h>
28 #include	<stdlib.h>
29 
30 #ifndef 	M_E
31 #define 	M_E		2.7182818284590452354
32 #endif
33 
34 /**
35  * \brief  Create a new synapsis between two neurons
36  * \param  input_neuron 		Input neuron for the synapsis
37  * \param  output_neuron 	Output neuron for the synapsis
38  * \param  weight 			Weight of the synapsis (set it to 0 for a random value between 0 and 1)
39  * \return A pointer representing the new synapsis
40  */
41 
42 static som_synapsis_t*
som_synapsis_new(som_neuron_t * input_neuron,som_neuron_t * output_neuron,double weight)43 som_synapsis_new ( som_neuron_t *input_neuron, som_neuron_t *output_neuron, double weight )
44 {
45 	som_synapsis_t  *synapsis = NULL;
46 
47 	if ( !( synapsis = ( som_synapsis_t* ) malloc ( sizeof ( som_synapsis_t ))))
48 	{
49 		return NULL;
50 	}
51 
52 	synapsis->neuron_in  = input_neuron;
53 	synapsis->neuron_out = output_neuron;
54 
55 	if ( weight == 0.0 )
56 	{
57 		synapsis->weight = (double) rand() / (double) UINT_MAX;
58 	} else {
59 		synapsis->weight = weight;
60 	}
61 
62 	if ( !( input_neuron->synapses = ( som_synapsis_t** ) realloc ( input_neuron->synapses, (++( input_neuron->synapses_count )) * sizeof ( som_synapsis_t ))))
63 	{
64 		free ( synapsis );
65 		return NULL;
66 	}
67 
68 	if ( !( output_neuron->synapses = ( som_synapsis_t** ) realloc ( output_neuron->synapses, (++( output_neuron->synapses_count )) * sizeof ( som_synapsis_t ))))
69 	{
70 		free ( synapsis );
71 		return NULL;
72 	}
73 
74 	input_neuron->synapses[ input_neuron->synapses_count - 1 ] = synapsis;
75 	output_neuron->synapses[ output_neuron->synapses_count - 1 ] = synapsis;
76 	return synapsis;
77 }		/* -----  end of function som_synapsis_new  ----- */
78 
79 
80 /**
81  * \brief  Create a new neuron
82  * \return The new neuron
83  */
84 #define som_neuron_new() ( som_neuron_t *)calloc( 1, sizeof ( som_neuron_t ) )
85 /* -----  end of macro som_neuron_new  ----- */
86 
87 /**
88  * \brief  Deallocate a neuron
89  * \param  neuron 	Neuron to be deallocated
90  */
91 #define som_neuron_destroy( neuron ) if( neuron ){ free(neuron); neuron = NULL; }
92 /* -----  end of macro som_neuron_destroy  ----- */
93 
94 /**
95  * \brief  Create a new input layer
96  * \param  neurons_count 	Number of neurons in the new input layer
97  * \return The new layer
98  */
99 
100 static som_input_layer_t*
som_input_layer_new(size_t neurons_count)101 som_input_layer_new ( size_t neurons_count )
102 {
103 	size_t i = 0,
104 		  j = 0;
105 
106 	som_input_layer_t *layer = NULL;
107 
108 	if ( !( layer = ( som_input_layer_t* ) malloc ( sizeof ( som_input_layer_t ))))
109 	{
110 		return NULL;
111 	}
112 
113 	layer->neurons_count = neurons_count;
114 
115 	if ( !( layer->neurons = ( som_neuron_t** ) malloc ( neurons_count * sizeof ( som_neuron_t* ))))
116 	{
117 		free ( layer );
118 		return NULL;
119 	}
120 
121 	for ( i=0; i < neurons_count; ++i )
122 	{
123 		if ( !( layer->neurons[i] = som_neuron_new() ))
124 		{
125 			for ( j=0; j < i; ++j )
126 			{
127 				som_neuron_destroy ( layer->neurons[j] );
128 			}
129 
130 			free ( layer->neurons );
131 			free ( layer );
132 			return NULL;
133 		}
134 	}
135 
136 	return layer;
137 }		/* -----  end of function som_input_layer_new  ----- */
138 
139 /**
140  * \brief  Create a new output layer
141  * \param  neurons_rows 	Number of rows in the matrix of output neurons
142  * \param  neurons_cols 	Number of cols in the matrix of output neurons
143  * \return The new layer
144  */
145 
146 static som_output_layer_t*
som_output_layer_new(size_t neurons_rows,size_t neurons_cols)147 som_output_layer_new ( size_t neurons_rows, size_t neurons_cols )
148 {
149 	size_t i = 0,
150 		  j = 0,
151 		  k = 0,
152 		  l = 0;
153 
154 	som_output_layer_t *layer = NULL;
155 
156 	if ( !( layer = ( som_output_layer_t* ) malloc ( sizeof ( som_output_layer_t ))))
157 	{
158 		return NULL;
159 	}
160 
161 	layer->neurons_rows = neurons_rows;
162 	layer->neurons_cols = neurons_cols;
163 
164 	if ( !( layer->neurons = ( som_neuron_t*** ) malloc ( neurons_rows * neurons_cols * sizeof ( som_neuron_t** ))))
165 	{
166 		free ( layer );
167 		return NULL;
168 	}
169 
170 	for ( i=0; i < neurons_rows; ++i )
171 	{
172 		if ( !( layer->neurons[i] = ( som_neuron_t** ) malloc ( neurons_cols * sizeof ( som_neuron_t* ))))
173 		{
174 			for ( j=0; j < i; ++j )
175 			{
176 				free ( layer->neurons[j] );
177 				layer->neurons[j] = NULL;
178 			}
179 
180 			free ( layer->neurons );
181 			free ( layer );
182 			return NULL;
183 		}
184 	}
185 
186 	for ( i=0; i < neurons_rows; ++i )
187 	{
188 		for ( j=0; j < neurons_cols; ++j )
189 		{
190 			if ( !( layer->neurons[i][j] = som_neuron_new() ))
191 			{
192 				for ( k=0; k < i; ++k )
193 				{
194 					for ( l=0; l < j; ++l )
195 					{
196 						som_neuron_destroy ( layer->neurons[k][l] );
197 					}
198 
199 					free ( layer->neurons[k] );
200 					layer->neurons[k] = NULL;
201 				}
202 
203 				free ( layer->neurons );
204 				return NULL;
205 			}
206 		}
207 	}
208 
209 	return layer;
210 }		/* -----  end of function som_output_layer_new  ----- */
211 
212 /**
213  * \brief  Connect two layers of a neural SOM
214  * \param  input_layer 	Reference to the input layer
215  * \param  output_layer 	Reference to the output layer
216  */
217 
218 static void
som_connect_layers(som_input_layer_t ** input_layer,som_output_layer_t ** output_layer)219 som_connect_layers ( som_input_layer_t **input_layer, som_output_layer_t **output_layer )
220 {
221 	size_t i = 0,
222 		  j = 0,
223 		  k = 0;
224 
225 	for ( i=0; i < (*output_layer)->neurons_rows; ++i )
226 	{
227 		for ( j=0; j < (*output_layer)->neurons_cols; ++j )
228 		{
229 			for ( k=0; k < (*input_layer)->neurons_count; ++k )
230 			{
231 				if ( !( som_synapsis_new ( (*input_layer)->neurons[k], (*output_layer)->neurons[i][j], 0.0 )))
232 				{
233 					return;
234 				}
235 			}
236 		}
237 	}
238 }		/* -----  end of function som_connect_layers  ----- */
239 
240 /**
241  * \brief  Initialize a new SOM neural network
242  * \param  input_neurons 		Number of neurons in the input layer
243  * \param  output_neurons_rows 	Number of rows of neurons in the output layer
244  * \param  output_neurons_cols 	Number of cols of neurons in the output layer
245  * \return The new SOM neural network
246  */
247 
248 som_network_t*
som_network_new(size_t input_neurons,size_t output_neurons_rows,size_t output_neurons_cols)249 som_network_new ( size_t input_neurons, size_t output_neurons_rows, size_t output_neurons_cols )
250 {
251 	som_network_t *net = NULL;
252 	srand ( time ( NULL ));
253 
254 	if ( !( net = ( som_network_t* ) calloc ( 1, sizeof ( som_network_t ))))
255 	{
256 		return NULL;
257 	}
258 
259 	if ( !( net->input_layer = som_input_layer_new ( input_neurons )))
260 	{
261 		free ( net );
262 		return NULL;
263 	}
264 
265 	if ( !( net->output_layer = som_output_layer_new ( output_neurons_rows, output_neurons_cols )))
266 	{
267 		free ( net->input_layer );
268 		free ( net );
269 		return NULL;
270 	}
271 
272 	net->T_learning_param = 0.0;
273 	net->serialization_time = ( time_t ) 0;
274 	som_connect_layers ( &( net->input_layer ), &( net->output_layer ));
275 	return net;
276 }		/* -----  end of function som_network_new  ----- */
277 
278 /**
279  * \brief  Deallocate an input layer
280  * \param  net 	Network whose input layer should be deallocated
281  */
282 
283 static void
som_input_layer_destroy(som_network_t * net)284 som_input_layer_destroy ( som_network_t *net )
285 {
286 	size_t i = 0,
287 		  j = 0,
288 		  k = 0;
289 
290 	if ( !( net->input_layer ))
291 	{
292 		return;
293 	}
294 
295 	for ( i=0; i < net->input_layer->neurons_count; ++i )
296 	{
297 		for ( j=0; j < net->input_layer->neurons[i]->synapses_count; ++j )
298 		{
299 			if ( (int) j < 0 )
300 			{
301 				break;
302 			}
303 
304 			if ( net->input_layer->neurons[i]->synapses )
305 			{
306 				if ( net->input_layer->neurons[i]->synapses[j] )
307 				{
308 					if ( net->input_layer->neurons[i]->synapses[j]->neuron_out )
309 					{
310 						/* net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k]->neuron_in = NULL; */
311 
312 						for ( k=0; k < net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses_count; ++k )
313 						{
314 							if ( net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k] )
315 							{
316 								net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k]->neuron_in = NULL;
317 								net->input_layer->neurons[i]->synapses[j]->neuron_out->synapses[k] = NULL;
318 							}
319 						}
320 					}
321 
322 					free ( net->input_layer->neurons[i]->synapses[j] );
323 					net->input_layer->neurons[i]->synapses[j] = NULL;
324 				}
325 
326 				free ( net->input_layer->neurons[i]->synapses );
327 				net->input_layer->neurons[i]->synapses = NULL;
328 			}
329 		}
330 
331 		som_neuron_destroy ( net->input_layer->neurons[i] );
332 	}
333 
334 	free ( net->input_layer->neurons );
335 	net->input_layer->neurons = NULL;
336 
337 	free ( net->input_layer );
338 	net->input_layer = NULL;
339 }		/* -----  end of function som_input_layer_destroy  ----- */
340 
341 /**
342  * \brief  Deallocate an output layer
343  * \param  net 	Network whose output layer should be deallocated
344  */
345 
346 static void
som_output_layer_destroy(som_network_t * net)347 som_output_layer_destroy ( som_network_t *net )
348 {
349 	size_t i = 0,
350 		  j = 0,
351 		  k = 0;
352 
353 	if ( !( net->output_layer ))
354 	{
355 		return;
356 	}
357 
358 	for ( i=0; i < net->output_layer->neurons_rows; ++i )
359 	{
360 		for ( j=0; j < net->output_layer->neurons_cols; ++j )
361 		{
362 			for ( k=0; k < net->output_layer->neurons[i][j]->synapses_count; ++k )
363 			{
364 				if ( net->output_layer->neurons[i][j]->synapses )
365 				{
366 					if ( net->output_layer->neurons[i][j]->synapses[k] )
367 					{
368 						free ( net->output_layer->neurons[i][j]->synapses[k] );
369 						net->output_layer->neurons[i][j]->synapses[k] = NULL;
370 					}
371 
372 					free ( net->output_layer->neurons[i][j]->synapses );
373 					net->output_layer->neurons[i][j]->synapses = NULL;
374 				}
375 			}
376 
377 			som_neuron_destroy ( net->output_layer->neurons[i][j] );
378 		}
379 
380 		free ( net->output_layer->neurons[i] );
381 		net->output_layer->neurons[i] = NULL;
382 	}
383 
384 	free ( net->output_layer->neurons );
385 	net->output_layer->neurons = NULL;
386 
387 	free ( net->output_layer );
388 	net->output_layer = NULL;
389 }		/* -----  end of function som_output_layer_destroy  ----- */
390 
391 /**
392  * \brief  Deallocate a SOM neural network
393  * \param  net 	Network to be deallocated
394  */
395 
396 void
som_network_destroy(som_network_t * net)397 som_network_destroy ( som_network_t *net )
398 {
399 	if ( !net )
400 	{
401 		return;
402 	}
403 
404 	som_input_layer_destroy  ( net );
405 	som_output_layer_destroy ( net );
406 	free ( net );
407 	net = NULL;
408 }		/* -----  end of function som_network_destroy  ----- */
409 
410 /**
411  * \brief  Set a vector as input for the network
412  * \param  net 	SOM neural network
413  * \param  data 	Vector to be passed as input for the network
414  */
415 
416 void
som_set_inputs(som_network_t * net,double * data)417 som_set_inputs ( som_network_t *net, double *data )
418 {
419 	size_t i = 0;
420 
421 	for ( i=0; i < net->input_layer->neurons_count; ++i )
422 	{
423 		net->input_layer->neurons[i]->input = data[i];
424 	}
425 }		/* -----  end of function som_set_inputs  ----- */
426 
427 /**
428  * \brief  Get the coordinates of the output neuron closest to the current input data
429  * \param  net 	SOM neural network
430  * \param  x 		Reference to the X coordinate of the best output neuron
431  * \param  y 		Reference to the Y coordinate of the best output neuron
432  * \return The value of the module ||X-W|| (squared euclidean distance) for the best neuron
433  */
434 
435 double
som_get_best_neuron_coordinates(som_network_t * net,size_t * x,size_t * y)436 som_get_best_neuron_coordinates ( som_network_t *net, size_t *x, size_t *y )
437 {
438 	size_t i = 0,
439 		   j = 0,
440 		   k = 0;
441 
442 	double mod 		 = 0.0,
443 	    best_dist = DBL_MAX;
444 
445 	som_neuron_t *neuron;
446 
447 	for ( i=0; i < net->output_layer->neurons_rows; ++i )
448 	{
449 		for ( j=0; j < net->output_layer->neurons_cols; ++j )
450 		{
451 			mod    = 0.0;
452 			neuron = net->output_layer->neurons[i][j];
453 
454 			for ( k=0; k < neuron->synapses_count; ++k )
455 			{
456 				mod += pow( net->input_layer->neurons[k]->input - neuron->synapses[k]->weight, 2 );
457 			}
458 
459 			if ( mod < best_dist )
460 			{
461 				best_dist = mod;
462 				*x = i;
463 				*y = j;
464 			}
465 		}
466 	}
467 
468 	return mod;
469 }		/* -----  end of function som_get_best_neuron_coordinates  ----- */
470 
471 /**
472  * \brief  Get the n-th approximated step of the analytic continuation of the Lambert W-function of a real number x (see "Numerical Evaluation of the Lambert W Function and Application to Generation of Generalized Gaussian Noise With Exponent 1/2" from Chapeau-Blondeau and Monir, IEEE Transactions on Signal Processing, vol.50, no.9, Sep.2002)
473  * \param  x 	Input variable of which we're going to compute W[-1](x)
474  * \return W[-1](x)
475  */
476 
477 static double
lambert_W1_function(som_network_t * net,double x)478 lambert_W1_function ( som_network_t *net, double x )
479 {
480 	int j = 0,
481 		k = 0;
482 
483 	double p = 0.0,
484 		   res = 0.0,
485 		   k_plus = 0,
486 		   k_less = 0;
487 
488 	p = - sqrt ( 2 * ( M_E * x + 1 ));
489 
490 	net->mus[0]    = -1;
491 	net->mus[1]    = 1;
492 	net->alphas[0] = 2;
493 	net->alphas[1] = -1;
494 	for ( k=2; k < TAYLOR_LAMBERT_LAST_ELEMENT; ++k )
495 	{
496 		net->alphas[k] = 0.0;
497 
498 		for ( j=2; j < k; ++j )
499 		{
500 			net->alphas[k] += net->mus[j] * net->mus[ k - j + 1 ];
501 		}
502 
503 		k_plus = k + 1;
504 		k_less = k - 1;
505 
506 		net->mus[k] = (k_less / k_plus) *
507 					  ( (net->mus[k-2] / 2.0) + ( net->alphas[k-2] / 4.0) ) -
508 					  ( net->alphas[k] / 2.0 ) -
509 					  ( net->mus[(int)k_less] / k_plus );
510 
511 		res += ( net->mus[k] * pow ( p, k ) );
512 	}
513 
514 	return res;
515 }		/* -----  end of function lambert_W1_function  ----- */
516 
517 /**
518  * \brief  Get the learning rate of a step of the learning process in function of the current iteration number
519  * \param  net 	SOM neural network
520  * \param  t 		Iteration number
521  * \param  M 		Maximum value for the learning rate (in [0:1])
522  * \param  N 		Iteration number after which the function equals the "cutoff" value (0.01), i.e. the learning rate becomes almost meaningless
523  * \return Learning rate
524  */
525 
526 static double
som_learning_rate(som_network_t * net,size_t t,double M,size_t N)527 som_learning_rate ( som_network_t* net, size_t t, double M, size_t N )
528 {
529 	double value = 0.0,
530 		  T = 0.0,
531 		  K = 0.0,
532 		  W = 0.0,
533 		  W_arg = 0.0;
534 
535 	if ( net->T_learning_param == 0.0 )
536 	{
537 		K = ( M * (double) N * M_E ) / 0.01;
538 		W_arg = -((double) N ) / K;
539 		W = lambert_W1_function ( net, W_arg );
540 		T = K * exp ( W );
541 		net->T_learning_param = T;
542 	} else {
543 		T = net->T_learning_param;
544 	}
545 
546 	value = M * ( (double) t / T) * exp ( 1 - ( (double) t / T ));
547 	return value;
548 }		/* -----  end of function som_learning_rate  ----- */
549 
550 /**
551  * \brief  Get the learning rate of a step of the learning process in function of the current iteration number once T_learning_param is set
552  * \param  net 	SOM neural network
553  * \param  t 		Iteration number
554  * \param  M 		Maximum value for the learning rate (in [0:1])
555  * \param  N 		Iteration number after which the function equals the "cutoff" value (0.01), i.e. the learning rate becomes almost meaningless
556  * \return Learning rate
557  */
558 
559 INLINE double
som_learning_rate_fast(som_network_t * net,size_t t,double M,size_t N)560 som_learning_rate_fast ( som_network_t* net, size_t t, double M, size_t N )
561 {
562 	double inv_lrate = (double) t / net->T_learning_param;
563 	return M * inv_lrate * exp ( 1 - inv_lrate );
564 }		/* -----  end of function som_learning_rate  ----- */
565 
566 /**
567  * \brief  Training iteration for the network given a single input data set
568  * \param  net 	SOM neural network
569  * \param  data 	Input data
570  * \param  iter 	Iteration number
571  */
572 
573 static void
som_train_iteration(som_network_t * net,double * data,size_t iter)574 som_train_iteration ( som_network_t *net, double *data, size_t iter )
575 {
576 	size_t x = 0,
577 		  y = 0,
578 		  i = 0,
579 		  j = 0,
580 		  k = 0,
581 		  dist = 0,
582 		  dist_i;
583 
584 	double l_rate = 0.0,
585 		   inv_dist;
586 
587 	l_rate = som_learning_rate_fast ( net, iter, 0.8, 200 );
588 	som_set_inputs ( net, data );
589 	som_get_best_neuron_coordinates ( net, &x, &y );
590 
591 	som_neuron_t *neuron;
592 	for ( i=0; i < net->output_layer->neurons_rows; ++i )
593 	{
594 		dist_i = abs( x - i );
595 		for ( j=0; j < net->output_layer->neurons_cols; ++j )
596 		{
597 			dist     = pow( dist_i + abs ( y - j ), 4 );
598 			inv_dist = (1.0 / ((double) dist + 1)) * l_rate;
599 			neuron   = net->output_layer->neurons[i][j];
600 			for ( k=0; k < net->input_layer->neurons_count; ++k )
601 			{
602 				neuron->synapses[k]->weight += inv_dist * ( net->input_layer->neurons[k]->input - neuron->synapses[k]->weight );
603 			}
604 		}
605 	}
606 }		/* -----  end of function som_train_loop  ----- */
607 
608 /**
609  * \brief  Initialize the synaptical weights of the network using the algorithm proposed in "Improving the Self-Organization Feature Map Algorithm Using an Efficient Initialization Scheme", by Su, Liu and Chang, on "Tamkang Journal of Science and Engineering", vol.5, no.1, pp.35-48, 2002
610  * \param  net 	SOM neural network
611  * \param  data 	Input data set
612  * \param  n_data 	Number of vectors in the input set
613  */
614 
615 void
som_init_weights(som_network_t * net,double ** data,size_t n_data)616 som_init_weights ( som_network_t *net, double **data, size_t n_data )
617 {
618 	size_t i = 0,
619 		  j = 0,
620 		  k = 0,
621 		  out_rows = 0,
622 		  out_cols = 0,
623 		  in_size  = 0,
624 		  max_i = 0,
625 		  max_j = 0,
626 		  medium_i = 0,
627 		  medium_j = 0;
628 
629 	double dist = 0.0,
630 		  max_dist = 0.0;
631 
632 	double *avg_data = NULL;
633 
634 	if ( !( avg_data = (double*) malloc ( net->input_layer->neurons_count * sizeof ( double ))))
635 	{
636 		return;
637 	}
638 
639 	/* Find the couple of data sets with the maximum distance */
640 	for ( i=0; i < n_data; ++i )
641 	{
642 		for ( j=0; j < n_data; ++j )
643 		{
644 			if ( i != j )
645 			{
646 				dist = 0.0;
647 
648 				for ( k=0; k < net->input_layer->neurons_count; ++k )
649 				{
650 					dist += fabs ( data[i][k] - data[j][k] );
651 				}
652 
653 				if ( dist > max_dist )
654 				{
655 					max_dist = dist;
656 					max_i = i;
657 					max_j = j;
658 				}
659 			}
660 		}
661 	}
662 
663 	/* Compute the avg_data vector as the vector containing the average values of (data[max_i], data[max_j]) */
664 	double *max_i_row = data[max_i],
665 	       *max_j_row = data[max_j];
666 	for ( i=0; i < net->input_layer->neurons_count; ++i )
667 	{
668 		avg_data[i] = fabs ( max_i_row[i] + max_j_row[i] ) / 2.0;
669 	}
670 
671 	/* Initialize the upper-right and bottom-left vertex of the output matrix with these values */
672 	som_neuron_t *ur_neuron = net->output_layer->neurons[0][ net->output_layer->neurons_cols - 1 ],
673 				 *bl_neuron = net->output_layer->neurons[ net->output_layer->neurons_rows - 1 ][0];
674 	for ( i=0; i < net->input_layer->neurons_count; ++i )
675 	{
676 		ur_neuron->synapses[i]->weight = max_i_row[i];
677 		bl_neuron->synapses[i]->weight = max_j_row[i];
678 	}
679 
680 	/* Find the vector having the maximum distance from the maximum distance vectors */
681 	max_dist = DBL_MAX;
682 
683 	for ( i=0; i < n_data; ++i )
684 	{
685 		if ( i != max_i && i != max_j )
686 		{
687 			dist = 0.0;
688 
689 			for ( k=0; k < net->input_layer->neurons_count; ++k )
690 			{
691 				dist += fabs ( data[i][k] - avg_data[i] );
692 
693 				if ( dist < max_dist )
694 				{
695 					max_dist = dist;
696 					medium_i = i;
697 				}
698 			}
699 		}
700 	}
701 
702 	double *med_i_row = data[medium_i];
703 
704 	/* Initialize the upper-left corner with the values of this vector */
705 	som_neuron_t *ul_neuron = net->output_layer->neurons[0][0];
706 	for ( i=0; i < net->input_layer->neurons_count; ++i )
707 	{
708 		ul_neuron->synapses[i]->weight = med_i_row[i];
709 	}
710 
711 	/* avg_data contains the average values of the 3 vectors computed above */
712 	for ( i=0; i < net->input_layer->neurons_count; ++i )
713 	{
714 		avg_data[i] = fabs ( max_i_row[i] + max_j_row[i] + med_i_row[i] ) / 3.0;
715 	}
716 
717 	/* Find the vector having the maximum distance from the 3 vectors above */
718 	max_dist = DBL_MAX;
719 
720 	for ( i=0; i < n_data; ++i )
721 	{
722 		if ( i != max_i && i != max_j && i != medium_i )
723 		{
724 			dist = 0.0;
725 
726 			for ( k=0; k < net->input_layer->neurons_count; ++k )
727 			{
728 				dist += fabs ( data[i][k] - avg_data[i] );
729 
730 				if ( dist < max_dist )
731 				{
732 					max_dist = dist;
733 					medium_j = i;
734 				}
735 			}
736 		}
737 	}
738 
739 	double *med_j_row = data[medium_j];
740 
741 	/* Initialize the bottom-right corner with the values of this vector */
742 	som_neuron_t *br_neuron = net->output_layer->neurons[ net->output_layer->neurons_rows - 1 ][ net->output_layer->neurons_cols - 1 ];
743 	for ( i=0; i < net->input_layer->neurons_count; ++i )
744 	{
745 		br_neuron->synapses[i]->weight = med_j_row[i];
746 	}
747 
748 	/* Initialize the weights on the 4 edges */
749 	out_rows = net->output_layer->neurons_rows;
750 	out_cols = net->output_layer->neurons_cols;
751 	in_size  = net->input_layer->neurons_count;
752 
753 	som_neuron_t **edges = net->output_layer->neurons[0];
754 	size_t		   last_out_col = out_cols - 1,
755 				   span_cols;
756 	double		   a, b;
757 	for ( j=1; j < out_cols - 1; ++j )
758 	{
759 		span_cols = out_cols - j;
760 		a		  = ( (double)(j - 1) / last_out_col );
761 		b		  = ( (double)span_cols / (double)last_out_col );
762 		for ( k=0; k < in_size; ++k )
763 		{
764 			edges[j]->synapses[k]->weight =
765 				a * edges[ last_out_col ]->synapses[k]->weight +
766 				b * ul_neuron->synapses[k]->weight;
767 		}
768 	}
769 
770 	size_t		 last_out_row = out_rows - 1;
771 	som_neuron_t *j_neuron,
772 				 *br_edge = net->output_layer->neurons[ last_out_row  ][ last_out_col ],
773 				 *bl_edge = net->output_layer->neurons[ last_out_row ][0];
774 	for ( j=1; j < out_cols - 1; ++j )
775 	{
776 		j_neuron  = net->output_layer->neurons[last_out_row][j];
777 		a		  = (double)(j - 1)		   / (double)last_out_col;
778 		b		  = (double)(out_cols - j) / (double)last_out_col;
779 
780 		for ( k=0; k < in_size; ++k )
781 		{
782 			j_neuron->synapses[k]->weight = a * br_edge->synapses[k]->weight + b * bl_edge->synapses[k]->weight;
783 		}
784 	}
785 
786 	som_neuron_t *i_neuron,
787 				 *tl_edge = net->output_layer->neurons[0][0];
788 	for ( i=1; i < out_rows - 1; ++i )
789 	{
790 		i_neuron = net->output_layer->neurons[i][0];
791 		a		 = ( ((double) i - 1) / (double) last_out_row );
792 		b		 = ( (double) ( out_rows - i ) /  (double)last_out_row);
793 		for ( k=0; k < in_size; ++k )
794 		{
795 			i_neuron->synapses[k]->weight = a * bl_edge->synapses[k]->weight + b * tl_edge->synapses[k]->weight;
796 		}
797 	}
798 
799 	som_neuron_t *tr_edge = net->output_layer->neurons[0][ last_out_col ];
800 	for ( i=1; i < out_rows - 1; ++i )
801 	{
802 		i_neuron = net->output_layer->neurons[i][ last_out_col ];
803 		a		 = ( ((double) i - 1) / ((double) last_out_row ));
804 		b		 = ( (double) ( out_rows - i ) / ((double) last_out_row ));
805 		for ( k=0; k < in_size; ++k )
806 		{
807 			i_neuron->synapses[k]->weight = a * br_edge->synapses[k]->weight + b * tr_edge->synapses[k]->weight;
808 		}
809 	}
810 
811 	/* Initialize the weights in the middle of the matrix */
812 	som_neuron_t *ij_neuron;
813 	double		  sqr_index = (double) last_out_row  * (double) last_out_col,
814 				  prev_i,
815 				  prev_j,
816 				  prev_out_rows,
817 				  prev_out_cols,
818 				  c,
819 				  d;
820 	for ( i=1; i < out_rows - 1; ++i )
821 	{
822 		prev_i 		  = i - 1;
823 		prev_out_rows = out_rows - i;
824 		for ( j=1; j < out_cols - 1; ++j )
825 		{
826 			prev_j	  	  = j - 1;
827 			ij_neuron 	  = net->output_layer->neurons[i][j];
828 			prev_out_cols = out_cols - j;
829 			a		  	  = ( prev_j * prev_i ) / sqr_index;
830 			b	   	  	  = ( prev_j * prev_out_rows ) / sqr_index;
831 			c		  	  = ( prev_out_cols * prev_i ) / sqr_index;
832 			d		 	  = ( prev_out_cols * prev_out_rows ) / sqr_index;
833 			for ( k=0; k < in_size; ++k )
834 			{
835 				ij_neuron->synapses[k]->weight =
836 					a *
837 					br_edge->synapses[k]->weight +
838 					b *
839 					tr_edge->synapses[k]->weight +
840 					c *
841 					bl_edge->synapses[k]->weight +
842 					d *
843 					tl_edge->synapses[k]->weight;
844 			}
845 		}
846 	}
847 	free(avg_data); // cleanup
848 }		/* -----  end of function som_init_weights  ----- */
849 
850 /**
851  * \brief  Train the self-organizing map through a data set
852  * \param  net 	SOM neural network
853  * \param  data 	Data set (set of input vectors)
854  * \param  n_data 	Number of input vectors in data
855  * \param  iter 	Number of iterations
856  */
857 
858 void
som_train(som_network_t * net,double ** data,size_t n_data,size_t iter)859 som_train ( som_network_t *net, double **data, size_t n_data, size_t iter )
860 {
861 	size_t n = 0,
862 		  k = 0,
863 		  x = 0,
864 		  y = 0;
865 
866 	som_learning_rate( net, iter, 0.8, 200 );
867 
868 	for ( n=0; n < n_data; ++n )
869 	{
870 		for ( k=1; k <= iter; ++k )
871 		{
872 			som_train_iteration ( net, data[n], k );
873 
874 			if ( som_get_best_neuron_coordinates ( net, &x, &y ) == 0.0 )
875 				break;
876 		}
877 	}
878 }		/* -----  end of function som_train  ----- */
879 
880 /**
881  * \brief  Serialize a neural network on a binary file
882  * \param  net 	SOM network to be serialized
883  * \param  fname 	Output file name
884  */
885 
886 void
som_serialize(som_network_t * net,const char * fname)887 som_serialize ( som_network_t *net, const char *fname )
888 {
889 	FILE *fp = NULL;
890 	size_t i = 0,
891 		  j = 0,
892 		  k = 0;
893 
894 	if ( !( fp = fopen ( fname, "w" )))
895 	{
896 		return;
897 	}
898 
899 	net->serialization_time = time ( NULL );
900 	fwrite ( &(net->serialization_time), sizeof ( time_t ), 1, fp );
901 	fwrite ( &(net->T_learning_param), sizeof ( double ), 1, fp );
902 	fwrite ( &(net->input_layer->neurons_count), sizeof ( size_t ), 1, fp );
903 	fwrite ( &(net->output_layer->neurons_rows), sizeof ( size_t ), 1, fp );
904 	fwrite ( &(net->output_layer->neurons_cols), sizeof ( size_t ), 1, fp );
905 
906 	for ( i=0; i < net->output_layer->neurons_rows; ++i )
907 	{
908 		for ( j=0; j < net->output_layer->neurons_cols; ++j )
909 		{
910 			for ( k=0; k < net->output_layer->neurons[i][j]->synapses_count; ++k )
911 			{
912 				fwrite ( &(net->output_layer->neurons[i][j]->synapses[k]->weight), sizeof ( double ), 1, fp );
913 			}
914 		}
915 	}
916 
917 	fclose ( fp );
918 }		/* -----  end of function som_serialize  ----- */
919 
920 /**
921  * \brief  Initialize a SOM neural network from a serialized one on a file
922  * \param  fname 	Binary file containing the network
923  * \return The initialized network in case of success, NULL otherwise
924  */
925 
926 som_network_t*
som_deserialize(const char * fname)927 som_deserialize ( const char* fname )
928 {
929 	som_network_t *net = NULL;
930 	FILE *fp = NULL;
931 	double weight = 0.0;
932 	size_t i = 0,
933 		  j = 0,
934 		  k = 0,
935 		  input_neurons = 0,
936 		  output_neurons_rows = 0,
937 		  output_neurons_cols = 0;
938 
939 	if ( !( fp = fopen ( fname, "r" )))
940 	{
941 		return NULL;
942 	}
943 
944 	if ( !( net = ( som_network_t* ) calloc ( 1, sizeof ( som_network_t ))))
945 	{
946 		return NULL;
947 	}
948 
949 	fread ( &(net->serialization_time), sizeof ( time_t ), 1, fp );
950 	fread ( &(net->T_learning_param ), sizeof ( double ), 1, fp );
951 	fread ( &input_neurons, sizeof ( size_t ), 1, fp );
952 	fread ( &output_neurons_rows, sizeof ( size_t ), 1, fp );
953 	fread ( &output_neurons_cols, sizeof ( size_t ), 1, fp );
954 
955 	if ( !( net->input_layer = som_input_layer_new ( input_neurons )))
956 	{
957 		free ( net );
958 		return NULL;
959 	}
960 
961 	if ( !( net->output_layer = som_output_layer_new ( output_neurons_rows, output_neurons_cols )))
962 	{
963 		free ( net->input_layer );
964 		free ( net );
965 		return NULL;
966 	}
967 
968 	for ( i=0; i < output_neurons_rows; ++i )
969 	{
970 		for ( j=0; j < output_neurons_cols; ++j )
971 		{
972 			for ( k=0; k < input_neurons; ++k )
973 			{
974 				fread ( &weight, sizeof ( double ), 1, fp );
975 
976 				if ( !( som_synapsis_new ( net->input_layer->neurons[k], net->output_layer->neurons[i][j], weight )))
977 				{
978 					som_input_layer_destroy ( net );
979 					som_output_layer_destroy ( net );
980 					return NULL;
981 				}
982 			}
983 		}
984 	}
985 
986 	return net;
987 }		/* -----  end of function som_deserialize  ----- */
988 
989