]> ruin.nu Git - germs.git/blob - fann/src/fann.c
Make it possible to build statically against the included fann library.
[germs.git] / fann / src / fann.c
1 /*
2   Fast Artificial Neural Network Library (fann)
3   Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
4
5   This library is free software; you can redistribute it and/or
6   modify it under the terms of the GNU Lesser General Public
7   License as published by the Free Software Foundation; either
8   version 2.1 of the License, or (at your option) any later version.
9
10   This library is distributed in the hope that it will be useful,
11   but WITHOUT ANY WARRANTY; without even the implied warranty of
12   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13   Lesser General Public License for more details.
14
15   You should have received a copy of the GNU Lesser General Public
16   License along with this library; if not, write to the Free Software
17   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
18 */
19
20 #include <stdio.h>
21 #include <stdlib.h>
22 #include <stdarg.h>
23 #include <string.h>
24 #include <time.h>
25 #include <math.h>
26
27 #include "config.h"
28 #include "fann.h"
29
30 FANN_EXTERNAL struct fann *FANN_API fann_create_standard(unsigned int num_layers, ...)
31 {
32         struct fann *ann;
33         va_list layer_sizes;
34         int i;
35         unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
36
37         if(layers == NULL)
38         {
39                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
40                 return NULL;
41         }
42
43         va_start(layer_sizes, num_layers);
44         for(i = 0; i < (int) num_layers; i++)
45         {
46                 layers[i] = va_arg(layer_sizes, unsigned int);
47         }
48         va_end(layer_sizes);
49
50         ann = fann_create_standard_array(num_layers, layers);
51
52         free(layers);
53
54         return ann;
55 }
56
57 FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers, 
58                                                                                                                            unsigned int *layers)
59 {
60         return fann_create_sparse_array(1, num_layers, layers); 
61 }
62
63 FANN_EXTERNAL struct fann *FANN_API fann_create_sparse(float connection_rate, 
64                                                                                                            unsigned int num_layers, ...)
65 {
66         struct fann *ann;
67         va_list layer_sizes;
68         int i;
69         unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
70
71         if(layers == NULL)
72         {
73                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
74                 return NULL;
75         }
76
77         va_start(layer_sizes, num_layers);
78         for(i = 0; i < (int) num_layers; i++)
79         {
80                 layers[i] = va_arg(layer_sizes, unsigned int);
81         }
82         va_end(layer_sizes);
83
84         ann = fann_create_sparse_array(connection_rate, num_layers, layers);
85
86         free(layers);
87
88         return ann;
89 }
90
91 FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate,
92                                                                                                                          unsigned int num_layers,
93                                                                                                                          unsigned int *layers)
94 {
95         struct fann_layer *layer_it, *last_layer, *prev_layer;
96         struct fann *ann;
97         struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron;
98 #ifdef DEBUG
99         unsigned int prev_layer_size;
100 #endif
101         unsigned int num_neurons_in, num_neurons_out, i, j;
102         unsigned int min_connections, max_connections, num_connections;
103         unsigned int connections_per_neuron, allocated_connections;
104         unsigned int random_number, found_connection;
105
106 #ifdef FIXEDFANN
107         unsigned int decimal_point;
108         unsigned int multiplier;
109 #endif
110         if(connection_rate > 1)
111         {
112                 connection_rate = 1;
113         }
114
115         /* seed random */
116 #ifndef FANN_NO_SEED
117         fann_seed_rand();
118 #endif
119
120         /* allocate the general structure */
121         ann = fann_allocate_structure(num_layers);
122         if(ann == NULL)
123         {
124                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
125                 return NULL;
126         }
127
128         ann->connection_rate = connection_rate;
129 #ifdef FIXEDFANN
130         decimal_point = ann->decimal_point;
131         multiplier = ann->multiplier;
132         fann_update_stepwise(ann);
133 #endif
134
135         /* determine how many neurons there should be in each layer */
136         i = 0;
137         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
138         {
139                 /* we do not allocate room here, but we make sure that
140                  * last_neuron - first_neuron is the number of neurons */
141                 layer_it->first_neuron = NULL;
142                 layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1;       /* +1 for bias */
143                 ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
144         }
145
146         ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1;
147         ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
148
149         /* allocate room for the actual neurons */
150         fann_allocate_neurons(ann);
151         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
152         {
153                 fann_destroy(ann);
154                 return NULL;
155         }
156
157 #ifdef DEBUG
158         printf("creating network with connection rate %f\n", connection_rate);
159         printf("input\n");
160         printf("  layer       : %d neurons, 1 bias\n",
161                    ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
162 #endif
163
164         num_neurons_in = ann->num_input;
165         for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
166         {
167                 num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
168                 /*�if all neurons in each layer should be connected to at least one neuron
169                  * in the previous layer, and one neuron in the next layer.
170                  * and the bias node should be connected to the all neurons in the next layer.
171                  * Then this is the minimum amount of neurons */
172                 min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out;
173                 max_connections = num_neurons_in * num_neurons_out;     /* not calculating bias */
174                 num_connections = fann_max(min_connections,
175                                                                    (unsigned int) (0.5 + (connection_rate * max_connections)) +
176                                                                    num_neurons_out);
177
178                 connections_per_neuron = num_connections / num_neurons_out;
179                 allocated_connections = 0;
180                 /* Now split out the connections on the different neurons */
181                 for(i = 0; i != num_neurons_out; i++)
182                 {
183                         layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
184                         allocated_connections += connections_per_neuron;
185                         layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
186
187                         layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
188 #ifdef FIXEDFANN
189                         layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
190 #else
191                         layer_it->first_neuron[i].activation_steepness = 0.5;
192 #endif
193
194                         if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out)
195                         {
196                                 layer_it->first_neuron[i].last_con++;
197                                 allocated_connections++;
198                         }
199                 }
200
201                 /* bias neuron also gets stuff */
202                 layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
203                 layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
204
205                 ann->total_connections += num_connections;
206
207                 /* used in the next run of the loop */
208                 num_neurons_in = num_neurons_out;
209         }
210
211         fann_allocate_connections(ann);
212         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
213         {
214                 fann_destroy(ann);
215                 return NULL;
216         }
217
218         if(connection_rate >= 1)
219         {
220 #ifdef DEBUG
221                 prev_layer_size = ann->num_input + 1;
222 #endif
223                 prev_layer = ann->first_layer;
224                 last_layer = ann->last_layer;
225                 for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
226                 {
227                         last_neuron = layer_it->last_neuron - 1;
228                         for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
229                         {
230                                 for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
231                                 {
232                                         ann->weights[i] = (fann_type) fann_random_weight();
233                                         /* these connections are still initialized for fully connected networks, to allow
234                                          * operations to work, that are not optimized for fully connected networks.
235                                          */
236                                         ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con);
237                                 }
238                         }
239 #ifdef DEBUG
240                         prev_layer_size = layer_it->last_neuron - layer_it->first_neuron;
241 #endif
242                         prev_layer = layer_it;
243 #ifdef DEBUG
244                         printf("  layer       : %d neurons, 1 bias\n", prev_layer_size - 1);
245 #endif
246                 }
247         }
248         else
249         {
250                 /* make connections for a network, that are not fully connected */
251
252                 /* generally, what we do is first to connect all the input
253                  * neurons to a output neuron, respecting the number of
254                  * available input neurons for each output neuron. Then
255                  * we go through all the output neurons, and connect the
256                  * rest of the connections to input neurons, that they are
257                  * not allready connected to.
258                  */
259
260                 /* All the connections are cleared by calloc, because we want to
261                  * be able to see which connections are allready connected */
262
263                 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
264                 {
265
266                         num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
267                         num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1;
268
269                         /* first connect the bias neuron */
270                         bias_neuron = (layer_it - 1)->last_neuron - 1;
271                         last_neuron = layer_it->last_neuron - 1;
272                         for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
273                         {
274
275                                 ann->connections[neuron_it->first_con] = bias_neuron;
276                                 ann->weights[neuron_it->first_con] = (fann_type) fann_random_weight();
277                         }
278
279                         /* then connect all neurons in the input layer */
280                         last_neuron = (layer_it - 1)->last_neuron - 1;
281                         for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++)
282                         {
283
284                                 /* random neuron in the output layer that has space
285                                  * for more connections */
286                                 do
287                                 {
288                                         random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1));
289                                         random_neuron = layer_it->first_neuron + random_number;
290                                         /* checks the last space in the connections array for room */
291                                 }
292                                 while(ann->connections[random_neuron->last_con - 1]);
293
294                                 /* find an empty space in the connection array and connect */
295                                 for(i = random_neuron->first_con; i < random_neuron->last_con; i++)
296                                 {
297                                         if(ann->connections[i] == NULL)
298                                         {
299                                                 ann->connections[i] = neuron_it;
300                                                 ann->weights[i] = (fann_type) fann_random_weight();
301                                                 break;
302                                         }
303                                 }
304                         }
305
306                         /* then connect the rest of the unconnected neurons */
307                         last_neuron = layer_it->last_neuron - 1;
308                         for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
309                         {
310                                 /* find empty space in the connection array and connect */
311                                 for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
312                                 {
313                                         /* continue if allready connected */
314                                         if(ann->connections[i] != NULL)
315                                                 continue;
316
317                                         do
318                                         {
319                                                 found_connection = 0;
320                                                 random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1));
321                                                 random_neuron = (layer_it - 1)->first_neuron + random_number;
322
323                                                 /* check to see if this connection is allready there */
324                                                 for(j = neuron_it->first_con; j < i; j++)
325                                                 {
326                                                         if(random_neuron == ann->connections[j])
327                                                         {
328                                                                 found_connection = 1;
329                                                                 break;
330                                                         }
331                                                 }
332
333                                         }
334                                         while(found_connection);
335
336                                         /* we have found a neuron that is not allready
337                                          * connected to us, connect it */
338                                         ann->connections[i] = random_neuron;
339                                         ann->weights[i] = (fann_type) fann_random_weight();
340                                 }
341                         }
342
343 #ifdef DEBUG
344                         printf("  layer       : %d neurons, 1 bias\n", num_neurons_out);
345 #endif
346                 }
347
348                 /* TODO it would be nice to have the randomly created
349                  * connections sorted for smoother memory access.
350                  */
351         }
352
353 #ifdef DEBUG
354         printf("output\n");
355 #endif
356
357         return ann;
358 }
359
360
361 FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut(unsigned int num_layers, ...)
362 {
363         struct fann *ann;
364         int i;
365         va_list layer_sizes;
366         unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
367
368         if(layers == NULL)
369         {
370                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
371                 return NULL;
372         }
373
374
375         va_start(layer_sizes, num_layers);
376         for(i = 0; i < (int) num_layers; i++)
377         {
378                 layers[i] = va_arg(layer_sizes, unsigned int);
379         }
380         va_end(layer_sizes);
381
382         ann = fann_create_shortcut_array(num_layers, layers);
383
384         free(layers);
385
386         return ann;
387 }
388
389 FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_layers,
390                                                                                                                            unsigned int *layers)
391 {
392         struct fann_layer *layer_it, *layer_it2, *last_layer;
393         struct fann *ann;
394         struct fann_neuron *neuron_it, *neuron_it2 = 0;
395         unsigned int i;
396         unsigned int num_neurons_in, num_neurons_out;
397
398 #ifdef FIXEDFANN
399         unsigned int decimal_point;
400         unsigned int multiplier;
401 #endif
402         /* seed random */
403 #ifndef FANN_NO_SEED
404         fann_seed_rand();
405 #endif
406
407         /* allocate the general structure */
408         ann = fann_allocate_structure(num_layers);
409         if(ann == NULL)
410         {
411                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
412                 return NULL;
413         }
414
415         ann->connection_rate = 1;
416         ann->shortcut_connections = 1;
417 #ifdef FIXEDFANN
418         decimal_point = ann->decimal_point;
419         multiplier = ann->multiplier;
420         fann_update_stepwise(ann);
421 #endif
422
423         /* determine how many neurons there should be in each layer */
424         i = 0;
425         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
426         {
427                 /* we do not allocate room here, but we make sure that
428                  * last_neuron - first_neuron is the number of neurons */
429                 layer_it->first_neuron = NULL;
430                 layer_it->last_neuron = layer_it->first_neuron + layers[i++];
431                 if(layer_it == ann->first_layer)
432                 {
433                         /* there is a bias neuron in the first layer */
434                         layer_it->last_neuron++;
435                 }
436
437                 ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
438         }
439
440         ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron;
441         ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
442
443         /* allocate room for the actual neurons */
444         fann_allocate_neurons(ann);
445         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
446         {
447                 fann_destroy(ann);
448                 return NULL;
449         }
450
451 #ifdef DEBUG
452         printf("creating fully shortcut connected network.\n");
453         printf("input\n");
454         printf("  layer       : %d neurons, 1 bias\n",
455                    ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
456 #endif
457
458         num_neurons_in = ann->num_input;
459         last_layer = ann->last_layer;
460         for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
461         {
462                 num_neurons_out = layer_it->last_neuron - layer_it->first_neuron;
463
464                 /* Now split out the connections on the different neurons */
465                 for(i = 0; i != num_neurons_out; i++)
466                 {
467                         layer_it->first_neuron[i].first_con = ann->total_connections;
468                         ann->total_connections += num_neurons_in + 1;
469                         layer_it->first_neuron[i].last_con = ann->total_connections;
470
471                         layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
472 #ifdef FIXEDFANN
473                         layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
474 #else
475                         layer_it->first_neuron[i].activation_steepness = 0.5;
476 #endif
477                 }
478
479 #ifdef DEBUG
480                 printf("  layer       : %d neurons, 0 bias\n", num_neurons_out);
481 #endif
482                 /* used in the next run of the loop */
483                 num_neurons_in += num_neurons_out;
484         }
485
486         fann_allocate_connections(ann);
487         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
488         {
489                 fann_destroy(ann);
490                 return NULL;
491         }
492
493         /* Connections are created from all neurons to all neurons in later layers
494          */
495         num_neurons_in = ann->num_input + 1;
496         for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
497         {
498                 for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
499                 {
500
501                         i = neuron_it->first_con;
502                         for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++)
503                         {
504                                 for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron;
505                                         neuron_it2++)
506                                 {
507
508                                         ann->weights[i] = (fann_type) fann_random_weight();
509                                         ann->connections[i] = neuron_it2;
510                                         i++;
511                                 }
512                         }
513                 }
514                 num_neurons_in += layer_it->last_neuron - layer_it->first_neuron;
515         }
516
517 #ifdef DEBUG
518         printf("output\n");
519 #endif
520
521         return ann;
522 }
523
524 FANN_EXTERNAL fann_type *FANN_API fann_run(struct fann * ann, fann_type * input)
525 {
526         struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers;
527         unsigned int i, num_connections, num_input, num_output;
528         fann_type neuron_sum, *output;
529         fann_type *weights;
530         struct fann_layer *layer_it, *last_layer;
531         unsigned int activation_function;
532         fann_type steepness;
533
534         /* store some variabels local for fast access */
535         struct fann_neuron *first_neuron = ann->first_layer->first_neuron;
536
537 #ifdef FIXEDFANN
538         int multiplier = ann->multiplier;
539         unsigned int decimal_point = ann->decimal_point;
540
541         /* values used for the stepwise linear sigmoid function */
542         fann_type r1 = 0, r2 = 0, r3 = 0, r4 = 0, r5 = 0, r6 = 0;
543         fann_type v1 = 0, v2 = 0, v3 = 0, v4 = 0, v5 = 0, v6 = 0;
544
545         fann_type last_steepness = 0;
546         unsigned int last_activation_function = 0;
547 #else
548         fann_type max_sum;      
549 #endif
550
551         /* first set the input */
552         num_input = ann->num_input;
553         for(i = 0; i != num_input; i++)
554         {
555 #ifdef FIXEDFANN
556                 if(fann_abs(input[i]) > multiplier)
557                 {
558                         printf
559                                 ("Warning input number %d is out of range -%d - %d with value %d, integer overflow may occur.\n",
560                                  i, multiplier, multiplier, input[i]);
561                 }
562 #endif
563                 first_neuron[i].value = input[i];
564         }
565         /* Set the bias neuron in the input layer */
566 #ifdef FIXEDFANN
567         (ann->first_layer->last_neuron - 1)->value = multiplier;
568 #else
569         (ann->first_layer->last_neuron - 1)->value = 1;
570 #endif
571
572         last_layer = ann->last_layer;
573         for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
574         {
575                 last_neuron = layer_it->last_neuron;
576                 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
577                 {
578                         if(neuron_it->first_con == neuron_it->last_con)
579                         {
580                                 /* bias neurons */
581 #ifdef FIXEDFANN
582                                 neuron_it->value = multiplier;
583 #else
584                                 neuron_it->value = 1;
585 #endif
586                                 continue;
587                         }
588
589                         activation_function = neuron_it->activation_function;
590                         steepness = neuron_it->activation_steepness;
591
592                         neuron_sum = 0;
593                         num_connections = neuron_it->last_con - neuron_it->first_con;
594                         weights = ann->weights + neuron_it->first_con;
595
596                         if(ann->connection_rate >= 1)
597                         {
598                                 if(ann->shortcut_connections)
599                                 {
600                                         neurons = ann->first_layer->first_neuron;
601                                 }
602                                 else
603                                 {
604                                         neurons = (layer_it - 1)->first_neuron;
605                                 }
606
607
608                                 /* unrolled loop start */
609                                 i = num_connections & 3;        /* same as modulo 4 */
610                                 switch (i)
611                                 {
612                                         case 3:
613                                                 neuron_sum += fann_mult(weights[2], neurons[2].value);
614                                         case 2:
615                                                 neuron_sum += fann_mult(weights[1], neurons[1].value);
616                                         case 1:
617                                                 neuron_sum += fann_mult(weights[0], neurons[0].value);
618                                         case 0:
619                                                 break;
620                                 }
621
622                                 for(; i != num_connections; i += 4)
623                                 {
624                                         neuron_sum +=
625                                                 fann_mult(weights[i], neurons[i].value) +
626                                                 fann_mult(weights[i + 1], neurons[i + 1].value) +
627                                                 fann_mult(weights[i + 2], neurons[i + 2].value) +
628                                                 fann_mult(weights[i + 3], neurons[i + 3].value);
629                                 }
630                                 /* unrolled loop end */
631
632                                 /*
633                                  * for(i = 0;i != num_connections; i++){
634                                  * printf("%f += %f*%f, ", neuron_sum, weights[i], neurons[i].value);
635                                  * neuron_sum += fann_mult(weights[i], neurons[i].value);
636                                  * }
637                                  */
638                         }
639                         else
640                         {
641                                 neuron_pointers = ann->connections + neuron_it->first_con;
642
643                                 i = num_connections & 3;        /* same as modulo 4 */
644                                 switch (i)
645                                 {
646                                         case 3:
647                                                 neuron_sum += fann_mult(weights[2], neuron_pointers[2]->value);
648                                         case 2:
649                                                 neuron_sum += fann_mult(weights[1], neuron_pointers[1]->value);
650                                         case 1:
651                                                 neuron_sum += fann_mult(weights[0], neuron_pointers[0]->value);
652                                         case 0:
653                                                 break;
654                                 }
655
656                                 for(; i != num_connections; i += 4)
657                                 {
658                                         neuron_sum +=
659                                                 fann_mult(weights[i], neuron_pointers[i]->value) +
660                                                 fann_mult(weights[i + 1], neuron_pointers[i + 1]->value) +
661                                                 fann_mult(weights[i + 2], neuron_pointers[i + 2]->value) +
662                                                 fann_mult(weights[i + 3], neuron_pointers[i + 3]->value);
663                                 }
664                         }
665
666 #ifdef FIXEDFANN
667                         neuron_it->sum = fann_mult(steepness, neuron_sum);
668
669                         if(activation_function != last_activation_function || steepness != last_steepness)
670                         {
671                                 switch (activation_function)
672                                 {
673                                         case FANN_SIGMOID:
674                                         case FANN_SIGMOID_STEPWISE:
675                                                 r1 = ann->sigmoid_results[0];
676                                                 r2 = ann->sigmoid_results[1];
677                                                 r3 = ann->sigmoid_results[2];
678                                                 r4 = ann->sigmoid_results[3];
679                                                 r5 = ann->sigmoid_results[4];
680                                                 r6 = ann->sigmoid_results[5];
681                                                 v1 = ann->sigmoid_values[0] / steepness;
682                                                 v2 = ann->sigmoid_values[1] / steepness;
683                                                 v3 = ann->sigmoid_values[2] / steepness;
684                                                 v4 = ann->sigmoid_values[3] / steepness;
685                                                 v5 = ann->sigmoid_values[4] / steepness;
686                                                 v6 = ann->sigmoid_values[5] / steepness;
687                                                 break;
688                                         case FANN_SIGMOID_SYMMETRIC:
689                                         case FANN_SIGMOID_SYMMETRIC_STEPWISE:
690                                                 r1 = ann->sigmoid_symmetric_results[0];
691                                                 r2 = ann->sigmoid_symmetric_results[1];
692                                                 r3 = ann->sigmoid_symmetric_results[2];
693                                                 r4 = ann->sigmoid_symmetric_results[3];
694                                                 r5 = ann->sigmoid_symmetric_results[4];
695                                                 r6 = ann->sigmoid_symmetric_results[5];
696                                                 v1 = ann->sigmoid_symmetric_values[0] / steepness;
697                                                 v2 = ann->sigmoid_symmetric_values[1] / steepness;
698                                                 v3 = ann->sigmoid_symmetric_values[2] / steepness;
699                                                 v4 = ann->sigmoid_symmetric_values[3] / steepness;
700                                                 v5 = ann->sigmoid_symmetric_values[4] / steepness;
701                                                 v6 = ann->sigmoid_symmetric_values[5] / steepness;
702                                                 break;
703                                         case FANN_THRESHOLD:
704                                                 break;
705                                 }
706                         }
707
708                         switch (activation_function)
709                         {
710                                 case FANN_SIGMOID:
711                                 case FANN_SIGMOID_STEPWISE:
712                                         neuron_it->value =
713                                                 (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, 0,
714                                                                                                   multiplier, neuron_sum);
715                                         break;
716                                 case FANN_SIGMOID_SYMMETRIC:
717                                 case FANN_SIGMOID_SYMMETRIC_STEPWISE:
718                                         neuron_it->value =
719                                                 (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6,
720                                                                                                   -multiplier, multiplier, neuron_sum);
721                                         break;
722                                 case FANN_THRESHOLD:
723                                         neuron_it->value = (fann_type) ((neuron_sum < 0) ? 0 : 1);
724                                         break;
725                                 case FANN_THRESHOLD_SYMMETRIC:
726                                         neuron_it->value = (fann_type) ((neuron_sum < 0) ? -1 : 1);
727                                         break;
728                                 case FANN_ELLIOT:
729                                         fann_error((struct fann_error *) ann, FANN_E_CANT_USE_ACTIVATION);
730                         }
731                         last_steepness = steepness;
732                         last_activation_function = activation_function;
733 #else
734                         neuron_sum = fann_mult(steepness, neuron_sum);
735                         
736                         max_sum = 150/steepness;
737                         if(neuron_sum > max_sum)
738                                 neuron_sum = max_sum;
739                         else if(neuron_sum < -max_sum)
740                                 neuron_sum = -max_sum;
741                         
742                         neuron_it->sum = neuron_sum;
743
744                         fann_activation_switch(ann, activation_function, neuron_sum, neuron_it->value);
745 #endif
746                 }
747         }
748
749         /* set the output */
750         output = ann->output;
751         num_output = ann->num_output;
752         neurons = (ann->last_layer - 1)->first_neuron;
753         for(i = 0; i != num_output; i++)
754         {
755                 output[i] = neurons[i].value;
756         }
757         return ann->output;
758 }
759
760 FANN_EXTERNAL void FANN_API fann_destroy(struct fann *ann)
761 {
762         if(ann == NULL)
763                 return;
764         fann_safe_free(ann->weights);
765         fann_safe_free(ann->connections);
766         fann_safe_free(ann->first_layer->first_neuron);
767         fann_safe_free(ann->first_layer);
768         fann_safe_free(ann->output);
769         fann_safe_free(ann->train_errors);
770         fann_safe_free(ann->train_slopes);
771         fann_safe_free(ann->prev_train_slopes);
772         fann_safe_free(ann->prev_steps);
773         fann_safe_free(ann->prev_weights_deltas);
774         fann_safe_free(ann->errstr);
775         fann_safe_free(ann->cascade_activation_functions);
776         fann_safe_free(ann->cascade_activation_steepnesses);
777         fann_safe_free(ann);
778 }
779
780 FANN_EXTERNAL void FANN_API fann_randomize_weights(struct fann *ann, fann_type min_weight,
781                                                                                                    fann_type max_weight)
782 {
783         fann_type *last_weight;
784         fann_type *weights = ann->weights;
785
786         last_weight = weights + ann->total_connections;
787         for(; weights != last_weight; weights++)
788         {
789                 *weights = (fann_type) (fann_rand(min_weight, max_weight));
790         }
791
792 #ifndef FIXEDFANN
793         if(ann->prev_train_slopes != NULL)
794         {
795                 fann_clear_train_arrays(ann);
796         }
797 #endif
798 }
799
800 FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
801 {
802         struct fann_layer *layer_it;
803         struct fann_neuron *neuron_it;
804         unsigned int i;
805         int value;
806         char *neurons;
807         unsigned int num_neurons = fann_get_total_neurons(ann) - fann_get_num_output(ann);
808
809         neurons = (char *) malloc(num_neurons + 1);
810         if(neurons == NULL)
811         {
812                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
813                 return;
814         }
815         neurons[num_neurons] = 0;
816
817         printf("Layer / Neuron ");
818         for(i = 0; i < num_neurons; i++)
819         {
820                 printf("%d", i % 10);
821         }
822         printf("\n");
823
824         for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
825         {
826                 for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
827                 {
828
829                         memset(neurons, (int) '.', num_neurons);
830                         for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
831                         {
832                                 if(ann->weights[i] < 0)
833                                 {
834 #ifdef FIXEDFANN
835                                         value = (int) ((ann->weights[i] / (double) ann->multiplier) - 0.5);
836 #else
837                                         value = (int) ((ann->weights[i]) - 0.5);
838 #endif
839                                         if(value < -25)
840                                                 value = -25;
841                                         neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'a' - value;
842                                 }
843                                 else
844                                 {
845 #ifdef FIXEDFANN
846                                         value = (int) ((ann->weights[i] / (double) ann->multiplier) + 0.5);
847 #else
848                                         value = (int) ((ann->weights[i]) + 0.5);
849 #endif
850                                         if(value > 25)
851                                                 value = 25;
852                                         neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'A' + value;
853                                 }
854                         }
855                         printf("L %3d / N %4d %s\n", layer_it - ann->first_layer,
856                                    neuron_it - ann->first_layer->first_neuron, neurons);
857                 }
858         }
859
860         free(neurons);
861 }
862
863 /* Initialize the weights using Widrow + Nguyen's algorithm.
864 */
865 FANN_EXTERNAL void FANN_API fann_init_weights(struct fann *ann, struct fann_train_data *train_data)
866 {
867         fann_type smallest_inp, largest_inp;
868         unsigned int dat = 0, elem, num_connect, num_hidden_neurons;
869         struct fann_layer *layer_it;
870         struct fann_neuron *neuron_it, *last_neuron, *bias_neuron;
871
872 #ifdef FIXEDFANN
873         unsigned int multiplier = ann->multiplier;
874 #endif
875         float scale_factor;
876
877         for(smallest_inp = largest_inp = train_data->input[0][0]; dat < train_data->num_data; dat++)
878         {
879                 for(elem = 0; elem < train_data->num_input; elem++)
880                 {
881                         if(train_data->input[dat][elem] < smallest_inp)
882                                 smallest_inp = train_data->input[dat][elem];
883                         if(train_data->input[dat][elem] > largest_inp)
884                                 largest_inp = train_data->input[dat][elem];
885                 }
886         }
887
888         num_hidden_neurons =
889                 ann->total_neurons - (ann->num_input + ann->num_output +
890                                                           (ann->last_layer - ann->first_layer));
891         scale_factor =
892                 (float) (pow
893                                  ((double) (0.7f * (double) num_hidden_neurons),
894                                   (double) (1.0f / (double) ann->num_input)) / (double) (largest_inp -
895                                                                                                                                                  smallest_inp));
896
897 #ifdef DEBUG
898         printf("Initializing weights with scale factor %f\n", scale_factor);
899 #endif
900         bias_neuron = ann->first_layer->last_neuron - 1;
901         for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
902         {
903                 last_neuron = layer_it->last_neuron;
904
905                 if(!ann->shortcut_connections)
906                 {
907                         bias_neuron = (layer_it - 1)->last_neuron - 1;
908                 }
909
910                 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
911                 {
912                         for(num_connect = neuron_it->first_con; num_connect < neuron_it->last_con;
913                                 num_connect++)
914                         {
915                                 if(bias_neuron == ann->connections[num_connect])
916                                 {
917 #ifdef FIXEDFANN
918                                         ann->weights[num_connect] =
919                                                 (fann_type) fann_rand(-scale_factor, scale_factor * multiplier);
920 #else
921                                         ann->weights[num_connect] = (fann_type) fann_rand(-scale_factor, scale_factor);
922 #endif
923                                 }
924                                 else
925                                 {
926 #ifdef FIXEDFANN
927                                         ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor * multiplier);
928 #else
929                                         ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor);
930 #endif
931                                 }
932                         }
933                 }
934         }
935
936 #ifndef FIXEDFANN
937         if(ann->prev_train_slopes != NULL)
938         {
939                 fann_clear_train_arrays(ann);
940         }
941 #endif
942 }
943
944 FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
945 {
946         struct fann_layer *layer_it;
947 #ifndef FIXEDFANN
948         unsigned int i;
949 #endif
950
951         printf("Input layer                          :%4d neurons, 1 bias\n", ann->num_input);
952         for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer - 1; layer_it++)
953         {
954                 if(ann->shortcut_connections)
955                 {
956                         printf("  Hidden layer                       :%4d neurons, 0 bias\n",
957                                    layer_it->last_neuron - layer_it->first_neuron);
958                 }
959                 else
960                 {
961                         printf("  Hidden layer                       :%4d neurons, 1 bias\n",
962                                    layer_it->last_neuron - layer_it->first_neuron - 1);
963                 }
964         }
965         printf("Output layer                         :%4d neurons\n", ann->num_output);
966         printf("Total neurons and biases             :%4d\n", fann_get_total_neurons(ann));
967         printf("Total connections                    :%4d\n", ann->total_connections);
968         printf("Connection rate                      :%8.3f\n", ann->connection_rate);
969         printf("Shortcut connections                 :%4d\n", ann->shortcut_connections);
970 #ifdef FIXEDFANN
971         printf("Decimal point                        :%4d\n", ann->decimal_point);
972         printf("Multiplier                           :%4d\n", ann->multiplier);
973 #else
974         printf("Training algorithm                   :   %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
975         printf("Training error function              :   %s\n", FANN_ERRORFUNC_NAMES[ann->train_error_function]);
976         printf("Training stop function               :   %s\n", FANN_STOPFUNC_NAMES[ann->train_stop_function]);
977 #endif
978 #ifdef FIXEDFANN
979         printf("Bit fail limit                       :%4d\n", ann->bit_fail_limit);
980 #else
981         printf("Learning rate                        :%8.3f\n", ann->learning_rate);
982         printf("Learning momentum                    :%8.3f\n", ann->learning_momentum);
983         printf("Quickprop decay                      :%11.6f\n", ann->quickprop_decay);
984         printf("Quickprop mu                         :%8.3f\n", ann->quickprop_mu);
985         printf("RPROP increase factor                :%8.3f\n", ann->rprop_increase_factor);
986         printf("RPROP decrease factor                :%8.3f\n", ann->rprop_decrease_factor);
987         printf("RPROP delta min                      :%8.3f\n", ann->rprop_delta_min);
988         printf("RPROP delta max                      :%8.3f\n", ann->rprop_delta_max);
989         printf("Cascade output change fraction       :%11.6f\n", ann->cascade_output_change_fraction);
990         printf("Cascade candidate change fraction    :%11.6f\n", ann->cascade_candidate_change_fraction);
991         printf("Cascade output stagnation epochs     :%4d\n", ann->cascade_output_stagnation_epochs);
992         printf("Cascade candidate stagnation epochs  :%4d\n", ann->cascade_candidate_stagnation_epochs);
993         printf("Cascade max output epochs            :%4d\n", ann->cascade_max_out_epochs);
994         printf("Cascade max candidate epochs         :%4d\n", ann->cascade_max_cand_epochs);
995         printf("Cascade weight multiplier            :%8.3f\n", ann->cascade_weight_multiplier);
996         printf("Cascade candidate limit              :%8.3f\n", ann->cascade_candidate_limit);
997         for(i = 0; i < ann->cascade_activation_functions_count; i++)
998                 printf("Cascade activation functions[%d]      :   %s\n", i,
999                         FANN_ACTIVATIONFUNC_NAMES[ann->cascade_activation_functions[i]]);
1000         for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
1001                 printf("Cascade activation steepnesses[%d]    :%8.3f\n", i,
1002                         ann->cascade_activation_steepnesses[i]);
1003                 
1004         printf("Cascade candidate groups             :%4d\n", ann->cascade_num_candidate_groups);
1005         printf("Cascade no. of candidates            :%4d\n", fann_get_cascade_num_candidates(ann));
1006 #endif
1007 }
1008
1009 FANN_GET(unsigned int, num_input)
1010 FANN_GET(unsigned int, num_output)
1011
1012 FANN_EXTERNAL unsigned int FANN_API fann_get_total_neurons(struct fann *ann)
1013 {
1014         if(ann->shortcut_connections)
1015         {
1016                 return ann->total_neurons;
1017         }
1018         else
1019         {
1020                 /* -1, because there is always an unused bias neuron in the last layer */
1021                 return ann->total_neurons - 1;
1022         }
1023 }
1024
1025 FANN_GET(unsigned int, total_connections)
1026
1027 #ifdef FIXEDFANN
1028
1029 FANN_GET(unsigned int, decimal_point)
1030 FANN_GET(unsigned int, multiplier)
1031
1032 /* INTERNAL FUNCTION
1033    Adjust the steepwise functions (if used)
1034 */
1035 void fann_update_stepwise(struct fann *ann)
1036 {
1037         unsigned int i = 0;
1038
1039         /* Calculate the parameters for the stepwise linear
1040          * sigmoid function fixed point.
1041          * Using a rewritten sigmoid function.
1042          * results 0.005, 0.05, 0.25, 0.75, 0.95, 0.995
1043          */
1044         ann->sigmoid_results[0] = fann_max((fann_type) (ann->multiplier / 200.0 + 0.5), 1);
1045         ann->sigmoid_results[1] = fann_max((fann_type) (ann->multiplier / 20.0 + 0.5), 1);
1046         ann->sigmoid_results[2] = fann_max((fann_type) (ann->multiplier / 4.0 + 0.5), 1);
1047         ann->sigmoid_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 4.0 + 0.5), ann->multiplier - 1);
1048         ann->sigmoid_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 20.0 + 0.5), ann->multiplier - 1);
1049         ann->sigmoid_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 200.0 + 0.5), ann->multiplier - 1);
1050
1051         ann->sigmoid_symmetric_results[0] = fann_max((fann_type) ((ann->multiplier / 100.0) - ann->multiplier - 0.5),
1052                                                                  (fann_type) (1 - (fann_type) ann->multiplier));
1053         ann->sigmoid_symmetric_results[1] =     fann_max((fann_type) ((ann->multiplier / 10.0) - ann->multiplier - 0.5),
1054                                                                  (fann_type) (1 - (fann_type) ann->multiplier));
1055         ann->sigmoid_symmetric_results[2] =     fann_max((fann_type) ((ann->multiplier / 2.0) - ann->multiplier - 0.5),
1056                                                                  (fann_type) (1 - (fann_type) ann->multiplier));
1057         ann->sigmoid_symmetric_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 2.0 + 0.5),
1058                                                                                              ann->multiplier - 1);
1059         ann->sigmoid_symmetric_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 10.0 + 0.5),
1060                                                                                              ann->multiplier - 1);
1061         ann->sigmoid_symmetric_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 100.0 + 1.0),
1062                                                                                              ann->multiplier - 1);
1063
1064         for(i = 0; i < 6; i++)
1065         {
1066                 ann->sigmoid_values[i] =
1067                         (fann_type) (((log(ann->multiplier / (float) ann->sigmoid_results[i] - 1) *
1068                                                    (float) ann->multiplier) / -2.0) * (float) ann->multiplier);
1069                 ann->sigmoid_symmetric_values[i] =
1070                         (fann_type) (((log
1071                                                    ((ann->multiplier -
1072                                                          (float) ann->sigmoid_symmetric_results[i]) /
1073                                                         ((float) ann->sigmoid_symmetric_results[i] +
1074                                                          ann->multiplier)) * (float) ann->multiplier) / -2.0) *
1075                                                  (float) ann->multiplier);
1076         }
1077 }
1078 #endif
1079
1080
1081 /* INTERNAL FUNCTION
1082    Allocates the main structure and sets some default values.
1083  */
1084 struct fann *fann_allocate_structure(unsigned int num_layers)
1085 {
1086         struct fann *ann;
1087
1088         if(num_layers < 2)
1089         {
1090 #ifdef DEBUG
1091                 printf("less than 2 layers - ABORTING.\n");
1092 #endif
1093                 return NULL;
1094         }
1095
1096         /* allocate and initialize the main network structure */
1097         ann = (struct fann *) malloc(sizeof(struct fann));
1098         if(ann == NULL)
1099         {
1100                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1101                 return NULL;
1102         }
1103
1104         ann->errno_f = FANN_E_NO_ERROR;
1105         ann->error_log = fann_default_error_log;
1106         ann->errstr = NULL;
1107         ann->learning_rate = 0.7f;
1108         ann->learning_momentum = 0.0;
1109         ann->total_neurons = 0;
1110         ann->total_connections = 0;
1111         ann->num_input = 0;
1112         ann->num_output = 0;
1113         ann->train_errors = NULL;
1114         ann->train_slopes = NULL;
1115         ann->prev_steps = NULL;
1116         ann->prev_train_slopes = NULL;
1117         ann->prev_weights_deltas = NULL;
1118         ann->training_algorithm = FANN_TRAIN_RPROP;
1119         ann->num_MSE = 0;
1120         ann->MSE_value = 0;
1121         ann->num_bit_fail = 0;
1122         ann->bit_fail_limit = (fann_type)0.35;
1123         ann->shortcut_connections = 0;
1124         ann->train_error_function = FANN_ERRORFUNC_TANH;
1125         ann->train_stop_function = FANN_STOPFUNC_MSE;
1126         ann->callback = NULL;
1127
1128         /* variables used for cascade correlation (reasonable defaults) */
1129         ann->cascade_output_change_fraction = 0.01f;
1130         ann->cascade_candidate_change_fraction = 0.01f;
1131         ann->cascade_output_stagnation_epochs = 12;
1132         ann->cascade_candidate_stagnation_epochs = 12;
1133         ann->cascade_num_candidate_groups = 2;
1134         ann->cascade_weight_multiplier = (fann_type)0.4;
1135         ann->cascade_candidate_limit = (fann_type)1000.0;
1136         ann->cascade_max_out_epochs = 150;
1137         ann->cascade_max_cand_epochs = 150;
1138         ann->cascade_candidate_scores = NULL;
1139         ann->cascade_activation_functions_count = 6;
1140         ann->cascade_activation_functions = 
1141                 (enum fann_activationfunc_enum *)calloc(ann->cascade_activation_functions_count, 
1142                                                            sizeof(enum fann_activationfunc_enum));
1143         if(ann->cascade_activation_functions == NULL)
1144         {
1145                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1146                 free(ann);
1147                 return NULL;
1148         }
1149                                                            
1150         ann->cascade_activation_functions[0] = FANN_SIGMOID;
1151         ann->cascade_activation_functions[1] = FANN_SIGMOID_SYMMETRIC;
1152         ann->cascade_activation_functions[2] = FANN_GAUSSIAN;
1153         ann->cascade_activation_functions[3] = FANN_GAUSSIAN_SYMMETRIC;
1154         ann->cascade_activation_functions[4] = FANN_ELLIOT;
1155         ann->cascade_activation_functions[5] = FANN_ELLIOT_SYMMETRIC;
1156
1157         ann->cascade_activation_steepnesses_count = 4;
1158         ann->cascade_activation_steepnesses = 
1159                 (fann_type *)calloc(ann->cascade_activation_steepnesses_count, 
1160                                                            sizeof(fann_type));
1161         if(ann->cascade_activation_functions == NULL)
1162         {
1163                 fann_safe_free(ann->cascade_activation_functions);
1164                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1165                 free(ann);
1166                 return NULL;
1167         }
1168         
1169         ann->cascade_activation_steepnesses[0] = (fann_type)0.25;
1170         ann->cascade_activation_steepnesses[1] = (fann_type)0.5;
1171         ann->cascade_activation_steepnesses[2] = (fann_type)0.75;
1172         ann->cascade_activation_steepnesses[3] = (fann_type)1.0;
1173
1174         /* Variables for use with with Quickprop training (reasonable defaults) */
1175         ann->quickprop_decay = (float) -0.0001;
1176         ann->quickprop_mu = 1.75;
1177
1178         /* Variables for use with with RPROP training (reasonable defaults) */
1179         ann->rprop_increase_factor = (float) 1.2;
1180         ann->rprop_decrease_factor = 0.5;
1181         ann->rprop_delta_min = 0.0;
1182         ann->rprop_delta_max = 50.0;
1183         ann->rprop_delta_zero = 0.5;
1184         
1185         fann_init_error_data((struct fann_error *) ann);
1186
1187 #ifdef FIXEDFANN
1188         /* these values are only boring defaults, and should really
1189          * never be used, since the real values are always loaded from a file. */
1190         ann->decimal_point = 8;
1191         ann->multiplier = 256;
1192 #endif
1193
1194         /* allocate room for the layers */
1195         ann->first_layer = (struct fann_layer *) calloc(num_layers, sizeof(struct fann_layer));
1196         if(ann->first_layer == NULL)
1197         {
1198                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1199                 free(ann);
1200                 return NULL;
1201         }
1202
1203         ann->last_layer = ann->first_layer + num_layers;
1204
1205         return ann;
1206 }
1207
1208 /* INTERNAL FUNCTION
1209    Allocates room for the neurons.
1210  */
1211 void fann_allocate_neurons(struct fann *ann)
1212 {
1213         struct fann_layer *layer_it;
1214         struct fann_neuron *neurons;
1215         unsigned int num_neurons_so_far = 0;
1216         unsigned int num_neurons = 0;
1217
1218         /* all the neurons is allocated in one long array (calloc clears mem) */
1219         neurons = (struct fann_neuron *) calloc(ann->total_neurons, sizeof(struct fann_neuron));
1220         ann->total_neurons_allocated = ann->total_neurons;
1221
1222         if(neurons == NULL)
1223         {
1224                 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1225                 return;
1226         }
1227
1228         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
1229         {
1230                 num_neurons = layer_it->last_neuron - layer_it->first_neuron;
1231                 layer_it->first_neuron = neurons + num_neurons_so_far;
1232                 layer_it->last_neuron = layer_it->first_neuron + num_neurons;
1233                 num_neurons_so_far += num_neurons;
1234         }
1235
1236         ann->output = (fann_type *) calloc(num_neurons, sizeof(fann_type));
1237         if(ann->output == NULL)
1238         {
1239                 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1240                 return;
1241         }
1242 }
1243
1244 /* INTERNAL FUNCTION
1245    Allocate room for the connections.
1246  */
1247 void fann_allocate_connections(struct fann *ann)
1248 {
1249         ann->weights = (fann_type *) calloc(ann->total_connections, sizeof(fann_type));
1250         if(ann->weights == NULL)
1251         {
1252                 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1253                 return;
1254         }
1255         ann->total_connections_allocated = ann->total_connections;
1256
1257         /* TODO make special cases for all places where the connections
1258          * is used, so that it is not needed for fully connected networks.
1259          */
1260         ann->connections =
1261                 (struct fann_neuron **) calloc(ann->total_connections_allocated,
1262                                                                            sizeof(struct fann_neuron *));
1263         if(ann->connections == NULL)
1264         {
1265                 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1266                 return;
1267         }
1268 }
1269
1270
1271 /* INTERNAL FUNCTION
1272    Seed the random function.
1273  */
1274 void fann_seed_rand()
1275 {
1276 #ifndef _WIN32
1277         FILE *fp = fopen("/dev/urandom", "r");
1278         unsigned int foo;
1279         struct timeval t;
1280
1281         if(!fp)
1282         {
1283                 gettimeofday(&t, NULL);
1284                 foo = t.tv_usec;
1285 #ifdef DEBUG
1286                 printf("unable to open /dev/urandom\n");
1287 #endif
1288         }
1289         else
1290         {
1291                 fread(&foo, sizeof(foo), 1, fp);
1292                 fclose(fp);
1293         }
1294         srand(foo);
1295 #else
1296         /* COMPAT_TIME REPLACEMENT */
1297         srand(GetTickCount());
1298 #endif
1299 }
1300