2 Fast Artificial Neural Network Library (fann)
3 Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
5 This library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 This library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with this library; if not, write to the Free Software
17 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 FANN_EXTERNAL struct fann *FANN_API fann_create_standard(unsigned int num_layers, ...)
35 unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
39 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
43 va_start(layer_sizes, num_layers);
44 for(i = 0; i < (int) num_layers; i++)
46 layers[i] = va_arg(layer_sizes, unsigned int);
50 ann = fann_create_standard_array(num_layers, layers);
57 FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers,
60 return fann_create_sparse_array(1, num_layers, layers);
63 FANN_EXTERNAL struct fann *FANN_API fann_create_sparse(float connection_rate,
64 unsigned int num_layers, ...)
69 unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
73 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
77 va_start(layer_sizes, num_layers);
78 for(i = 0; i < (int) num_layers; i++)
80 layers[i] = va_arg(layer_sizes, unsigned int);
84 ann = fann_create_sparse_array(connection_rate, num_layers, layers);
91 FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate,
92 unsigned int num_layers,
95 struct fann_layer *layer_it, *last_layer, *prev_layer;
97 struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron;
99 unsigned int prev_layer_size;
101 unsigned int num_neurons_in, num_neurons_out, i, j;
102 unsigned int min_connections, max_connections, num_connections;
103 unsigned int connections_per_neuron, allocated_connections;
104 unsigned int random_number, found_connection;
107 unsigned int decimal_point;
108 unsigned int multiplier;
110 if(connection_rate > 1)
120 /* allocate the general structure */
121 ann = fann_allocate_structure(num_layers);
124 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
128 ann->connection_rate = connection_rate;
130 decimal_point = ann->decimal_point;
131 multiplier = ann->multiplier;
132 fann_update_stepwise(ann);
135 /* determine how many neurons there should be in each layer */
137 for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
139 /* we do not allocate room here, but we make sure that
140 * last_neuron - first_neuron is the number of neurons */
141 layer_it->first_neuron = NULL;
142 layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */
143 ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
146 ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1;
147 ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
149 /* allocate room for the actual neurons */
150 fann_allocate_neurons(ann);
151 if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
158 printf("creating network with connection rate %f\n", connection_rate);
160 printf(" layer : %d neurons, 1 bias\n",
161 ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
164 num_neurons_in = ann->num_input;
165 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
167 num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
168 /*�if all neurons in each layer should be connected to at least one neuron
169 * in the previous layer, and one neuron in the next layer.
170 * and the bias node should be connected to the all neurons in the next layer.
171 * Then this is the minimum amount of neurons */
172 min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out;
173 max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */
174 num_connections = fann_max(min_connections,
175 (unsigned int) (0.5 + (connection_rate * max_connections)) +
178 connections_per_neuron = num_connections / num_neurons_out;
179 allocated_connections = 0;
180 /* Now split out the connections on the different neurons */
181 for(i = 0; i != num_neurons_out; i++)
183 layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
184 allocated_connections += connections_per_neuron;
185 layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
187 layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
189 layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
191 layer_it->first_neuron[i].activation_steepness = 0.5;
194 if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out)
196 layer_it->first_neuron[i].last_con++;
197 allocated_connections++;
201 /* bias neuron also gets stuff */
202 layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
203 layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
205 ann->total_connections += num_connections;
207 /* used in the next run of the loop */
208 num_neurons_in = num_neurons_out;
211 fann_allocate_connections(ann);
212 if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
218 if(connection_rate >= 1)
221 prev_layer_size = ann->num_input + 1;
223 prev_layer = ann->first_layer;
224 last_layer = ann->last_layer;
225 for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
227 last_neuron = layer_it->last_neuron - 1;
228 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
230 for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
232 ann->weights[i] = (fann_type) fann_random_weight();
233 /* these connections are still initialized for fully connected networks, to allow
234 * operations to work, that are not optimized for fully connected networks.
236 ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con);
240 prev_layer_size = layer_it->last_neuron - layer_it->first_neuron;
242 prev_layer = layer_it;
244 printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1);
250 /* make connections for a network, that are not fully connected */
252 /* generally, what we do is first to connect all the input
253 * neurons to a output neuron, respecting the number of
254 * available input neurons for each output neuron. Then
255 * we go through all the output neurons, and connect the
256 * rest of the connections to input neurons, that they are
257 * not allready connected to.
260 /* All the connections are cleared by calloc, because we want to
261 * be able to see which connections are allready connected */
263 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
266 num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
267 num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1;
269 /* first connect the bias neuron */
270 bias_neuron = (layer_it - 1)->last_neuron - 1;
271 last_neuron = layer_it->last_neuron - 1;
272 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
275 ann->connections[neuron_it->first_con] = bias_neuron;
276 ann->weights[neuron_it->first_con] = (fann_type) fann_random_weight();
279 /* then connect all neurons in the input layer */
280 last_neuron = (layer_it - 1)->last_neuron - 1;
281 for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++)
284 /* random neuron in the output layer that has space
285 * for more connections */
288 random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1));
289 random_neuron = layer_it->first_neuron + random_number;
290 /* checks the last space in the connections array for room */
292 while(ann->connections[random_neuron->last_con - 1]);
294 /* find an empty space in the connection array and connect */
295 for(i = random_neuron->first_con; i < random_neuron->last_con; i++)
297 if(ann->connections[i] == NULL)
299 ann->connections[i] = neuron_it;
300 ann->weights[i] = (fann_type) fann_random_weight();
306 /* then connect the rest of the unconnected neurons */
307 last_neuron = layer_it->last_neuron - 1;
308 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
310 /* find empty space in the connection array and connect */
311 for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
313 /* continue if allready connected */
314 if(ann->connections[i] != NULL)
319 found_connection = 0;
320 random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1));
321 random_neuron = (layer_it - 1)->first_neuron + random_number;
323 /* check to see if this connection is allready there */
324 for(j = neuron_it->first_con; j < i; j++)
326 if(random_neuron == ann->connections[j])
328 found_connection = 1;
334 while(found_connection);
336 /* we have found a neuron that is not allready
337 * connected to us, connect it */
338 ann->connections[i] = random_neuron;
339 ann->weights[i] = (fann_type) fann_random_weight();
344 printf(" layer : %d neurons, 1 bias\n", num_neurons_out);
348 /* TODO it would be nice to have the randomly created
349 * connections sorted for smoother memory access.
361 FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut(unsigned int num_layers, ...)
366 unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
370 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
375 va_start(layer_sizes, num_layers);
376 for(i = 0; i < (int) num_layers; i++)
378 layers[i] = va_arg(layer_sizes, unsigned int);
382 ann = fann_create_shortcut_array(num_layers, layers);
389 FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_layers,
390 unsigned int *layers)
392 struct fann_layer *layer_it, *layer_it2, *last_layer;
394 struct fann_neuron *neuron_it, *neuron_it2 = 0;
396 unsigned int num_neurons_in, num_neurons_out;
399 unsigned int decimal_point;
400 unsigned int multiplier;
407 /* allocate the general structure */
408 ann = fann_allocate_structure(num_layers);
411 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
415 ann->connection_rate = 1;
416 ann->shortcut_connections = 1;
418 decimal_point = ann->decimal_point;
419 multiplier = ann->multiplier;
420 fann_update_stepwise(ann);
423 /* determine how many neurons there should be in each layer */
425 for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
427 /* we do not allocate room here, but we make sure that
428 * last_neuron - first_neuron is the number of neurons */
429 layer_it->first_neuron = NULL;
430 layer_it->last_neuron = layer_it->first_neuron + layers[i++];
431 if(layer_it == ann->first_layer)
433 /* there is a bias neuron in the first layer */
434 layer_it->last_neuron++;
437 ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
440 ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron;
441 ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
443 /* allocate room for the actual neurons */
444 fann_allocate_neurons(ann);
445 if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
452 printf("creating fully shortcut connected network.\n");
454 printf(" layer : %d neurons, 1 bias\n",
455 ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
458 num_neurons_in = ann->num_input;
459 last_layer = ann->last_layer;
460 for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
462 num_neurons_out = layer_it->last_neuron - layer_it->first_neuron;
464 /* Now split out the connections on the different neurons */
465 for(i = 0; i != num_neurons_out; i++)
467 layer_it->first_neuron[i].first_con = ann->total_connections;
468 ann->total_connections += num_neurons_in + 1;
469 layer_it->first_neuron[i].last_con = ann->total_connections;
471 layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
473 layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
475 layer_it->first_neuron[i].activation_steepness = 0.5;
480 printf(" layer : %d neurons, 0 bias\n", num_neurons_out);
482 /* used in the next run of the loop */
483 num_neurons_in += num_neurons_out;
486 fann_allocate_connections(ann);
487 if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
493 /* Connections are created from all neurons to all neurons in later layers
495 num_neurons_in = ann->num_input + 1;
496 for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
498 for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
501 i = neuron_it->first_con;
502 for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++)
504 for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron;
508 ann->weights[i] = (fann_type) fann_random_weight();
509 ann->connections[i] = neuron_it2;
514 num_neurons_in += layer_it->last_neuron - layer_it->first_neuron;
524 FANN_EXTERNAL fann_type *FANN_API fann_run(struct fann * ann, fann_type * input)
526 struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers;
527 unsigned int i, num_connections, num_input, num_output;
528 fann_type neuron_sum, *output;
530 struct fann_layer *layer_it, *last_layer;
531 unsigned int activation_function;
534 /* store some variabels local for fast access */
535 struct fann_neuron *first_neuron = ann->first_layer->first_neuron;
538 int multiplier = ann->multiplier;
539 unsigned int decimal_point = ann->decimal_point;
541 /* values used for the stepwise linear sigmoid function */
542 fann_type r1 = 0, r2 = 0, r3 = 0, r4 = 0, r5 = 0, r6 = 0;
543 fann_type v1 = 0, v2 = 0, v3 = 0, v4 = 0, v5 = 0, v6 = 0;
545 fann_type last_steepness = 0;
546 unsigned int last_activation_function = 0;
551 /* first set the input */
552 num_input = ann->num_input;
553 for(i = 0; i != num_input; i++)
556 if(fann_abs(input[i]) > multiplier)
559 ("Warning input number %d is out of range -%d - %d with value %d, integer overflow may occur.\n",
560 i, multiplier, multiplier, input[i]);
563 first_neuron[i].value = input[i];
565 /* Set the bias neuron in the input layer */
567 (ann->first_layer->last_neuron - 1)->value = multiplier;
569 (ann->first_layer->last_neuron - 1)->value = 1;
572 last_layer = ann->last_layer;
573 for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
575 last_neuron = layer_it->last_neuron;
576 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
578 if(neuron_it->first_con == neuron_it->last_con)
582 neuron_it->value = multiplier;
584 neuron_it->value = 1;
589 activation_function = neuron_it->activation_function;
590 steepness = neuron_it->activation_steepness;
593 num_connections = neuron_it->last_con - neuron_it->first_con;
594 weights = ann->weights + neuron_it->first_con;
596 if(ann->connection_rate >= 1)
598 if(ann->shortcut_connections)
600 neurons = ann->first_layer->first_neuron;
604 neurons = (layer_it - 1)->first_neuron;
608 /* unrolled loop start */
609 i = num_connections & 3; /* same as modulo 4 */
613 neuron_sum += fann_mult(weights[2], neurons[2].value);
615 neuron_sum += fann_mult(weights[1], neurons[1].value);
617 neuron_sum += fann_mult(weights[0], neurons[0].value);
622 for(; i != num_connections; i += 4)
625 fann_mult(weights[i], neurons[i].value) +
626 fann_mult(weights[i + 1], neurons[i + 1].value) +
627 fann_mult(weights[i + 2], neurons[i + 2].value) +
628 fann_mult(weights[i + 3], neurons[i + 3].value);
630 /* unrolled loop end */
633 * for(i = 0;i != num_connections; i++){
634 * printf("%f += %f*%f, ", neuron_sum, weights[i], neurons[i].value);
635 * neuron_sum += fann_mult(weights[i], neurons[i].value);
641 neuron_pointers = ann->connections + neuron_it->first_con;
643 i = num_connections & 3; /* same as modulo 4 */
647 neuron_sum += fann_mult(weights[2], neuron_pointers[2]->value);
649 neuron_sum += fann_mult(weights[1], neuron_pointers[1]->value);
651 neuron_sum += fann_mult(weights[0], neuron_pointers[0]->value);
656 for(; i != num_connections; i += 4)
659 fann_mult(weights[i], neuron_pointers[i]->value) +
660 fann_mult(weights[i + 1], neuron_pointers[i + 1]->value) +
661 fann_mult(weights[i + 2], neuron_pointers[i + 2]->value) +
662 fann_mult(weights[i + 3], neuron_pointers[i + 3]->value);
667 neuron_it->sum = fann_mult(steepness, neuron_sum);
669 if(activation_function != last_activation_function || steepness != last_steepness)
671 switch (activation_function)
674 case FANN_SIGMOID_STEPWISE:
675 r1 = ann->sigmoid_results[0];
676 r2 = ann->sigmoid_results[1];
677 r3 = ann->sigmoid_results[2];
678 r4 = ann->sigmoid_results[3];
679 r5 = ann->sigmoid_results[4];
680 r6 = ann->sigmoid_results[5];
681 v1 = ann->sigmoid_values[0] / steepness;
682 v2 = ann->sigmoid_values[1] / steepness;
683 v3 = ann->sigmoid_values[2] / steepness;
684 v4 = ann->sigmoid_values[3] / steepness;
685 v5 = ann->sigmoid_values[4] / steepness;
686 v6 = ann->sigmoid_values[5] / steepness;
688 case FANN_SIGMOID_SYMMETRIC:
689 case FANN_SIGMOID_SYMMETRIC_STEPWISE:
690 r1 = ann->sigmoid_symmetric_results[0];
691 r2 = ann->sigmoid_symmetric_results[1];
692 r3 = ann->sigmoid_symmetric_results[2];
693 r4 = ann->sigmoid_symmetric_results[3];
694 r5 = ann->sigmoid_symmetric_results[4];
695 r6 = ann->sigmoid_symmetric_results[5];
696 v1 = ann->sigmoid_symmetric_values[0] / steepness;
697 v2 = ann->sigmoid_symmetric_values[1] / steepness;
698 v3 = ann->sigmoid_symmetric_values[2] / steepness;
699 v4 = ann->sigmoid_symmetric_values[3] / steepness;
700 v5 = ann->sigmoid_symmetric_values[4] / steepness;
701 v6 = ann->sigmoid_symmetric_values[5] / steepness;
708 switch (activation_function)
711 case FANN_SIGMOID_STEPWISE:
713 (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, 0,
714 multiplier, neuron_sum);
716 case FANN_SIGMOID_SYMMETRIC:
717 case FANN_SIGMOID_SYMMETRIC_STEPWISE:
719 (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6,
720 -multiplier, multiplier, neuron_sum);
723 neuron_it->value = (fann_type) ((neuron_sum < 0) ? 0 : 1);
725 case FANN_THRESHOLD_SYMMETRIC:
726 neuron_it->value = (fann_type) ((neuron_sum < 0) ? -1 : 1);
729 fann_error((struct fann_error *) ann, FANN_E_CANT_USE_ACTIVATION);
731 last_steepness = steepness;
732 last_activation_function = activation_function;
734 neuron_sum = fann_mult(steepness, neuron_sum);
736 max_sum = 150/steepness;
737 if(neuron_sum > max_sum)
738 neuron_sum = max_sum;
739 else if(neuron_sum < -max_sum)
740 neuron_sum = -max_sum;
742 neuron_it->sum = neuron_sum;
744 fann_activation_switch(ann, activation_function, neuron_sum, neuron_it->value);
750 output = ann->output;
751 num_output = ann->num_output;
752 neurons = (ann->last_layer - 1)->first_neuron;
753 for(i = 0; i != num_output; i++)
755 output[i] = neurons[i].value;
760 FANN_EXTERNAL void FANN_API fann_destroy(struct fann *ann)
764 fann_safe_free(ann->weights);
765 fann_safe_free(ann->connections);
766 fann_safe_free(ann->first_layer->first_neuron);
767 fann_safe_free(ann->first_layer);
768 fann_safe_free(ann->output);
769 fann_safe_free(ann->train_errors);
770 fann_safe_free(ann->train_slopes);
771 fann_safe_free(ann->prev_train_slopes);
772 fann_safe_free(ann->prev_steps);
773 fann_safe_free(ann->prev_weights_deltas);
774 fann_safe_free(ann->errstr);
775 fann_safe_free(ann->cascade_activation_functions);
776 fann_safe_free(ann->cascade_activation_steepnesses);
780 FANN_EXTERNAL void FANN_API fann_randomize_weights(struct fann *ann, fann_type min_weight,
781 fann_type max_weight)
783 fann_type *last_weight;
784 fann_type *weights = ann->weights;
786 last_weight = weights + ann->total_connections;
787 for(; weights != last_weight; weights++)
789 *weights = (fann_type) (fann_rand(min_weight, max_weight));
793 if(ann->prev_train_slopes != NULL)
795 fann_clear_train_arrays(ann);
800 FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
802 struct fann_layer *layer_it;
803 struct fann_neuron *neuron_it;
807 unsigned int num_neurons = fann_get_total_neurons(ann) - fann_get_num_output(ann);
809 neurons = (char *) malloc(num_neurons + 1);
812 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
815 neurons[num_neurons] = 0;
817 printf("Layer / Neuron ");
818 for(i = 0; i < num_neurons; i++)
820 printf("%d", i % 10);
824 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
826 for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
829 memset(neurons, (int) '.', num_neurons);
830 for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
832 if(ann->weights[i] < 0)
835 value = (int) ((ann->weights[i] / (double) ann->multiplier) - 0.5);
837 value = (int) ((ann->weights[i]) - 0.5);
841 neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'a' - value;
846 value = (int) ((ann->weights[i] / (double) ann->multiplier) + 0.5);
848 value = (int) ((ann->weights[i]) + 0.5);
852 neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'A' + value;
855 printf("L %3d / N %4d %s\n", layer_it - ann->first_layer,
856 neuron_it - ann->first_layer->first_neuron, neurons);
863 /* Initialize the weights using Widrow + Nguyen's algorithm.
865 FANN_EXTERNAL void FANN_API fann_init_weights(struct fann *ann, struct fann_train_data *train_data)
867 fann_type smallest_inp, largest_inp;
868 unsigned int dat = 0, elem, num_connect, num_hidden_neurons;
869 struct fann_layer *layer_it;
870 struct fann_neuron *neuron_it, *last_neuron, *bias_neuron;
873 unsigned int multiplier = ann->multiplier;
877 for(smallest_inp = largest_inp = train_data->input[0][0]; dat < train_data->num_data; dat++)
879 for(elem = 0; elem < train_data->num_input; elem++)
881 if(train_data->input[dat][elem] < smallest_inp)
882 smallest_inp = train_data->input[dat][elem];
883 if(train_data->input[dat][elem] > largest_inp)
884 largest_inp = train_data->input[dat][elem];
889 ann->total_neurons - (ann->num_input + ann->num_output +
890 (ann->last_layer - ann->first_layer));
893 ((double) (0.7f * (double) num_hidden_neurons),
894 (double) (1.0f / (double) ann->num_input)) / (double) (largest_inp -
898 printf("Initializing weights with scale factor %f\n", scale_factor);
900 bias_neuron = ann->first_layer->last_neuron - 1;
901 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
903 last_neuron = layer_it->last_neuron;
905 if(!ann->shortcut_connections)
907 bias_neuron = (layer_it - 1)->last_neuron - 1;
910 for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
912 for(num_connect = neuron_it->first_con; num_connect < neuron_it->last_con;
915 if(bias_neuron == ann->connections[num_connect])
918 ann->weights[num_connect] =
919 (fann_type) fann_rand(-scale_factor, scale_factor * multiplier);
921 ann->weights[num_connect] = (fann_type) fann_rand(-scale_factor, scale_factor);
927 ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor * multiplier);
929 ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor);
937 if(ann->prev_train_slopes != NULL)
939 fann_clear_train_arrays(ann);
944 FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
946 struct fann_layer *layer_it;
951 printf("Input layer :%4d neurons, 1 bias\n", ann->num_input);
952 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer - 1; layer_it++)
954 if(ann->shortcut_connections)
956 printf(" Hidden layer :%4d neurons, 0 bias\n",
957 layer_it->last_neuron - layer_it->first_neuron);
961 printf(" Hidden layer :%4d neurons, 1 bias\n",
962 layer_it->last_neuron - layer_it->first_neuron - 1);
965 printf("Output layer :%4d neurons\n", ann->num_output);
966 printf("Total neurons and biases :%4d\n", fann_get_total_neurons(ann));
967 printf("Total connections :%4d\n", ann->total_connections);
968 printf("Connection rate :%8.3f\n", ann->connection_rate);
969 printf("Shortcut connections :%4d\n", ann->shortcut_connections);
971 printf("Decimal point :%4d\n", ann->decimal_point);
972 printf("Multiplier :%4d\n", ann->multiplier);
974 printf("Training algorithm : %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
975 printf("Training error function : %s\n", FANN_ERRORFUNC_NAMES[ann->train_error_function]);
976 printf("Training stop function : %s\n", FANN_STOPFUNC_NAMES[ann->train_stop_function]);
979 printf("Bit fail limit :%4d\n", ann->bit_fail_limit);
981 printf("Learning rate :%8.3f\n", ann->learning_rate);
982 printf("Learning momentum :%8.3f\n", ann->learning_momentum);
983 printf("Quickprop decay :%11.6f\n", ann->quickprop_decay);
984 printf("Quickprop mu :%8.3f\n", ann->quickprop_mu);
985 printf("RPROP increase factor :%8.3f\n", ann->rprop_increase_factor);
986 printf("RPROP decrease factor :%8.3f\n", ann->rprop_decrease_factor);
987 printf("RPROP delta min :%8.3f\n", ann->rprop_delta_min);
988 printf("RPROP delta max :%8.3f\n", ann->rprop_delta_max);
989 printf("Cascade output change fraction :%11.6f\n", ann->cascade_output_change_fraction);
990 printf("Cascade candidate change fraction :%11.6f\n", ann->cascade_candidate_change_fraction);
991 printf("Cascade output stagnation epochs :%4d\n", ann->cascade_output_stagnation_epochs);
992 printf("Cascade candidate stagnation epochs :%4d\n", ann->cascade_candidate_stagnation_epochs);
993 printf("Cascade max output epochs :%4d\n", ann->cascade_max_out_epochs);
994 printf("Cascade max candidate epochs :%4d\n", ann->cascade_max_cand_epochs);
995 printf("Cascade weight multiplier :%8.3f\n", ann->cascade_weight_multiplier);
996 printf("Cascade candidate limit :%8.3f\n", ann->cascade_candidate_limit);
997 for(i = 0; i < ann->cascade_activation_functions_count; i++)
998 printf("Cascade activation functions[%d] : %s\n", i,
999 FANN_ACTIVATIONFUNC_NAMES[ann->cascade_activation_functions[i]]);
1000 for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
1001 printf("Cascade activation steepnesses[%d] :%8.3f\n", i,
1002 ann->cascade_activation_steepnesses[i]);
1004 printf("Cascade candidate groups :%4d\n", ann->cascade_num_candidate_groups);
1005 printf("Cascade no. of candidates :%4d\n", fann_get_cascade_num_candidates(ann));
1009 FANN_GET(unsigned int, num_input)
1010 FANN_GET(unsigned int, num_output)
1012 FANN_EXTERNAL unsigned int FANN_API fann_get_total_neurons(struct fann *ann)
1014 if(ann->shortcut_connections)
1016 return ann->total_neurons;
1020 /* -1, because there is always an unused bias neuron in the last layer */
1021 return ann->total_neurons - 1;
1025 FANN_GET(unsigned int, total_connections)
1029 FANN_GET(unsigned int, decimal_point)
1030 FANN_GET(unsigned int, multiplier)
1032 /* INTERNAL FUNCTION
1033 Adjust the steepwise functions (if used)
1035 void fann_update_stepwise(struct fann *ann)
1039 /* Calculate the parameters for the stepwise linear
1040 * sigmoid function fixed point.
1041 * Using a rewritten sigmoid function.
1042 * results 0.005, 0.05, 0.25, 0.75, 0.95, 0.995
1044 ann->sigmoid_results[0] = fann_max((fann_type) (ann->multiplier / 200.0 + 0.5), 1);
1045 ann->sigmoid_results[1] = fann_max((fann_type) (ann->multiplier / 20.0 + 0.5), 1);
1046 ann->sigmoid_results[2] = fann_max((fann_type) (ann->multiplier / 4.0 + 0.5), 1);
1047 ann->sigmoid_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 4.0 + 0.5), ann->multiplier - 1);
1048 ann->sigmoid_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 20.0 + 0.5), ann->multiplier - 1);
1049 ann->sigmoid_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 200.0 + 0.5), ann->multiplier - 1);
1051 ann->sigmoid_symmetric_results[0] = fann_max((fann_type) ((ann->multiplier / 100.0) - ann->multiplier - 0.5),
1052 (fann_type) (1 - (fann_type) ann->multiplier));
1053 ann->sigmoid_symmetric_results[1] = fann_max((fann_type) ((ann->multiplier / 10.0) - ann->multiplier - 0.5),
1054 (fann_type) (1 - (fann_type) ann->multiplier));
1055 ann->sigmoid_symmetric_results[2] = fann_max((fann_type) ((ann->multiplier / 2.0) - ann->multiplier - 0.5),
1056 (fann_type) (1 - (fann_type) ann->multiplier));
1057 ann->sigmoid_symmetric_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 2.0 + 0.5),
1058 ann->multiplier - 1);
1059 ann->sigmoid_symmetric_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 10.0 + 0.5),
1060 ann->multiplier - 1);
1061 ann->sigmoid_symmetric_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 100.0 + 1.0),
1062 ann->multiplier - 1);
1064 for(i = 0; i < 6; i++)
1066 ann->sigmoid_values[i] =
1067 (fann_type) (((log(ann->multiplier / (float) ann->sigmoid_results[i] - 1) *
1068 (float) ann->multiplier) / -2.0) * (float) ann->multiplier);
1069 ann->sigmoid_symmetric_values[i] =
1072 (float) ann->sigmoid_symmetric_results[i]) /
1073 ((float) ann->sigmoid_symmetric_results[i] +
1074 ann->multiplier)) * (float) ann->multiplier) / -2.0) *
1075 (float) ann->multiplier);
1081 /* INTERNAL FUNCTION
1082 Allocates the main structure and sets some default values.
1084 struct fann *fann_allocate_structure(unsigned int num_layers)
1091 printf("less than 2 layers - ABORTING.\n");
1096 /* allocate and initialize the main network structure */
1097 ann = (struct fann *) malloc(sizeof(struct fann));
1100 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1104 ann->errno_f = FANN_E_NO_ERROR;
1105 ann->error_log = fann_default_error_log;
1107 ann->learning_rate = 0.7f;
1108 ann->learning_momentum = 0.0;
1109 ann->total_neurons = 0;
1110 ann->total_connections = 0;
1112 ann->num_output = 0;
1113 ann->train_errors = NULL;
1114 ann->train_slopes = NULL;
1115 ann->prev_steps = NULL;
1116 ann->prev_train_slopes = NULL;
1117 ann->prev_weights_deltas = NULL;
1118 ann->training_algorithm = FANN_TRAIN_RPROP;
1121 ann->num_bit_fail = 0;
1122 ann->bit_fail_limit = (fann_type)0.35;
1123 ann->shortcut_connections = 0;
1124 ann->train_error_function = FANN_ERRORFUNC_TANH;
1125 ann->train_stop_function = FANN_STOPFUNC_MSE;
1126 ann->callback = NULL;
1128 /* variables used for cascade correlation (reasonable defaults) */
1129 ann->cascade_output_change_fraction = 0.01f;
1130 ann->cascade_candidate_change_fraction = 0.01f;
1131 ann->cascade_output_stagnation_epochs = 12;
1132 ann->cascade_candidate_stagnation_epochs = 12;
1133 ann->cascade_num_candidate_groups = 2;
1134 ann->cascade_weight_multiplier = (fann_type)0.4;
1135 ann->cascade_candidate_limit = (fann_type)1000.0;
1136 ann->cascade_max_out_epochs = 150;
1137 ann->cascade_max_cand_epochs = 150;
1138 ann->cascade_candidate_scores = NULL;
1139 ann->cascade_activation_functions_count = 6;
1140 ann->cascade_activation_functions =
1141 (enum fann_activationfunc_enum *)calloc(ann->cascade_activation_functions_count,
1142 sizeof(enum fann_activationfunc_enum));
1143 if(ann->cascade_activation_functions == NULL)
1145 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1150 ann->cascade_activation_functions[0] = FANN_SIGMOID;
1151 ann->cascade_activation_functions[1] = FANN_SIGMOID_SYMMETRIC;
1152 ann->cascade_activation_functions[2] = FANN_GAUSSIAN;
1153 ann->cascade_activation_functions[3] = FANN_GAUSSIAN_SYMMETRIC;
1154 ann->cascade_activation_functions[4] = FANN_ELLIOT;
1155 ann->cascade_activation_functions[5] = FANN_ELLIOT_SYMMETRIC;
1157 ann->cascade_activation_steepnesses_count = 4;
1158 ann->cascade_activation_steepnesses =
1159 (fann_type *)calloc(ann->cascade_activation_steepnesses_count,
1161 if(ann->cascade_activation_functions == NULL)
1163 fann_safe_free(ann->cascade_activation_functions);
1164 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1169 ann->cascade_activation_steepnesses[0] = (fann_type)0.25;
1170 ann->cascade_activation_steepnesses[1] = (fann_type)0.5;
1171 ann->cascade_activation_steepnesses[2] = (fann_type)0.75;
1172 ann->cascade_activation_steepnesses[3] = (fann_type)1.0;
1174 /* Variables for use with with Quickprop training (reasonable defaults) */
1175 ann->quickprop_decay = (float) -0.0001;
1176 ann->quickprop_mu = 1.75;
1178 /* Variables for use with with RPROP training (reasonable defaults) */
1179 ann->rprop_increase_factor = (float) 1.2;
1180 ann->rprop_decrease_factor = 0.5;
1181 ann->rprop_delta_min = 0.0;
1182 ann->rprop_delta_max = 50.0;
1183 ann->rprop_delta_zero = 0.5;
1185 fann_init_error_data((struct fann_error *) ann);
1188 /* these values are only boring defaults, and should really
1189 * never be used, since the real values are always loaded from a file. */
1190 ann->decimal_point = 8;
1191 ann->multiplier = 256;
1194 /* allocate room for the layers */
1195 ann->first_layer = (struct fann_layer *) calloc(num_layers, sizeof(struct fann_layer));
1196 if(ann->first_layer == NULL)
1198 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1203 ann->last_layer = ann->first_layer + num_layers;
1208 /* INTERNAL FUNCTION
1209 Allocates room for the neurons.
1211 void fann_allocate_neurons(struct fann *ann)
1213 struct fann_layer *layer_it;
1214 struct fann_neuron *neurons;
1215 unsigned int num_neurons_so_far = 0;
1216 unsigned int num_neurons = 0;
1218 /* all the neurons is allocated in one long array (calloc clears mem) */
1219 neurons = (struct fann_neuron *) calloc(ann->total_neurons, sizeof(struct fann_neuron));
1220 ann->total_neurons_allocated = ann->total_neurons;
1224 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1228 for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
1230 num_neurons = layer_it->last_neuron - layer_it->first_neuron;
1231 layer_it->first_neuron = neurons + num_neurons_so_far;
1232 layer_it->last_neuron = layer_it->first_neuron + num_neurons;
1233 num_neurons_so_far += num_neurons;
1236 ann->output = (fann_type *) calloc(num_neurons, sizeof(fann_type));
1237 if(ann->output == NULL)
1239 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1244 /* INTERNAL FUNCTION
1245 Allocate room for the connections.
1247 void fann_allocate_connections(struct fann *ann)
1249 ann->weights = (fann_type *) calloc(ann->total_connections, sizeof(fann_type));
1250 if(ann->weights == NULL)
1252 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1255 ann->total_connections_allocated = ann->total_connections;
1257 /* TODO make special cases for all places where the connections
1258 * is used, so that it is not needed for fully connected networks.
1261 (struct fann_neuron **) calloc(ann->total_connections_allocated,
1262 sizeof(struct fann_neuron *));
1263 if(ann->connections == NULL)
1265 fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1271 /* INTERNAL FUNCTION
1272 Seed the random function.
1274 void fann_seed_rand()
1277 FILE *fp = fopen("/dev/urandom", "r");
1283 gettimeofday(&t, NULL);
1286 printf("unable to open /dev/urandom\n");
1291 fread(&foo, sizeof(foo), 1, fp);
1296 /* COMPAT_TIME REPLACEMENT */
1297 srand(GetTickCount());