]> ruin.nu Git - germs.git/blob - fann/src/fann_io.c
Make it possible to build statically against the included fann library.
[germs.git] / fann / src / fann_io.c
1 /*
2   Fast Artificial Neural Network Library (fann)
3   Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
4   
5   This library is free software; you can redistribute it and/or
6   modify it under the terms of the GNU Lesser General Public
7   License as published by the Free Software Foundation; either
8   version 2.1 of the License, or (at your option) any later version.
9   
10   This library is distributed in the hope that it will be useful,
11   but WITHOUT ANY WARRANTY; without even the implied warranty of
12   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13   Lesser General Public License for more details.
14   
15   You should have received a copy of the GNU Lesser General Public
16   License along with this library; if not, write to the Free Software
17   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
18 */
19
20 #include <stdio.h>
21 #include <stdlib.h>
22 #include <stdarg.h>
23 #include <string.h>
24
25 #include "config.h"
26 #include "fann.h"
27
28 /* Create a network from a configuration file.
29  */
30 FANN_EXTERNAL struct fann *FANN_API fann_create_from_file(const char *configuration_file)
31 {
32         struct fann *ann;
33         FILE *conf = fopen(configuration_file, "r");
34
35         if(!conf)
36         {
37                 fann_error(NULL, FANN_E_CANT_OPEN_CONFIG_R, configuration_file);
38                 return NULL;
39         }
40         ann = fann_create_from_fd(conf, configuration_file);
41         fclose(conf);
42         return ann;
43 }
44
45 /* Save the network.
46  */
47 FANN_EXTERNAL int FANN_API fann_save(struct fann *ann, const char *configuration_file)
48 {
49         return fann_save_internal(ann, configuration_file, 0);
50 }
51
52 /* Save the network as fixed point data.
53  */
54 FANN_EXTERNAL int FANN_API fann_save_to_fixed(struct fann *ann, const char *configuration_file)
55 {
56         return fann_save_internal(ann, configuration_file, 1);
57 }
58
59 /* INTERNAL FUNCTION
60    Used to save the network to a file.
61  */
62 int fann_save_internal(struct fann *ann, const char *configuration_file, unsigned int save_as_fixed)
63 {
64         int retval;
65         FILE *conf = fopen(configuration_file, "w+");
66
67         if(!conf)
68         {
69                 fann_error((struct fann_error *) ann, FANN_E_CANT_OPEN_CONFIG_W, configuration_file);
70                 return -1;
71         }
72         retval = fann_save_internal_fd(ann, conf, configuration_file, save_as_fixed);
73         fclose(conf);
74         return retval;
75 }
76
77 /* INTERNAL FUNCTION
78    Used to save the network to a file descriptor.
79  */
80 int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configuration_file,
81                                                   unsigned int save_as_fixed)
82 {
83         struct fann_layer *layer_it;
84         int calculated_decimal_point = 0;
85         struct fann_neuron *neuron_it, *first_neuron;
86         fann_type *weights;
87         struct fann_neuron **connected_neurons;
88         unsigned int i = 0;
89
90 #ifndef FIXEDFANN
91         /* variabels for use when saving floats as fixed point variabels */
92         unsigned int decimal_point = 0;
93         unsigned int fixed_multiplier = 0;
94         fann_type max_possible_value = 0;
95         unsigned int bits_used_for_max = 0;
96         fann_type current_max_value = 0;
97 #endif
98
99 #ifndef FIXEDFANN
100         if(save_as_fixed)
101         {
102                 /* save the version information */
103                 fprintf(conf, FANN_FIX_VERSION "\n");
104         }
105         else
106         {
107                 /* save the version information */
108                 fprintf(conf, FANN_FLO_VERSION "\n");
109         }
110 #else
111         /* save the version information */
112         fprintf(conf, FANN_FIX_VERSION "\n");
113 #endif
114
115 #ifndef FIXEDFANN
116         if(save_as_fixed)
117         {
118                 /* calculate the maximal possible shift value */
119
120                 for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
121                 {
122                         for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
123                         {
124                                 /* look at all connections to each neurons, and see how high a value we can get */
125                                 current_max_value = 0;
126                                 for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
127                                 {
128                                         current_max_value += fann_abs(ann->weights[i]);
129                                 }
130
131                                 if(current_max_value > max_possible_value)
132                                 {
133                                         max_possible_value = current_max_value;
134                                 }
135                         }
136                 }
137
138                 for(bits_used_for_max = 0; max_possible_value >= 1; bits_used_for_max++)
139                 {
140                         max_possible_value /= 2.0;
141                 }
142
143                 /* The maximum number of bits we shift the fix point, is the number
144                  * of bits in a integer, minus one for the sign, one for the minus
145                  * in stepwise, and minus the bits used for the maximum.
146                  * This is devided by two, to allow multiplication of two fixed
147                  * point numbers.
148                  */
149                 calculated_decimal_point = (sizeof(int) * 8 - 2 - bits_used_for_max) / 2;
150
151                 if(calculated_decimal_point < 0)
152                 {
153                         decimal_point = 0;
154                 }
155                 else
156                 {
157                         decimal_point = calculated_decimal_point;
158                 }
159
160                 fixed_multiplier = 1 << decimal_point;
161
162 #ifdef DEBUG
163                 printf("calculated_decimal_point=%d, decimal_point=%u, bits_used_for_max=%u\n",
164                            calculated_decimal_point, decimal_point, bits_used_for_max);
165 #endif
166
167                 /* save the decimal_point on a seperate line */
168                 fprintf(conf, "decimal_point=%u\n", decimal_point);
169         }
170 #else
171         /* save the decimal_point on a seperate line */
172         fprintf(conf, "decimal_point=%u\n", ann->decimal_point);
173
174 #endif
175
176         /* Save network parameters */
177         fprintf(conf, "num_layers=%u\n", ann->last_layer - ann->first_layer);
178         fprintf(conf, "learning_rate=%f\n", ann->learning_rate);
179         fprintf(conf, "connection_rate=%f\n", ann->connection_rate);
180         fprintf(conf, "shortcut_connections=%u\n", ann->shortcut_connections);
181         
182         fprintf(conf, "learning_momentum=%f\n", ann->learning_momentum);
183         fprintf(conf, "training_algorithm=%u\n", ann->training_algorithm);
184         fprintf(conf, "train_error_function=%u\n", ann->train_error_function);
185         fprintf(conf, "train_stop_function=%u\n", ann->train_stop_function);
186         fprintf(conf, "cascade_output_change_fraction=%f\n", ann->cascade_output_change_fraction);
187         fprintf(conf, "quickprop_decay=%f\n", ann->quickprop_decay);
188         fprintf(conf, "quickprop_mu=%f\n", ann->quickprop_mu);
189         fprintf(conf, "rprop_increase_factor=%f\n", ann->rprop_increase_factor);
190         fprintf(conf, "rprop_decrease_factor=%f\n", ann->rprop_decrease_factor);
191         fprintf(conf, "rprop_delta_min=%f\n", ann->rprop_delta_min);
192         fprintf(conf, "rprop_delta_max=%f\n", ann->rprop_delta_max);
193         fprintf(conf, "rprop_delta_zero=%f\n", ann->rprop_delta_zero);
194         fprintf(conf, "cascade_output_stagnation_epochs=%u\n", ann->cascade_output_stagnation_epochs);
195         fprintf(conf, "cascade_candidate_change_fraction=%f\n", ann->cascade_candidate_change_fraction);
196         fprintf(conf, "cascade_candidate_stagnation_epochs=%u\n", ann->cascade_candidate_stagnation_epochs);
197         fprintf(conf, "cascade_max_out_epochs=%u\n", ann->cascade_max_out_epochs);
198         fprintf(conf, "cascade_max_cand_epochs=%u\n", ann->cascade_max_cand_epochs);    
199         fprintf(conf, "cascade_num_candidate_groups=%u\n", ann->cascade_num_candidate_groups);
200
201 #ifndef FIXEDFANN
202         if(save_as_fixed)
203         {
204                 fprintf(conf, "bit_fail_limit=%u\n", (int) floor((ann->bit_fail_limit * fixed_multiplier) + 0.5));
205                 fprintf(conf, "cascade_candidate_limit=%u\n", (int) floor((ann->cascade_candidate_limit * fixed_multiplier) + 0.5));
206                 fprintf(conf, "cascade_weight_multiplier=%u\n", (int) floor((ann->cascade_weight_multiplier * fixed_multiplier) + 0.5));
207         }
208         else
209 #endif  
210         {
211                 fprintf(conf, "bit_fail_limit="FANNPRINTF"\n", ann->bit_fail_limit);
212                 fprintf(conf, "cascade_candidate_limit="FANNPRINTF"\n", ann->cascade_candidate_limit);
213                 fprintf(conf, "cascade_weight_multiplier="FANNPRINTF"\n", ann->cascade_weight_multiplier);
214         }
215
216         fprintf(conf, "cascade_activation_functions_count=%u\n", ann->cascade_activation_functions_count);
217         fprintf(conf, "cascade_activation_functions=");
218         for(i = 0; i < ann->cascade_activation_functions_count; i++)
219                 fprintf(conf, "%u ", ann->cascade_activation_functions[i]);
220         fprintf(conf, "\n");
221         
222         fprintf(conf, "cascade_activation_steepnesses_count=%u\n", ann->cascade_activation_steepnesses_count);
223         fprintf(conf, "cascade_activation_steepnesses=");
224         for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
225         {
226 #ifndef FIXEDFANN
227                 if(save_as_fixed)
228                         fprintf(conf, "%u ", (int) floor((ann->cascade_activation_steepnesses[i] * fixed_multiplier) + 0.5));
229                 else
230 #endif  
231                         fprintf(conf, FANNPRINTF" ", ann->cascade_activation_steepnesses[i]);
232         }
233         fprintf(conf, "\n");
234
235         fprintf(conf, "layer_sizes=");
236         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
237         {
238                 /* the number of neurons in the layers (in the last layer, there is always one too many neurons, because of an unused bias) */
239                 fprintf(conf, "%u ", layer_it->last_neuron - layer_it->first_neuron);
240         }
241         fprintf(conf, "\n");
242
243
244         fprintf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
245         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
246         {
247                 /* the neurons */
248                 for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
249                 {
250 #ifndef FIXEDFANN
251                         if(save_as_fixed)
252                         {
253                                 fprintf(conf, "(%u, %u, %u) ", neuron_it->last_con - neuron_it->first_con,
254                                                 neuron_it->activation_function,
255                                                 (int) floor((neuron_it->activation_steepness * fixed_multiplier) + 0.5));
256                         }
257                         else
258                         {
259                                 fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
260                                                 neuron_it->activation_function, neuron_it->activation_steepness);
261                         }
262 #else
263                         fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
264                                         neuron_it->activation_function, neuron_it->activation_steepness);
265 #endif
266                 }
267         }
268         fprintf(conf, "\n");
269
270         connected_neurons = ann->connections;
271         weights = ann->weights;
272         first_neuron = ann->first_layer->first_neuron;
273
274         /* Now save all the connections.
275          * We only need to save the source and the weight,
276          * since the destination is given by the order.
277          * 
278          * The weight is not saved binary due to differences
279          * in binary definition of floating point numbers.
280          * Especially an iPAQ does not use the same binary
281          * representation as an i386 machine.
282          */
283         fprintf(conf, "connections (connected_to_neuron, weight)=");
284         for(i = 0; i < ann->total_connections; i++)
285         {
286 #ifndef FIXEDFANN
287                 if(save_as_fixed)
288                 {
289                         /* save the connection "(source weight) " */
290                         fprintf(conf, "(%u, %d) ",
291                                         connected_neurons[i] - first_neuron,
292                                         (int) floor((weights[i] * fixed_multiplier) + 0.5));
293                 }
294                 else
295                 {
296                         /* save the connection "(source weight) " */
297                         fprintf(conf, "(%u, " FANNPRINTF ") ", connected_neurons[i] - first_neuron, weights[i]);
298                 }
299 #else
300                 /* save the connection "(source weight) " */
301                 fprintf(conf, "(%u, " FANNPRINTF ") ", connected_neurons[i] - first_neuron, weights[i]);
302 #endif
303
304         }
305         fprintf(conf, "\n");
306
307         return calculated_decimal_point;
308 }
309
310 struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file);
311
312 #define fann_scanf(type, name, val) \
313 { \
314         if(fscanf(conf, name"="type"\n", val) != 1) \
315         { \
316                 fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
317                 fann_destroy(ann); \
318                 return NULL; \
319         } \
320 }
321
322 /* INTERNAL FUNCTION
323    Create a network from a configuration file descriptor.
324  */
325 struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
326 {
327         unsigned int num_layers, layer_size, input_neuron, i, num_connections;
328
329 #ifdef FIXEDFANN
330         unsigned int decimal_point, multiplier;
331 #endif
332         struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
333         fann_type *weights;
334         struct fann_layer *layer_it;
335         struct fann *ann = NULL;
336
337         char *read_version;
338
339         read_version = (char *) calloc(strlen(FANN_CONF_VERSION "\n"), 1);
340         if(read_version == NULL)
341         {
342                 fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
343                 return NULL;
344         }
345
346         fread(read_version, 1, strlen(FANN_CONF_VERSION "\n"), conf);   /* reads version */
347
348         /* compares the version information */
349         if(strncmp(read_version, FANN_CONF_VERSION "\n", strlen(FANN_CONF_VERSION "\n")) != 0)
350         {
351 #ifdef FIXEDFANN
352                 if(strncmp(read_version, "FANN_FIX_1.1\n", strlen("FANN_FIX_1.1\n")) == 0)
353                 {
354 #else
355                 if(strncmp(read_version, "FANN_FLO_1.1\n", strlen("FANN_FLO_1.1\n")) == 0)
356                 {
357 #endif
358                         free(read_version);
359                         return fann_create_from_fd_1_1(conf, configuration_file);
360                 }
361
362                 free(read_version);
363                 fann_error(NULL, FANN_E_WRONG_CONFIG_VERSION, configuration_file);
364
365                 return NULL;
366         }
367
368         free(read_version);
369
370 #ifdef FIXEDFANN
371     fann_scanf("%u", "decimal_point", &decimal_point);
372         multiplier = 1 << decimal_point;
373 #endif
374
375     fann_scanf("%u", "num_layers", &num_layers);
376
377         ann = fann_allocate_structure(num_layers);
378         if(ann == NULL)
379         {
380                 return NULL;
381         }
382
383     fann_scanf("%f", "learning_rate", &ann->learning_rate);
384     fann_scanf("%f", "connection_rate", &ann->connection_rate);
385     fann_scanf("%u", "shortcut_connections", &ann->shortcut_connections);
386         fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
387         fann_scanf("%u", "training_algorithm", (unsigned int *)&ann->training_algorithm);
388         fann_scanf("%u", "train_error_function", (unsigned int *)&ann->train_error_function);
389         fann_scanf("%u", "train_stop_function", (unsigned int *)&ann->train_stop_function);
390         fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
391         fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
392         fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
393         fann_scanf("%f", "rprop_increase_factor", &ann->rprop_increase_factor);
394         fann_scanf("%f", "rprop_decrease_factor", &ann->rprop_decrease_factor);
395         fann_scanf("%f", "rprop_delta_min", &ann->rprop_delta_min);
396         fann_scanf("%f", "rprop_delta_max", &ann->rprop_delta_max);
397         fann_scanf("%f", "rprop_delta_zero", &ann->rprop_delta_zero);
398         fann_scanf("%u", "cascade_output_stagnation_epochs", &ann->cascade_output_stagnation_epochs);
399         fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
400         fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
401         fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
402         fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);     
403         fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
404
405         fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
406         fann_scanf(FANNSCANF, "cascade_candidate_limit", &ann->cascade_candidate_limit);
407         fann_scanf(FANNSCANF, "cascade_weight_multiplier", &ann->cascade_weight_multiplier);
408
409
410         fann_scanf("%u", "cascade_activation_functions_count", &ann->cascade_activation_functions_count);
411
412         /* reallocate mem */
413         ann->cascade_activation_functions = 
414                 (enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions, 
415                 ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
416         if(ann->cascade_activation_functions == NULL)
417         {
418                 fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
419                 fann_destroy(ann);
420                 return NULL;
421         }
422
423         fscanf(conf, "cascade_activation_functions=");
424         for(i = 0; i < ann->cascade_activation_functions_count; i++)
425                 fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]);
426         
427         fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
428
429         /* reallocate mem */
430         ann->cascade_activation_steepnesses = 
431                 (fann_type *)realloc(ann->cascade_activation_steepnesses, 
432                 ann->cascade_activation_steepnesses_count * sizeof(fann_type));
433         if(ann->cascade_activation_steepnesses == NULL)
434         {
435                 fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
436                 fann_destroy(ann);
437                 return NULL;
438         }
439
440         fscanf(conf, "cascade_activation_steepnesses=");
441         for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
442                 fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]);
443
444 #ifdef FIXEDFANN
445         ann->decimal_point = decimal_point;
446         ann->multiplier = multiplier;
447 #endif
448
449 #ifdef FIXEDFANN
450         fann_update_stepwise(ann);
451 #endif
452
453 #ifdef DEBUG
454         printf("creating network with %d layers\n", num_layers);
455         printf("input\n");
456 #endif
457
458         fscanf(conf, "layer_sizes=");
459         /* determine how many neurons there should be in each layer */
460         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
461         {
462                 if(fscanf(conf, "%u ", &layer_size) != 1)
463                 {
464                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, "layer_sizes", configuration_file);
465                         fann_destroy(ann);
466                         return NULL;
467                 }
468                 /* we do not allocate room here, but we make sure that
469                  * last_neuron - first_neuron is the number of neurons */
470                 layer_it->first_neuron = NULL;
471                 layer_it->last_neuron = layer_it->first_neuron + layer_size;
472                 ann->total_neurons += layer_size;
473 #ifdef DEBUG
474                 if(ann->shortcut_connections && layer_it != ann->first_layer)
475                 {
476                         printf("  layer       : %d neurons, 0 bias\n", layer_size);
477                 }
478                 else
479                 {
480                         printf("  layer       : %d neurons, 1 bias\n", layer_size - 1);
481                 }
482 #endif
483         }
484
485         ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
486         ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
487         if(!ann->shortcut_connections)
488         {
489                 /* one too many (bias) in the output layer */
490                 ann->num_output--;
491         }
492
493         /* allocate room for the actual neurons */
494         fann_allocate_neurons(ann);
495         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
496         {
497                 fann_destroy(ann);
498                 return NULL;
499         }
500
501         last_neuron = (ann->last_layer - 1)->last_neuron;
502         fscanf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
503         for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
504         {
505                 if(fscanf
506                    (conf, "(%u, %u, " FANNSCANF ") ", &num_connections, (unsigned int *)&neuron_it->activation_function,
507                         &neuron_it->activation_steepness) != 3)
508                 {
509                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
510                         fann_destroy(ann);
511                         return NULL;
512                 }
513                 neuron_it->first_con = ann->total_connections;
514                 ann->total_connections += num_connections;
515                 neuron_it->last_con = ann->total_connections;
516         }
517
518         fann_allocate_connections(ann);
519         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
520         {
521                 fann_destroy(ann);
522                 return NULL;
523         }
524
525         connected_neurons = ann->connections;
526         weights = ann->weights;
527         first_neuron = ann->first_layer->first_neuron;
528
529         fscanf(conf, "connections (connected_to_neuron, weight)=");
530         for(i = 0; i < ann->total_connections; i++)
531         {
532                 if(fscanf(conf, "(%u, " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
533                 {
534                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
535                         fann_destroy(ann);
536                         return NULL;
537                 }
538                 connected_neurons[i] = first_neuron + input_neuron;
539         }
540
541 #ifdef DEBUG
542         printf("output\n");
543 #endif
544         return ann;
545 }
546
547
548 /* INTERNAL FUNCTION
549    Create a network from a configuration file descriptor. (backward compatible read of version 1.1 files)
550  */
551 struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file)
552 {
553         unsigned int num_layers, layer_size, input_neuron, i, shortcut_connections, num_connections;
554         unsigned int activation_function_hidden, activation_function_output;
555 #ifdef FIXEDFANN
556         unsigned int decimal_point, multiplier;
557 #endif
558         fann_type activation_steepness_hidden, activation_steepness_output;
559         float learning_rate, connection_rate;
560         struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
561         fann_type *weights;
562         struct fann_layer *layer_it;
563         struct fann *ann;
564
565 #ifdef FIXEDFANN
566         if(fscanf(conf, "%u\n", &decimal_point) != 1)
567         {
568                 fann_error(NULL, FANN_E_CANT_READ_CONFIG, "decimal_point", configuration_file);
569                 return NULL;
570         }
571         multiplier = 1 << decimal_point;
572 #endif
573
574         if(fscanf(conf, "%u %f %f %u %u %u " FANNSCANF " " FANNSCANF "\n", &num_layers, &learning_rate,
575                 &connection_rate, &shortcut_connections, &activation_function_hidden,
576                 &activation_function_output, &activation_steepness_hidden,
577                 &activation_steepness_output) != 8)
578         {
579                 fann_error(NULL, FANN_E_CANT_READ_CONFIG, "parameters", configuration_file);
580                 return NULL;
581         }
582
583         ann = fann_allocate_structure(num_layers);
584         if(ann == NULL)
585         {
586                 return NULL;
587         }
588         ann->connection_rate = connection_rate;
589         ann->shortcut_connections = shortcut_connections;
590         ann->learning_rate = learning_rate;
591
592 #ifdef FIXEDFANN
593         ann->decimal_point = decimal_point;
594         ann->multiplier = multiplier;
595 #endif
596
597 #ifdef FIXEDFANN
598         fann_update_stepwise(ann);
599 #endif
600
601 #ifdef DEBUG
602         printf("creating network with learning rate %f\n", learning_rate);
603         printf("input\n");
604 #endif
605
606         /* determine how many neurons there should be in each layer */
607         for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
608         {
609                 if(fscanf(conf, "%u ", &layer_size) != 1)
610                 {
611                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
612                         fann_destroy(ann);
613                         return NULL;
614                 }
615                 /* we do not allocate room here, but we make sure that
616                  * last_neuron - first_neuron is the number of neurons */
617                 layer_it->first_neuron = NULL;
618                 layer_it->last_neuron = layer_it->first_neuron + layer_size;
619                 ann->total_neurons += layer_size;
620 #ifdef DEBUG
621                 if(ann->shortcut_connections && layer_it != ann->first_layer)
622                 {
623                         printf("  layer       : %d neurons, 0 bias\n", layer_size);
624                 }
625                 else
626                 {
627                         printf("  layer       : %d neurons, 1 bias\n", layer_size - 1);
628                 }
629 #endif
630         }
631
632         ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
633         ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
634         if(!ann->shortcut_connections)
635         {
636                 /* one too many (bias) in the output layer */
637                 ann->num_output--;
638         }
639
640         /* allocate room for the actual neurons */
641         fann_allocate_neurons(ann);
642         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
643         {
644                 fann_destroy(ann);
645                 return NULL;
646         }
647
648         last_neuron = (ann->last_layer - 1)->last_neuron;
649         for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
650         {
651                 if(fscanf(conf, "%u ", &num_connections) != 1)
652                 {
653                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
654                         fann_destroy(ann);
655                         return NULL;
656                 }
657                 neuron_it->first_con = ann->total_connections;
658                 ann->total_connections += num_connections;
659                 neuron_it->last_con = ann->total_connections;
660         }
661
662         fann_allocate_connections(ann);
663         if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
664         {
665                 fann_destroy(ann);
666                 return NULL;
667         }
668
669         connected_neurons = ann->connections;
670         weights = ann->weights;
671         first_neuron = ann->first_layer->first_neuron;
672
673         for(i = 0; i < ann->total_connections; i++)
674         {
675                 if(fscanf(conf, "(%u " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
676                 {
677                         fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
678                         fann_destroy(ann);
679                         return NULL;
680                 }
681                 connected_neurons[i] = first_neuron + input_neuron;
682         }
683
684         fann_set_activation_steepness_hidden(ann, activation_steepness_hidden);
685         fann_set_activation_steepness_output(ann, activation_steepness_output);
686         fann_set_activation_function_hidden(ann, (enum fann_activationfunc_enum)activation_function_hidden);
687         fann_set_activation_function_output(ann, (enum fann_activationfunc_enum)activation_function_output);
688
689 #ifdef DEBUG
690         printf("output\n");
691 #endif
692         return ann;
693 }