1 /*****************************************************************************/
2 /* */
3 /* 3dsvm_common.c */
4 /* */
5 /* Definitions and functions used by 3dsvm */
6 /* */
7 /* Copyright (C) 2007 Stephen LaConte */
8 /* */
9 /* This file is part of 3dsvm */
10 /* */
11 /* 3dsvm is free software: you can redistribute it and/or modify */
12 /* it under the terms of the GNU General Public License as published by */
13 /* the Free Software Foundation, either version 3 of the License, or */
14 /* (at your option) any later version. */
15 /* */
16 /* 3dsvm is distributed in the hope that it will be useful, */
17 /* but WITHOUT ANY WARRANTY; without even the implied warranty of */
18 /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
19 /* GNU General Public License for more details. */
20 /* */
21 /* You should have received a copy of the GNU General Public License */
22 /* along with 3dsvm. If not, see <http://www.gnu.org/licenses/>. */
23 /* */
24 /* */
25 /* The SVM-light software is copyrighted by Thorsten Joachims */
26 /* and is redistributed by permission. */
27 /* */
28 /* The SVM-light software is free only for non-commercial use. It must not */
29 /* be distributed without prior permission of the author. The author is not */
30 /* responsible for implications from the use of this software. */
31 /* */
32 /* */
33 /* For AFNI's copyright please refer to ../README.copyright. */
34 /* */
35 /*****************************************************************************/
36
37
38 #include "3dsvm_common.h"
39 #include "debugtrace.h"
40
41 /* JL Sep. 2010: Global variables used for real-time training/testing.
42 * If not declared here, unknown */
43 RT_SVM_VARS GLOBAL_svm_vars = {0};
44
45 /* from svm_classify.c - copied directly (print_help) for now since this file
46 * also has main in it */
print_help_classify(void)47 void print_help_classify(void)
48 {
49 printf("\nSVM-light %s: Support Vector Machine, classification module %s\n",
50 VERSION_SVMLIGHT, VERSION_DATE_SVMLIGHT);
51 copyright_notice();
52 printf(" usage: svm_classify [options] example_file model_file output_file\n\n");
53 printf("options: -h -> this help\n");
54 printf(" -v [0..3] -> verbosity level (default 2)\n");
55 printf(" -f [0,1] -> 0: old output format of V1.0\n");
56 printf(" -> 1: output the value of decision function (default)\n\n");
57 }
58
59 /* from svm_learn_main.c - copied directly (print_help -
60 * omitting the wait_any_key()) for now since this file also has main in it */
print_help_learn()61 void print_help_learn()
62 {
63 printf("\nSVM-light %s: Support Vector Machine, learning module %sstim\n",
64 VERSION_SVMLIGHT, VERSION_DATE_SVMLIGHT);
65 copyright_notice();
66 printf(" usage: svm_learn [options] example_file model_file\n\n");
67 printf("Arguments:\n");
68 printf(" example_file-> file with training data\n");
69 printf(" model_file -> file to store learned decision rule in\n");
70
71 printf("General options:\n");
72 printf(" -? -> this help\n");
73 printf(" -v [0..3] -> level (default 1)\n");
74 printf("Learning options:\n");
75 printf(" -z {c,r,p} -> select between classification (c), regression (r),\n");
76 printf(" and preference ranking (p) (default classification)\n");
77 printf(" -c float -> C: trade-off between training error\n");
78 printf(" and margin (default [avg. x*x]^-1)\n");
79 printf(" -w [0..] -> epsilon width of tube for regression\n");
80 printf(" (default 0.1)\n");
81 printf(" -j float -> Cost: cost-factor, by which training errors on\n");
82 printf(" positive examples outweight errors on negative\n");
83 printf(" examples (default 1) (see [4])\n");
84 printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n");
85 printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n");
86 printf(" -i [0,1] -> remove inconsistent training examples\n");
87 printf(" and retrain (default 0)\n");
88 printf("Performance estimation options:\n");
89 printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n");
90 printf(" (see [5])\n");
91 printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n");
92 printf(" leave-one-out computation (default 1.0) (see [2])\n");
93 printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n");
94 printf(" (default 0)\n");
95 printf("Transduction options (see [3]):\n");
96 printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n");
97 printf(" into the positive class (default is the ratio of\n");
98 printf(" positive and negative examples in the training data)\n");
99 printf("Kernel options:\n");
100 printf(" -t int -> type of kernel function:\n");
101 printf(" 0: linear (default)\n");
102 printf(" 1: polynomial (s a*b+c)^d\n");
103 printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n");
104 printf(" 3: sigmoid tanh(s a*b + c)\n");
105 printf(" 4: user defined kernel from kernel.h\n");
106 printf(" -d int -> parameter d in polynomial kernel\n");
107 printf(" -g float -> parameter gamma in rbf kernel\n");
108 printf(" -s float -> parameter s in sigmoid/poly kernel\n");
109 printf(" -r float -> parameter c in sigmoid/poly kernel\n");
110 printf(" -u string -> parameter of user defined kernel\n");
111 printf("Optimization options (see [1]):\n");
112 printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n");
113 printf(" -n [2..q] -> number of new variables entering the working set\n");
114 printf(" in each iteration (default n = q). Set n<q to prevent\n");
115 printf(" zig-zagging.\n");
116 printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n");
117 printf(" The larger the faster...\n");
118 printf(" -e float -> eps: Allow that error for termination criterion\n");
119 printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n");
120 printf(" -h [5..] -> number of iterations a variable needs to be\n");
121 printf(" optimal before considered for shrinking (default 100)\n");
122 printf(" -f [0,1] -> do final optimality check for variables removed\n");
123 printf(" by shrinking. Although this test is usually \n");
124 printf(" positive, there is no guarantee that the optimum\n");
125 printf(" was found if the test is omitted. (default 1)\n");
126 printf("Output options:\n");
127 printf(" -l string -> file to write predicted labels of unlabeled\n");
128 printf(" examples into after transductive learning\n");
129 printf(" -a string -> write all alphas to this file after learning\n");
130 printf(" (in the same order as in the training set)\n");
131 printf("\nMore details in:\n");
132 printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
133 printf(" Kernel Methods - Support Vector Learning, B. Schoelkopf and C. Burges and\n");
134 printf(" A. Smola (ed.), MIT Press, 1999.\n");
135 printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n");
136 printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n");
137 printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n");
138 printf(" Vector Machines. International Conference on Machine Learning (ICML),\n");
139 printf(" 1999.\n");
140 printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n");
141 printf(" with a knowledge-based approach - A case study in intensive care \n");
142 printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n");
143 printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n");
144 printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
145 printf(" 2002.\n\n");
146 }
147
print_version()148 void print_version()
149 {
150 printf("\n");
151 printf("*************************************************\n");
152 printf("*** 3dsvm: %s (%s), SVM-light: %s ***\n",
153 VERSION_3DSVM, VERSION_DATE_3DSVM, VERSION_SVMLIGHT);
154 printf("*************************************************\n");
155 printf("%s", change_string);
156 }
157
detrend_linear_cnsrs(float * data,LABELS * labels,char * errorString)158 int detrend_linear_cnsrs(float *data, LABELS *labels, char *errorString)
159 {
160
161 /* This function performs detrending without censored time-points, so that
162 * removing censored volumes (e.g. using 3dTcat) is equivalent to using
163 * 3dsvm with a censorfile and/or 9999 in the label file.
164 * The classifier output is typically written to the prediction file
165 * for all time-points, thus censored time-points are detrended based on
166 * on all data
167 *
168 * TODO: Having a flag to detrend based on all data, even if
169 * data points are censored, might be good
170 *
171 * JL Aug. 2013: Bugfix: Checking of censored data points did not include
172 * censorfile causing buffer overflow */
173
174
175 int t, tc, nt, ntc = 0;
176 float *data_cnsrs = NULL;
177
178
179 ENTRY("detrend_linear_cnsrs");
180
181 nt = labels->n;
182 ntc = nt - labels->n_cnsrs;
183
184 if( (data_cnsrs = (float *)malloc(sizeof(float)*(ntc))) == NULL ) {
185 snprintf(errorString, LONG_STRING, "detrend_linear_cnsrs: "
186 "Memory allocation for dist_cnsrs failed!");
187 RETURN(1);
188 }
189
190 /* get data for uncensored time-points */
191 for( t=0, tc=0; t<nt; t++) {
192 if( labels->cnsrs[t] == 1 ) { /* not censored */
193 data_cnsrs[tc] = data[t];
194 tc++;
195 }
196 }
197
198 DETREND_linear(nt, data); /* detrend all */
199 DETREND_linear(ntc, data_cnsrs); /* detrend without censored time-points */
200
201 /* replace values for uncensored data points */
202 for( t=0, tc=0; t<nt; t++ ) {
203 if( labels->cnsrs[t] == 1 ) {
204 data[t] = data_cnsrs[tc];
205 tc++;
206 }
207 }
208
209 IFree(data_cnsrs);
210
211 RETURN(0);
212 }
213
214 /* JL June 2009: This function writes the svm-light DOC structure into
215 * a svm-light readable textfile
216 *
217 * JL Apr. 2010: Writing 1e-6 for voxels (features) equal to 0, otherwise
218 * svm-light gets the feature index wrong!
219 *
220 */
write_svmLight_doc(DOC * docs,long nt,long nvox,LabelType * target,char * fileName,char * svmLight_ver)221 void write_svmLight_doc(DOC *docs, long nt, long nvox,
222 LabelType *target, char *fileName, char *svmLight_ver)
223 {
224 long t = 0;
225 long v = 0;
226 FILE *fp = NULL;
227
228
229 ENTRY("write_svmLight_doc");
230 INFO_message("Writing svm-light textfile...");
231
232 if ( (fp=fopen(fileName, "w")) == NULL ) {
233 WARNING_message("Can not open: %s to write svm-light (doc) textfile!",
234 fileName);
235 EXRETURN;
236 }
237
238 if ( !strcmp(svmLight_ver, "V5.00") ) {
239 for ( t=0; t<nt; ++t ) {
240 fprintf(fp, "%lf ", target[t]);
241 for (v=0; v<nvox; ++v) {
242 if ( docs[t].words[v].wnum == 0 ) {
243 WARNING_message("Writing svm-light textfile: "
244 "Number of words shorter than expected\n");
245 continue;
246 }
247 if ( docs[t].words[v].weight != 0 ) {
248 fprintf(fp, "%ld:%lf ", v+1, docs[t].words[v].weight );
249 }
250 else {
251 WARNING_message("Timpepoint %4ld: voxel:%6ld is 0. Adding 1e-6 to fix "
252 "a problem with svm-light", t, v);
253 fprintf(fp, "%ld:%lf ", v+1, 1e-6);
254 }
255 }
256 fprintf(fp, " # written by 3dsvm\n");
257 }
258 }
259 else {
260 WARNING_message("Can not write svm-light (doc) textfile"
261 " svm-light version %s unknown", svmLight_ver);
262 EXRETURN;
263 }
264
265 fclose(fp);
266
267 EXRETURN;
268 }
269
270 /* JL Sep. 2010 */
271 /* Mai 2011 */
printASLoptions(ASLoptions * options)272 void printASLoptions(ASLoptions* options)
273 {
274
275 ENTRY("printASLoptions");
276
277 INFO_message("ASLoptipns: labelFile = %s\n", options->labelFile);
278 INFO_message("ASLoptions: censorFile = %s\n", options->censorFile);
279 INFO_message("ASLoptions: trainFile = %s\n", options->trainFile);
280 INFO_message("ASLoptions: maskFile = %s\n", options->maskFile);
281 INFO_message("ASLoptions: modelFile = %s\n", options->modelFile);
282 INFO_message("ASLoptions: docFile = %s\n", options->docFile);
283 INFO_message("ASLoptions: docFileOnly = %s\n", options->docFileOnly);
284 INFO_message("ASLoptions: kernelName = %s\n", options->kernelName);
285 INFO_message("ASLoptions: svmType = %s\n", options->svmType);
286 INFO_message("ASLoptions: outModelNoMask = %d\n", options->outModelNoMask);
287 INFO_message("ASLoptions: noPredDetrend = %d\n", options->noPredDetrend);
288 INFO_message("ASLoptions: noPredCensor = %d\n", options->noPredCensor);
289 INFO_message("ASLoptions: noPredScale = %d\n", options->noPredScale);
290 INFO_message("ASLoptions: rtTrain = %d\n", options->rtTrain);
291 INFO_message("ASLoptions: rtTest = %d\n", options->rtTest);
292 INFO_message("ASLoptions: rtIP = %s\n", options->rtIP);
293 INFO_message("ASLoptions: rtPort = %d\n", options->rtPort);
294 INFO_message("ASLoptions: classout = %d\n", options->classout);
295 INFO_message("ASLoptions: testFile = %d\n", options->testFile);
296 INFO_message("ASLoptions: multiclass = %s\n", options->multiclass);
297 INFO_message("ASLoptions: predFile = %s\n", options->predFile);
298 INFO_message("ASLoptions: testLabelFile = %s\n", options->testLabelFile);
299 INFO_message("ASLoptions: modelAlphaFile = %s\n", options->modelAlphaFile);
300 INFO_message("ASLoptions: modelWeightFile = %s\n", options->modelWeightFile);
301
302 EXRETURN;
303 }
304
305 /* JL Nov 2010 */
printAfniModel(AFNI_MODEL * afniModel)306 void printAfniModel( AFNI_MODEL *afniModel )
307 {
308
309 int i, t = 0;
310
311
312 ENTRY("printAfniModel");
313
314 if( afniModel->version < 1.00 ) {
315 ERROR_message("Can not print afniModel! Version number: '%f' outdated!",
316 afniModel->version);
317 EXRETURN;
318 }
319
320 INFO_message("afniModel: version = %f\n", afniModel->version);
321 INFO_message("afniModel: svm_type = %s\n", afniModel->svm_type);
322 INFO_message("afniModel: mask_used = %d\n", afniModel->mask_used);
323 INFO_message("afniModel: class_count = %d\n", afniModel->class_count);
324 INFO_message("afniModel: combinations = %d\n", afniModel->combinations);
325 INFO_message("afniModel: timepoints = %d\n", afniModel->timepoints);
326
327 for( i=0; i<afniModel->combinations; ++i ) {
328 INFO_message("afniModel: combName[%04d] = %s\n", i, afniModel->combName[i]);
329 INFO_message("afniModel: kernel_custom[%04d] = %s\n", i, afniModel->kernel_custom[i]);
330 INFO_message("afniModel: kernel_type[%04d] = %d\n", i, afniModel->kernel_type[i]);
331 INFO_message("afniModel: polynomial_degree[%04d] = %f\n", i, afniModel->polynomial_degree[i]);
332 INFO_message("afniModel: rbf_gamma[%04d] = %f\n", i, afniModel->rbf_gamma[i]);
333 INFO_message("afniModel: linear_coefficient[%04d] = %f\n", i, afniModel->linear_coefficient[i]);
334 INFO_message("afniModel: total_masked_features[%04d] = %d\n", i, afniModel->total_masked_features[i]);
335 INFO_message("afniModel: total_support_vectors[%04d] = %d\n", i, afniModel->total_support_vectors[i]);
336 INFO_message("afniModel: b[%04d] = %f\n", i, afniModel->b[i]);
337 INFO_message("afniModel: eps[%04d] = %f\n", i, afniModel->eps[i]);
338 INFO_message("afniModel: svm_c[%04d] = %f\n", i, afniModel->svm_c[i]);
339 INFO_message("afniModel: biased_hyperplane[%04d] = %d\n", i, afniModel->biased_hyperplane[i]);
340 INFO_message("afniModel: skip_final_opt_check[%04d] = %d\n", i, afniModel->skip_final_opt_check[i]);
341 INFO_message("afniModel: svm_maxqpsize[%04d] = %d\n", i, afniModel->svm_maxqpsize[i]);
342 INFO_message("afniModel: svm_newvarsinqp[%04d] = %d\n", i, afniModel->svm_newvarsinqp[i]);
343 INFO_message("afniModel: svm_iter_to_shrink[%04d] = %d\n", i, afniModel->svm_iter_to_shrink[i]);
344 INFO_message("afniModel: transduction_posratio[%04d] = %f\n", i, afniModel->transduction_posratio[i]);
345 INFO_message("afniModel: svm_costratio[%04d] = %f\n", i, afniModel->svm_costratio[i]);
346 INFO_message("afniModel: svm_costratio_unlab[%04d] = %f\n", i, afniModel->svm_costratio_unlab[i]);
347 INFO_message("afniModel: svm_unlabbound[%04d] = %f\n", i, afniModel->svm_unlabbound[i]);
348 INFO_message("afniModel: epsilon_a[%04d] = %f\n", i, afniModel->epsilon_a[i]);
349 INFO_message("afniModel: epsilon_crit[%04d] = %f\n", i, afniModel->epsilon_crit[i]);
350 INFO_message("afniModel: compute_loo[%04d] = %d\n", i, afniModel->compute_loo[i]);
351 INFO_message("afniModel: rho[%04d] = %f\n", i, afniModel->rho[i]);
352 INFO_message("afniModel: xa_depth[%04d] = %d\n", i, afniModel->xa_depth[i]);
353 }
354
355 EXRETURN;
356
357 }
358
359 /* JL Sep. 2010: This function was originally defined in plug_3dsvm.c. and
360 * only used by the plugin. Now it is also used by 3dsvm directly */
printArgv(char ** myargv,int * myargc)361 void printArgv(char **myargv, int *myargc)
362 {
363 int i = 0;
364
365 ENTRY("printArgv");
366
367 INFO_message("%s \\\n", myargv[0]);
368 for( i=1; i<*myargc; ++i) printf("\t_%s_\\\n", myargv[i]);
369 INFO_message("\n");
370
371 EXRETURN;
372 }
373
374 /* JL Sep. 2010: This function was originally defined in plug_3dsvm.c.
375 * and only used by the plugin. Now also used by 3dsvm directly to
376 * read command-line options from environment (e.g. .afnirc) */
argvAppend(char ** myargv,int * myargc,char * option,char * value)377 void argvAppend(char **myargv, int *myargc, char *option, char *value)
378 {
379
380 ENTRY("argvAppend");
381
382 /* --- append option --- */
383 if( (myargv[*myargc] = (char *)malloc( LONG_STRING * sizeof(char) )) ) {
384 strncpy(myargv[*myargc], option, LONG_STRING);
385 (*myargc)++;
386 }
387 else ERROR_exit("Could not allocate option string!");
388
389 /* --- append value --- */
390 if( value[0] ) {
391 if( (myargv[*myargc] = (char *)malloc( LONG_STRING * sizeof(char) )) ) {
392 strncpy(myargv[*myargc],value, LONG_STRING);
393 (*myargc)++;
394 }
395 else ERROR_exit("Could not allocate argument string!");
396 }
397
398 EXRETURN;
399 }
400
freeArgv(char ** myargv,int myargc)401 void freeArgv( char **myargv, int myargc )
402 {
403 int i = 0;
404
405 ENTRY("freeArgv");
406
407 for( i=0; i<myargc; i++ )
408 {
409 myargv[i]='\0';
410 IFree(myargv[i]);
411 }
412
413 EXRETURN;
414 }
415
416 /* JL Sep. 2010: This function goes through argv and returns 1
417 * if option (or option && value) is present */
argvCheck(char ** myargv,int * myargc,char * option,char * value)418 int argvCheck(char **myargv, int *myargc, char *option, char *value)
419 {
420 int i = 0;
421
422 ENTRY("argvCheck");
423
424 for( i=0; i<*myargc; ++i ) {
425 if( !strncmp(myargv[i], option, LONG_STRING) ) {
426 if( (value[0]) && (i<*myargc-2) ) {
427 if( !strncmp(myargv[i+1], value, LONG_STRING) ){
428 RETURN(1);
429 }
430 }
431 if( !value[0] ) RETURN(1);
432 }
433 }
434
435 RETURN(0);
436 }
437
438 /* JL Mai 2011: My poor mans way to parse the command-line into argc, argv */
getAllocateCmdlArgv(char * cmdl,char * progname,int * myargc,char *** myargv)439 void getAllocateCmdlArgv( char *cmdl, char *progname, int *myargc, char ***myargv)
440 {
441
442 long nargs = 0;
443 char ** args = NULL;
444 char * cmdl_copy = NULL;
445 char * option = NULL;
446
447
448 ENTRY("getAllocateCmdlArgv");
449
450
451 /* -- initialize for strtok -- */
452 cmdl_copy = strdup(cmdl);
453 option = strtok(cmdl_copy, " ");
454
455 while( option != NULL ) {
456 nargs++;
457 option = strtok(NULL, " ");
458 }
459 nargs++; /* +1 for program name */
460
461 /* -- allocate args -- */
462 args = Allocate2c(nargs, LONG_STRING);
463
464 /* -- go through command line and assign to args --*/
465 strncpy(args[0], progname, LONG_STRING); /* copy program name */
466 nargs = 1;
467
468 cmdl_copy = strdup(cmdl);
469 option = strtok(cmdl_copy, " ");
470
471 while( option != NULL ) {
472
473 if( strlen(option) > LONG_STRING ) {
474 ERROR_exit("Command line option %s\n"
475 " Exceeds maximum length: %d\n", option, LONG_STRING);
476 }
477
478 strncpy(args[nargs], option, LONG_STRING);
479 option = strtok(NULL, " ");
480
481 nargs++;
482 }
483
484 /* -- return pointers --*/
485 *myargv=args;
486 *myargc=nargs;
487
488
489 EXRETURN;
490 }
491
492
493 /* JL Sep. 2010: This function reads the command-line options from
494 * from the environment (e.g. .afnirc)
495 * (some of the options only can be used by the 3dsvm plugin
496 * in real-time mode) */
getEnvArgv(char ** myargv,int * myargc,char * name)497 void getEnvArgv(char **myargv, int *myargc, char *name)
498 {
499
500 char *ept = NULL;
501
502 ENTRY("getEnvArgv");
503
504 /* --- real-time flags ---*/
505
506 /* These options only makes sense for the 3dsvm plugin in real-time mode.
507 * and will cause error messages if 3dsvm is evoked with the "-getenv" option.
508 */
509 if( (!strncmp(name, "AFNI_3DSVM_RT_TRAIN", LONG_STRING)) ||
510 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
511 ept = my_getenv("AFNI_3DSVM_RT_TRAIN");
512 if( ept != NULL ) {
513 if( !strncmp(ept,"YES", LONG_STRING) ) {
514 argvAppend(myargv,myargc,"-rt_train","");
515 }
516 }
517 }
518
519 if( (!strncmp(name, "AFNI_3DSVM_RT_TEST", LONG_STRING)) ||
520 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
521 ept = my_getenv("AFNI_3DSVM_RT_TEST");
522 if( ept != NULL ) {
523 if( !strncmp(ept,"YES", LONG_STRING) ) {
524 argvAppend(myargv,myargc,"-rt_test","");
525 }
526 }
527 }
528
529 if( (!strncmp(name, "AFNI_3DSVM_RT_IP", LONG_STRING)) ||
530 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
531 ept = my_getenv("AFNI_3DSVM_RT_IP");
532 if( ept != NULL ) argvAppend(myargv, myargc,"-stim_ip", ept);
533 }
534
535 if( (!strncmp(name, "AFNI_3DSVM_RT_PORT", LONG_STRING)) ||
536 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
537 ept = my_getenv("AFNI_3DSVM_RT_PORT");
538 if( ept != NULL ) argvAppend(myargv, myargc,"-stim_port", ept);
539 }
540
541 /* --- testing flags ----*/
542 if( (!strncmp(name, "AFNI_3DSVM_NOMASK", LONG_STRING)) ||
543 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
544 ept = my_getenv("AFNI_3DSVM_NOMASK");
545 if( ept != NULL ) {
546 if( !strncmp(ept,"YES", LONG_STRING) ) {
547 argvAppend(myargv,myargc,"-nomodelmask","");
548 }
549 }
550 }
551
552 if( (!strncmp(name, "AFNI_3DSVM_NODETREND", LONG_STRING)) ||
553 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
554 ept = my_getenv("AFNI_3DSVM_NODETREND");
555 if( ept != NULL ) {
556 if( !strncmp(ept,"YES", LONG_STRING) ) {
557 argvAppend(myargv,myargc,"-nodetrend","");
558 }
559 }
560 }
561
562 /* --- training options --- */
563 if( (!strncmp(name, "AFNI_3DSVM_TRAIN_TYPE", LONG_STRING)) ||
564 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
565 ept = my_getenv("AFNI_3DSVM_TRAIN_TYPE");
566 if( ept != NULL ) argvAppend(myargv, myargc,"-type", ept);
567 }
568
569 if( (!strncmp(name, "AFNI_3DSVM_TRAIN_DSET", LONG_STRING)) ||
570 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
571 ept = my_getenv("AFNI_3DSVM_TRAIN_DSET");
572 if( ept != NULL ) argvAppend(myargv, myargc,"-trainvol", ept);
573 }
574
575 if( (!strncmp(name, "AFNI_3DSVM_TRAIN_LBLS", LONG_STRING)) ||
576 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
577 ept = my_getenv("AFNI_3DSVM_TRAIN_LBLS");
578 if( ept != NULL ) argvAppend(myargv,myargc,"-trainlabels", ept);
579 }
580
581 if( (!strncmp(name, "AFNI_3DSVM_MASK_DSET", LONG_STRING)) ||
582 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
583 ept = my_getenv("AFNI_3DSVM_MASK_DSET");
584 if( ept != NULL ) {
585 argvAppend(myargv,myargc,"-mask", ept);
586 }
587 }
588
589 if( (!strncmp(name, "AFNI_3DSVM_MODEL_DSET", LONG_STRING)) ||
590 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
591 ept = my_getenv("AFNI_3DSVM_MODEL_DSET");
592 if( ept != NULL ) argvAppend(myargv,myargc,"-model", ept);
593 }
594
595 if( (!strncmp(name, "AFNI_3DSVM_BUCKET_DSET", LONG_STRING)) ||
596 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
597 ept = my_getenv("AFNI_3DSVM_BUCKET_DSET");
598 if( ept != NULL ) argvAppend(myargv,myargc,"-bucket", ept);
599 }
600
601 if( (!strncmp(name, "AFNI_3DSVM_ALPHA_FILE", LONG_STRING)) ||
602 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
603 ept = my_getenv("AFNI_3DSVM_ALPHA_FILE");
604 if( ept != NULL ) argvAppend(myargv,myargc,"-alpha", ept);
605 }
606
607 if( (!strncmp(name, "AFNI_3DSVM_PARM_C", LONG_STRING)) ||
608 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
609 ept = my_getenv("AFNI_3DSVM_PARM_C");
610 if( ept != NULL ) argvAppend(myargv,myargc,"-c", ept);
611 }
612
613 if( (!strncmp(name, "AFNI_3DSVM_PARM_EPS", LONG_STRING)) ||
614 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
615 ept = my_getenv("AFNI_3DSVM_PARM_EPS");
616 if( ept != NULL ) argvAppend(myargv,myargc,"-e", ept);
617 }
618
619 if( (!strncmp(name, "AFNI_3DSVM_KERNEL_TYPE", LONG_STRING)) ||
620 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
621 ept = my_getenv("AFNI_3DSVM_KERNEL_TYPE");
622 if( ept != NULL ) argvAppend(myargv,myargc,"-kernel", ept);
623 }
624
625 if( (!strncmp(name, "AFNI_3DSVM_KERNEL_PARM_D", LONG_STRING)) ||
626 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
627 ept = my_getenv("AFNI_3DSVM_KERNEL_PARM_D");
628 if( ept != NULL ) argvAppend(myargv,myargc,"-d", ept);
629 }
630
631 if( (!strncmp(name, "AFNI_3DSVM_KERNEL_PARM_G", LONG_STRING)) ||
632 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
633 ept = my_getenv("AFNI_3DSVM_KERNEL_PARM_G");
634 if( ept != NULL ) argvAppend(myargv,myargc,"-g", ept);
635 }
636
637 if( (!strncmp(name, "AFNI_3DSVM_KERNEL_PARM_S", LONG_STRING)) ||
638 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
639 ept = my_getenv("AFNI_3DSVM_KERNEL_PARM_S");
640 if( ept != NULL ) argvAppend(myargv,myargc,"-s", ept);
641 }
642
643 if( (!strncmp(name, "AFNI_3DSVM_KERNEL_PARM_R", LONG_STRING)) ||
644 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
645 ept = my_getenv("AFNI_3DSVM_KERNEL_PARM_R");
646 if( ept != NULL ) argvAppend(myargv,myargc,"-r", ept);
647 }
648
649
650 /* --- testing options --- */
651 if( (!strncmp(name, "AFNI_3DSVM_TEST_DSET", LONG_STRING)) ||
652 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
653 ept = my_getenv("AFNI_3DSVM_TEST_DSET");
654 if( ept != NULL ) argvAppend(myargv,myargc,"-testvol", ept);
655 }
656
657 if( (!strncmp(name, "AFNI_3DSVM_TEST_LBLS", LONG_STRING)) ||
658 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
659 ept = my_getenv("AFNI_3DSVM_TEST_LBLS");
660 if( ept != NULL ) argvAppend(myargv,myargc,"-testlabels", ept);
661 }
662
663 if( (!strncmp(name, "AFNI_3DSVM_PRED_FILE", LONG_STRING)) ||
664 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
665 ept = my_getenv("AFNI_3DSVM_PRED_FILE");
666 if( ept != NULL ) argvAppend(myargv,myargc,"-predictions", ept);
667 }
668
669 if( (!strncmp(name, "AFNI_3DSVM_MCLASS_TYPE", LONG_STRING)) ||
670 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
671 ept = my_getenv("AFNI_3DSVM_MCLASS_TYPE");
672 if( ept != NULL ) argvAppend(myargv,myargc,"-multiclass", ept);
673 }
674
675 /* --- training or testing option --- */
676 if( (!strncmp(name, "AFNI_3DSVM_CENSOR_FILE", LONG_STRING)) ||
677 (!strncmp(name, "3DSVM_ALL_OPTIONS", LONG_STRING)) ) {
678 ept = my_getenv("AFNI_3DSVM_CENSOR_FILE");
679 if( ept != NULL ) argvAppend(myargv,myargc,"-censor", ept);
680 }
681
682
683 EXRETURN;
684 }
685
686
687 /* JL Feb. 2009: This function calculates the squared Euclidean length of
688 * a complex vector. */
cpxtwonorm_sq(WORD * a)689 double cpxtwonorm_sq(WORD *a) {
690
691 long size_i, i;
692 double Re_a, Im_a, sum;
693
694 size_i=i=0;
695 Re_a=Im_a=sum=0.0;
696
697
698 ENTRY("cpxtwonorm_sq");
699
700
701 while (a[i].wnum) {i++;}
702
703 if (i%2 != 0) {
704 ERROR_exit("something is wrong with the complex-valued data"
705 "representation in the WORD structure.");
706 }
707 size_i=i/2;
708
709 i=0;
710 while (i < size_i) {
711 Re_a=a[i].weight;
712 Im_a=a[i+size_i].weight;
713 sum+=Re_a*Re_a+Im_a*Im_a;
714
715 i++;
716 }
717
718 RETURN(sum);
719 }
720
721 /* JL Sep 2010: Remove white space, tabs and \n from string */
trimString(char * string)722 char *trimString(char *string)
723 {
724 char new_string[strlen(string)+1];
725 int i,j = 0;
726
727 ENTRY("trimString");
728
729 if( string == NULL ) RETURN(NULL);
730
731 j = 0;
732 for( i=0; string[i] !='\0'; i++ ) {
733 if( (string[i] != ' ') && (string[i] != '\t') && (string[i] != '\n') )
734 new_string[j++] = string[i];
735 }
736 new_string[j]='\0';
737
738 for( i=0; new_string[i] !='\0'; i++ ) {
739 string[i] = new_string[i];
740 }
741 string[i]='\0';
742
743 RETURN(string);
744 }
745
getFileSize(char * fileName)746 long getFileSize( char *fileName )
747 {
748 FILE *fp;
749 unsigned long int lineCount=0;
750 char str[400];
751
752 ENTRY("getFileSize");
753
754 if( (fp = fopen(fileName, "r")) == NULL ) {
755 ERROR_exit("Can not open file in getFileSize");
756 }
757
758 while( !feof(fp) ) {
759 fgets(str,390,fp);
760 lineCount ++;
761 }
762 lineCount --;
763
764 fclose(fp);
765
766 RETURN(lineCount);
767 }
768
compare_ints(const int * a,const int * b)769 int compare_ints( const int *a, const int *b ) {
770 int tmp = *a - *b;
771 if( tmp > 0 )
772 return 1;
773 else if( tmp < 0 )
774 return -1;
775 else
776 return 0;
777 }
778
779
780 /* JL Mar. 2009 */
Allocate2d(long index1,long index2)781 double **Allocate2d(long index1, long index2)
782 {
783 long i = 0;
784 double **darr = NULL;
785
786 ENTRY("Allocate2d");
787
788 /* JL June 2011: Modified error handling. Return NULL if memory
789 can not be allocated. Don't exit! */
790
791 if( ( darr = (double **)malloc(index1*sizeof(double *)) ) ) {
792 for(i = 0; i < index1; i++) {
793 if( ( darr[i] = (double *)malloc(index2*sizeof(double)) ) );
794 else RETURN(NULL);
795 }
796 }
797 else RETURN(NULL);
798
799
800 RETURN(darr);
801 }
802
803 /* JL Mar. 2009 */
free2d(double ** x,long index1)804 void free2d(double **x, long index1)
805 {
806 long i;
807
808 ENTRY("free2d");
809
810 if( x != NULL )
811 {
812 for(i = 0; i < index1; i++)
813 {
814 if( x[i] != NULL )
815 IFree(x[i]);
816 x[i]=NULL;
817 }
818 IFree(x);
819 x=NULL;
820 }
821
822 EXRETURN;
823 }
824
825 /* JL Mar. 2009 */
Clear2d(double ** x,long index1,long index2)826 void Clear2d(double **x, long index1, long index2)
827 {
828 long i,j;
829
830 ENTRY("Clear2d");
831
832 for ( i=0; i<index1; ++i ) {
833 for ( j=0; j<index2; ++j ) {
834 x[i][j] = (double) 0.0;
835 }
836 }
837
838 EXRETURN;
839 }
840
841 /****************************************************************
842 * Allocate2f() *
843 * farr[index1][index2] *
844 ****************************************************************/
Allocate2f(long index1,long index2)845 float **Allocate2f(long index1, long index2)
846 {
847 long i = 0;
848 float ** farr = NULL;
849
850 ENTRY("Allocate2f");
851
852 /* JL June 2011: Modified error handling. Return NULL if memory
853 can not be allocated. Don't exit! */
854
855 if( ( farr = (float **)malloc(index1*sizeof(float *)) ) ) {
856 for(i = 0; i < index1; i++) {
857 if( ( farr[i] = (float *)malloc(index2*sizeof(float)) ) );
858 else RETURN(NULL);
859 }
860 }
861 else RETURN(NULL);
862
863
864 RETURN(farr);
865 }
866
free2f(float ** x,long index1)867 void free2f(float **x, long index1)
868 {
869 long i;
870
871 ENTRY("free2f");
872
873 if( x != NULL )
874 {
875 for(i = 0; i < index1; i++)
876 {
877 IFree(x[i]);
878 }
879 IFree(x);
880 }
881
882 EXRETURN;
883 }
884
885 /* JL Mar. 2009 */
Clear2f(float ** x,long index1,long index2)886 void Clear2f(float **x, long index1, long index2)
887 {
888 long i,j;
889
890 ENTRY("Clear2f");
891
892 for ( i=0; i<index1; ++i ) {
893 for ( j=0; j<index2; ++j ) {
894 x[i][j] = (float) 0.0;
895 }
896 }
897
898 EXRETURN;
899 }
900
901 /****************************************************************
902 * Allocate2DT() -- Datasetype *
903 * arr[index1][index2] *
904 ****************************************************************/
Allocate2DT(long index1,long index2)905 DatasetType **Allocate2DT(long index1, long index2)
906 {
907 long i = 0;
908 DatasetType **arr = NULL;
909
910 ENTRY("Allocate2DT");
911
912 /* JL June 2011: Modified error handling. Return NULL if memory
913 can not be allocated. Don't exit! */
914
915 if( ( arr = (DatasetType **)malloc(index1*sizeof(DatasetType *)) ) ) {
916 for(i = 0; i < index1; i++) {
917 if( ( arr[i] = (DatasetType *)malloc(index2*sizeof(DatasetType)) ) );
918 else RETURN(NULL);
919 }
920 }
921 else RETURN(NULL);
922
923
924 RETURN(arr);
925 }
926
free2DT(DatasetType ** x,long index1)927 void free2DT(DatasetType **x, long index1)
928 {
929 long i;
930
931 ENTRY("free2DT");
932
933 if( x != NULL )
934 {
935 for(i = 0; i < index1; i++) {
936 IFree(x[i]);
937 }
938 IFree(x);
939 }
940
941 EXRETURN;
942 }
943
944 /* JL Mar. 2009 */
Clear2DT(DatasetType ** x,long index1,long index2)945 void Clear2DT(DatasetType **x, long index1, long index2)
946 {
947 long i,j;
948
949 ENTRY("Clear2DT");
950
951 for(i = 0; i < index1; i++) {
952 for(j = 0; j< index2; j++) {
953 x[i][j] = (DatasetType)0;
954 }
955 }
956
957 EXRETURN;
958 }
959
960 /* JL Mar. 2009 */
Allocate2c(long index1,long index2)961 char **Allocate2c(long index1, long index2)
962 {
963 long i = 0;
964 char **carr = NULL;
965
966 ENTRY("Allocate2c");
967
968 /* JL June 2011: Modified error handling. Return NULL if memory
969 can not be allocated. Don't exit! */
970
971 if( (carr = (char **)malloc(sizeof(char *) * index1)) == NULL ) RETURN(NULL);
972 for(i=0; i<index1; i++) {
973 if( (carr[i] = (char *)malloc(sizeof(char) * index2)) == NULL ) RETURN(NULL);
974 }
975
976 RETURN(carr);
977 }
978
979
980 /* JL Mar. 2009 */
Clear2c(char ** x,long index1)981 void Clear2c(char **x, long index1)
982 {
983 long i;
984
985 ENTRY("Clear2c");
986
987 for(i=0; i<index1; i++) {
988 strcpy(x[i], "\0");
989 }
990
991 EXRETURN;
992 }
993
994 /* JL Mar. 2009 */
free2c(char ** x,long index1)995 void free2c(char **x, long index1)
996 {
997 long i;
998
999 ENTRY("free2c");
1000
1001 if( x != NULL )
1002 {
1003 for(i = 0; i < index1; i++)
1004 {
1005 IFree(x[i]);
1006 }
1007 IFree(x);
1008 }
1009
1010 EXRETURN;
1011 }
1012
1013
allocateDOCs(long ndocsTime,long nvoxelWords)1014 DOC * allocateDOCs(long ndocsTime, long nvoxelWords)
1015 {
1016 long i = 0;
1017 DOC * docs = NULL;
1018
1019 /* JL: July 2011: Modified function to allocate the DOCs and
1020 the WORDs for the DOCs together. Return memory or NULL if
1021 memory can not be allocated */
1022
1023 ENTRY("allocateDOCs");
1024
1025 /* -- allocate DOCs (timepoints) -- */
1026 if( (docs = (DOC*)malloc(sizeof(DOC)*ndocsTime)) == NULL ) {
1027 RETURN(NULL);
1028 }
1029
1030 /* allocate WORDs (voxels) for each DOC (timepoints) */
1031 for( i=0; i < ndocsTime; ++i ) {
1032 if( (docs[i].words = (WORD*)malloc(sizeof(WORD)*(nvoxelWords+1))) == NULL ) {
1033 RETURN(NULL);
1034 }
1035 }
1036
1037 RETURN(docs);
1038 }
1039
freeDOCs(DOC * docs,long ndocsTime)1040 void freeDOCs(DOC *docs, long ndocsTime)
1041 {
1042
1043 long i = 0;
1044
1045 ENTRY("freeDOCs");
1046
1047 /* Note from svm-light:
1048 Warning: The model contains references to the original data 'docs'.
1049 If you want to free the original data, and only keep the model, you
1050 have to make a deep copy of 'model'. */
1051 /* deep_copy_of_model=copy_model(model); */
1052
1053 if( docs != NULL )
1054 {
1055 for( i=0; i < ndocsTime; ++i )
1056 {
1057 if (docs[i].words != NULL) IFree(docs[i].words);
1058 docs[i].words = NULL;
1059 }
1060 IFree(docs);
1061 docs = NULL;
1062 }
1063
1064 EXRETURN;
1065 }
1066
allocateMultiClassArrays(float *** multiclass_dist,float ** classCorrect,float ** classIncorrect,int ** classVote,int ** classList,long n_classMax,long n_classComb,long nt,char * errorString)1067 int allocateMultiClassArrays( float ***multiclass_dist, float **classCorrect,
1068 float **classIncorrect, int **classVote, int **classList, long n_classMax,
1069 long n_classComb, long nt, char *errorString )
1070 {
1071 float ** tmp_mcdist = NULL;
1072 float * tmp_classCorrect = NULL;
1073 float * tmp_classIncorrect = NULL;
1074 int * tmp_classVote = NULL;
1075 int * tmp_classList = NULL;
1076
1077 ENTRY("allocateMultiClassArrays");
1078
1079 /* JL July 2011: Added this function to simplify the flow in
1080 test_classification. All arrays necessary for multiclass
1081 (current method DAG and vote) are allocated here.
1082
1083 TODO: We are bit inefficient, since either
1084 DAG or vote is used, and we are allocating for both.
1085 */
1086
1087
1088 if( (tmp_mcdist = Allocate2f(n_classComb, nt)) == NULL ) {
1089 snprintf(errorString, LONG_STRING, "allocateMultiClassArrays: "
1090 "Memory allocation for tmp_mcdist failed!");
1091 RETURN(1);
1092 }
1093
1094 if( (tmp_classCorrect = (float *)malloc(sizeof(float)*n_classMax)) == NULL ) {
1095 snprintf(errorString, LONG_STRING, "allocateMultiClassArrays: "
1096 "Memory allocation for tmp_classCorrect failed!");
1097
1098 /* free and return */
1099 free2f(tmp_mcdist, n_classComb);
1100 RETURN(1);
1101 }
1102
1103 if( (tmp_classIncorrect = (float *)malloc(sizeof(float)*n_classMax)) == NULL ) {
1104 snprintf(errorString, LONG_STRING, "allocateMultiClassArrays: "
1105 "Memory allocation for tmp_classIncorrect failed!");
1106
1107 /* free and return */
1108 free2f(tmp_mcdist, n_classComb);
1109 if( tmp_classCorrect != NULL ) IFree(tmp_classCorrect);
1110 tmp_classCorrect = NULL;
1111 RETURN(1);
1112 }
1113
1114 if( (tmp_classVote = (int *)malloc(sizeof(int)*n_classMax)) == NULL ) {
1115 snprintf(errorString, LONG_STRING, "allocateMultiClassArrays: "
1116 "Memory allocation for tmp_classVote failed!");
1117
1118 /* free and return */
1119 free2f(tmp_mcdist, n_classComb);
1120 IFree(tmp_classCorrect);
1121 IFree(tmp_classIncorrect);
1122 RETURN(1);
1123 }
1124
1125 if( (tmp_classList = (int *)malloc(sizeof(int)*n_classMax)) == NULL ) {
1126 snprintf(errorString, LONG_STRING, "allocateMultiClassArrays: "
1127 "Memory allocation for tmp_classList failed!");
1128
1129 /* free and return */
1130 free2f(tmp_mcdist, n_classComb);
1131 IFree(tmp_classCorrect);
1132 IFree(tmp_classIncorrect);
1133 IFree(tmp_classVote);
1134 RETURN(1);
1135 }
1136
1137
1138 /* -- return pointers to allocated memory -- */
1139 *multiclass_dist = tmp_mcdist;
1140 *classCorrect = tmp_classCorrect;
1141 *classIncorrect = tmp_classIncorrect;
1142 *classVote = tmp_classVote;
1143 *classList = tmp_classList;
1144
1145 RETURN(0);
1146 }
1147
freeMultiClassArrays(float ** multiclass_dist,float * classCorrect,float * classIncorrect,int * classVote,int * classList,long n_classComb)1148 void freeMultiClassArrays( float **multiclass_dist, float *classCorrect,
1149 float *classIncorrect, int *classVote, int *classList, long n_classComb )
1150 {
1151
1152 ENTRY("freeMultiClassArryas");
1153
1154 free2f(multiclass_dist, n_classComb);
1155 IFree(classCorrect);
1156 IFree(classIncorrect);
1157 IFree(classVote);
1158 IFree(classList);
1159
1160 EXRETURN;
1161 }
1162
1163 /* JL Mar 2014: Added this function for handeling mask datasets of various
1164 * data types (byte only before that) */
getAllocateMaskArray(THD_3dim_dataset * dset,char * errorString)1165 MaskType* getAllocateMaskArray( THD_3dim_dataset *dset, char *errorString )
1166 {
1167 long v = 0; /* index over nvox */
1168 long nvox = 0; /* number of voxels */
1169 int datum = 0; /* datum type */
1170
1171 MaskType* maskArray = NULL;
1172
1173 ENTRY("getAllocateMaskArray");
1174
1175 /* --- just making sure we have a dset to work with --- */
1176 /* we should never get here */
1177 if( dset == NULL ) {
1178 snprintf(errorString, LONG_STRING, "getAllocateMaskArray: "
1179 "What happened?! Pointer to dataset is NULL!");
1180
1181 RETURN(NULL);
1182
1183 }
1184 if( !DSET_LOADED(dset) ) {
1185 snprintf(errorString, LONG_STRING, "getAllocateMaskArray: "
1186 "What happened?! Dataset is not in memory!");
1187
1188 RETURN(NULL);
1189 }
1190
1191 if ( DSET_NUM_TIMES(dset) > 1 ) {
1192 /* 3D+t as a mask dataset? */
1193 snprintf(errorString, LONG_STRING, "getAllocateMaskArray: "
1194 "Time dimension not supported!");
1195 RETURN(NULL);
1196 }
1197
1198 /* --- initialize and allocate ---*/
1199 nvox = DSET_NVOX( dset );
1200
1201 if( (maskArray = (MaskType *) malloc(sizeof(MaskType)*nvox)) == NULL) {
1202 snprintf(errorString, LONG_STRING, "getAllocateMaskArray: "
1203 "Memory allocation for dsetMask failed!");
1204
1205 RETURN(NULL);
1206 }
1207
1208 /* --- convert to internal mask representation (MaskType) --- */
1209 datum = DSET_BRICK_TYPE(dset,0);
1210
1211 switch (datum) {
1212 case MRI_float: {
1213
1214 float* tmp_dsetArray = (float *) DSET_ARRAY(dset,0);
1215
1216 /* fill mask array */
1217 for( v=0; v<nvox; ++v ) {
1218 if( abs(tmp_dsetArray[v]) > 0.0000001f ) maskArray[v] = (MaskType) 1;
1219 else maskArray[v] = (MaskType) 0;
1220 }
1221 }
1222 break;
1223
1224 case MRI_short: {
1225
1226 short* tmp_dsetArray = (short *) DSET_ARRAY(dset,0);
1227
1228 /* fill mask array */
1229 for( v=0; v<nvox; ++v ) {
1230 if( abs(tmp_dsetArray[v]) > 0 ) maskArray[v] = (MaskType) 1;
1231 else maskArray[v] = (MaskType) 0;
1232 }
1233 }
1234 break;
1235
1236 case MRI_byte: {
1237 /* That's the datum type we want, but might define it differently
1238 * in the future, so doing cast regardless. */
1239
1240 byte* tmp_dsetArray = (byte *) DSET_ARRAY(dset,0);
1241
1242 /* fill mask array */
1243 for( v=0; v<nvox; ++v ) {
1244 if( tmp_dsetArray[v] > 0 ) maskArray[v] = (MaskType) 1;
1245 else maskArray[v] = (MaskType) 0;
1246 }
1247 }
1248 break;
1249
1250 case MRI_rgb:
1251 snprintf(errorString, LONG_STRING,
1252 "Sorry, datum-type MRI_rgb (%d) is not supported!", datum);
1253
1254 /* free end return */
1255 IFree(maskArray);
1256 RETURN(NULL);
1257 break;
1258
1259 case MRI_complex:
1260 snprintf(errorString, LONG_STRING,
1261 "Sorry, datum-type MRI_complex (%d) is not supported!", datum);
1262
1263 /* free end return */
1264 IFree(maskArray);
1265 RETURN(NULL);
1266 break;
1267
1268 default:
1269 snprintf(errorString, LONG_STRING,
1270 "Unknown datum-type (%d)", datum);
1271
1272 /* free end return */
1273 IFree(maskArray);
1274 RETURN(NULL);
1275 break;
1276 }
1277
1278 RETURN(maskArray);
1279 }
1280
1281
getAllocateDsetArray(THD_3dim_dataset * dset,char * errorString)1282 DatasetType** getAllocateDsetArray( THD_3dim_dataset *dset, char *errorString )
1283 {
1284 long v = 0; /* index over nvox */
1285 long t = 0; /* index over nt */
1286 long nt = 0; /* number of observations (time-points) total */
1287 long nvox = 0; /* number of voxels */
1288 int datum = 0; /* datum type */
1289
1290 DatasetType **
1291 dsetArray = NULL;
1292
1293 ENTRY("getAllocateDsetArray");
1294
1295 /* JL June 2011: Modified error handling: Passing error string as argument
1296 * to the calling function, allocated memory is freed, RETURN(1)
1297 * instead of ERROR_exit.
1298 */
1299
1300 /* --- just making sure we have a dset to work with --- */
1301 /* we should never get here */
1302 if( dset == NULL) {
1303 snprintf(errorString, LONG_STRING, "getAllocateDsetArray: "
1304 "What happened?! Pointer to dataset is NULL!");
1305
1306 RETURN(NULL);
1307
1308 }
1309 if( !DSET_LOADED(dset) ) {
1310 snprintf(errorString, LONG_STRING, "getAllocateDsetArray: "
1311 "What happened?! Dataset is not in memory!");
1312
1313 RETURN(NULL);
1314 }
1315
1316 /* --- initialize and allocate ---*/
1317 nvox = DSET_NVOX( dset );
1318 nt = DSET_NUM_TIMES( dset );
1319 if( (dsetArray = Allocate2DT(nt, nvox)) == NULL ) {
1320 snprintf(errorString, LONG_STRING, "getAllocateDsetArray: "
1321 "Memory allocation for dsetArray failed!");
1322
1323 RETURN(NULL);
1324 }
1325
1326
1327 /* --- make sure all bricks have same datum --- */
1328 if ( !DSET_datum_constant(dset) ) {
1329 snprintf(errorString, LONG_STRING, "Creating dataset array failed! Sub-briks "
1330 "have different datum types!");
1331
1332 /* -- free end return -- */
1333 free2DT( dsetArray, nt );
1334
1335 RETURN(NULL);
1336
1337 }
1338
1339 /* --- converting data to internal representation (DatasetType) --- */
1340 datum = DSET_BRICK_TYPE(dset,0);
1341
1342 switch (datum) {
1343 case MRI_float:
1344 for( t=0; t<nt; ++t ) {
1345 /* -- create 1D array to hold one volume -- */
1346 float* tmp_dsetArray = (float *) DSET_ARRAY(dset,t);
1347
1348 /* -- create 2D array to hold [time][volume] -- */
1349 for( v=0; v<nvox; ++v ){
1350 dsetArray[t][v] = (DatasetType) tmp_dsetArray[v];
1351 }
1352 }
1353 break;
1354 case MRI_short:
1355 for( t=0; t<nt; ++t ) {
1356
1357 /* -- create 1D array to hold one volume -- */
1358 short* tmp_dsetArray = (short *) DSET_ARRAY(dset,t);
1359
1360 /* -- create 2D array to hold [time][volume] -- */
1361 for( v=0; v<nvox; ++v ){
1362 dsetArray[t][v] = (DatasetType) tmp_dsetArray[v];
1363 }
1364 }
1365 break;
1366 case MRI_byte:
1367 snprintf(errorString, LONG_STRING,
1368 "Sorry, datum-type MRI_byte (%d) is not supported!", datum);
1369
1370 /* free end return */
1371 free2DT( dsetArray, nt );
1372 RETURN(NULL);
1373 break;
1374
1375 case MRI_rgb:
1376 snprintf(errorString, LONG_STRING,
1377 "Sorry, datum-type MRI_rgb (%d) is not supported!", datum);
1378
1379 /* free end return */
1380 free2DT( dsetArray, nt );
1381 RETURN(NULL);
1382 break;
1383
1384 case MRI_complex:
1385 snprintf(errorString, LONG_STRING,
1386 "Sorry, datum-type MRI_complex (%d) is not supported!", datum);
1387
1388 /* free end return */
1389 free2DT( dsetArray, nt );
1390 RETURN(NULL);
1391
1392 break;
1393
1394 default:
1395 snprintf(errorString, LONG_STRING,
1396 "Unknown datum-type (%d)", datum);
1397
1398 /* free end return */
1399 free2DT( dsetArray, nt );
1400 RETURN(NULL);
1401
1402 break;
1403 }
1404
1405 RETURN(dsetArray);
1406 }
1407
1408 /* TODO: Check if dset needs to be in memory in order to access
1409 DSET_NUM_TIMES() */
freeDsetArray(THD_3dim_dataset * dset,DatasetType ** dsetArray)1410 void freeDsetArray(THD_3dim_dataset *dset, DatasetType** dsetArray)
1411 {
1412 long nt = 0; /* number of observations (time-points) total */
1413
1414 ENTRY("freeDsetArray");
1415
1416 if( dset == NULL) EXRETURN;
1417 if( !DSET_LOADED(dset) ) EXRETURN; /*TODO: this might not be required ! */
1418
1419 nt = DSET_NUM_TIMES( dset );
1420 free2DT( dsetArray, nt );
1421
1422 EXRETURN;
1423 }
1424
1425
allocateModel(MODEL * model,AFNI_MODEL * afni_model,char * errorString)1426 int allocateModel( MODEL *model, AFNI_MODEL *afni_model, char *errorString )
1427 {
1428 long nsv = 0; /* number of support vectors */
1429 long sv = 0; /* index over nsv */
1430 long nvox_masked = 0;
1431
1432
1433 ENTRY("allocateModel");
1434
1435 /* our approach to multiclass is to keep all training timepoints
1436 * with non-support vectors as alpha = 0
1437 * thus the model "documents" and number of support vectors is
1438 * always the number of timepoints in in the training data
1439 *
1440 * JL July 2011: Modified error handling: Passing error message
1441 * as argument (errorString) to the calling function, allocated memory
1442 * is freed, RETURN(1) instead of ERROR_exit()
1443 *
1444 * JL July 2011: Replaced svm-light's my_malloc by malloc (or mcw_malloc,
1445 * I should say) this allows us to find memory problems and check
1446 * for out of memory in 3dsvm rather than in svm-light.
1447 * Doing that, it turns out that model->lin_weitghts causes corruption
1448 * problems.
1449 *
1450 * TODO: need to fix this soon. It would be good to patch svm-light and
1451 * replace all my_malloc by malloc to see if there are leeks/corruption
1452 * problems.
1453 */
1454
1455 if( afni_model == NULL ) {
1456 /* we should never get here */
1457 snprintf(errorString, LONG_STRING, "allocateModel: "
1458 "What happened? Can't access afni model!");
1459
1460 RETURN(1);
1461 }
1462
1463 /* -- initialize -- */
1464 if( !strcmp(afni_model->svm_type, "regression") ) {
1465 nsv = afni_model->total_support_vectors[0];
1466 }
1467 else if( !strcmp(afni_model->svm_type, "classification") ) {
1468 nsv = afni_model->timepoints + 1;
1469 /* (timpoints + 1) is svmlights number of support vectors */
1470 }
1471
1472 nvox_masked = afni_model->total_masked_features[0];
1473 /* [0] assumes that all models use the same mask */
1474
1475
1476 /* -- allocate -- */
1477 if( (model->supvec = (DOC **)malloc(sizeof(DOC *)*(nsv))) == NULL ) {
1478 snprintf(errorString, LONG_STRING, "allocateModel: "
1479 "Memory allocation for model->supvec failed!");
1480
1481 RETURN(1);
1482 }
1483
1484 for( sv=1; sv<nsv; ++sv ) {
1485 if( (model->supvec[sv] = (DOC *)calloc(sizeof(DOC), 1)) == NULL ) {
1486 snprintf(errorString, LONG_STRING, "allocateModel: "
1487 "Memory allocation for model for model->supvec[%ld] failed!", sv);
1488
1489 RETURN(1);
1490 }
1491 if( ((model->supvec[sv])->words =
1492 (WORD *)calloc(sizeof(WORD), nvox_masked + 1)) == NULL ) {
1493 /* + 1 for end of list value */
1494
1495 snprintf(errorString, LONG_STRING, "allocateModel: "
1496 "Memory allocation for model->supvec[%ld])->words failed!", sv);
1497
1498 RETURN(1);
1499 }
1500 }
1501
1502 if( (model->alpha = (double *)malloc(sizeof(double)*(nsv))) == NULL ) {
1503 snprintf(errorString, LONG_STRING, "allocateModel: "
1504 "Memory allocation for model->alpha failed!");
1505
1506 /* free and return */
1507 IFree(model->supvec);
1508 for( sv=1; sv<nsv; ++sv ) {
1509 IFree( (model->supvec[sv])->words );
1510 IFree(model->supvec[sv]);
1511 }
1512 IFree(model->supvec);
1513
1514 RETURN(1);
1515 }
1516
1517 if( afni_model->kernel_type[0] == LINEAR ) {
1518 if( (model->lin_weights=(double *)my_malloc(sizeof(double)*nvox_masked + 1)) == NULL ) {
1519
1520 snprintf(errorString, LONG_STRING, "allocateModel: "
1521 "Memory allocation for model->lin_weights failed!");
1522
1523 /* free and return */
1524 IFree(model->supvec);
1525 for( sv=1; sv<nsv; ++sv ) {
1526 IFree( (model->supvec[sv])->words );
1527 IFree(model->supvec[sv]);
1528 }
1529 IFree(model->supvec);
1530
1531 RETURN(1);
1532 }
1533 }
1534
1535 RETURN(0);
1536 }
1537
freeModel(MODEL * model,AFNI_MODEL * afni_model,enum modes mode)1538 void freeModel( MODEL *model, AFNI_MODEL *afni_model, enum modes mode )
1539 {
1540
1541 long nsv = 0;
1542 long sv = 0;
1543
1544 ENTRY("freeModel");
1545
1546
1547 /* Note from svm_learn_main.c:
1548 Warning: The model contains references to the original data 'docs'.
1549 If you want to free the original data, and only keep the model, you
1550 have to make a deep copy of 'model'. */
1551 /* deep_copy_of_model=copy_model(model); */
1552
1553
1554 if( model == NULL ) {
1555 ERROR_message("Can't free svm-light model!");
1556 EXRETURN;
1557 }
1558
1559 if( !strcmp(afni_model->svm_type, "regression") ) {
1560 nsv = afni_model->total_support_vectors[0];
1561 }
1562 else if( !strcmp(afni_model->svm_type, "classification") ) {
1563 nsv = afni_model->timepoints + 1;
1564 /* (timepoints + 1) is svmlights number of support vectors */
1565 }
1566
1567 if( mode == TEST ) {
1568 /* free the model throughly, we don't have other references to the
1569 original training data */
1570 for( sv=1; sv<nsv; ++sv) {
1571 IFree( (model->supvec[sv])->words );
1572 IFree(model->supvec[sv]);
1573 }
1574 IFree(model->supvec);
1575 IFree(model->alpha);
1576 }
1577 else if( mode == TRAIN) {
1578 /* model->supvec are freed by freeing the DOCs in freeDOCs */
1579
1580 IFree(model->supvec);
1581 IFree(model->alpha);
1582 IFree(model->index);
1583 }
1584
1585 /* if(model->kernel_parm.kernel_type == LINEAR ) IFree(model->lin_weights); */
1586
1587 EXRETURN;
1588 }
1589
updateModel(MODEL * model,AFNI_MODEL * afni_model,int comb)1590 void updateModel(MODEL *model, AFNI_MODEL *afni_model, int comb)
1591 {
1592 long i = 0;
1593 long sv = 0;
1594
1595
1596 ENTRY("updateModel");
1597
1598 model->kernel_parm.kernel_type = afni_model->kernel_type[comb];
1599 model->kernel_parm.poly_degree = afni_model->polynomial_degree[comb];
1600 model->kernel_parm.rbf_gamma = afni_model->rbf_gamma[comb];
1601 model->kernel_parm.coef_lin = (double) afni_model->linear_coefficient[comb];
1602 model->kernel_parm.coef_const = (double) afni_model->constant_coefficient[comb];
1603 model->totwords = (long) afni_model->total_masked_features[comb];
1604 strncpy(model->kernel_parm.custom, afni_model->kernel_custom[i], 50);
1605 model->b = (double) afni_model->b[comb];
1606
1607
1608 /* regression */
1609 if( !strcmp(afni_model->svm_type,"regression") ) {
1610 /* comb = 0 for regression */
1611 /* number of docs is doubled for regression */
1612 model->totdoc = (long) afni_model->timepoints*2;
1613 model->sv_num = (long) afni_model->total_support_vectors[0];
1614
1615 sv=1;
1616 for( i=0; i<model->totdoc; ++i) {
1617 if ( fabs(afni_model->alphas[0][i]) > 0 ) {
1618 model->alpha[sv] = (double) afni_model->alphas[0][i];
1619
1620 ++sv;
1621 }
1622 }
1623 }
1624 /* classification */
1625 else {
1626
1627 /* our approach to multiclass is to keep all training timepoints
1628 * with non-support vectors as alpha = 0
1629 * thus the model "documents" and number of support vectors is
1630 * always the number of timepoints in in the training data
1631 *
1632 */
1633 model->totdoc = (long) afni_model->timepoints;
1634 model->sv_num = (long) afni_model->timepoints + 1;
1635 for( i=0 ; i< model->sv_num - 1 ; ++i ) {
1636 model->alpha[i+1] = (double)afni_model->alphas[comb][i];
1637 }
1638 }
1639
1640 if( model->kernel_parm.kernel_type == LINEAR ) {
1641 /* essentially replacing call to add_weight_vector_to_linear_model(model)*/
1642 /* that function mallocs, which we don't want since we are re-using */
1643
1644 /* JL July 2011: This part causes memory corruption problems. Read the comment
1645 * in allocateModel for more details. TODO: we should fix this soon. */
1646
1647 clear_vector_n(model->lin_weights,model->totwords);
1648 for(i=1;i<model->sv_num;i++) {
1649 add_vector_ns(model->lin_weights,(model->supvec[i])->words, model->alpha[i]);
1650 }
1651 }
1652
1653 if(verbosity >= 2) {
1654 INFO_message( "updateModel:");
1655 INFO_message( "sv_num = %ld", model->sv_num );
1656 INFO_message( "kernel_type = %ld", model->kernel_parm.kernel_type );
1657 INFO_message( "poly_degree = %ld", model->kernel_parm.poly_degree );
1658 INFO_message( "rbf_gamma = %lf", model->kernel_parm.rbf_gamma );
1659 INFO_message( "coef_lin = %lf", model->kernel_parm.coef_lin );
1660 INFO_message( "coef_const = %lf", model->kernel_parm.coef_const );
1661 INFO_message( "totwords = %ld", model->totwords );
1662 INFO_message( "totdoc = %ld", model->totdoc );
1663 INFO_message( "b = %lf", model->b );
1664
1665 for( i=0 ; i< model->sv_num - 1 ; ++i ) {
1666 INFO_message(" model->alpha[%ld+1] = %e", i, model->alpha[i+1]);
1667 }
1668
1669 }
1670
1671 EXRETURN;
1672 }
1673
freeModelArrays(DatasetType ** dsetModelArray,MaskType * dsetMaskArray,long nt_model,int mask_used)1674 void freeModelArrays( DatasetType** dsetModelArray,
1675 MaskType* dsetMaskArray, long nt_model, int mask_used )
1676 {
1677
1678 ENTRY("freeModelArrays");
1679
1680 /* CC if( mask_used == MASK_YES ) IFree(dsetMaskArray); */
1681 if( dsetMaskArray != NULL ) IFree(dsetMaskArray);
1682 dsetMaskArray=NULL;
1683 free2DT(dsetModelArray, nt_model );
1684
1685
1686 EXRETURN;
1687
1688 }
1689 /* JL May 2010: This functions produces and allocates the arrays holding the
1690 * model data and the model-mask data */
getAllocateModelArrays(THD_3dim_dataset * dsetModel,DatasetType *** dsetModelArray,MaskType ** dsetMaskArray,long * nt_model,long * nvox_model,int * mask_used,int noMaskFlag,char * errorString)1691 int getAllocateModelArrays(THD_3dim_dataset *dsetModel,
1692 DatasetType ***dsetModelArray, MaskType **dsetMaskArray,
1693 long *nt_model, long *nvox_model, int *mask_used,
1694 int noMaskFlag, char *errorString )
1695 {
1696 long v = 0; /* index over nvox_model */
1697 long t = 0; /* index over nt_model */
1698
1699 THD_3dim_dataset *
1700 dsetMask = NULL;
1701
1702 DatasetType **
1703 tmp_dsetArray = NULL;
1704
1705 MaskType *
1706 tmp_maskArrayPtr = NULL;
1707
1708 DatasetType **
1709 tmp_dsetModelArray = NULL;
1710
1711 MaskType *
1712 tmp_dsetMaskArray = NULL;
1713
1714
1715 char *inModelFile = NULL;
1716 char inModelFileMask[LONG_STRING];
1717 char *inModelFileMaskExt = MODEL_MSK_EXT;
1718
1719
1720 ENTRY("getAllocateModelArrays");
1721
1722
1723 /* JL Oct. 2010: Allocating model and mask array in this function.
1724 * If no mask was used for training, then dsetMaskArray == NULL.
1725 *
1726 * JL July 2011: Modified error handling: Passing error message
1727 * as argument (errorString) to the calling function,
1728 * allocated memory is freed, RETURN(1) instead of ERROR_exit().
1729 */
1730
1731
1732 /* --- initial error checking --- */
1733 if( !DSET_LOADED(dsetModel) ) {
1734 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1735 "What happened?! Model file not in memory!");
1736
1737 RETURN(1);
1738 }
1739
1740 if( (*mask_used != MASK_UNKNOWN) && (*mask_used != MASK_YES) &&
1741 (*mask_used != MASK_NO) ) {
1742 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1743 "What happened?! Mask status unknown!");
1744
1745 RETURN(1);
1746 }
1747
1748
1749 /* --- new way: mask is not unknown mask is stored in last 2 bricks
1750 * (even if no mask was used!) --- */
1751 if (*mask_used != MASK_UNKNOWN) {
1752
1753 /* -- initialize and allocate model array --*/
1754 *nvox_model = DSET_NVOX( dsetModel );
1755 *nt_model = DSET_NUM_TIMES( dsetModel )-2; /* mask is stored in last 2 bricks */
1756
1757 if( (tmp_dsetArray = getAllocateDsetArray(dsetModel, errorString)) == NULL ) {
1758
1759 RETURN(1);
1760 }
1761
1762 if( (tmp_dsetModelArray = Allocate2DT( *nt_model, *nvox_model)) == NULL ) {
1763 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1764 "Memory allocation for tmp_dsetModelArray failed!");
1765
1766 /* free and return */
1767 freeDsetArray(dsetModel, tmp_dsetArray);
1768 RETURN(1);
1769 }
1770
1771 /* -- create model array --*/
1772 for (t=0; t<*nt_model; ++t) {
1773 for (v=0; v<*nvox_model; ++v) {
1774 tmp_dsetModelArray[t][v] = tmp_dsetArray[t][v];
1775 }
1776 }
1777
1778 /* -- create mask array -- */
1779 if (*mask_used == MASK_YES) {
1780
1781 /* - allocate mask array - */
1782 if( (tmp_dsetMaskArray = (MaskType *)malloc(*nvox_model*sizeof(MaskType))) == NULL ) {
1783 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1784 "Memory allocation for tmp_dsetMaskArray failed!");
1785
1786 /* free and return */
1787 freeDsetArray(dsetModel, tmp_dsetArray);
1788 free2DT(tmp_dsetModelArray, *nt_model);
1789 RETURN(1);
1790 }
1791
1792 for (v=0; v<*nvox_model; ++v) {
1793 tmp_dsetMaskArray[v] = (MaskType) tmp_dsetArray[*nt_model+1][v];
1794 }
1795 }
1796 }
1797
1798 /* --- old way: mask is unknown, trying to locate mask dataset on disc --- */
1799 else {
1800
1801 /* -- initialize and allocate -- */
1802 *nvox_model = DSET_NVOX( dsetModel );
1803 *nt_model = DSET_NUM_TIMES( dsetModel );
1804 inModelFile = DSET_PREFIX( dsetModel );
1805
1806 if( (tmp_dsetArray = getAllocateDsetArray(dsetModel, errorString)) == NULL ) {
1807
1808 RETURN(1);
1809 }
1810
1811 if( (tmp_dsetModelArray = Allocate2DT( *nt_model, *nvox_model )) == NULL ) {
1812 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1813 "Memory allocation for tmp_dsetModelArray failed!");
1814
1815 /* free and return */
1816 freeDsetArray(dsetModel, tmp_dsetArray);
1817 RETURN(1);
1818 }
1819
1820 /* -- create model array --*/
1821 for (t=0; t<*nt_model; ++t) {
1822 for (v=0; v<*nvox_model; ++v) {
1823 tmp_dsetModelArray[t][v] = tmp_dsetArray[t][v];
1824 }
1825 }
1826
1827 if ( !noMaskFlag ) {
1828 /* -- create mask array --*/
1829 /* - determine view type - */
1830 strncpy(inModelFileMask, inModelFile, LONG_STRING);
1831 strncat(inModelFileMask, inModelFileMaskExt, LONG_STRING);
1832
1833 if (dsetModel->view_type == VIEW_ORIGINAL_TYPE) {
1834 strncat(inModelFileMask,"+orig", LONG_STRING);
1835 }
1836 else if (dsetModel->view_type == VIEW_TALAIRACH_TYPE) {
1837 strncat(inModelFileMask,"+tlrc", LONG_STRING);
1838 }
1839 else if (dsetModel->view_type == VIEW_ACPCALIGNED_TYPE) {
1840 strncat(inModelFileMask,"+acpc", LONG_STRING);
1841 }
1842 else {
1843 snprintf(errorString, LONG_STRING,
1844 "Viewtype of model: %s unknown!", inModelFile);
1845
1846 /* free and return */
1847 freeDsetArray(dsetModel, tmp_dsetArray);
1848 free2DT(tmp_dsetModelArray, *nt_model);
1849 RETURN(1);
1850 }
1851
1852 /* - open mask dataset - */
1853 if( (dsetMask = THD_open_one_dataset( inModelFileMask )) != NULL ) {
1854
1855 /* mask dataset found */
1856 *mask_used = MASK_YES;
1857 DSET_load( dsetMask );
1858
1859 /* get pointer to mask array */
1860 tmp_maskArrayPtr = (MaskType *)DSET_ARRAY(dsetMask,0);
1861
1862 if( (tmp_dsetMaskArray = (MaskType *)malloc(*nvox_model*sizeof(MaskType))) == NULL ) {
1863 snprintf(errorString, LONG_STRING, "getAllocateModelArrays: "
1864 "Memory allocation for tmp_dsetMaskArray failed!");
1865
1866 /* free and return */
1867 freeDsetArray(dsetModel, tmp_dsetArray);
1868 free2DT(tmp_dsetModelArray, *nt_model);
1869 DSET_unload(dsetMask);
1870 RETURN(1);
1871 }
1872
1873 for (v=0; v<*nvox_model; ++v) tmp_dsetMaskArray[v] = tmp_maskArrayPtr[v];
1874
1875 /* - free memory mask dataset */
1876 DSET_unload(dsetMask);
1877 }
1878 else { /* mask dataset not found */
1879 snprintf(errorString, LONG_STRING,
1880 "Failed to open mask dataset: %s! \n\n"
1881 " You are using an outdated model file!\n"
1882 " Make sure the mask file is in your current working directory!\n"
1883 " If no mask file was used for training use "
1884 " option -nomodelmask!", inModelFileMask );
1885
1886 /* free and return */
1887 freeDsetArray(dsetModel, tmp_dsetArray);
1888 free2DT(tmp_dsetModelArray, *nt_model);
1889 RETURN(1);
1890 }
1891 }
1892 else { /* (option -nomodelmask) */
1893 *mask_used = MASK_NO;
1894 }
1895 }
1896
1897 /* -- free temporary memory */
1898 IFree(tmp_dsetArray);
1899
1900 *dsetMaskArray = tmp_dsetMaskArray;
1901 *dsetModelArray = tmp_dsetModelArray;
1902
1903 RETURN(0);
1904 }
1905
1906
1907 /* just fills in the model data set (assumed constant accross class combinations) */
1908 /* Need to also use updateModel for class */
1909 /* The idea is to only call this once and then updateModel for combination specific aspects */
get_svm_model(MODEL * model,DatasetType ** dsetModelArray,MaskType * dsetMaskArray,AFNI_MODEL * afni_model,long model_vox,int noMaskFlag,char * errorString)1910 int get_svm_model(MODEL *model, DatasetType **dsetModelArray,
1911 MaskType *dsetMaskArray, AFNI_MODEL *afni_model, long model_vox,
1912 int noMaskFlag, char *errorString)
1913 {
1914 long i = 0;
1915 long j = 0;
1916 long k = 0;
1917
1918 long nt = 0; /* number of timepoints */
1919 long t = 0; /* index of nt */
1920 long v = 0; /* index over model_vox */
1921 long nvox_msk = 0; /* number of masked voxels */
1922 long vmsk = 0; /* index over nvox_msk */
1923 long sv = 0; /* sv index */
1924
1925 ENTRY("get_svm_model");
1926
1927 /* JL June 2014: Changed how alphas are retrieved for regression */
1928
1929 if( !strcmp(afni_model->svm_type,"regression") ) {
1930 nt = afni_model->timepoints;
1931 nvox_msk = (long) afni_model->total_masked_features[0];
1932
1933 /* For regression, the array storing the alphas is twice as
1934 * long as for classification. Since the number of timepoints
1935 * in the model (number of observations in training data) is
1936 * not twice as long, alphas of index nt+t belong to model data
1937 * of index t. */
1938
1939 sv=1;
1940 for( k=0; k<2; k++) {
1941 for( t=0; t<nt; ++t ) {
1942 vmsk=0;
1943 if( fabs(afni_model->alphas[0][k*nt+t]) > 0.0 ) {
1944
1945 if( sv >= afni_model->total_support_vectors[0] ) {
1946 /* should never get here */
1947 snprintf(errorString, LONG_STRING, "Reading model failed. More SVs than expected!");
1948 RETURN(1);
1949 }
1950
1951 for( v=0; v<model_vox; ++v ) {
1952 if( vmsk<nvox_msk ) {
1953 if( noMaskFlag ) { /* no mask */
1954 (model->supvec[sv])->words[vmsk].wnum = vmsk + 1;
1955 (model->supvec[sv])->words[vmsk].weight =
1956 (float)dsetModelArray[t][v];
1957
1958 ++vmsk;
1959 }
1960 else {
1961 if( dsetMaskArray[v] ) { /* mask */
1962 (model->supvec[sv])->words[vmsk].wnum = vmsk + 1;
1963 (model->supvec[sv])->words[vmsk].weight =
1964 (float)dsetModelArray[t][v];
1965
1966 ++vmsk;
1967 }
1968 }
1969 }
1970 }
1971 (model->supvec[sv])->words[vmsk].wnum=0; /* end of list */
1972 (model->supvec[sv])->twonorm_sq = sprod_ss((model->supvec[sv])->words,
1973 (model->supvec[sv])->words);
1974 (model->supvec[sv])->docnum = -1;
1975
1976 ++sv;
1977 }
1978 }
1979 }
1980 }
1981 else { /* before sv-regression: */
1982
1983 for(i = 1; i < afni_model->timepoints + 1; i++) {
1984 /* number of support vectors is (afni_model->timepoints + 1) */
1985 /* this simplifies multi-class life by allowing us to essentially */
1986 /* store and read the training data once, one brick, etc. */
1987 /* the real number is the number of non-zero alphas */
1988
1989 k = 0;
1990 for( j=0 ; j< model_vox; ++j) {
1991 // if( dsetMaskArray[j] && (k < (long) afni_model->total_masked_features[0]) )
1992 // --- before dealing with noMaskFlag, used this
1993 if( k < (long) afni_model->total_masked_features[0] ) {
1994 /* [0] assumes mask is the same for all class combinations */
1995 if( noMaskFlag ) {
1996 (model->supvec[i])->words[k].wnum = k + 1; /* value should start at 1 */
1997 (model->supvec[i])->words[k].weight = (float)dsetModelArray[i-1][j];
1998 // printf("%ld: %f ", (model->supvec[i])->words[k].wnum, (model->supvec[i])->words[k].weight );
1999 ++k;
2000 }
2001 else if( dsetMaskArray[j] ) {
2002 (model->supvec[i])->words[k].wnum = k + 1; /* value should start at 1 */
2003 (model->supvec[i])->words[k].weight = (float)dsetModelArray[i-1][j];
2004 // printf("%ld: %f ", (model->supvec[i])->words[k].wnum, (model->supvec[i])->words[k].weight );
2005 ++k;
2006 }
2007 }
2008 }
2009 // printf("\n");
2010 ((model->supvec[i])->words[k]).wnum=0; /* end of list */
2011 (model->supvec[i])->twonorm_sq = sprod_ss((model->supvec[i])->words,
2012 (model->supvec[i])->words);
2013 (model->supvec[i])->docnum = -1;
2014 }
2015 }
2016
2017 RETURN(0);
2018 }
2019
readAllocateAfniModel(THD_3dim_dataset * dsetModel,AFNI_MODEL * afniModel,char * errorString)2020 int readAllocateAfniModel( THD_3dim_dataset *dsetModel, AFNI_MODEL *afniModel, char *errorString )
2021 {
2022 ATR_float * atr_float = NULL;
2023 ATR_int * atr_int = NULL;
2024 ATR_string * atr_string = NULL;
2025 long i,j = 0;
2026 long nalphas = 0;
2027
2028
2029
2030 /* used for strtok magic */
2031 long p_string_size = 0; /* string size p, dependent on the
2032 number of class combinations */
2033 char *p = NULL;
2034 char *q = NULL;
2035 long max_comb = CLASS_MAX*(CLASS_MAX-1)/2;
2036
2037
2038 char headernames[LONG_STRING];
2039
2040
2041
2042 ENTRY("readAllocateAfniModel");
2043
2044
2045 /* JL Oct 2009: The naming and number of model parameters in the
2046 * model header has changed. We added "3DSVM" in front of each parameter name
2047 * to avoid collisions with header entries from other afni programs.
2048 *
2049 * JL Apr. 2010: Allocating p string (for strtok) dynamically.
2050 * Replaced all string functions by its equivalent that also takes the
2051 * string size as an argument
2052 *
2053 * JL Apr. 2010: Allocating combNames and kernel_custome dynamically
2054 * based on CLASS_MAX and CSV_STRING
2055 *
2056 * JL May 2010: Added 3DSVM_MASK_USED to determine if mask was used for
2057 * training or not. (Now, the mask is a sub-brick of the model dataset)
2058 *
2059 * JL May 2010: Trying to be backwards compatible based on version number
2060 *
2061 * JL July 2011: Modified error handling: Passing error message
2062 * as argument (errorString) to the calling function. Checking allocation
2063 * for every model parameter, if malloc fails, free memory and
2064 * RETURN(1) instead of ERROR_exit().
2065 *
2066 */
2067
2068
2069 /* --- determine version number for backward compatiblity ---*/
2070 atr_string = THD_find_string_atr( dsetModel->dblk, "3DSVM_VERSION" );
2071
2072 /* version number in model header (introduced Apr. 2010) */
2073 if( atr_string != NULL ) {
2074 afniModel->version = atof((char *)(atr_string->ch+1));
2075 }
2076 else { /* no version number in header, assign version number based on
2077 header entries */
2078
2079 /* check if 3DSVM_TYPE is present (introduced Oct. 2009)
2080 * assign version number 0.90 */
2081 atr_string = THD_find_string_atr( dsetModel->dblk, "3DSVM_SVM_TYPE" );
2082 if (atr_string != NULL) afniModel->version = 0.90;
2083 else { /* maybe even older */
2084
2085 /* check if COMBO_NAMES is present
2086 * assign version number 0.80
2087 */
2088 atr_string = THD_find_string_atr( dsetModel->dblk, "COMBO_NAMES" );
2089 if (atr_string != NULL) afniModel->version = 0.80;
2090 else { /* out of luck */
2091 snprintf(errorString, LONG_STRING, "Can not read model!");
2092 RETURN(1);
2093 }
2094 }
2095 }
2096 /* ---------------------------------------------------------------------- */
2097 /* --- read header information based on 3DSVM VERSION ---*/
2098 /* ---------------------------------------------------------------------- */
2099 if (afniModel->version >= 0.90) {
2100 /* -- initialize -- */
2101 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_CLASS_COMBINATIONS" );
2102 afniModel->combinations = *atr_int->in;
2103
2104 /* - allocate CSV strings - */
2105 p_string_size = afniModel->combinations*CSV_STRING;
2106
2107 if( (p = (char *)malloc(p_string_size*sizeof(char))) == NULL ) {
2108 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2109 "Memory allocation for csv-string failed!");
2110 RETURN(1);
2111 }
2112
2113 /* - allocate 2D char arrays - */
2114 if( (afniModel->combName = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
2115 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2116 "Memory allocation for combName failed!");
2117
2118 /* free and return */
2119 IFree(p);
2120 RETURN(1);
2121 }
2122 Clear2c(afniModel->combName, max_comb);
2123
2124 if( (afniModel->kernel_custom = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
2125 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2126 "Memory allocation for kernel_custom failed!");
2127
2128 /* free and return */
2129 IFree(p);
2130 free2c(afniModel->combName, max_comb);
2131 RETURN(1);
2132 }
2133 Clear2c(afniModel->kernel_custom, max_comb);
2134
2135 /* -- allocate and read header entries -- */
2136 /* JL Oct 2009 */
2137 atr_string = THD_find_string_atr( dsetModel->dblk, "3DSVM_SVM_TYPE" );
2138 strncpy(afniModel->svm_type, atr_string->ch, LONG_STRING);
2139
2140 /* JL May 2010 */
2141 if (afniModel->version >= 1.10) {
2142 atr_int = THD_find_int_atr(dsetModel->dblk, "3DSVM_MASK_USED");
2143 afniModel->mask_used = *atr_int->in;
2144 }
2145 else afniModel->mask_used = MASK_UNKNOWN;
2146
2147 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_CLASS_COUNT" );
2148 afniModel->class_count = *atr_int->in;
2149
2150 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_TIMEPOINTS" );
2151 afniModel->timepoints = *atr_int->in;
2152
2153 atr_string = THD_find_string_atr( dsetModel->dblk, "3DSVM_COMBO_NAMES" );
2154 strncpy(p, atr_string->ch, p_string_size);
2155 q = strtok(p,",");
2156 if (q != NULL) strncpy(afniModel->combName[0], q, CSV_STRING);
2157 else {
2158 snprintf(errorString, LONG_STRING, "Reading model combinations in header "
2159 "file failed");
2160
2161 /* free and return */
2162 IFree(p);
2163 free2c(afniModel->combName, max_comb);
2164 free2c(afniModel->kernel_custom, max_comb);
2165 RETURN(1);
2166 }
2167
2168 for(i = 1; i < afniModel->combinations; ++i) {
2169 q=strtok(NULL,",");
2170 if (q != NULL) strncpy(afniModel->combName[i], q, CSV_STRING);
2171 else {
2172 snprintf(errorString, LONG_STRING,
2173 "Reading model combinations in header file failed! "
2174 "Number does not match expected: '%d'", afniModel->combinations);
2175
2176 /* free and return */
2177 IFree(p);
2178 free2c(afniModel->combName, max_comb);
2179 free2c(afniModel->kernel_custom, max_comb);
2180 RETURN(1);
2181 }
2182 }
2183
2184 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_KERNEL_TYPE" );
2185 if( (afniModel->kernel_type = (int *)malloc( atr_int->nin * sizeof(int) )) == NULL) {
2186 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2187 "Memory allocation for kernel_type failed!");
2188
2189 /* free and return */
2190 IFree(p);
2191 free2c(afniModel->combName, max_comb);
2192 free2c(afniModel->kernel_custom, max_comb);
2193 RETURN(1);
2194 }
2195 for( i=0 ; i<atr_int->nin ; ++i ) {
2196 afniModel->kernel_type[i] = atr_int->in[i];
2197 }
2198
2199 atr_string = THD_find_string_atr( dsetModel->dblk, "3DSVM_KERNEL_CUSTOM" );
2200 strncpy(p, atr_string->ch, p_string_size);
2201 q = strtok(p,",");
2202 if (q != NULL) strncpy(afniModel->kernel_custom[0],q, CSV_STRING);
2203 else {
2204 snprintf(errorString, LONG_STRING,
2205 "Can't find 3DSVM_KERNEL_CUSTOM in model header file");
2206
2207
2208 /* free and return */
2209 IFree(p);
2210 free2c(afniModel->combName, max_comb);
2211 free2c(afniModel->kernel_custom, max_comb);
2212 IFree(afniModel->kernel_type);
2213 RETURN(1);
2214 }
2215 for( i=1; i<afniModel->combinations; ++i ) {
2216 q=strtok(NULL,",");
2217 if (q != NULL) strncpy(afniModel->kernel_custom[i], q, CSV_STRING);
2218 else {
2219 snprintf(errorString, LONG_STRING,
2220 "Reading 3DSVM_KERNEL_CUSTOM in model header file number of class"
2221 "combinations does not match expected:'%d'", afniModel->combinations);
2222
2223 /* free and return */
2224 IFree(p);
2225 free2c(afniModel->combName, max_comb);
2226 free2c(afniModel->kernel_custom, max_comb);
2227 IFree(afniModel->kernel_type);
2228 RETURN(1);
2229 }
2230 }
2231
2232 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_RBF_GAMMA" );
2233 if( (afniModel->rbf_gamma = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2234 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2235 "Memory allocation for rbf_gamma failed!");
2236
2237 /* free and return */
2238 IFree(p);
2239 free2c(afniModel->combName, max_comb);
2240 free2c(afniModel->kernel_custom, max_comb);
2241 IFree(afniModel->kernel_type);
2242 RETURN(1);
2243 }
2244 for( i=0 ; i<atr_float->nfl ; ++i ) {
2245 afniModel->rbf_gamma[i] = atr_float->fl[i];
2246 }
2247
2248 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_LINEAR_COEFFICIENT" );
2249 if( (afniModel->linear_coefficient = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2250 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2251 "Memory allocation for linear_coefficient failed!");
2252
2253 /* free and return */
2254 IFree(p);
2255 free2c(afniModel->combName, max_comb);
2256 free2c(afniModel->kernel_custom, max_comb);
2257 IFree(afniModel->kernel_type);
2258 IFree(afniModel->rbf_gamma);
2259 RETURN(1);
2260 }
2261 for( i=0 ; i<atr_float->nfl ; ++i ) {
2262 afniModel->linear_coefficient[i] = atr_float->fl[i];
2263 }
2264
2265 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_CONSTANT_COEFFICIENT" );
2266 if( (afniModel->constant_coefficient = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2267 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2268 "Memory allocation for constant_coefficient failed!");
2269
2270 /* free and return */
2271 IFree(p);
2272 free2c(afniModel->combName, max_comb);
2273 free2c(afniModel->kernel_custom, max_comb);
2274 IFree(afniModel->kernel_type);
2275 IFree(afniModel->rbf_gamma);
2276 IFree(afniModel->linear_coefficient);
2277 RETURN(1);
2278 }
2279 for( i=0 ; i<atr_float->nfl ; ++i ) {
2280 afniModel->constant_coefficient[i] = atr_float->fl[i];
2281 }
2282
2283 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_TOTAL_MASKED_FEATURES" );
2284 if( (afniModel->total_masked_features = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2285 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2286 "Memory allocation for total_masked_features failed!");
2287
2288 /* free and return */
2289 IFree(p);
2290 free2c(afniModel->combName, max_comb);
2291 free2c(afniModel->kernel_custom, max_comb);
2292 IFree(afniModel->kernel_type);
2293 IFree(afniModel->rbf_gamma);
2294 IFree(afniModel->linear_coefficient);
2295 IFree(afniModel->constant_coefficient);
2296 RETURN(1);
2297 }
2298 for( i=0 ; i<atr_int->nin ; ++i ) {
2299 afniModel->total_masked_features[i] = atr_int->in[i];
2300 }
2301
2302 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_TOTAL_SAMPLES" );
2303 if( (afniModel->total_samples = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2304 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2305 "Memory allocation for total_samples failed!");
2306
2307 /* free and return */
2308 IFree(p);
2309 free2c(afniModel->combName, max_comb);
2310 free2c(afniModel->kernel_custom, max_comb);
2311 IFree(afniModel->kernel_type);
2312 IFree(afniModel->rbf_gamma);
2313 IFree(afniModel->linear_coefficient);
2314 IFree(afniModel->constant_coefficient);
2315 IFree(afniModel->total_masked_features);
2316 RETURN(1);
2317 }
2318 for( i=0 ; i<atr_int->nin ; ++i ) {
2319 afniModel->total_samples[i] = atr_int->in[i];
2320 }
2321
2322 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_TOTAL_SUPPORT_VECTORS" );
2323 if( (afniModel->total_support_vectors = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2324 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2325 "Memory allocation for total_support_vectors failed!");
2326
2327 /* free and return */
2328 IFree(p);
2329 free2c(afniModel->combName, max_comb);
2330 free2c(afniModel->kernel_custom, max_comb);
2331 IFree(afniModel->kernel_type);
2332 IFree(afniModel->rbf_gamma);
2333 IFree(afniModel->linear_coefficient);
2334 IFree(afniModel->constant_coefficient);
2335 IFree(afniModel->total_masked_features);
2336 IFree(afniModel->total_samples);
2337 RETURN(1);
2338 }
2339 for( i=0 ; i<atr_int->nin ; ++i ) {
2340 afniModel->total_support_vectors[i] = atr_int->in[i];
2341 }
2342
2343 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_B" );
2344 if( ( afniModel->b = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2345 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2346 "Memory allocation for b failed!");
2347
2348 /* free and return */
2349 IFree(p);
2350 free2c(afniModel->combName, max_comb);
2351 free2c(afniModel->kernel_custom, max_comb);
2352 IFree(afniModel->kernel_type);
2353 IFree(afniModel->rbf_gamma);
2354 IFree(afniModel->linear_coefficient);
2355 IFree(afniModel->constant_coefficient);
2356 IFree(afniModel->total_masked_features);
2357 IFree(afniModel->total_samples);
2358 IFree(afniModel->total_support_vectors);
2359 RETURN(1);
2360 }
2361 for( i=0 ; i<atr_float->nfl ; ++i ) {
2362 afniModel->b[i] = atr_float->fl[i];
2363 }
2364
2365 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_POLYNOMIAL_DEGREE" );
2366 if( (afniModel->polynomial_degree = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2367 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2368 "Memory allocation for polynomial_degree failed!");
2369 /* free and return */
2370 IFree(p);
2371 free2c(afniModel->combName, max_comb);
2372 free2c(afniModel->kernel_custom, max_comb);
2373 IFree(afniModel->kernel_type);
2374 IFree(afniModel->rbf_gamma);
2375 IFree(afniModel->linear_coefficient);
2376 IFree(afniModel->constant_coefficient);
2377 IFree(afniModel->total_masked_features);
2378 IFree(afniModel->total_samples);
2379 IFree(afniModel->total_support_vectors);
2380 IFree(afniModel->b);
2381 RETURN(1);
2382 }
2383 for( i=0 ; i<atr_int->nin ; ++i ) {
2384 afniModel->polynomial_degree[i] = atr_int->in[i];
2385 }
2386
2387 /* For regression, the array holding the alphas needs to be twice as long as for
2388 * classification */
2389 if( !strcmp(afniModel->svm_type, "regression") ) {
2390 nalphas=afniModel->timepoints*2;
2391 }
2392 else if( !strcmp(afniModel->svm_type, "classification") ) {
2393 nalphas=afniModel->timepoints;
2394 }
2395 /* we should never get here */
2396 else {
2397 snprintf(errorString, LONG_STRING, "Can not read model! SVM type unknown!");
2398 RETURN(1);
2399 }
2400
2401 if( (afniModel->alphas = Allocate2f((long) afniModel->combinations,
2402 nalphas)) == NULL ) {
2403 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2404 "Memory allocation for alphas failed!");
2405
2406 /* free and return */
2407 IFree(p);
2408 free2c(afniModel->combName, max_comb);
2409 free2c(afniModel->kernel_custom, max_comb);
2410 IFree(afniModel->kernel_type);
2411 IFree(afniModel->rbf_gamma);
2412 IFree(afniModel->linear_coefficient);
2413 IFree(afniModel->constant_coefficient);
2414 IFree(afniModel->total_masked_features);
2415 IFree(afniModel->total_samples);
2416 IFree(afniModel->total_support_vectors);
2417 IFree(afniModel->b);
2418 IFree(afniModel->polynomial_degree);
2419 RETURN(1);
2420 }
2421
2422 for(i = 0; i < afniModel->combinations; ++i ) {
2423 snprintf(headernames, LONG_STRING, "3DSVM_ALPHAS_%s", afniModel->combName[i]);
2424 atr_float = THD_find_float_atr( dsetModel->dblk, headernames);
2425 for(j = 0; j < nalphas; ++j ) {
2426 afniModel->alphas[i][j] = (double)atr_float->fl[j];
2427 }
2428 }
2429
2430 /* JL Nov 2009: new parameters: */
2431 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_SVM_C" );
2432 if( (afniModel->svm_c = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2433 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2434 "Memory allocation for svm_c failed!");
2435
2436 /* free and return */
2437 IFree(p);
2438 free2c(afniModel->combName, max_comb);
2439 free2c(afniModel->kernel_custom, max_comb);
2440 IFree(afniModel->kernel_type);
2441 IFree(afniModel->rbf_gamma);
2442 IFree(afniModel->linear_coefficient);
2443 IFree(afniModel->constant_coefficient);
2444 IFree(afniModel->total_masked_features);
2445 IFree(afniModel->total_samples);
2446 IFree(afniModel->total_support_vectors);
2447 IFree(afniModel->b);
2448 IFree(afniModel->polynomial_degree);
2449 free2f(afniModel->alphas, afniModel->combinations);
2450 RETURN(1);
2451 }
2452 for (i=0; i < afniModel->combinations; ++i ) {
2453 afniModel->svm_c[i] = atr_float->fl[i];
2454 }
2455
2456 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_EPS" );
2457 if( (afniModel->eps = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2458 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2459 "Memory allocation for eps failed!");
2460
2461 /* free and return */
2462 IFree(p);
2463 free2c(afniModel->combName, max_comb);
2464 free2c(afniModel->kernel_custom, max_comb);
2465 IFree(afniModel->kernel_type);
2466 IFree(afniModel->rbf_gamma);
2467 IFree(afniModel->linear_coefficient);
2468 IFree(afniModel->constant_coefficient);
2469 IFree(afniModel->total_masked_features);
2470 IFree(afniModel->total_samples);
2471 IFree(afniModel->total_support_vectors);
2472 IFree(afniModel->b);
2473 IFree(afniModel->polynomial_degree);
2474 free2f(afniModel->alphas, afniModel->combinations);
2475 IFree(afniModel->svm_c);
2476 RETURN(1);
2477 }
2478 for (i=0; i < afniModel->combinations; ++i ) {
2479 afniModel->eps[i] = atr_float->fl[i];
2480 }
2481
2482 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_BIASED_HYPERPLANE" );
2483 if( (afniModel->biased_hyperplane = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2484 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2485 "Memory allocation for biased_hyperplane failed!");
2486
2487 /* free and return */
2488 IFree(p);
2489 free2c(afniModel->combName, max_comb);
2490 free2c(afniModel->kernel_custom, max_comb);
2491 IFree(afniModel->kernel_type);
2492 IFree(afniModel->rbf_gamma);
2493 IFree(afniModel->linear_coefficient);
2494 IFree(afniModel->constant_coefficient);
2495 IFree(afniModel->total_masked_features);
2496 IFree(afniModel->total_samples);
2497 IFree(afniModel->total_support_vectors);
2498 IFree(afniModel->b);
2499 IFree(afniModel->polynomial_degree);
2500 free2f(afniModel->alphas, afniModel->combinations);
2501 IFree(afniModel->svm_c);
2502 IFree(afniModel->eps);
2503 RETURN(1);
2504 }
2505 for( i=0; i<afniModel->combinations; ++i ) {
2506 afniModel->biased_hyperplane[i] = atr_int->in[i];
2507 }
2508
2509 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_SKIP_FINAL_OPT_CHECK" );
2510 if( (afniModel->skip_final_opt_check = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2511 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2512 "Memory allocation for skip_final_opt_check failed!");
2513
2514 /* free and return */
2515 IFree(p);
2516 free2c(afniModel->combName, max_comb);
2517 free2c(afniModel->kernel_custom, max_comb);
2518 IFree(afniModel->kernel_type);
2519 IFree(afniModel->rbf_gamma);
2520 IFree(afniModel->linear_coefficient);
2521 IFree(afniModel->constant_coefficient);
2522 IFree(afniModel->total_masked_features);
2523 IFree(afniModel->total_samples);
2524 IFree(afniModel->total_support_vectors);
2525 IFree(afniModel->b);
2526 IFree(afniModel->polynomial_degree);
2527 free2f(afniModel->alphas, afniModel->combinations);
2528 IFree(afniModel->svm_c);
2529 IFree(afniModel->eps);
2530 IFree(afniModel->biased_hyperplane);
2531 RETURN(1);
2532 }
2533 for( i=0; i<afniModel->combinations; ++i ) {
2534 afniModel->skip_final_opt_check[i] = atr_int->in[i];
2535 }
2536
2537 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_SVM_MAXQPSIZE" );
2538 if( (afniModel->svm_maxqpsize = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2539 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2540 "Memory allocation for svm_maxqpsize failed!");
2541
2542 /* free and return */
2543 IFree(p);
2544 free2c(afniModel->combName, max_comb);
2545 free2c(afniModel->kernel_custom, max_comb);
2546 IFree(afniModel->kernel_type);
2547 IFree(afniModel->rbf_gamma);
2548 IFree(afniModel->linear_coefficient);
2549 IFree(afniModel->constant_coefficient);
2550 IFree(afniModel->total_masked_features);
2551 IFree(afniModel->total_samples);
2552 IFree(afniModel->total_support_vectors);
2553 IFree(afniModel->b);
2554 IFree(afniModel->polynomial_degree);
2555 free2f(afniModel->alphas, afniModel->combinations);
2556 IFree(afniModel->svm_c);
2557 IFree(afniModel->eps);
2558 IFree(afniModel->biased_hyperplane);
2559 IFree(afniModel->skip_final_opt_check);
2560 RETURN(1);
2561 }
2562 for( i=0; i<afniModel->combinations; ++i ) {
2563 afniModel->svm_maxqpsize[i] = atr_int->in[i];
2564 }
2565
2566 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_SVM_NEWVARSINQP" );
2567 if( (afniModel->svm_newvarsinqp = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2568 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2569 "Memory allocation for svm_newvarsinqp failed!");
2570
2571 /* free and return */
2572 IFree(p);
2573 free2c(afniModel->combName, max_comb);
2574 free2c(afniModel->kernel_custom, max_comb);
2575 IFree(afniModel->kernel_type);
2576 IFree(afniModel->rbf_gamma);
2577 IFree(afniModel->linear_coefficient);
2578 IFree(afniModel->constant_coefficient);
2579 IFree(afniModel->total_masked_features);
2580 IFree(afniModel->total_samples);
2581 IFree(afniModel->total_support_vectors);
2582 IFree(afniModel->b);
2583 IFree(afniModel->polynomial_degree);
2584 free2f(afniModel->alphas, afniModel->combinations);
2585 IFree(afniModel->svm_c);
2586 IFree(afniModel->eps);
2587 IFree(afniModel->biased_hyperplane);
2588 IFree(afniModel->skip_final_opt_check);
2589 IFree(afniModel->svm_maxqpsize);
2590 RETURN(1);
2591 }
2592 for( i=0; i<afniModel->combinations; ++i ) {
2593 afniModel->svm_newvarsinqp[i] = atr_int->in[i];
2594 }
2595
2596 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_SVM_ITER_TO_SHRINK");
2597 if( (afniModel->svm_iter_to_shrink = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2598 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2599 "Memory allocation for svm_iter_to_shrink failed!");
2600
2601
2602 /* free and return */
2603 IFree(p);
2604 free2c(afniModel->combName, max_comb);
2605 free2c(afniModel->kernel_custom, max_comb);
2606 IFree(afniModel->kernel_type);
2607 IFree(afniModel->rbf_gamma);
2608 IFree(afniModel->linear_coefficient);
2609 IFree(afniModel->constant_coefficient);
2610 IFree(afniModel->total_masked_features);
2611 IFree(afniModel->total_samples);
2612 IFree(afniModel->total_support_vectors);
2613 IFree(afniModel->b);
2614 IFree(afniModel->polynomial_degree);
2615 free2f(afniModel->alphas, afniModel->combinations);
2616 IFree(afniModel->svm_c);
2617 IFree(afniModel->eps);
2618 IFree(afniModel->biased_hyperplane);
2619 IFree(afniModel->skip_final_opt_check);
2620 IFree(afniModel->svm_maxqpsize);
2621 IFree(afniModel->svm_newvarsinqp);
2622 RETURN(1);
2623 }
2624 for( i=0; i<afniModel->combinations; ++i ) {
2625 afniModel->svm_iter_to_shrink[i] = atr_int->in[i];
2626 }
2627
2628 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_TRANSDUCTION_POSRATIO" );
2629 if( (afniModel->transduction_posratio = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2630 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2631 "Memory allocation for transduction_posratio failed!");
2632
2633 /* free and return */
2634 IFree(p);
2635 free2c(afniModel->combName, max_comb);
2636 free2c(afniModel->kernel_custom, max_comb);
2637 IFree(afniModel->kernel_type);
2638 IFree(afniModel->rbf_gamma);
2639 IFree(afniModel->linear_coefficient);
2640 IFree(afniModel->constant_coefficient);
2641 IFree(afniModel->total_masked_features);
2642 IFree(afniModel->total_samples);
2643 IFree(afniModel->total_support_vectors);
2644 IFree(afniModel->b);
2645 IFree(afniModel->polynomial_degree);
2646 free2f(afniModel->alphas, afniModel->combinations);
2647 IFree(afniModel->svm_c);
2648 IFree(afniModel->eps);
2649 IFree(afniModel->biased_hyperplane);
2650 IFree(afniModel->skip_final_opt_check);
2651 IFree(afniModel->svm_maxqpsize);
2652 IFree(afniModel->svm_newvarsinqp);
2653 IFree(afniModel->svm_iter_to_shrink);
2654 RETURN(1);
2655
2656 }
2657 for (i=0; i < afniModel->combinations; ++i ) {
2658 afniModel->transduction_posratio[i] = atr_float->fl[i];
2659 }
2660
2661 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_SVM_COSTRATIO" );
2662 if( (afniModel->svm_costratio = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2663 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2664 "Memory allocation for svm_costratio failed!");
2665
2666 /* free and return */
2667 IFree(p);
2668 free2c(afniModel->combName, max_comb);
2669 free2c(afniModel->kernel_custom, max_comb);
2670 IFree(afniModel->kernel_type);
2671 IFree(afniModel->rbf_gamma);
2672 IFree(afniModel->linear_coefficient);
2673 IFree(afniModel->constant_coefficient);
2674 IFree(afniModel->total_masked_features);
2675 IFree(afniModel->total_samples);
2676 IFree(afniModel->total_support_vectors);
2677 IFree(afniModel->b);
2678 IFree(afniModel->polynomial_degree);
2679 free2f(afniModel->alphas, afniModel->combinations);
2680 IFree(afniModel->svm_c);
2681 IFree(afniModel->eps);
2682 IFree(afniModel->biased_hyperplane);
2683 IFree(afniModel->skip_final_opt_check);
2684 IFree(afniModel->svm_maxqpsize);
2685 IFree(afniModel->svm_newvarsinqp);
2686 IFree(afniModel->svm_iter_to_shrink);
2687 IFree(afniModel->transduction_posratio);
2688 RETURN(1);
2689 }
2690 for (i=0; i < afniModel->combinations; ++i ) {
2691 afniModel->svm_costratio[i] = atr_float->fl[i];
2692 }
2693
2694 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_SVM_COSTRATIO_UNLAB" );
2695 if( (afniModel->svm_costratio_unlab = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2696 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2697 "Memory allocation for svm_costratio_unlab failed!");
2698
2699 /* free and return */
2700 IFree(p);
2701 free2c(afniModel->combName, max_comb);
2702 free2c(afniModel->kernel_custom, max_comb);
2703 IFree(afniModel->kernel_type);
2704 IFree(afniModel->rbf_gamma);
2705 IFree(afniModel->linear_coefficient);
2706 IFree(afniModel->constant_coefficient);
2707 IFree(afniModel->total_masked_features);
2708 IFree(afniModel->total_samples);
2709 IFree(afniModel->total_support_vectors);
2710 IFree(afniModel->b);
2711 IFree(afniModel->polynomial_degree);
2712 free2f(afniModel->alphas, afniModel->combinations);
2713 IFree(afniModel->svm_c);
2714 IFree(afniModel->eps);
2715 IFree(afniModel->biased_hyperplane);
2716 IFree(afniModel->skip_final_opt_check);
2717 IFree(afniModel->svm_maxqpsize);
2718 IFree(afniModel->svm_newvarsinqp);
2719 IFree(afniModel->svm_iter_to_shrink);
2720 IFree(afniModel->transduction_posratio);
2721 IFree(afniModel->svm_costratio);
2722 RETURN(1);
2723 }
2724 for (i=0; i < afniModel->combinations; ++i ) {
2725 afniModel->svm_costratio_unlab[i] = atr_float->fl[i];
2726 }
2727
2728 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_SVM_UNLABBOUND" );
2729 if( (afniModel->svm_unlabbound = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2730 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2731 "Memory allocation for svm_unlabbound failed!");
2732
2733 /* free and return */
2734 IFree(p);
2735 free2c(afniModel->combName, max_comb);
2736 free2c(afniModel->kernel_custom, max_comb);
2737 IFree(afniModel->kernel_type);
2738 IFree(afniModel->rbf_gamma);
2739 IFree(afniModel->linear_coefficient);
2740 IFree(afniModel->constant_coefficient);
2741 IFree(afniModel->total_masked_features);
2742 IFree(afniModel->total_samples);
2743 IFree(afniModel->total_support_vectors);
2744 IFree(afniModel->b);
2745 IFree(afniModel->polynomial_degree);
2746 free2f(afniModel->alphas, afniModel->combinations);
2747 IFree(afniModel->svm_c);
2748 IFree(afniModel->eps);
2749 IFree(afniModel->biased_hyperplane);
2750 IFree(afniModel->skip_final_opt_check);
2751 IFree(afniModel->svm_maxqpsize);
2752 IFree(afniModel->svm_newvarsinqp);
2753 IFree(afniModel->svm_iter_to_shrink);
2754 IFree(afniModel->transduction_posratio);
2755 IFree(afniModel->svm_costratio);
2756 IFree(afniModel->svm_costratio_unlab);
2757 RETURN(1);
2758 }
2759 for (i=0; i < afniModel->combinations; ++i ) {
2760 afniModel->svm_unlabbound[i] = atr_float->fl[i];
2761 }
2762
2763 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_EPSILON_A" );
2764 if( (afniModel->epsilon_a = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2765 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2766 "Memory allocation for epsilon_a failed!");
2767
2768 /* free and return */
2769 IFree(p);
2770 free2c(afniModel->combName, max_comb);
2771 free2c(afniModel->kernel_custom, max_comb);
2772 IFree(afniModel->kernel_type);
2773 IFree(afniModel->rbf_gamma);
2774 IFree(afniModel->linear_coefficient);
2775 IFree(afniModel->constant_coefficient);
2776 IFree(afniModel->total_masked_features);
2777 IFree(afniModel->total_samples);
2778 IFree(afniModel->total_support_vectors);
2779 IFree(afniModel->b);
2780 IFree(afniModel->polynomial_degree);
2781 free2f(afniModel->alphas, afniModel->combinations);
2782 IFree(afniModel->svm_c);
2783 IFree(afniModel->eps);
2784 IFree(afniModel->biased_hyperplane);
2785 IFree(afniModel->skip_final_opt_check);
2786 IFree(afniModel->svm_maxqpsize);
2787 IFree(afniModel->svm_newvarsinqp);
2788 IFree(afniModel->svm_iter_to_shrink);
2789 IFree(afniModel->transduction_posratio);
2790 IFree(afniModel->svm_costratio);
2791 IFree(afniModel->svm_costratio_unlab);
2792 RETURN(1);
2793 }
2794 for (i=0; i < afniModel->combinations; ++i ) {
2795 afniModel->epsilon_a[i] = atr_float->fl[i];
2796 }
2797
2798 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_EPSILON_CRIT" );
2799 if( (afniModel->epsilon_crit = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2800 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2801 "Memory allocation for epsilon_crit failed!");
2802
2803 /* free and return */
2804 IFree(p);
2805 free2c(afniModel->combName, max_comb);
2806 free2c(afniModel->kernel_custom, max_comb);
2807 IFree(afniModel->kernel_type);
2808 IFree(afniModel->rbf_gamma);
2809 IFree(afniModel->linear_coefficient);
2810 IFree(afniModel->constant_coefficient);
2811 IFree(afniModel->total_masked_features);
2812 IFree(afniModel->total_samples);
2813 IFree(afniModel->total_support_vectors);
2814 IFree(afniModel->b);
2815 IFree(afniModel->polynomial_degree);
2816 free2f(afniModel->alphas, afniModel->combinations);
2817 IFree(afniModel->svm_c);
2818 IFree(afniModel->eps);
2819 IFree(afniModel->biased_hyperplane);
2820 IFree(afniModel->skip_final_opt_check);
2821 IFree(afniModel->svm_maxqpsize);
2822 IFree(afniModel->svm_newvarsinqp);
2823 IFree(afniModel->svm_iter_to_shrink);
2824 IFree(afniModel->transduction_posratio);
2825 IFree(afniModel->svm_costratio);
2826 IFree(afniModel->svm_costratio_unlab);
2827 IFree(afniModel->epsilon_a);
2828 RETURN(1);
2829 }
2830 for (i=0; i < afniModel->combinations; ++i ) {
2831 afniModel->epsilon_crit[i] = atr_float->fl[i];
2832 }
2833
2834 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_COMPUTE_LOO" );
2835 if( (afniModel->compute_loo = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2836 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2837 "Memory allocation for compute_loo failed!");
2838
2839 /* free and return */
2840 IFree(p);
2841 free2c(afniModel->combName, max_comb);
2842 free2c(afniModel->kernel_custom, max_comb);
2843 IFree(afniModel->kernel_type);
2844 IFree(afniModel->rbf_gamma);
2845 IFree(afniModel->linear_coefficient);
2846 IFree(afniModel->constant_coefficient);
2847 IFree(afniModel->total_masked_features);
2848 IFree(afniModel->total_samples);
2849 IFree(afniModel->total_support_vectors);
2850 IFree(afniModel->b);
2851 IFree(afniModel->polynomial_degree);
2852 free2f(afniModel->alphas, afniModel->combinations);
2853 IFree(afniModel->svm_c);
2854 IFree(afniModel->eps);
2855 IFree(afniModel->biased_hyperplane);
2856 IFree(afniModel->skip_final_opt_check);
2857 IFree(afniModel->svm_maxqpsize);
2858 IFree(afniModel->svm_newvarsinqp);
2859 IFree(afniModel->svm_iter_to_shrink);
2860 IFree(afniModel->transduction_posratio);
2861 IFree(afniModel->svm_costratio);
2862 IFree(afniModel->svm_costratio_unlab);
2863 IFree(afniModel->epsilon_a);
2864 IFree(afniModel->epsilon_crit);
2865 RETURN(1);
2866 }
2867 for( i=0; i<afniModel->combinations; ++i ) {
2868 afniModel->compute_loo[i] = atr_int->in[i];
2869 }
2870
2871 atr_float = THD_find_float_atr( dsetModel->dblk, "3DSVM_RHO" );
2872 if( (afniModel->rho = (float *)malloc( atr_float->nfl * sizeof(float))) == NULL ) {
2873 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2874 "Memory allocation for rho!");
2875
2876 /* free and return */
2877 IFree(p);
2878 free2c(afniModel->combName, max_comb);
2879 free2c(afniModel->kernel_custom, max_comb);
2880 IFree(afniModel->kernel_type);
2881 IFree(afniModel->rbf_gamma);
2882 IFree(afniModel->linear_coefficient);
2883 IFree(afniModel->constant_coefficient);
2884 IFree(afniModel->total_masked_features);
2885 IFree(afniModel->total_samples);
2886 IFree(afniModel->total_support_vectors);
2887 IFree(afniModel->b);
2888 IFree(afniModel->polynomial_degree);
2889 free2f(afniModel->alphas, afniModel->combinations);
2890 IFree(afniModel->svm_c);
2891 IFree(afniModel->eps);
2892 IFree(afniModel->biased_hyperplane);
2893 IFree(afniModel->skip_final_opt_check);
2894 IFree(afniModel->svm_maxqpsize);
2895 IFree(afniModel->svm_newvarsinqp);
2896 IFree(afniModel->svm_iter_to_shrink);
2897 IFree(afniModel->transduction_posratio);
2898 IFree(afniModel->svm_costratio);
2899 IFree(afniModel->svm_costratio_unlab);
2900 IFree(afniModel->epsilon_a);
2901 IFree(afniModel->epsilon_crit);
2902 IFree(afniModel->compute_loo);
2903 RETURN(1);
2904 }
2905 for (i=0; i < afniModel->combinations; ++i ) {
2906 afniModel->rho[i] = atr_float->fl[i];
2907 }
2908
2909 atr_int = THD_find_int_atr( dsetModel->dblk, "3DSVM_XA_DEPTH" );
2910 if( (afniModel->xa_depth = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
2911 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2912 "Memory allocation for xa_depth failed!");
2913
2914 /* free and return */
2915 IFree(p);
2916 free2c(afniModel->combName, max_comb);
2917 free2c(afniModel->kernel_custom, max_comb);
2918 IFree(afniModel->kernel_type);
2919 IFree(afniModel->rbf_gamma);
2920 IFree(afniModel->linear_coefficient);
2921 IFree(afniModel->constant_coefficient);
2922 IFree(afniModel->total_masked_features);
2923 IFree(afniModel->total_samples);
2924 IFree(afniModel->total_support_vectors);
2925 IFree(afniModel->b);
2926 IFree(afniModel->polynomial_degree);
2927 free2f(afniModel->alphas, afniModel->combinations);
2928 IFree(afniModel->svm_c);
2929 IFree(afniModel->eps);
2930 IFree(afniModel->biased_hyperplane);
2931 IFree(afniModel->skip_final_opt_check);
2932 IFree(afniModel->svm_maxqpsize);
2933 IFree(afniModel->svm_newvarsinqp);
2934 IFree(afniModel->svm_iter_to_shrink);
2935 IFree(afniModel->transduction_posratio);
2936 IFree(afniModel->svm_costratio);
2937 IFree(afniModel->svm_costratio_unlab);
2938 IFree(afniModel->epsilon_a);
2939 IFree(afniModel->epsilon_crit);
2940 IFree(afniModel->compute_loo);
2941 IFree(afniModel->rho);
2942 RETURN(1);
2943 }
2944 for( i=0; i<afniModel->combinations; ++i ) {
2945 afniModel->xa_depth[i] = atr_int->in[i];
2946 }
2947 }
2948
2949 /* -----------------------------------------------------*/
2950 /* ---- naming for model parameters before Oct. 2009 ---*/
2951 /* -----------------------------------------------------*/
2952 else if (afniModel->version >= 0.80) {
2953
2954 atr_int = THD_find_int_atr( dsetModel->dblk, "CLASS_COMBINATIONS" );
2955 afniModel->combinations = *atr_int->in;
2956
2957 /* --- allocate CSV strings --- */
2958 p_string_size = afniModel->combinations*CSV_STRING;
2959 if ( (p = (char *)malloc(p_string_size*sizeof(char))) == NULL ) {
2960 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2961 "Memory allocation for cvs-string failed!");
2962 RETURN(1);
2963 }
2964
2965 /* - allocate 2D char arrays - */
2966 if( (afniModel->combName = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
2967 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2968 "Memory allocation for combName failed!");
2969
2970 /* free and return */
2971 IFree(p);
2972 RETURN(1);
2973 }
2974 Clear2c(afniModel->combName, max_comb);
2975
2976 if( (afniModel->kernel_custom = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
2977 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
2978 "Memory allocation for kernel_custom failed!");
2979
2980 /* free and return */
2981 IFree(p);
2982 free2c(afniModel->combName, max_comb);
2983 RETURN(1);
2984 }
2985 Clear2c(afniModel->kernel_custom, max_comb);
2986
2987 /* -- write default values for non-existing header entries */
2988 strncpy(afniModel->svm_type, "classification", LONG_STRING);
2989 afniModel->mask_used = MASK_UNKNOWN;
2990
2991 /* -- allocate and read header entries -- */
2992 atr_string = THD_find_string_atr( dsetModel->dblk, "COMBO_NAMES" );
2993 strncpy(p, atr_string->ch, p_string_size);
2994 q = strtok(p,",");
2995 if (q != NULL) strncpy(afniModel->combName[0], q, CSV_STRING);
2996 else {
2997 snprintf(errorString, LONG_STRING,
2998 "Reading model combinations in header file failed");
2999
3000 /* free and return */
3001 IFree(p);
3002 free2c(afniModel->combName, max_comb);
3003 free2c(afniModel->kernel_custom, max_comb);
3004 RETURN(1);
3005 }
3006 for(i = 1; i < afniModel->combinations; ++i) {
3007 q=strtok(NULL, ",");
3008 if (q != NULL) strncpy(afniModel->combName[i], q, CSV_STRING);
3009 else {
3010 snprintf(errorString, LONG_STRING,
3011 "Reading model combinations in header file failed "
3012 "Number does not match expected(%d)", afniModel->combinations);
3013
3014 /* free and return */
3015 IFree(p);
3016 free2c(afniModel->combName, max_comb);
3017 free2c(afniModel->kernel_custom, max_comb);
3018 RETURN(1);
3019 }
3020 }
3021
3022 atr_int = THD_find_int_atr( dsetModel->dblk, "CLASS_COUNT" );
3023 afniModel->class_count = *atr_int->in;
3024
3025 atr_int = THD_find_int_atr( dsetModel->dblk, "TIMEPOINTS" );
3026 afniModel->timepoints = *atr_int->in;
3027
3028 atr_int = THD_find_int_atr( dsetModel->dblk, "KERNEL_TYPE" );
3029 if( (afniModel->kernel_type = (int *)malloc( atr_int->nin * sizeof(int))) == NULL ) {
3030 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3031 "Memory allocation for kernel_type failed!");
3032
3033 /* free and return */
3034 IFree(p);
3035 free2c(afniModel->combName, max_comb);
3036 free2c(afniModel->kernel_custom, max_comb);
3037 RETURN(1);
3038 }
3039
3040 for( i=0 ; i<atr_int->nin ; ++i ) {
3041 afniModel->kernel_type[i] = atr_int->in[i];
3042 }
3043
3044 /* JL Feb. 2009: Added this part to support custom kernels.
3045 * To be backward compatible, read KERNEL_CUSTOM only if training was
3046 * performed with a custom kernel. */
3047
3048 if (afniModel->kernel_type[0] == CUSTOM) {
3049 atr_string = THD_find_string_atr( dsetModel->dblk, "KERNEL_CUSTOM" );
3050 strncpy(p,atr_string->ch, p_string_size);
3051 q = strtok(p,",");
3052 if (q != NULL) strncpy(afniModel->kernel_custom[0], q, CSV_STRING);
3053 else {
3054 snprintf(errorString, LONG_STRING,
3055 "Reading model file failed. Can't find KERNEL_CUSTOM");
3056
3057 /* free and return */
3058 IFree(p);
3059 free2c(afniModel->combName, max_comb);
3060 free2c(afniModel->kernel_custom, max_comb);
3061 IFree(afniModel->kernel_type);
3062 RETURN(1);
3063 }
3064
3065 for ( i=1; i<afniModel->combinations; ++i) {
3066 q=strtok(NULL,",");
3067 if (q != NULL) strncpy(afniModel->kernel_custom[i], q, p_string_size);
3068 else {
3069 snprintf(errorString, LONG_STRING, "Reading KERNEL_CUSTOM in model "
3070 "header. Number of class-combinations does not match expected(%d)\n",
3071 afniModel->combinations);
3072
3073 /* free and return */
3074 IFree(p);
3075 free2c(afniModel->combName, max_comb);
3076 free2c(afniModel->kernel_custom, max_comb);
3077 IFree(afniModel->kernel_type);
3078 RETURN(1);
3079 }
3080 }
3081 }
3082 else {
3083 for ( i=1; i<afniModel->combinations; ++i) {
3084 strncpy(afniModel->kernel_custom[i], "empty", CSV_STRING);
3085 }
3086 }
3087
3088 atr_float = THD_find_float_atr( dsetModel->dblk, "RBF_GAMMA" );
3089 if( (afniModel->rbf_gamma = (float *)malloc( atr_float->nfl * sizeof(float) )) == NULL ) {
3090 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3091 "Memory allocation for rbf_gamma failed!");
3092
3093 /* free and return */
3094 IFree(p);
3095 free2c(afniModel->combName, max_comb);
3096 free2c(afniModel->kernel_custom, max_comb);
3097 IFree(afniModel->kernel_type);
3098 RETURN(1);
3099 }
3100 for( i=0 ; i<atr_float->nfl ; ++i ) {
3101 afniModel->rbf_gamma[i] = atr_float->fl[i];
3102 }
3103
3104 atr_float = THD_find_float_atr( dsetModel->dblk, "LINEAR_COEFFICIENT" );
3105 if( (afniModel->linear_coefficient = (float *)malloc( atr_float->nfl * sizeof(float) )) == NULL ) {
3106
3107 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3108 "Memory allocation for linear_coefficient failed!");
3109
3110 /* free and return */
3111 IFree(p);
3112 free2c(afniModel->combName, max_comb);
3113 free2c(afniModel->kernel_custom, max_comb);
3114 IFree(afniModel->kernel_type);
3115 IFree(afniModel->rbf_gamma);
3116 RETURN(1);
3117 }
3118 for( i=0 ; i<atr_float->nfl ; ++i ) {
3119 afniModel->linear_coefficient[i] = atr_float->fl[i];
3120 }
3121
3122 atr_float = THD_find_float_atr( dsetModel->dblk, "CONSTANT_COEFFICIENT" );
3123 if( (afniModel->constant_coefficient = (float *)malloc( atr_float->nfl * sizeof(float) )) == NULL ) {
3124 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3125 "Memory allocation for constant_coefficient failed!");
3126
3127 /* free and return */
3128 IFree(p);
3129 free2c(afniModel->combName, max_comb);
3130 free2c(afniModel->kernel_custom, max_comb);
3131 IFree(afniModel->kernel_type);
3132 IFree(afniModel->rbf_gamma);
3133 IFree(afniModel->linear_coefficient);
3134 RETURN(1);
3135 }
3136 for( i=0 ; i<atr_float->nfl ; ++i ) {
3137 afniModel->constant_coefficient[i] = atr_float->fl[i];
3138 }
3139
3140 atr_int = THD_find_int_atr( dsetModel->dblk, "TOTAL_MASKED_FEATURES" );
3141 if( (afniModel->total_masked_features = (int *)malloc( atr_int->nin * sizeof(int) )) == NULL ) {
3142 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3143 "Memory allocation for total_masked_features failed!");
3144
3145 /* -- free and return -- */
3146 IFree(p);
3147 free2c(afniModel->combName, max_comb);
3148 free2c(afniModel->kernel_custom, max_comb);
3149 IFree(afniModel->kernel_type);
3150 IFree(afniModel->rbf_gamma);
3151 IFree(afniModel->linear_coefficient);
3152 IFree(afniModel->constant_coefficient);
3153 RETURN(1);
3154 }
3155 for( i=0 ; i<atr_int->nin ; ++i ) {
3156 afniModel->total_masked_features[i] = atr_int->in[i];
3157 }
3158
3159 atr_int = THD_find_int_atr( dsetModel->dblk, "TOTAL_SAMPLES" );
3160 if( (afniModel->total_samples = (int *)malloc( atr_int->nin * sizeof(int) )) == NULL ) {
3161 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3162 "Memory allocation for total_samples failed!");
3163
3164 /* -- free and return -- */
3165 IFree(p);
3166 free2c(afniModel->combName, max_comb);
3167 free2c(afniModel->kernel_custom, max_comb);
3168 IFree(afniModel->kernel_type);
3169 IFree(afniModel->rbf_gamma);
3170 IFree(afniModel->linear_coefficient);
3171 IFree(afniModel->constant_coefficient);
3172 IFree(afniModel->total_masked_features);
3173 RETURN(1);
3174 }
3175 for( i=0 ; i<atr_int->nin ; ++i ) {
3176 afniModel->total_samples[i] = atr_int->in[i];
3177 }
3178
3179 atr_int = THD_find_int_atr( dsetModel->dblk, "TOTAL_SUPPORT_VECTORS" );
3180 if( (afniModel->total_support_vectors = (int *)malloc( atr_int->nin * sizeof(int) )) == NULL ) {
3181 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3182 "Memory allocation for total_support_vectors failed!");
3183
3184 /* -- free and return -- */
3185 IFree(p);
3186 free2c(afniModel->combName, max_comb);
3187 free2c(afniModel->kernel_custom, max_comb);
3188 IFree(afniModel->kernel_type);
3189 IFree(afniModel->rbf_gamma);
3190 IFree(afniModel->linear_coefficient);
3191 IFree(afniModel->constant_coefficient);
3192 IFree(afniModel->total_masked_features);
3193 IFree(afniModel->total_samples);
3194 RETURN(1);
3195 }
3196 for( i=0 ; i<atr_int->nin ; ++i ) {
3197 afniModel->total_support_vectors[i] = atr_int->in[i];
3198 }
3199
3200 atr_float = THD_find_float_atr( dsetModel->dblk, "B" );
3201 if( (afniModel->b = (float *)malloc( atr_float->nfl * sizeof(float) )) == NULL ) {
3202 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3203 "Memory allocation for b failed!");
3204
3205 /* -- free and return -- */
3206 IFree(p);
3207 free2c(afniModel->combName, max_comb);
3208 free2c(afniModel->kernel_custom, max_comb);
3209 IFree(afniModel->kernel_type);
3210 IFree(afniModel->rbf_gamma);
3211 IFree(afniModel->linear_coefficient);
3212 IFree(afniModel->constant_coefficient);
3213 IFree(afniModel->total_masked_features);
3214 IFree(afniModel->total_samples);
3215 IFree(afniModel->total_support_vectors);
3216 RETURN(1);
3217 }
3218 for( i=0 ; i<atr_float->nfl ; ++i ) {
3219 afniModel->b[i] = atr_float->fl[i];
3220 }
3221
3222 atr_int = THD_find_int_atr( dsetModel->dblk, "POLYNOMIAL_DEGREE" );
3223 if( (afniModel->polynomial_degree = (int *)malloc( atr_int->nin * sizeof(int) )) == NULL ) {
3224 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3225 "Memory allocation for polynomial_degree failed!");
3226
3227 /* -- free and return -- */
3228 IFree(p);
3229 free2c(afniModel->combName, max_comb);
3230 free2c(afniModel->kernel_custom, max_comb);
3231 IFree(afniModel->kernel_type);
3232 IFree(afniModel->rbf_gamma);
3233 IFree(afniModel->linear_coefficient);
3234 IFree(afniModel->constant_coefficient);
3235 IFree(afniModel->total_masked_features);
3236 IFree(afniModel->total_samples);
3237 IFree(afniModel->total_support_vectors);
3238 IFree(afniModel->b);
3239 RETURN(1);
3240 }
3241 for( i=0 ; i<atr_int->nin ; ++i ) {
3242 afniModel->polynomial_degree[i] = atr_int->in[i];
3243 }
3244
3245 if( (afniModel->alphas = Allocate2f((long) afniModel->combinations,
3246 nalphas)) == NULL ) {
3247
3248 snprintf(errorString, LONG_STRING, "readAllocateAfniModel: "
3249 "Memory allocation for alphas failed!");
3250
3251 /* -- free and return -- */
3252 IFree(p);
3253 free2c(afniModel->combName, max_comb);
3254 free2c(afniModel->kernel_custom, max_comb);
3255 IFree(afniModel->kernel_type);
3256 IFree(afniModel->rbf_gamma);
3257 IFree(afniModel->linear_coefficient);
3258 IFree(afniModel->constant_coefficient);
3259 IFree(afniModel->total_masked_features);
3260 IFree(afniModel->total_samples);
3261 IFree(afniModel->total_support_vectors);
3262 IFree(afniModel->b);
3263 IFree(afniModel->polynomial_degree);
3264 RETURN(1);
3265 }
3266
3267 for(i = 0; i < afniModel->combinations; ++i ) {
3268 snprintf(headernames, LONG_STRING, "ALPHAS_%s", afniModel->combName[i]);
3269 atr_float = THD_find_float_atr( dsetModel->dblk, headernames);
3270 for(j = 0; j < nalphas; ++j ) {
3271 afniModel->alphas[i][j] = (double)atr_float->fl[j];
3272 }
3273 }
3274 }
3275 else {
3276 snprintf(errorString, LONG_STRING,
3277 "Could not read model header. Version V%3.2f unknown!", afniModel->version);
3278 /* -- free and return -- */
3279 IFree(p);
3280 free2c(afniModel->combName, max_comb);
3281 free2c(afniModel->kernel_custom, max_comb);
3282 IFree(afniModel->kernel_type);
3283 IFree(afniModel->rbf_gamma);
3284 IFree(afniModel->linear_coefficient);
3285 IFree(afniModel->constant_coefficient);
3286 IFree(afniModel->total_masked_features);
3287 IFree(afniModel->total_samples);
3288 IFree(afniModel->total_support_vectors);
3289 IFree(afniModel->b);
3290 IFree(afniModel->polynomial_degree);
3291 free2f(afniModel->alphas, afniModel->combinations);
3292 RETURN(1);
3293 }
3294
3295 /* --- free p string used for strtok ---*/
3296 IFree(p);
3297
3298 RETURN(0);
3299 }
3300
3301
allocateModelMaps(MODEL_MAPS * maps,long n_classes,long n_vox,char * kernelName)3302 int allocateModelMaps(MODEL_MAPS *maps, long n_classes, long n_vox, char *kernelName)
3303 {
3304 long class_comb = 0;
3305
3306 ENTRY("allocateModelMaps");
3307
3308 /* --- initialize --- */
3309 class_comb = n_classes*(n_classes-1)/2;
3310 maps->index=0;
3311
3312 if ( (!strcmp(kernelName, "complex1")) ) {
3313
3314 /* Remember the real- and imaginary part are concatenated for kernel-
3315 * option complex1. I'm probably redundant here... */
3316 if (n_vox%2 != 0) {
3317 WARNING_message("Can not create model maps. "
3318 "Something is wrong with the complex-valued data representation");
3319
3320 /* even if maps can not be generated, allocated some small chuck of memory.
3321 This makes freeing memory a bit easier and doesn't cost a lot of
3322 resources. */
3323 maps->nmaps = 1;
3324 maps->nvox = 1;
3325 }
3326 else {
3327 maps->nvox = n_vox/2;
3328
3329 /* Generating six weight-vector maps for linear-complex kernels:
3330 * RE, IM, MAG1, MAG2, PHA1, PHA2 */
3331 maps->nmaps = class_comb*6;
3332 }
3333 }
3334 else if ( (!strcmp(kernelName, "linear")) ) {
3335 maps->nmaps = class_comb;
3336 maps->nvox = n_vox;
3337 }
3338 else {
3339 WARNING_message("Sorry, Can not create model maps for kernel option: %s", kernelName);
3340 /* even if maps can not be generated, allocate some small chuck of memory.
3341 This makes freeing memory a bit easier and doesn't cost a lot of
3342 resources. */
3343 maps->nmaps = 1;
3344 maps->nvox = 1;
3345 }
3346
3347 /* --- allocate memory ---*/
3348 if( (maps->names = Allocate2c(maps->nmaps, (long)LONG_STRING)) == NULL ) {
3349 RETURN(1);
3350 }
3351 if( (maps->data = Allocate2d(maps->nmaps, maps->nvox)) == NULL ) {
3352 free2c(maps->names, maps->nmaps);
3353 RETURN(1);
3354 }
3355
3356 /* --- null allocated memory --- */
3357 Clear2c(maps->names, maps->nmaps);
3358 Clear2d(maps->data, maps->nmaps, maps->nvox);
3359
3360 RETURN(0);
3361 }
3362
freeModelMaps(MODEL_MAPS * maps)3363 void freeModelMaps(MODEL_MAPS *maps)
3364 {
3365 ENTRY("freeModelMaps");
3366
3367 free2d(maps->data, maps->nmaps);
3368 free2c(maps->names, maps->nmaps);
3369
3370
3371 EXRETURN;
3372 }
3373
addToModelMap_bucket(MODEL_MAPS * maps,AFNI_MODEL * afni_model,DatasetType ** dsetTrainArray,MaskType * dsetMaskArray,char * maskFile,long cc)3374 void addToModelMap_bucket ( MODEL_MAPS *maps, AFNI_MODEL *afni_model,
3375 DatasetType **dsetTrainArray, MaskType *dsetMaskArray, char *maskFile,
3376 long cc)
3377 {
3378 long v = 0;
3379 long iMap = 0;
3380 long nvoxh = 0;
3381 long t = 0;
3382 long k = 0;
3383 long nk = 0;
3384 long nt = 0;
3385
3386 ENTRY("addToModelMap_bucket");
3387
3388 /* Note: This function adds one ore more maps to the MODEL_MAPS structure for
3389 * each class-combination (cc). In order to keep track of how many maps were
3390 * written, *map_index is updated for each function call. For now, only
3391 * weight-vector maps for linear and linear-complex kernels are calculated.
3392 * The kernel has to be the same for each class-combination!
3393 *
3394 * JL Aug 2009: Added regression maps
3395 * JL Apr 2010: Incorporated map_index into the MODEL_MAPS structure
3396 * JL Jul 2015: Check for non-zero alphas more rigorously
3397 * JL JUL 2015: Got rid of cAlphas (censored alphas). Unnecessary.
3398 */
3399
3400 /* --- initialization ---*/
3401 iMap=maps->index; /* TODO: prone for errors, should do something better than that */
3402
3403
3404 if( !strcmp(afni_model->svm_type, "regression") ) {
3405 /* For regression, the array storing the alphas is twice as long
3406 * as for classification. */
3407 nk = 2;
3408 }
3409 else nk = 1;
3410
3411 nt = afni_model->timepoints;
3412
3413 /* -- linear kernel -- */
3414 if(afni_model->kernel_type[cc] == LINEAR) {
3415 for (k=0; k<nk; ++k) {
3416 for (t=0; t<nt; ++t) {
3417 if ( fabs(afni_model->alphas[cc][k*nt+t]) > 0.0 ) {
3418 for (v=0; v<maps->nvox; ++v) {
3419 if ( maskFile[0] ) { /* mask */
3420 if ( dsetMaskArray[v] ) {
3421 maps->data[iMap][v] += afni_model->alphas[cc][k*nt+t] *
3422 dsetTrainArray[t ][v];
3423 }
3424 else {
3425 maps->data[iMap][v] = 0;
3426 }
3427 }
3428 else { /* no mask */
3429 maps->data[iMap][v] += afni_model->alphas[cc][k*nt+t] *
3430 dsetTrainArray[t ][v];
3431 }
3432 }
3433 }
3434 }
3435 }
3436 snprintf(maps->names[iMap], LONG_STRING, "w_%s", afni_model->combName[cc]);
3437 ++iMap;
3438 }
3439 /* -- complex-linear kernel -- */
3440 /* JL: Experimental stuff for Scott Peltier */
3441 else if( (afni_model->kernel_type[cc] == CUSTOM) &&
3442 (!strcmp(afni_model->kernel_custom[cc],"complex1")) ) {
3443
3444 nvoxh = maps->nvox;
3445
3446 for (k=0; k<nk; ++k) {
3447 for (t=0; t<nt; ++t) {
3448 if ( fabs(afni_model->alphas[cc][k*nt+t]) > 0.0 ) {
3449 for (v=0; v<maps->nvox; ++v) {
3450 if ( maskFile[0] ) { /* mask */
3451 if ( dsetMaskArray[v] ) {
3452
3453 /* - RE - */
3454 maps->data[iMap ][v] += afni_model->alphas[cc][k*nt+t] *
3455 dsetTrainArray[t ][v ];
3456 /* - IM - */
3457 maps->data[iMap+1][v] += afni_model->alphas[cc][k*nt+t] *
3458 dsetTrainArray[t ][v+nvoxh];
3459 /* - MAG1 - */
3460 maps->data[iMap+2][v] += afni_model->alphas[cc][k*nt+t] *
3461 sqrt( dsetTrainArray[t ][v ] * dsetTrainArray[t][v ] +
3462 dsetTrainArray[t ][v+nvoxh] * dsetTrainArray[t][v+nvoxh]);
3463
3464 /* - PHA1 - */
3465 maps->data[iMap+3][v] += 10e5 *afni_model->alphas[cc][k*nt+t ] *
3466 atan2(dsetTrainArray[t ][v+nvoxh], dsetTrainArray[t ][v ]);
3467
3468 }
3469 else {
3470 maps->data[iMap ][v] = 0;
3471 maps->data[iMap+1][v] = 0;
3472 maps->data[iMap+2][v] = 0;
3473 maps->data[iMap+3][v] = 0;
3474
3475 }
3476 }
3477 else { /* no mask */
3478
3479 /* - RE - */
3480 maps->data[iMap ][v] += afni_model->alphas[cc][k*nt+t] *
3481 dsetTrainArray[t ][v ];
3482 /* - IM - */
3483 maps->data[iMap+1][v] += afni_model->alphas[cc][k*nt+t] *
3484 dsetTrainArray[t ][v+nvoxh];
3485 /* - MAG1 - */
3486 maps->data[iMap+2][v] += afni_model->alphas[cc][k*nt+t] *
3487 sqrt( dsetTrainArray[t ][v ] * dsetTrainArray[t ][v ] +
3488 dsetTrainArray[t ][v+nvoxh] * dsetTrainArray[t ][v+nvoxh]);
3489 /* - PHA1 - */
3490 maps->data[iMap+3][v] += afni_model->alphas[cc][k*nt+t] *
3491 atan2(dsetTrainArray[t ][v+nvoxh], dsetTrainArray[t ][v ]);
3492
3493 }
3494 }
3495 }
3496 }
3497 }
3498 snprintf(maps->names[iMap ], LONG_STRING, "CpxWvMapReal_%s", afni_model->combName[cc]);
3499 snprintf(maps->names[iMap+1], LONG_STRING, "CpxWvMapImag_%s", afni_model->combName[cc]);
3500 snprintf(maps->names[iMap+2], LONG_STRING, "CpxWvMapMag1_%s", afni_model->combName[cc]);
3501 snprintf(maps->names[iMap+3], LONG_STRING, "CpxWvMapPha1_%s", afni_model->combName[cc]);
3502
3503 for (v=0; v<maps->nvox; ++v) {
3504 if ( maskFile[0] ) { /* mask */
3505 if ( dsetMaskArray[v] ) {
3506 /* - MAG2 - */
3507 maps->data[iMap+4][v] = sqrt( maps->data[iMap ][v] *
3508 maps->data[iMap ][v] +
3509 maps->data[iMap+1][v] *
3510 maps->data[iMap+1][v] );
3511 /* - PHA2 - */
3512 maps->data[iMap+5][v] = atan2( maps->data[iMap+1][v] ,
3513 maps->data[iMap ][v] );
3514 }
3515 else {
3516 maps->data[iMap+4][v] = 0;
3517 maps->data[iMap+5][v] = 0;
3518 }
3519 }
3520 else { /* no mask */
3521 /* - MAG2 - */
3522 maps->data[iMap+4][v] = sqrt( maps->data[iMap ][v] *
3523 maps->data[iMap ][v] +
3524 maps->data[iMap+1][v] *
3525 maps->data[iMap+1][v] );
3526 /* - PHA2 - */
3527 maps->data[iMap+5][v] = atan2( maps->data[iMap+1][v] ,
3528 maps->data[iMap ][v] );
3529 }
3530 }
3531 snprintf(maps->names[iMap+4], LONG_STRING, "CpxWvMapMag2_%s", afni_model->combName[cc]);
3532 snprintf(maps->names[iMap+5], LONG_STRING, "CpxWvMapPha2_%s", afni_model->combName[cc]);
3533 iMap=iMap+6;
3534 }
3535
3536 maps->index=iMap;
3537
3538 EXRETURN;
3539 }
3540
writeModelMap_bucket(MODEL_MAPS * maps,MaskType * dsetMaskArray,THD_3dim_dataset * dsetTrain,char * maskFile,char * fileName,float * b,long combinations,ASLoptions * options,int argc,char ** argv,char * errorString)3541 int writeModelMap_bucket( MODEL_MAPS *maps, MaskType *dsetMaskArray,
3542 THD_3dim_dataset *dsetTrain, char *maskFile, char *fileName,
3543 float *b, long combinations, ASLoptions* options, int argc, char **argv,
3544 char *errorString )
3545 {
3546 long v = 0;
3547 long iMap = 0;
3548 long nx = 0;
3549 long ny = 0;
3550 long nz = 0;
3551 long nx_ny = 0;
3552
3553 THD_ivec3 iv_nxyz;
3554 int ierror = 0;
3555
3556 THD_3dim_dataset* dsetModelMapBucket = NULL;
3557 float* scaled_map = NULL;
3558
3559 char* commandline = NULL; /* for history */
3560
3561 ENTRY("writeModelMap_bucket");
3562
3563 /* Changes:
3564 * JL Apr. 2010: Writing VERSION_3DSVM and VERSION_3DSVM into the header
3565 * JL Apr. 2010: Writing B value into the header
3566 * JL Jul. 2010: Writing entire command line history into the header
3567 * JL July 2011: Modified error handling: Replaced ERROR_exit() by RETURN(1).
3568 * Passing error message as argument (errorString) to the
3569 * calling function.
3570 *
3571 */
3572
3573
3574 /* --- initialize --- */
3575 dsetModelMapBucket = EDIT_empty_copy( dsetTrain );
3576
3577 nx = DSET_NX( dsetTrain );
3578 ny = DSET_NY( dsetTrain );
3579 nx_ny = nx*ny;
3580
3581 if ( !strcmp(options->kernelName, "complex1") ) {
3582 /* JL: For complex kernel RE and IM is concatenated in the z-direction.
3583 * However, we are not concatenating data for the bucket (going back to
3584 * nz/2) */
3585
3586 if ( maps->nvox%nx_ny != 0 ) {
3587 snprintf(errorString, LONG_STRING, "Writing bucket with model maps failed! "
3588 "Something is wrong with the complex-valued data representation.");
3589 RETURN(1);
3590 }
3591 nz = maps->nvox/(nx*ny);
3592 }
3593 else {
3594 nz = DSET_NZ( dsetTrain );
3595 }
3596
3597 LOAD_IVEC3( iv_nxyz, nx ,ny ,nz);
3598 ierror = EDIT_dset_items ( dsetModelMapBucket,
3599 ADN_prefix, fileName,
3600 ADN_type, HEAD_FUNC_TYPE,
3601 ADN_func_type, FUNC_BUCK_TYPE,
3602 ADN_datum_all, MRI_float,
3603 ADN_ntt, 0, /* no time axis */
3604 ADN_nvals, maps->nmaps,
3605 ADN_nxyz, iv_nxyz,
3606 ADN_malloc_type, DATABLOCK_MEM_MALLOC ,
3607 ADN_none ) ;
3608
3609 if( ierror > 0 ) {
3610 snprintf(errorString, LONG_STRING, "writeModelMap_bucket: "
3611 "%d errors in attempting to create bucket dataset!", ierror );
3612 RETURN(1);
3613 }
3614
3615 /* -- record history -- */
3616 commandline = tross_commandline(PROGRAM_NAME, argc, argv);
3617 if (commandline == NULL) {
3618 WARNING_message("Can not copy command-line into bucket header!");
3619 }
3620 else tross_Append_History (dsetModelMapBucket, commandline);
3621 IFree(commandline);
3622
3623 /* --- scale and write maps into bucket --- */
3624 for (iMap=0; iMap<maps->nmaps; ++iMap) {
3625
3626 /* -- allocate scaled_map -- */
3627 if( (scaled_map = (float *) malloc(sizeof(float)*maps->nvox)) == NULL ) {
3628 snprintf(errorString, LONG_STRING, "writeModelMap_bucket: "
3629 "Memory allocation failed!");
3630
3631 /* free and return */
3632 DSET_unload(dsetModelMapBucket);
3633 RETURN(1);
3634 }
3635
3636 /* -- scaling PHA-- */
3637 if ( !strncmp(maps->names[iMap], "CpxWvMapPha", LONG_STRING) ) {
3638 for (v=0; v<maps->nvox; ++v) {
3639 if ( maskFile[0] ) {
3640 if ( dsetMaskArray[v] ) {
3641 scaled_map[v] = (float) (180.0/M_PI*maps->data[iMap][v]);
3642 }
3643 else {
3644 scaled_map[v] = 0;
3645 }
3646 }
3647 else {
3648 scaled_map[v] = (float) (180.0/M_PI*maps->data[iMap][v]);
3649 }
3650 }
3651 }
3652 else {
3653 /* -- scaling RE, IM, MAG-- */
3654 for (v=0; v<maps->nvox; ++v) {
3655 if ( maskFile[0] ) {
3656 if ( dsetMaskArray[v] ) {
3657 scaled_map[v] = (float) (SCALE*maps->data[iMap][v]);
3658 }
3659 else {
3660 scaled_map[v] = 0;
3661 }
3662 }
3663 else {
3664 scaled_map[v] = (float) (SCALE*maps->data[iMap][v]);
3665 }
3666 }
3667 }
3668
3669 /* -- add current map to bucket -- */
3670 EDIT_substitute_brick( dsetModelMapBucket, iMap, MRI_float, scaled_map );
3671 EDIT_BRICK_LABEL( dsetModelMapBucket, iMap, maps->names[iMap] );
3672 }
3673
3674 /* --- add information to the header --- */
3675 THD_set_string_atr( dsetModelMapBucket->dblk, "3DSVM_VERSION", VERSION_3DSVM );
3676 THD_set_string_atr( dsetModelMapBucket->dblk, "3DSVM_VERSION_DATE", VERSION_DATE_3DSVM );
3677 THD_set_float_atr( dsetModelMapBucket->dblk, "3DSVM_B", combinations, b );
3678
3679 /* --- write entire bucket data set to disc --- */
3680 fflush(stdout);
3681 INFO_message("Writing bucket dataset: %s with %ld brick(s)...", fileName, maps->nmaps);
3682 THD_write_3dim_dataset( "./", fileName, dsetModelMapBucket, True );
3683
3684 /* --- deallocate memory --- */
3685 IFree(scaled_map);
3686
3687 RETURN(0);
3688 }
3689
3690 /*-----------------------------------------------------------*/
writeModelMask(THD_3dim_dataset * dsetMask,MaskType * dsetMaskArray,char * fileName)3691 void writeModelMask( THD_3dim_dataset *dsetMask, MaskType* dsetMaskArray, char *fileName)
3692 {
3693 char maskCopyName[LONG_STRING];
3694 THD_3dim_dataset* dsetMaskCopy;
3695 int ityp;
3696
3697 ENTRY("writeModelMask");
3698
3699 /* Write out model mask (actually, just a simple copy of mask used) */
3700 snprintf( maskCopyName, LONG_STRING, "%s%s", fileName, MODEL_MSK_EXT );
3701 dsetMaskCopy = EDIT_empty_copy(dsetMask);
3702 EDIT_dset_items( dsetMaskCopy,
3703 ADN_prefix, maskCopyName,
3704 ADN_label1, maskCopyName,
3705 ADN_type, 1, /* functional dataset */
3706 ADN_func_type, 0, /* fim functional type */
3707 ADN_nvals, 1,
3708 ADN_ntt, 0, /* number of time points (?) */
3709 ADN_none );
3710
3711 ityp = DSET_BRICK_TYPE( dsetMask, 0 ); /* ityp: 0=byte, 1=short, 2=float, 3=complex */
3712
3713 EDIT_substitute_brick( dsetMaskCopy, 0, ityp, dsetMaskArray );
3714
3715 tross_Copy_History( dsetMask, dsetMaskCopy );
3716 tross_Append_History( dsetMaskCopy, "a 3dsvm copy") ;
3717
3718 fflush(stdout);
3719 INFO_message("Writing model dataset mask: %s...", fileName);
3720 THD_write_3dim_dataset( "./", maskCopyName, dsetMaskCopy, True );
3721
3722 EXRETURN;
3723 }
3724
3725 /*-----------------------------------------------------------*/
writeModelBrik(AFNI_MODEL * afniModel,THD_3dim_dataset * dsetTrain,DatasetType ** dsetTrainArray,MaskType * dsetMaskArray,ASLoptions * options,char * fileName,int argc,char ** argv,char * errorString)3726 int writeModelBrik(AFNI_MODEL *afniModel, THD_3dim_dataset* dsetTrain,
3727 DatasetType** dsetTrainArray,MaskType* dsetMaskArray,
3728 ASLoptions* options, char* fileName, int argc, char **argv, char *errorString)
3729 {
3730
3731 THD_3dim_dataset *dsetModel = NULL;
3732 char* csv_combName = NULL; /* comma separated "names" of class
3733 * category combinations */
3734 char* csv_kernelCustom = NULL;
3735
3736 char headernames[LONG_STRING]; /* comma separated "names" for each alpha
3737 * set */
3738 long csv_string_size = 0; /* size of csv strings, dependent on number
3739 of class-combinations */
3740 char* commandline = NULL; /* for history */
3741 long i = 0;
3742 long nt = 0;
3743 long t = 0;
3744 long nvox = 0;
3745 long v = 0;
3746 long nalphas = 0;
3747 int datum = 0;
3748 int maskUsed = 0;
3749 int ierror = 0;
3750 short* tmp_dsetArrayShort = NULL;
3751 float* tmp_dsetArrayFloat = NULL;
3752
3753 ENTRY("writeModelBrik");
3754
3755 /* JL Oct. 2009: The naming and the number of parameters written into the
3756 * model header has changed. Now, we are writing all svm parameters
3757 * (that can be specified using the command-line) into the header.
3758 * We also added "3DSVM" in front of each parameter name to avoid
3759 * collisions with header entries from other afni programs.
3760 * Trying to be backwards compatible.
3761 *
3762 * JL Apr. 2010: Changed allocation of strings holding comma separated values
3763 * to be dynamic. Replaced all string functions by its equivalent that also
3764 * takes the string size as an argument.
3765 *
3766 * JL May. 2010: Writing mask as a sub brick of the model. To overcome a
3767 * problem with EDIT_substitute_brick (or a header entry), the mask is written
3768 * in brick n+1 and n+2.
3769 *
3770 * JL Oct. 2010: Bugfix: .BRIK was not written for datum type float (casted
3771 * incorrectly)
3772 *
3773 * JL July 2011: Modified error handling: Replaced ERROR_exit() by RETURN(1).
3774 * Passing error message as argument (errorString) to the calling function.
3775 *
3776 */
3777
3778 dsetModel = EDIT_empty_copy (dsetTrain);
3779
3780 nt = DSET_NUM_TIMES( dsetModel );
3781 nvox = DSET_NVOX( dsetModel );
3782 datum = DSET_BRICK_TYPE(dsetModel,0);
3783
3784 ierror = EDIT_dset_items(dsetModel,
3785 ADN_prefix, fileName,
3786 ADN_ntt, nt+2, /* two more timepoint to store mask */
3787 ADN_nvals, nt+2,
3788 ADN_none);
3789
3790 if( ierror > 0 ) {
3791 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3792 "%d errors in attempting to create model dataset!", ierror );
3793 RETURN(1);
3794 }
3795
3796 if( !strcmp(options->svmType, "regression") ) {
3797 nalphas = (long)afniModel->timepoints*2;
3798 }
3799 else if( !strcmp(options->svmType, "classification") ) {
3800 nalphas = (long)afniModel->timepoints;
3801 }
3802 else {
3803 /* should never get here */
3804 snprintf(errorString, LONG_STRING, "allocateAfniModel: SVM type unknown!");
3805 RETURN(1);
3806 }
3807
3808 /* --- write mask and training data into model dataset --- */
3809 switch (datum) {
3810 case MRI_float:
3811 /* -- write train data into model dataset -- */
3812 for (t=0; t<nt; ++t) {
3813 if( (tmp_dsetArrayFloat = (float *)malloc(nvox * sizeof(float))) == NULL ) {
3814 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3815 "Memory allocation failed!");
3816
3817 /* free and return */
3818 DSET_unload(dsetModel);
3819 RETURN(1);
3820 }
3821 for (v=0; v<nvox; ++v) tmp_dsetArrayFloat[v] = (float) dsetTrainArray[t][v];
3822 EDIT_substitute_brick(dsetModel, t, MRI_float, tmp_dsetArrayFloat);
3823 }
3824
3825 /* -- write last (nt-1) brick again -- */
3826 /* TODO: I don't know if EDIT_substitute_brick has a bug or if I'm doing
3827 * something wrong, but writing the mask into brick n also writes the
3828 * mask in brick n-1. So I am writing brick n in n+1 and the mask in n+2,
3829 * to not overwrite brick n with the mask.
3830 */
3831
3832 if( (tmp_dsetArrayFloat = (float *)malloc(nvox * sizeof(float))) == NULL ) {
3833 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3834 "Memory allocation failed!");
3835
3836 /* free and return */
3837 DSET_unload(dsetModel);
3838 RETURN(1);
3839 }
3840 for (v=0; v<nvox; ++v) tmp_dsetArrayFloat[v]= (float) dsetTrainArray[nt-1][v];
3841 EDIT_substitute_brick( dsetModel, nt, MRI_float, tmp_dsetArrayFloat);
3842
3843 /* -- write mask data into last brick of model dataset */
3844 if( (tmp_dsetArrayFloat = (float *)malloc(nvox * sizeof(float))) == NULL ) {
3845 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3846 "Memory allocation failed!");
3847
3848 /* free and return */
3849 DSET_unload(dsetModel);
3850 RETURN(1);
3851 }
3852 if (options->maskFile[0]) {
3853 for (v=0; v<nvox; ++v) tmp_dsetArrayFloat[v] = (float) dsetMaskArray[v];
3854 }
3855 else {
3856 for (v=0; v<nvox; ++v) tmp_dsetArrayFloat[v] = 1.0;
3857 }
3858 EDIT_substitute_brick( dsetModel, nt+1, MRI_float, tmp_dsetArrayFloat );
3859
3860 break;
3861
3862 case MRI_short:
3863 /* -- write train data into model dataset --*/
3864 for (t=0; t<nt; ++t) {
3865 if( (tmp_dsetArrayShort = (short *)malloc(nvox * sizeof(short))) == NULL ) {
3866 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3867 "Memory allocation failed!");
3868
3869 /* free and return */
3870 DSET_unload(dsetModel);
3871 RETURN(1);
3872 }
3873 for (v=0; v<nvox; ++v) tmp_dsetArrayShort[v]= (short) dsetTrainArray[t][v];
3874 EDIT_substitute_brick( dsetModel, t, MRI_short, tmp_dsetArrayShort);
3875 }
3876
3877 /* -- write last brick (nt-1) again -- */
3878 /* TODO: I don't know if EDIT_substitute_brick has a bug or if I'm doing
3879 * something wrong, but writing the mask into brick n also writes the
3880 * mask in brick n-1. So I am writing brick n in n+1 and the mask in n+2,
3881 * to not overwrite brick n with the mask.
3882 */
3883
3884 if( (tmp_dsetArrayShort = (short *)malloc(nvox * sizeof(short))) == NULL ) {
3885 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3886 "Memory allocation failed!");
3887
3888 /* free and return */
3889 DSET_unload(dsetModel);
3890 RETURN(1);
3891 }
3892 for (v=0; v<nvox; ++v) tmp_dsetArrayShort[v]= (short) dsetTrainArray[nt-1][v];
3893 EDIT_substitute_brick( dsetModel, nt, MRI_short, tmp_dsetArrayShort);
3894
3895
3896 /* -- write mask into the last brick of model dataset -- */
3897 if (options->maskFile[0]) {
3898 for (v=0; v<nvox; ++v) tmp_dsetArrayShort[v] = (short) dsetMaskArray[v];
3899 }
3900 else {
3901 for (v=0; v<nvox; ++v) tmp_dsetArrayShort[v] = 1;
3902 }
3903 EDIT_substitute_brick(dsetModel, nt+1, MRI_short, tmp_dsetArrayShort);
3904
3905 break;
3906
3907 default:
3908 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3909 "Writing model failed! Unknown datum-type (%d)", datum);
3910
3911 RETURN(1);
3912 break;
3913 }
3914
3915 /* --- write header of model dataset --- */
3916 /* -- allocating csv strings -- */
3917 csv_string_size = afniModel->combinations*CSV_STRING;
3918 if( (csv_combName = (char *) malloc(csv_string_size * sizeof(char))) == NULL ) {
3919 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3920 "Memory allocation for csv_combName failed!");
3921
3922 DSET_unload(dsetModel);
3923 RETURN(1);
3924 }
3925 if( (csv_kernelCustom = (char *) malloc(csv_string_size * sizeof(char))) == NULL ) {
3926 snprintf(errorString, LONG_STRING, "writeModelBrik: "
3927 "Memory allocation for csv_kernelCustom failed!");
3928
3929 DSET_unload(dsetModel);
3930 IFree(csv_combName);
3931 RETURN(1);
3932 }
3933
3934 /* -- record history -- */
3935 commandline = tross_commandline(PROGRAM_NAME, argc, argv);
3936 if (commandline == NULL) {
3937 WARNING_message("Can not copy command-line into model header!");
3938 }
3939 else tross_Append_History (dsetModel, commandline);
3940 IFree(commandline);
3941
3942 /* -- write model header -- */
3943 strncpy(csv_combName, afniModel->combName[0], csv_string_size);
3944 strncpy(csv_kernelCustom, afniModel->kernel_custom[0], csv_string_size);
3945
3946 for(i = 1; i < afniModel->combinations; ++i) {
3947 strncat(csv_combName, ",", csv_string_size);
3948 strncat(csv_combName, afniModel->combName[i], csv_string_size);
3949 strncat(csv_kernelCustom, ",", csv_string_size);
3950 strncat(csv_kernelCustom, afniModel->kernel_custom[i], csv_string_size);
3951 }
3952
3953 /* JL July 2011: */
3954 {
3955 int max_iterations = (int)afniModel->max_iterations; /* would be ideally long */
3956 THD_set_int_atr( dsetModel->dblk, "3DSVM_MAX_ITERATIONS", 1, &max_iterations);
3957 }
3958
3959 /* JL May 2010: Write if mask was used into the header */
3960 if (options->modelFile[0]) maskUsed = 1; else maskUsed=0;
3961 THD_set_int_atr( dsetModel->dblk, "3DSVM_MASK_USED", 1, &maskUsed);
3962
3963 THD_set_string_atr( dsetModel->dblk, "3DSVM_VERSION", /* JL Apr. 2010 */
3964 VERSION_3DSVM);
3965 THD_set_string_atr( dsetModel->dblk, "3DSVM_VERSION_DATE", /* JL Apr. 2010 */
3966 VERSION_DATE_3DSVM);
3967 THD_set_int_atr( dsetModel->dblk, "3DSVM_CLASS_COUNT", 1,
3968 &afniModel->class_count);
3969 THD_set_int_atr( dsetModel->dblk, "3DSVM_CLASS_COMBINATIONS", 1,
3970 &afniModel->combinations);
3971 THD_set_int_atr( dsetModel->dblk, "3DSVM_TIMEPOINTS", 1,
3972 &afniModel->timepoints);
3973 THD_set_string_atr( dsetModel->dblk, "3DSVM_COMBO_NAMES",
3974 csv_combName);
3975 THD_set_string_atr( dsetModel->dblk, "3DSVM_SVM_TYPE",
3976 afniModel->svm_type ); /* JL May 2009 */
3977 THD_set_string_atr( dsetModel->dblk, "3DSVM_KERNEL_CUSTOM",
3978 csv_kernelCustom); /* JL Feb 2009 */
3979 THD_set_int_atr( dsetModel->dblk, "3DSVM_KERNEL_TYPE",
3980 afniModel->combinations, afniModel->kernel_type); /* JL May 2009 */
3981 THD_set_int_atr( dsetModel->dblk, "3DSVM_POLYNOMIAL_DEGREE",
3982 afniModel->combinations, afniModel->polynomial_degree);
3983 THD_set_float_atr( dsetModel->dblk, "3DSVM_RBF_GAMMA",
3984 afniModel->combinations, afniModel->rbf_gamma);
3985 THD_set_float_atr( dsetModel->dblk, "3DSVM_LINEAR_COEFFICIENT",
3986 afniModel->combinations, afniModel->linear_coefficient);
3987 THD_set_float_atr( dsetModel->dblk, "3DSVM_CONSTANT_COEFFICIENT",
3988 afniModel->combinations, afniModel->constant_coefficient);
3989 THD_set_int_atr( dsetModel->dblk, "3DSVM_TOTAL_MASKED_FEATURES",
3990 afniModel->combinations, afniModel->total_masked_features);
3991 THD_set_int_atr( dsetModel->dblk, "3DSVM_TOTAL_SAMPLES",
3992 afniModel->combinations, afniModel->total_samples);
3993 THD_set_int_atr( dsetModel->dblk, "3DSVM_TOTAL_SUPPORT_VECTORS",
3994 afniModel->combinations, afniModel->total_support_vectors);
3995 THD_set_float_atr( dsetModel->dblk, "3DSVM_B",
3996 afniModel->combinations, afniModel->b );
3997 /* JL Oct 2009: */
3998 THD_set_float_atr( dsetModel->dblk, "3DSVM_EPS",
3999 afniModel->combinations, afniModel->eps );
4000 THD_set_float_atr( dsetModel->dblk, "3DSVM_SVM_C",
4001 afniModel->combinations, afniModel->svm_c );
4002 THD_set_int_atr( dsetModel->dblk, "3DSVM_BIASED_HYPERPLANE",
4003 afniModel->combinations, afniModel->biased_hyperplane );
4004 THD_set_int_atr( dsetModel->dblk, "3DSVM_SKIP_FINAL_OPT_CHECK",
4005 afniModel->combinations, afniModel->skip_final_opt_check);
4006 THD_set_int_atr( dsetModel->dblk, "3DSVM_SVM_MAXQPSIZE",
4007 afniModel->combinations, afniModel->svm_maxqpsize );
4008 THD_set_int_atr( dsetModel->dblk, "3DSVM_SVM_NEWVARSINQP",
4009 afniModel->combinations, afniModel->svm_newvarsinqp );
4010 THD_set_int_atr( dsetModel->dblk, "3DSVM_SVM_ITER_TO_SHRINK",
4011 afniModel->combinations, afniModel->svm_iter_to_shrink );
4012 THD_set_float_atr( dsetModel->dblk, "3DSVM_TRANSDUCTION_POSRATIO",
4013 afniModel->combinations, afniModel->transduction_posratio );
4014 THD_set_float_atr( dsetModel->dblk, "3DSVM_SVM_COSTRATIO",
4015 afniModel->combinations, afniModel->svm_costratio );
4016 THD_set_float_atr( dsetModel->dblk, "3DSVM_SVM_COSTRATIO_UNLAB",
4017 afniModel->combinations, afniModel->svm_costratio_unlab );
4018 THD_set_float_atr( dsetModel->dblk, "3DSVM_SVM_UNLABBOUND",
4019 afniModel->combinations, afniModel->svm_unlabbound );
4020 THD_set_float_atr( dsetModel->dblk, "3DSVM_EPSILON_A",
4021 afniModel->combinations, afniModel->epsilon_a );
4022 THD_set_float_atr( dsetModel->dblk, "3DSVM_EPSILON_CRIT",
4023 afniModel->combinations, afniModel->epsilon_crit );
4024 THD_set_int_atr( dsetModel->dblk, "3DSVM_COMPUTE_LOO",
4025 afniModel->combinations, afniModel->compute_loo );
4026 THD_set_float_atr( dsetModel->dblk, "3DSVM_RHO",
4027 afniModel->combinations, afniModel->rho );
4028 THD_set_int_atr( dsetModel->dblk, "3DSVM_XA_DEPTH",
4029 afniModel->combinations, afniModel->xa_depth );
4030 for(i = 0; i < afniModel->combinations; ++i) {
4031 snprintf(headernames, LONG_STRING, "3DSVM_ALPHAS_%s",afniModel->combName[i]);
4032 THD_set_float_atr( dsetModel->dblk, headernames, nalphas, afniModel->alphas[i] );
4033 }
4034
4035 /* --- write brick --- */
4036 fflush(stdout);
4037 INFO_message( "Writing model dataset: %s...", fileName );
4038 THD_write_3dim_dataset( "./", fileName, dsetModel, True );
4039
4040 /* --- free memory ---*/
4041 IFree(csv_combName);
4042 IFree(csv_kernelCustom);
4043
4044 RETURN(0);
4045 }
4046
4047
4048 /* JL May 2009: Added 'ASLoptions *options' to support sv-regression */
addToAfniModel(AFNI_MODEL * afniModel,MODEL * model,LEARN_PARM * learn_parm,LabelType * tmp_labels,ASLoptions * options,long classCount,long sampleCount,int comb0,int comb1)4049 void addToAfniModel(AFNI_MODEL *afniModel, MODEL *model, LEARN_PARM *learn_parm,
4050 LabelType *tmp_labels, ASLoptions *options, long classCount,
4051 long sampleCount, int comb0, int comb1)
4052 {
4053 long nsv = 0; /* number of support vectors */
4054 long sv = 0; /* index over nsv */
4055 long nt = 0; /* number of timepoints */
4056 long t = 0; /* index over timepoints */
4057 long qid = 0; /* incrementing queryid */
4058 long nqid = 0;
4059
4060 FILE *fp = NULL; /* alpha file output for sv-regression*/
4061 char alphaFile[LONG_STRING]; /* naming of alphafile output */
4062
4063 ENTRY("addToAfniModel");
4064
4065
4066 /* JL July 2009: Changed this function to retrieve the alphas directly from
4067 * the svm-light modelfile. Now, we are assigning a queryid containing the
4068 * time information to each doc (timepoint), which allows us to retrieve the
4069 * alphas in time order.
4070 *
4071 * JL Aug. 2009: Added alpha file output for sv-regression to this function,
4072 * since, for sv-regression, svm-light is not writing the alphas in time-
4073 * order
4074 *
4075 * JL Jul. 2014: Changed how alphas are stored for regression and how they
4076 * are written to file.
4077 *
4078 * JL Jul. 2015: Bugfix. Initialized alphas properly. Otherwise, some alphas
4079 * that should be zero might not be zero. This leads to problems
4080 * (too many SVs) when the model is read in.
4081 * Got rid of cAlphas (censored alphas). Unnecessary.
4082 * Write both sets of alphas for regression even if they are zero.
4083 */
4084
4085 /* --- initialization ---*/
4086 nsv = model->sv_num;
4087 nt = afniModel->timepoints;
4088
4089 /* - open file for writing alphas */
4090 if( options->modelAlphaFile[0] ) {
4091 if (afniModel->class_count > 2) {
4092 snprintf( alphaFile, LONG_STRING, "%s_%d_%d.1D", options->modelAlphaFile,
4093 comb0, comb1);
4094 }
4095 else {
4096 snprintf( alphaFile, LONG_STRING, "%s.1D", options->modelAlphaFile);
4097 }
4098
4099 if ( (fp=fopen(alphaFile, "w")) == NULL ) {
4100 ERROR_message("Can not open alphafile: %s for writing", alphaFile);
4101 }
4102 else {
4103 fflush(stdout);
4104 if ( verbosity >= 1 ) INFO_message("Writing alphafile: %s...", alphaFile);
4105 }
4106 }
4107
4108 /* recover time-order of alphas using quid and write them into
4109 * afniModel->alphas (index over all time points)
4110 */
4111
4112 /* - initialize alphas to zero - */
4113 if( !strcmp(options->svmType, "regression") ) {
4114 /* For regression, the arrays holding the alphas is twice as long */
4115 for( t=0; t<2*nt; ++t ) afniModel->alphas[classCount][t] = 0.0;
4116 }
4117 else {
4118 for( t=0; t<nt; ++t ) afniModel->alphas[classCount][t] = 0.0;
4119 }
4120
4121 qid=0;
4122 for( t=0; t<nt; ++t ) {
4123 /* only look at non-censored time-points, quid index only runs
4124 * over non-censored time-points */
4125 if ( abs((int)rint(tmp_labels[t])) == 1) {
4126
4127 /* - searching for alpha with (queryid == qid) - */
4128 for( sv=1; sv<nsv; ++sv) {
4129 if ( (model->supvec[sv])->queryid == qid) {
4130 afniModel->alphas[classCount][t] = (float)model->alpha[sv];
4131
4132 /* - alpha with quid found. Exit loop over sv - */
4133 break;
4134 }
4135 }
4136
4137 /* - write alpha to file - */
4138 if( options->modelAlphaFile[0] ) {
4139 fprintf(fp,"%.4g", afniModel->alphas[classCount][t]);
4140 }
4141
4142 /* For regression, the number of alphas might double, so the
4143 * array size for storing the alphas is twice as long (nt*2) as for
4144 * classification. Continue looping through the svmLight model and
4145 * keep searching for alphas with given qid. */
4146 if( !strcmp(options->svmType, "regression") ) {
4147 ++sv;
4148 for( ; sv<nsv; ++sv) {
4149 if( (model->supvec[sv])->queryid == qid ) {
4150 afniModel->alphas[classCount][nt+t] = (float)model->alpha[sv];
4151 (float)model->alpha[sv];
4152
4153 /* - second alpha with quid found. Exit loop over sv - */
4154 break;
4155 }
4156 }
4157 /* - write second alpha to file - */
4158 if( options->modelAlphaFile[0] ) {
4159 fprintf(fp,"\t %.4g", afniModel->alphas[classCount][nt+t]);
4160 }
4161 }
4162
4163 /* - done with writing alpha(s) for current timepoint - */
4164 if( options->modelAlphaFile[0] ) fprintf(fp,"\n");
4165
4166 /* increment qid (quid only runs over non-censored time-points!) */
4167 ++qid;
4168 }
4169 else {
4170 /* - censored timepoints alpha=0 - -*/
4171 if( options->modelAlphaFile[0] ) {
4172 if( !strcmp(options->svmType, "regression") ) {
4173 fprintf(fp,"%.4g\t %.4g\n", 0.0, 0.0);
4174 }
4175 else {
4176 fprintf(fp,"%.4g\n", 0.0);
4177 }
4178 }
4179 }
4180 }
4181 if( options->modelAlphaFile[0] ) fclose(fp);
4182
4183 /* JL Feb. 2009: Added kernel_custom and kernel_type
4184 * May. 2009: Added svm_type to support sv-regression
4185 * Oct. 2009: Added remaining model parameters that can be specified
4186 * via command-line
4187 * July 2011: Added max_iterations.
4188 *
4189 * TODO: Some parameters such as max_iterations don't change across
4190 * class combinations, but get assigned here. Need to put then
4191 * in allocateAfniModel instead.
4192 */
4193
4194
4195 snprintf( afniModel->svm_type, LONG_STRING, "%s", options->svmType);
4196 snprintf( afniModel->combName[classCount], CSV_STRING, "%d_%d", comb0, comb1 );
4197 snprintf( afniModel->kernel_custom[classCount], CSV_STRING, "%s", model->kernel_parm.custom);
4198 afniModel->kernel_type[classCount] = model->kernel_parm.kernel_type;
4199 afniModel->polynomial_degree[classCount] = model->kernel_parm.poly_degree;
4200 afniModel->rbf_gamma[classCount] = model->kernel_parm.rbf_gamma;
4201 afniModel->linear_coefficient[classCount] = model->kernel_parm.coef_lin;
4202 afniModel->constant_coefficient[classCount] = model->kernel_parm.coef_const;
4203 afniModel->total_masked_features[classCount] = (int) model->totwords;
4204 afniModel->total_samples[classCount] = (int) model->totdoc;
4205 afniModel->total_support_vectors[classCount] = (int) model->sv_num;
4206 afniModel->b[classCount] = model->b;
4207
4208 /* July 2011: */
4209 afniModel->max_iterations=learn_parm->max_iterations;
4210
4211 /* Oct 2009: */
4212 afniModel->eps[classCount] = learn_parm->eps;
4213 afniModel->svm_c[classCount] = learn_parm->svm_c;
4214 afniModel->biased_hyperplane[classCount] = learn_parm->biased_hyperplane;
4215 afniModel->skip_final_opt_check[classCount] = learn_parm->skip_final_opt_check;
4216 afniModel->svm_maxqpsize[classCount] = learn_parm->svm_maxqpsize;
4217 afniModel->svm_newvarsinqp[classCount] = learn_parm->svm_newvarsinqp;
4218 afniModel->svm_iter_to_shrink[classCount] = learn_parm->svm_iter_to_shrink;
4219 afniModel->transduction_posratio[classCount] = learn_parm->transduction_posratio;
4220 afniModel->svm_costratio[classCount] = learn_parm->svm_costratio;
4221 afniModel->svm_costratio_unlab[classCount] = learn_parm->svm_costratio_unlab;
4222 afniModel->svm_unlabbound[classCount] = learn_parm->svm_unlabbound;
4223 afniModel->epsilon_a[classCount] = learn_parm->epsilon_a;
4224 afniModel->epsilon_crit[classCount] = learn_parm->epsilon_crit;
4225 afniModel->compute_loo[classCount] = learn_parm->compute_loo;
4226 afniModel->rho[classCount] = learn_parm->rho;
4227 afniModel->xa_depth[classCount] = learn_parm->xa_depth;
4228
4229 EXRETURN;
4230 }
4231
4232 /* JL Apr 2010: This function takes a dataset array and returns a dataset array
4233 * without censored time-points.
4234 *
4235 * For regression a few things can be simplified:
4236 * - we are only supporting censoring with a separate censor file (not 9999s)
4237 * - we don't have to worry about multi-class.
4238 *
4239 */
getAllocateCensoredRegressionArray(DatasetType ** dsetArray,LABELS * labels,long nvox)4240 DatasetType** getAllocateCensoredRegressionArray(DatasetType **dsetArray,
4241 LABELS *labels, long nvox)
4242 {
4243 long v = 0; /* index over voxels */
4244 long t = 0; /* index over timepoints */
4245 long tnc = 0; /* index over timepoints non censored */
4246 long nt = 0; /* number of timepoints */
4247 long ntc = 0; /* number of timepoints censored */
4248
4249 DatasetType **dsetArrayCensored = NULL;
4250
4251
4252 ENTRY("getAllocateCensoredRegressionArray");
4253
4254
4255 /* JL July 2011: Return NULL if memory can not be allocated */
4256
4257 nt = labels->n;
4258 ntc = labels->n_cnsrs;
4259
4260 /* allocate */
4261 if( (dsetArrayCensored = Allocate2DT(nt-ntc, nvox)) == NULL ) {
4262 RETURN(NULL);
4263 }
4264
4265 /* extract non-censored time-points */
4266 tnc=0;
4267 for( t=0; t<nt; ++t ) {
4268 if( labels->cnsrs[t] != 0 ) {
4269 for( v=0 ; v<nvox ; ++v ) {
4270 dsetArrayCensored[tnc][v] = dsetArray[t][v];
4271 } ++tnc;
4272 }
4273 }
4274
4275 RETURN(dsetArrayCensored);
4276 }
4277
4278 /* JL July 2011: Added the corresponding free to
4279 * getAllocateCensoredRegressionArray
4280 */
freeCensoredRegressionArray(DatasetType ** dsetArray,LABELS * labels)4281 void freeCensoredRegressionArray(DatasetType **dsetArray, LABELS *labels) {
4282
4283 ENTRY("freeCensoredRegressionArray");
4284
4285 free2DT(dsetArray, labels->n - labels->n_cnsrs);
4286
4287 EXRETURN;
4288 }
4289
4290
4291 /* JL Apr. 2010: This function retrieves the training array and the targets
4292 * for the current class-combination based on censoredTarget
4293 *
4294 * Note: We use 9999 as an input from the user to ignore time-points, but we
4295 * also use 9999 for internal purposes to exclude time-points, that do not belong
4296 * to the current class-combination
4297 */
getClassTrainArrayAndTarget(DatasetType ** dsetTrainArray,LabelType * censoredTarget,DatasetType ** dsetClassTrainArray,LabelType * classTarget,long nt,long nvox)4298 void getClassTrainArrayAndTarget(DatasetType **dsetTrainArray,
4299 LabelType *censoredTarget, DatasetType **dsetClassTrainArray,
4300 LabelType *classTarget, long nt, long nvox)
4301 {
4302
4303 long v = 0; /* index over nvox */
4304 long t = 0; /* index over nt */
4305 long k = 0; /* index over non censored time-points */
4306
4307
4308 ENTRY("getClassTrainArrayAndTarget");
4309
4310 for( t=0; t<nt; ++t ) {
4311 if( censoredTarget[t] != 9999 ) {
4312 /* sample is not supposed to be ignored (is an observation of
4313 * current class combination and not censored by user) */
4314
4315 /* -- set target value for svm-light (+1,-1, or 0) -- */
4316 classTarget[k] = censoredTarget[t];
4317
4318 for( v=0; v<nvox; ++v ){
4319 dsetClassTrainArray[k][v] = dsetTrainArray[t][v];
4320 }
4321 ++k;
4322 }
4323 }
4324
4325 EXRETURN;
4326 }
4327
4328 /* JL Feb. 2009: Added 'ASLoptions *options' as an argument to
4329 * support handling of complex-valued data */
afni_dset_to_svm_doc(DOC * docs,DatasetType ** dsetArray,MaskType * maskArray,long tpts,long nvoxels,long nmasked)4330 void afni_dset_to_svm_doc( DOC *docs, DatasetType **dsetArray,
4331 MaskType* maskArray, long tpts, long nvoxels, long nmasked )
4332 {
4333 long i, j, k; /* loop indices */
4334
4335 /* JL and SL July 2009: Writing the time order into the queryid of the
4336 * DOC structure (we hijacked this for our own evil purposes),
4337 * which allows us to retrieve the time order of the support
4338 * vectors and alphas after training.
4339 * Queryid only is used in svm-light's ranking, so we can use this entry
4340 * for classification and regression. */
4341
4342 ENTRY("afni_dset_to_svm_doc");
4343
4344 for( i=0; i < tpts; ++i ) {
4345 docs[i].docnum = i;
4346 /* docs[i].queryid = 0; */
4347 docs[i].queryid = i; /* we hijacked here*/
4348 docs[i].costfactor = 1;
4349 docs[i].words[nmasked].wnum = 0; /* svmLight stop signal */
4350
4351 if( maskArray ) {
4352 k = 0;
4353 for( j=0 ; j<nvoxels ; ++j) {
4354 if( maskArray[j] ) {
4355 docs[i].words[k].wnum = k+1;
4356 docs[i].words[k].weight = (FVAL) dsetArray[i][j]; /*FVAL is svmLight defined*/
4357 ++k;
4358 }
4359 }
4360 }
4361 else {
4362 for( j=0 ; j<nvoxels ; ++j) {
4363 docs[i].words[j].wnum = j+1;
4364 docs[i].words[j].weight = (FVAL) dsetArray[i][j];
4365 }
4366 }
4367 docs[i].twonorm_sq = sprod_ss(&docs[i].words[0],&docs[i].words[0]);
4368 }
4369
4370 EXRETURN;
4371 }
4372
4373 /*-----------------------------------------------------------*/
4374 /* JL Apr. 2010: This function was previously named getTmpLabels. Changed name
4375 * to reflect that time-points which do no belong to the current class-combination
4376 * are censored (labeled with 9999).
4377 *
4378 */
getCensoredClassTarget(LabelType * censoredTarget,long * sampleCount,LABELS * labels,long classIndex0,long classIndex1,enum modes mode,char * errorString)4379 int getCensoredClassTarget(LabelType *censoredTarget, long *sampleCount,
4380 LABELS *labels, long classIndex0, long classIndex1, enum modes mode,
4381 char *errorString)
4382 {
4383 long i = 0;
4384 short labelWarningFlag = 0; /* warn users if unknown class label
4385 - probably from multi-class */
4386 int class0 = 0;
4387 int class1 = 0;
4388 *sampleCount = 0;
4389
4390
4391 /*
4392 * JL June 2011: Modified error handling:
4393 * Replaced ERROR_exit by RETURN(1), passing error message as errorString
4394 * to the calling function;
4395 *
4396 */
4397
4398 ENTRY("getCensoredClassTarget");
4399
4400 if (mode == TRAIN) {
4401 class0 = labels->class_list[classIndex0];
4402 class1 = labels->class_list[classIndex1];
4403 }
4404 else if (mode == TEST) {
4405 class0 = classIndex0;
4406 class1 = classIndex1;
4407 }
4408 else { /* We should never get here */
4409 snprintf(errorString, LONG_STRING,
4410 "What happened?! getCensoredClassTarget: unknown mode!");
4411 RETURN(1);
4412 }
4413
4414 if(verbosity >= 2) printf("++ ");
4415
4416 for( i=0 ; i<labels->n ; ++i) { /* convert timeseries input to one that
4417 can be used with svm light. */
4418 if( (int)(labels->cnsrs[i]) ) {
4419 if(labels->lbls[i] == class0) { /*class a */
4420 censoredTarget[i] = -1.0;
4421 (*sampleCount)++;
4422 }
4423 else if(labels->lbls[i] == class1) { /* class b */
4424 censoredTarget[i] = 1.0;
4425 (*sampleCount)++;
4426 }
4427 else if( labels->lbls[i] == -9999 ) { /* transductive sample */
4428 censoredTarget[i] = 0.0;
4429 (*sampleCount)++;
4430 }
4431 else if( labels->lbls[i] == 9999 ) { /* ignore sample */
4432 censoredTarget[i] = 9999.0;
4433 }
4434 else {
4435 censoredTarget[i] = 9999.0; /* invalid value - ignore */
4436 labelWarningFlag = 1;
4437 if(verbosity >= 2) printf("%ld,", i); /* ignored time point */
4438 }
4439 }
4440 else {
4441 censoredTarget[i] = 9999.0; /* censored sample - ignore */
4442 }
4443 }
4444 if( labelWarningFlag && (verbosity >= 1) ) {
4445 INFO_message("Time points ignored. If not using multi-class, check for bad labels.");
4446 }
4447
4448
4449 RETURN(0);
4450 }
4451
freeAfniModel(AFNI_MODEL * afniModel)4452 void freeAfniModel(AFNI_MODEL *afniModel)
4453 {
4454
4455 long max_comb = CLASS_MAX*(CLASS_MAX-1)/2;
4456
4457 ENTRY("freeAfniModel");
4458
4459 if( afniModel != NULL )
4460 {
4461 IFree( afniModel->kernel_type );
4462 IFree( afniModel->polynomial_degree );
4463 IFree( afniModel->rbf_gamma );
4464 IFree( afniModel->linear_coefficient );
4465 IFree( afniModel->constant_coefficient );
4466 IFree( afniModel->total_masked_features );
4467 IFree( afniModel->total_samples );
4468 IFree( afniModel->total_support_vectors );
4469 IFree( afniModel->b );
4470 free2f(afniModel->alphas, (long) afniModel->combinations);
4471 free2c(afniModel->combName, max_comb);
4472
4473 /* Oct. 2008: */
4474 if( afniModel->version >= 0.80 ) {
4475 free2c(afniModel->kernel_custom, max_comb);
4476 }
4477
4478 /* JL Nov 2009: */
4479 if( afniModel->version >= 1.10 ) {
4480 IFree( afniModel->eps );
4481 IFree( afniModel->svm_c );
4482 IFree( afniModel->biased_hyperplane );
4483 IFree( afniModel->skip_final_opt_check );
4484 IFree( afniModel->svm_maxqpsize );
4485 IFree( afniModel->svm_newvarsinqp );
4486 IFree( afniModel->svm_iter_to_shrink );
4487 IFree( afniModel->transduction_posratio );
4488 IFree( afniModel->svm_costratio );
4489 IFree( afniModel->svm_costratio_unlab );
4490 IFree( afniModel->svm_unlabbound );
4491 IFree( afniModel->epsilon_a );
4492 IFree( afniModel->epsilon_crit );
4493 IFree( afniModel->compute_loo );
4494 IFree( afniModel->rho );
4495 IFree( afniModel->xa_depth );
4496 }
4497 }
4498 EXRETURN;
4499 }
4500
allocateAfniModel(AFNI_MODEL * afniModel,LABELS * labels,ASLoptions * options,char * errorString)4501 int allocateAfniModel(AFNI_MODEL *afniModel, LABELS *labels,
4502 ASLoptions *options, char *errorString)
4503 {
4504 long max_comb = CLASS_MAX*(CLASS_MAX-1)/2;
4505 long nalphas = 0;
4506
4507 ENTRY("allocateAfniModel");
4508
4509
4510 /* JL June 2009: Enabled sv-regression.
4511 *
4512 * JL June 2011: Modified error handling: Passing error string as argument
4513 * to the calling function, allocated memory is freed, RETURN(1)
4514 * instead of ERROR_exit. Checking each malloc individually.
4515 *
4516 * JL July 2014: Changed allocation of array holding alphas for regression
4517 *
4518 */
4519
4520 /* alpha and alpha* might be stored separately for sv-regression,
4521 * so the array being allocated to store them needs to be twice as long */
4522 if( !strcmp(options->svmType, "regression") ) {
4523 nalphas = (int) labels->n*2;
4524 }
4525 else if( !strcmp(options->svmType, "classification") ) {
4526 nalphas = (int) labels->n;
4527 }
4528 else {
4529 /* should never get here */
4530 snprintf(errorString, LONG_STRING, "allocateAfniModel: SVM type unknown!");
4531 RETURN(1);
4532 }
4533
4534 afniModel->timepoints = (int) labels->n;
4535 /* would like to be long, but no equivalent to THD_set_int_atr */
4536 afniModel->class_count = (int) labels->n_classes;
4537 /* would like to be long, but no equivalent to THD_set_int_atr */
4538 afniModel->combinations = (long) ( (labels->n_classes * (labels->n_classes - 1)) / 2 );
4539 if( (afniModel->kernel_type = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4540 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for kernel_type failed!");
4541
4542 /* return */
4543 RETURN(1);
4544 }
4545 if( (afniModel->polynomial_degree = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4546 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for polynomial_degree failed!");
4547
4548 /* free and return */
4549 IFree(afniModel->kernel_type);
4550 RETURN(1);
4551 }
4552
4553 if( (afniModel->rbf_gamma = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4554 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for rbf_gamma failed!");
4555
4556 /* free and return */
4557 IFree(afniModel->kernel_type);
4558 IFree(afniModel->polynomial_degree);
4559 RETURN(1);
4560 }
4561
4562 if( (afniModel->linear_coefficient = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4563 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for linear_coefficient failed!");
4564
4565 /* free and return */
4566 IFree(afniModel->kernel_type);
4567 IFree(afniModel->polynomial_degree);
4568 IFree(afniModel->rbf_gamma);
4569 RETURN(1);
4570 }
4571
4572
4573 if( (afniModel->constant_coefficient = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4574 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for constant_coefficient failed!");
4575
4576 /* free and return */
4577 IFree(afniModel->kernel_type);
4578 IFree(afniModel->polynomial_degree);
4579 IFree(afniModel->rbf_gamma);
4580 IFree(afniModel->linear_coefficient);
4581 RETURN(1);
4582 }
4583
4584 if( (afniModel->total_masked_features = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4585 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for total_masked_features failed!");
4586
4587 /* free and return */
4588 IFree(afniModel->kernel_type);
4589 IFree(afniModel->polynomial_degree);
4590 IFree(afniModel->rbf_gamma);
4591 IFree(afniModel->linear_coefficient);
4592 IFree(afniModel->constant_coefficient);
4593 RETURN(1);
4594 }
4595
4596 if( (afniModel->total_samples = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4597 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for total_samples failed!");
4598
4599 /* free and return */
4600 IFree(afniModel->kernel_type);
4601 IFree(afniModel->polynomial_degree);
4602 IFree(afniModel->rbf_gamma);
4603 IFree(afniModel->linear_coefficient);
4604 IFree(afniModel->constant_coefficient);
4605 IFree(afniModel->total_masked_features);
4606 RETURN(1);
4607 }
4608
4609 if( (afniModel->total_support_vectors = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4610 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for total_support_vectors failed!");
4611
4612 /* free and return */
4613 IFree(afniModel->kernel_type);
4614 IFree(afniModel->polynomial_degree);
4615 IFree(afniModel->rbf_gamma);
4616 IFree(afniModel->linear_coefficient);
4617 IFree(afniModel->constant_coefficient);
4618 IFree(afniModel->total_masked_features);
4619 IFree(afniModel->total_samples);
4620 RETURN(1);
4621 }
4622
4623 if( (afniModel->b = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4624 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for b failed!");
4625
4626 /* free and return */
4627 IFree(afniModel->kernel_type);
4628 IFree(afniModel->polynomial_degree);
4629 IFree(afniModel->rbf_gamma);
4630 IFree(afniModel->linear_coefficient);
4631 IFree(afniModel->constant_coefficient);
4632 IFree(afniModel->total_masked_features);
4633 IFree(afniModel->total_samples);
4634 IFree(afniModel->total_support_vectors);
4635 RETURN(1);
4636 }
4637
4638 if( (afniModel->alphas = Allocate2f((long) afniModel->combinations, nalphas)) == NULL ) {
4639 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for alphas failed!");
4640
4641 /* free and return */
4642 IFree(afniModel->kernel_type);
4643 IFree(afniModel->polynomial_degree);
4644 IFree(afniModel->rbf_gamma);
4645 IFree(afniModel->linear_coefficient);
4646 IFree(afniModel->constant_coefficient);
4647 IFree(afniModel->total_masked_features);
4648 IFree(afniModel->total_samples);
4649 IFree(afniModel->total_support_vectors);
4650 IFree(afniModel->b);
4651 RETURN(1);
4652 }
4653
4654
4655
4656 /* JL Nov 2009: Added model parameters */
4657 if( (afniModel->eps = (float *)malloc( afniModel->combinations * sizeof(float) )) == NULL ) {
4658 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for eps failed!");
4659
4660 /* free and return */
4661 IFree(afniModel->kernel_type);
4662 IFree(afniModel->polynomial_degree);
4663 IFree(afniModel->rbf_gamma);
4664 IFree(afniModel->linear_coefficient);
4665 IFree(afniModel->constant_coefficient);
4666 IFree(afniModel->total_masked_features);
4667 IFree(afniModel->total_samples);
4668 IFree(afniModel->total_support_vectors);
4669 IFree(afniModel->b);
4670 free2f(afniModel->alphas, (long) afniModel->combinations);
4671 RETURN(1);
4672 }
4673
4674 if( (afniModel->svm_c = (float *)malloc( afniModel->combinations * sizeof(float) )) == NULL ) {
4675 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_c!");
4676
4677 /* free and return */
4678 IFree(afniModel->kernel_type);
4679 IFree(afniModel->polynomial_degree);
4680 IFree(afniModel->rbf_gamma);
4681 IFree(afniModel->linear_coefficient);
4682 IFree(afniModel->constant_coefficient);
4683 IFree(afniModel->total_masked_features);
4684 IFree(afniModel->total_samples);
4685 IFree(afniModel->total_support_vectors);
4686 IFree(afniModel->b);
4687 free2f(afniModel->alphas, (long) afniModel->combinations);
4688 IFree(afniModel->eps);
4689 RETURN(1);
4690 }
4691
4692 if( (afniModel->biased_hyperplane = (int *)malloc( afniModel->combinations * sizeof(int) )) == NULL ) {
4693 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for biased_hyperplane failed!");
4694
4695 /* free and return */
4696 IFree(afniModel->kernel_type);
4697 IFree(afniModel->polynomial_degree);
4698 IFree(afniModel->rbf_gamma);
4699 IFree(afniModel->linear_coefficient);
4700 IFree(afniModel->constant_coefficient);
4701 IFree(afniModel->total_masked_features);
4702 IFree(afniModel->total_samples);
4703 IFree(afniModel->total_support_vectors);
4704 IFree(afniModel->b);
4705 free2f(afniModel->alphas, (long) afniModel->combinations);
4706 IFree(afniModel->eps);
4707 IFree(afniModel->svm_c);
4708 RETURN(1);
4709 }
4710
4711 if( (afniModel->skip_final_opt_check = (int *)malloc( afniModel->combinations * sizeof(int) )) == NULL ) {
4712 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for skip_final_opt_check failed!");
4713
4714 /* free and return */
4715 IFree(afniModel->kernel_type);
4716 IFree(afniModel->polynomial_degree);
4717 IFree(afniModel->rbf_gamma);
4718 IFree(afniModel->linear_coefficient);
4719 IFree(afniModel->constant_coefficient);
4720 IFree(afniModel->total_masked_features);
4721 IFree(afniModel->total_samples);
4722 IFree(afniModel->total_support_vectors);
4723 IFree(afniModel->b);
4724 free2f(afniModel->alphas, (long) afniModel->combinations);
4725 IFree(afniModel->eps);
4726 IFree(afniModel->svm_c);
4727 IFree(afniModel->biased_hyperplane);
4728 RETURN(1);
4729 }
4730
4731 if( (afniModel->svm_maxqpsize = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4732 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_maxqpsize failed!");
4733
4734 /* free and return */
4735 IFree(afniModel->kernel_type);
4736 IFree(afniModel->polynomial_degree);
4737 IFree(afniModel->rbf_gamma);
4738 IFree(afniModel->linear_coefficient);
4739 IFree(afniModel->constant_coefficient);
4740 IFree(afniModel->total_masked_features);
4741 IFree(afniModel->total_samples);
4742 IFree(afniModel->total_support_vectors);
4743 IFree(afniModel->b);
4744 free2f(afniModel->alphas, (long) afniModel->combinations);
4745 IFree(afniModel->eps);
4746 IFree(afniModel->svm_c);
4747 IFree(afniModel->biased_hyperplane);
4748 IFree(afniModel->skip_final_opt_check);
4749 RETURN(1);
4750 }
4751
4752 if( (afniModel->svm_newvarsinqp = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4753 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_newvarsinqp failed!");
4754
4755 /* free and return */
4756 IFree(afniModel->kernel_type);
4757 IFree(afniModel->polynomial_degree);
4758 IFree(afniModel->rbf_gamma);
4759 IFree(afniModel->linear_coefficient);
4760 IFree(afniModel->constant_coefficient);
4761 IFree(afniModel->total_masked_features);
4762 IFree(afniModel->total_samples);
4763 IFree(afniModel->total_support_vectors);
4764 IFree(afniModel->b);
4765 free2f(afniModel->alphas, (long) afniModel->combinations);
4766 IFree(afniModel->eps);
4767 IFree(afniModel->svm_c);
4768 IFree(afniModel->biased_hyperplane);
4769 IFree(afniModel->skip_final_opt_check);
4770 IFree(afniModel->svm_maxqpsize);
4771 RETURN(1);
4772 }
4773
4774 if( (afniModel->svm_iter_to_shrink = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4775 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_iter_to_shrink failed!");
4776
4777 /* free and return */
4778 IFree(afniModel->kernel_type);
4779 IFree(afniModel->polynomial_degree);
4780 IFree(afniModel->rbf_gamma);
4781 IFree(afniModel->linear_coefficient);
4782 IFree(afniModel->constant_coefficient);
4783 IFree(afniModel->total_masked_features);
4784 IFree(afniModel->total_samples);
4785 IFree(afniModel->total_support_vectors);
4786 IFree(afniModel->b);
4787 free2f(afniModel->alphas, (long) afniModel->combinations);
4788 IFree(afniModel->eps);
4789 IFree(afniModel->svm_c);
4790 IFree(afniModel->biased_hyperplane);
4791 IFree(afniModel->skip_final_opt_check);
4792 IFree(afniModel->svm_maxqpsize);
4793 IFree(afniModel->svm_newvarsinqp);
4794 RETURN(1);
4795 }
4796
4797 if( (afniModel->transduction_posratio = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4798 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for transduction_posratio failed!");
4799
4800 /* free and return */
4801 IFree(afniModel->kernel_type);
4802 IFree(afniModel->polynomial_degree);
4803 IFree(afniModel->rbf_gamma);
4804 IFree(afniModel->linear_coefficient);
4805 IFree(afniModel->constant_coefficient);
4806 IFree(afniModel->total_masked_features);
4807 IFree(afniModel->total_samples);
4808 IFree(afniModel->total_support_vectors);
4809 IFree(afniModel->b);
4810 free2f(afniModel->alphas, (long) afniModel->combinations);
4811 IFree(afniModel->eps);
4812 IFree(afniModel->svm_c);
4813 IFree(afniModel->biased_hyperplane);
4814 IFree(afniModel->skip_final_opt_check);
4815 IFree(afniModel->svm_maxqpsize);
4816 IFree(afniModel->svm_newvarsinqp);
4817 IFree(afniModel->svm_iter_to_shrink);
4818 RETURN(1);
4819 }
4820
4821 if( (afniModel->svm_costratio = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4822 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_costratio failed!");
4823
4824 /* free and return */
4825 IFree(afniModel->kernel_type);
4826 IFree(afniModel->polynomial_degree);
4827 IFree(afniModel->rbf_gamma);
4828 IFree(afniModel->linear_coefficient);
4829 IFree(afniModel->constant_coefficient);
4830 IFree(afniModel->total_masked_features);
4831 IFree(afniModel->total_samples);
4832 IFree(afniModel->total_support_vectors);
4833 IFree(afniModel->b);
4834 free2f(afniModel->alphas, (long) afniModel->combinations);
4835 IFree(afniModel->eps);
4836 IFree(afniModel->svm_c);
4837 IFree(afniModel->biased_hyperplane);
4838 IFree(afniModel->skip_final_opt_check);
4839 IFree(afniModel->svm_maxqpsize);
4840 IFree(afniModel->svm_newvarsinqp);
4841 IFree(afniModel->svm_iter_to_shrink);
4842 IFree(afniModel->transduction_posratio);
4843 RETURN(1);
4844 }
4845
4846 if( (afniModel->svm_costratio_unlab = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4847 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_costratio_unlab failed!");
4848
4849 /* free and return */
4850 IFree(afniModel->kernel_type);
4851 IFree(afniModel->polynomial_degree);
4852 IFree(afniModel->rbf_gamma);
4853 IFree(afniModel->linear_coefficient);
4854 IFree(afniModel->constant_coefficient);
4855 IFree(afniModel->total_masked_features);
4856 IFree(afniModel->total_samples);
4857 IFree(afniModel->total_support_vectors);
4858 IFree(afniModel->b);
4859 free2f(afniModel->alphas, (long) afniModel->combinations);
4860 IFree(afniModel->eps);
4861 IFree(afniModel->svm_c);
4862 IFree(afniModel->biased_hyperplane);
4863 IFree(afniModel->skip_final_opt_check);
4864 IFree(afniModel->svm_maxqpsize);
4865 IFree(afniModel->svm_newvarsinqp);
4866 IFree(afniModel->svm_iter_to_shrink);
4867 IFree(afniModel->transduction_posratio);
4868 IFree(afniModel->svm_costratio);
4869 RETURN(1);
4870 }
4871
4872 if( (afniModel->svm_unlabbound = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4873 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for svm_unlabbound failed!");
4874
4875 /* free and return */
4876 IFree(afniModel->kernel_type);
4877 IFree(afniModel->polynomial_degree);
4878 IFree(afniModel->rbf_gamma);
4879 IFree(afniModel->linear_coefficient);
4880 IFree(afniModel->constant_coefficient);
4881 IFree(afniModel->total_masked_features);
4882 IFree(afniModel->total_samples);
4883 IFree(afniModel->total_support_vectors);
4884 IFree(afniModel->b);
4885 free2f(afniModel->alphas, (long) afniModel->combinations);
4886 IFree(afniModel->eps);
4887 IFree(afniModel->svm_c);
4888 IFree(afniModel->biased_hyperplane);
4889 IFree(afniModel->skip_final_opt_check);
4890 IFree(afniModel->svm_maxqpsize);
4891 IFree(afniModel->svm_newvarsinqp);
4892 IFree(afniModel->svm_iter_to_shrink);
4893 IFree(afniModel->transduction_posratio);
4894 IFree(afniModel->svm_costratio);
4895 IFree(afniModel->svm_costratio_unlab);
4896 RETURN(1);
4897 }
4898
4899 if( (afniModel->epsilon_a = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4900 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for epsilon_a failed!");
4901
4902 /* free and return */
4903 IFree(afniModel->kernel_type);
4904 IFree(afniModel->polynomial_degree);
4905 IFree(afniModel->rbf_gamma);
4906 IFree(afniModel->linear_coefficient);
4907 IFree(afniModel->constant_coefficient);
4908 IFree(afniModel->total_masked_features);
4909 IFree(afniModel->total_samples);
4910 IFree(afniModel->total_support_vectors);
4911 IFree(afniModel->b);
4912 free2f(afniModel->alphas, (long) afniModel->combinations);
4913 IFree(afniModel->eps);
4914 IFree(afniModel->svm_c);
4915 IFree(afniModel->biased_hyperplane);
4916 IFree(afniModel->skip_final_opt_check);
4917 IFree(afniModel->svm_maxqpsize);
4918 IFree(afniModel->svm_newvarsinqp);
4919 IFree(afniModel->svm_iter_to_shrink);
4920 IFree(afniModel->transduction_posratio);
4921 IFree(afniModel->svm_costratio);
4922 IFree(afniModel->svm_costratio_unlab);
4923 IFree(afniModel->svm_unlabbound);
4924 RETURN(1);
4925 }
4926
4927 if( (afniModel->epsilon_crit = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4928 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for epsilon_crit failed!");
4929
4930 /* free and return */
4931 IFree(afniModel->kernel_type);
4932 IFree(afniModel->polynomial_degree);
4933 IFree(afniModel->rbf_gamma);
4934 IFree(afniModel->linear_coefficient);
4935 IFree(afniModel->constant_coefficient);
4936 IFree(afniModel->total_masked_features);
4937 IFree(afniModel->total_samples);
4938 IFree(afniModel->total_support_vectors);
4939 IFree(afniModel->b);
4940 free2f(afniModel->alphas, (long) afniModel->combinations);
4941 IFree(afniModel->eps);
4942 IFree(afniModel->svm_c);
4943 IFree(afniModel->biased_hyperplane);
4944 IFree(afniModel->skip_final_opt_check);
4945 IFree(afniModel->svm_maxqpsize);
4946 IFree(afniModel->svm_newvarsinqp);
4947 IFree(afniModel->svm_iter_to_shrink);
4948 IFree(afniModel->transduction_posratio);
4949 IFree(afniModel->svm_costratio);
4950 IFree(afniModel->svm_costratio_unlab);
4951 IFree(afniModel->svm_unlabbound);
4952 IFree(afniModel->epsilon_a);
4953 RETURN(1);
4954 }
4955
4956 if( (afniModel->compute_loo = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
4957 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for compute_loo failed!");
4958
4959 /* free and return */
4960 IFree(afniModel->kernel_type);
4961 IFree(afniModel->polynomial_degree);
4962 IFree(afniModel->rbf_gamma);
4963 IFree(afniModel->linear_coefficient);
4964 IFree(afniModel->constant_coefficient);
4965 IFree(afniModel->total_masked_features);
4966 IFree(afniModel->total_samples);
4967 IFree(afniModel->total_support_vectors);
4968 IFree(afniModel->b);
4969 free2f(afniModel->alphas, (long) afniModel->combinations);
4970 IFree(afniModel->eps);
4971 IFree(afniModel->svm_c);
4972 IFree(afniModel->biased_hyperplane);
4973 IFree(afniModel->skip_final_opt_check);
4974 IFree(afniModel->svm_maxqpsize);
4975 IFree(afniModel->svm_newvarsinqp);
4976 IFree(afniModel->svm_iter_to_shrink);
4977 IFree(afniModel->transduction_posratio);
4978 IFree(afniModel->svm_costratio);
4979 IFree(afniModel->svm_costratio_unlab);
4980 IFree(afniModel->svm_unlabbound);
4981 IFree(afniModel->epsilon_a);
4982 IFree(afniModel->epsilon_crit);
4983 RETURN(1);
4984 }
4985
4986 if( (afniModel->rho = (float *)malloc( afniModel->combinations * sizeof(float))) == NULL ) {
4987 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for rho failed!");
4988
4989 /* free and return */
4990 IFree(afniModel->kernel_type);
4991 IFree(afniModel->polynomial_degree);
4992 IFree(afniModel->rbf_gamma);
4993 IFree(afniModel->linear_coefficient);
4994 IFree(afniModel->constant_coefficient);
4995 IFree(afniModel->total_masked_features);
4996 IFree(afniModel->total_samples);
4997 IFree(afniModel->total_support_vectors);
4998 IFree(afniModel->b);
4999 free2f(afniModel->alphas, (long) afniModel->combinations);
5000 IFree(afniModel->eps);
5001 IFree(afniModel->svm_c);
5002 IFree(afniModel->biased_hyperplane);
5003 IFree(afniModel->skip_final_opt_check);
5004 IFree(afniModel->svm_maxqpsize);
5005 IFree(afniModel->svm_newvarsinqp);
5006 IFree(afniModel->svm_iter_to_shrink);
5007 IFree(afniModel->transduction_posratio);
5008 IFree(afniModel->svm_costratio);
5009 IFree(afniModel->svm_costratio_unlab);
5010 IFree(afniModel->svm_unlabbound);
5011 IFree(afniModel->epsilon_a);
5012 IFree(afniModel->epsilon_crit);
5013 IFree(afniModel->compute_loo);
5014 RETURN(1);
5015 }
5016
5017 if( (afniModel->xa_depth = (int *)malloc( afniModel->combinations * sizeof(int))) == NULL ) {
5018 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for xa_depth failed!");
5019
5020 /* free and return */
5021 IFree(afniModel->kernel_type);
5022 IFree(afniModel->polynomial_degree);
5023 IFree(afniModel->rbf_gamma);
5024 IFree(afniModel->linear_coefficient);
5025 IFree(afniModel->constant_coefficient);
5026 IFree(afniModel->total_masked_features);
5027 IFree(afniModel->total_samples);
5028 IFree(afniModel->total_support_vectors);
5029 IFree(afniModel->b);
5030 free2f(afniModel->alphas, (long) afniModel->combinations);
5031 IFree(afniModel->eps);
5032 IFree(afniModel->svm_c);
5033 IFree(afniModel->biased_hyperplane);
5034 IFree(afniModel->skip_final_opt_check);
5035 IFree(afniModel->svm_maxqpsize);
5036 IFree(afniModel->svm_newvarsinqp);
5037 IFree(afniModel->svm_iter_to_shrink);
5038 IFree(afniModel->transduction_posratio);
5039 IFree(afniModel->svm_costratio);
5040 IFree(afniModel->svm_costratio_unlab);
5041 IFree(afniModel->svm_unlabbound);
5042 IFree(afniModel->epsilon_a);
5043 IFree(afniModel->epsilon_crit);
5044 IFree(afniModel->compute_loo);
5045 IFree(afniModel->rho);
5046 RETURN(1);
5047 }
5048
5049 /* JL Apr 2010: Changed to dynamic allocation */
5050 if( (afniModel->combName = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
5051 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for combName failed!");
5052
5053 /* free and return */
5054 IFree(afniModel->kernel_type);
5055 IFree(afniModel->polynomial_degree);
5056 IFree(afniModel->rbf_gamma);
5057 IFree(afniModel->linear_coefficient);
5058 IFree(afniModel->constant_coefficient);
5059 IFree(afniModel->total_masked_features);
5060 IFree(afniModel->total_samples);
5061 IFree(afniModel->total_support_vectors);
5062 IFree(afniModel->b);
5063 free2f(afniModel->alphas, (long) afniModel->combinations);
5064 IFree(afniModel->eps);
5065 IFree(afniModel->svm_c);
5066 IFree(afniModel->biased_hyperplane);
5067 IFree(afniModel->skip_final_opt_check);
5068 IFree(afniModel->svm_maxqpsize);
5069 IFree(afniModel->svm_newvarsinqp);
5070 IFree(afniModel->svm_iter_to_shrink);
5071 IFree(afniModel->transduction_posratio);
5072 IFree(afniModel->svm_costratio);
5073 IFree(afniModel->svm_costratio_unlab);
5074 IFree(afniModel->svm_unlabbound);
5075 IFree(afniModel->epsilon_a);
5076 IFree(afniModel->epsilon_crit);
5077 IFree(afniModel->compute_loo);
5078 IFree(afniModel->rho);
5079 IFree(afniModel->xa_depth);
5080 RETURN(1);
5081 }
5082 Clear2c(afniModel->combName, max_comb);
5083
5084 if( (afniModel->kernel_custom = Allocate2c(max_comb, (long)CSV_STRING)) == NULL ) {
5085 snprintf(errorString, LONG_STRING, "allocateAfniModel: Memory allocation for kernel_custom failed!");
5086
5087 /* free and return */
5088 IFree(afniModel->kernel_type);
5089 IFree(afniModel->polynomial_degree);
5090 IFree(afniModel->rbf_gamma);
5091 IFree(afniModel->linear_coefficient);
5092 IFree(afniModel->constant_coefficient);
5093 IFree(afniModel->total_masked_features);
5094 IFree(afniModel->total_samples);
5095 IFree(afniModel->total_support_vectors);
5096 IFree(afniModel->b);
5097 free2f(afniModel->alphas, (long) afniModel->combinations);
5098 IFree(afniModel->eps);
5099 IFree(afniModel->svm_c);
5100 IFree(afniModel->biased_hyperplane);
5101 IFree(afniModel->skip_final_opt_check);
5102 IFree(afniModel->svm_maxqpsize);
5103 IFree(afniModel->svm_newvarsinqp);
5104 IFree(afniModel->svm_iter_to_shrink);
5105 IFree(afniModel->transduction_posratio);
5106 IFree(afniModel->svm_costratio);
5107 IFree(afniModel->svm_costratio_unlab);
5108 IFree(afniModel->svm_unlabbound);
5109 IFree(afniModel->epsilon_a);
5110 IFree(afniModel->epsilon_crit);
5111 IFree(afniModel->compute_loo);
5112 IFree(afniModel->rho);
5113 IFree(afniModel->xa_depth);
5114 free2c(afniModel->combName, max_comb);
5115 RETURN(1);
5116 }
5117 Clear2c(afniModel->kernel_custom, max_comb);
5118
5119 RETURN(0);
5120 }
5121
freeAfniModelAndArrays(AFNI_MODEL * afniModel,DatasetType ** dsetModelArray,MaskType * dsetMaskArray,long nt_model)5122 void freeAfniModelAndArrays(AFNI_MODEL *afniModel,
5123 DatasetType **dsetModelArray, MaskType *dsetMaskArray,
5124 long nt_model)
5125 {
5126 ENTRY("freeAfniModelAndArrays");
5127
5128 if( afniModel != NULL )
5129 {
5130 freeModelArrays(dsetModelArray, dsetMaskArray, nt_model, afniModel->mask_used);
5131 }
5132 else
5133 {
5134 freeModelArrays(dsetModelArray, dsetMaskArray, nt_model, 1);
5135 }
5136 freeAfniModel(afniModel);
5137
5138 EXRETURN;
5139 }
5140
5141 /* JL Oct 2010: This functions determintes the svm learn type (svm_type),
5142 * reads and allocates afniModel, dsetModelArray and dsetMaskArray.
5143 */
readAllocateAfniModelAndArrays(ASLoptions * options,AFNI_MODEL * afniModel,THD_3dim_dataset * dsetModel,DatasetType *** dsetModelArray,MaskType ** dsetMaskArray,long * nt_model,long * nvox_model,enum modes mode,int * svm_type,char * errorString)5144 int readAllocateAfniModelAndArrays( ASLoptions *options, AFNI_MODEL *afniModel,
5145 THD_3dim_dataset *dsetModel, DatasetType ***dsetModelArray,
5146 MaskType **dsetMaskArray, long *nt_model, long *nvox_model,
5147 enum modes mode, int *svm_type, char *errorString )
5148 {
5149
5150 DatasetType ** tmp_dsetModelArray = NULL;
5151 MaskType * tmp_dsetMaskArray = NULL;
5152
5153
5154 ENTRY("readAllocateAfniModelAndArrays");
5155
5156
5157 /* JL July 2011: Modified error handling: Passing error message
5158 * as argument (errorString) to the calling function,
5159 * allocated memory is freed, RETURN(1) instead of ERROR_exit().
5160 *
5161 * TODO: Should pass arrays directly to the testing functions,
5162 * (not read them from disc twice!), if training and testing
5163 * is performed at once.
5164 */
5165
5166
5167 /*----- VIEWTYPE POSTFIX FOR MODEL (quick fix !!!) -----*/
5168 /* It would be better to not read in the model from disc */
5169 if( mode == TRAIN_AND_TEST ) {
5170 if( strstr(options->trainFile,"+orig") != NULL ) {
5171 if( strstr(options->testFile,"+orig") == NULL ) {
5172 snprintf(errorString, LONG_STRING,
5173 "Viewtype of train dataset: %s does not match\n"
5174 " Viewtype of test dataset: %s!", options->trainFile,
5175 options->testFile);
5176
5177 RETURN(1);
5178 }
5179 else strncat(options->modelFile,"+orig", LONG_STRING);
5180 }
5181 else if( strstr(options->trainFile,"+tlrc") != NULL ) {
5182 if( strstr(options->testFile,"+tlrc") == NULL ) {
5183 snprintf(errorString, LONG_STRING,
5184 "Viewtype of train dataset: %s does not match\n"
5185 " Viewtype of test dataset: %s!", options->trainFile,
5186 options->testFile);
5187
5188 RETURN(1);
5189 }
5190 else strncat(options->modelFile,"+tlrc", LONG_STRING);
5191 }
5192 else if( strstr(options->trainFile,"+acpc") != NULL ) {
5193 if( strstr(options->testFile,"+acpc") == NULL ) {
5194 snprintf(errorString, LONG_STRING,
5195 "Viewtype of train dataset: %s does not match\n"
5196 " Viewtype of test dataset: %s!", options->trainFile,
5197 options->testFile);
5198
5199 RETURN(1);
5200 }
5201 else strncat(options->modelFile,"+acpc", LONG_STRING);
5202 }
5203 else {
5204 snprintf(errorString, LONG_STRING, "Model viewtype unknown!");
5205 RETURN(1);
5206 }
5207 }
5208
5209 /*----- LOAD AFNI MODEL -----*/
5210 if( (dsetModel = THD_open_one_dataset( options->modelFile )) == NULL ) {
5211 snprintf(errorString, LONG_STRING,
5212 "Failed to open model dataset: %s", options->modelFile);
5213
5214 RETURN(1);
5215 }
5216 DSET_load( dsetModel );
5217
5218 /*----- READ AFNI MODEL -----*/
5219 if( readAllocateAfniModel(dsetModel, afniModel, errorString) ) {
5220
5221 /* free and return */
5222 DSET_unload(dsetModel);
5223 RETURN(1);
5224 }
5225
5226 /*---- GET MODEL AND MASK ARRAY ------------------*/
5227 if( getAllocateModelArrays(dsetModel, &tmp_dsetModelArray, &tmp_dsetMaskArray,
5228 nt_model, nvox_model, &afniModel->mask_used, options->outModelNoMask,
5229 errorString) ) {
5230
5231 /* free and return */
5232 DSET_unload(dsetModel);
5233 freeAfniModel(afniModel);
5234 RETURN(1);
5235 }
5236
5237 /* --- SET svm learn type ---*/
5238 if( !strcmp(afniModel->svm_type, "regression") ) *svm_type = REGRESSION;
5239 else *svm_type = CLASSIFICATION;
5240
5241 /*----- FREE MEMORY ------*/
5242 DSET_unload( dsetModel );
5243
5244 *dsetMaskArray = tmp_dsetMaskArray;
5245 *dsetModelArray = tmp_dsetModelArray;
5246
5247 RETURN(0);
5248 }
5249
freeClassificationLabels(LABELS * labels)5250 void freeClassificationLabels(LABELS *labels) {
5251
5252 ENTRY("freeClasssificationLabels");
5253
5254 IFree(labels->lbls);
5255 IFree(labels->cnsrs);
5256 IFree(labels->lbls_cont);
5257 IFree(labels->class_list);
5258 IFree(labels->lbls_count);
5259
5260 EXRETURN;
5261 }
5262
getAllocateClassificationLabels(LABELS * labels,char * labelFile,char * censorFile,char * errorString)5263 int getAllocateClassificationLabels( LABELS *labels, char *labelFile,
5264 char *censorFile, char *errorString )
5265 {
5266 FILE *fp = NULL;
5267 int class_exists_flag = 0;
5268 long i,j,k = 0;
5269 char labelString[LONG_STRING];
5270 int strLength = 0;
5271
5272 ENTRY("getAllocateClassificationLabels");
5273
5274 /* Changes:
5275 * JL April 2010: Added checking for empty lines in label- and censor file
5276 * JL Aug. 2010: Fixed a bug in determining number of classes: Number of
5277 * of classes was calculated incorrectly if an entire class was
5278 * censored completely using the censorfile.
5279 * JL Aug. 2010: Added lbls_cont. which holds the user-given labels
5280 * converted to continues label values (i.e. 0,1,2,...,n)
5281 * (needed for calculation of multiclass prediction accuracies).
5282 * JL Sep. 2010: Improved error checking for censor file
5283 *
5284 * JL June 2011: Modified error handling: Passing error string as argument
5285 * to the calling function, allocated memory is freed, RETURN(1)
5286 * instead of ERROR_exit.
5287 *
5288 * JL Mar. 2014: Determine occurrence of each label and store in lbls_count
5289 * (Needed for calculating of multi-class accuracies)
5290 *
5291 */
5292
5293 /*----- RETRIEVE LABEL FILE AND CENSOR FILE--------------*/
5294 if( (fp = fopen(labelFile, "r")) == NULL ) {
5295 snprintf(errorString, LONG_STRING, "Could not open .1D label file: %s",
5296 labelFile);
5297
5298 RETURN(1);
5299 }
5300
5301 /* --- get length --- */
5302 labels->n = getFileSize(labelFile);
5303
5304 /* --- allocate labels --- */
5305 if( (labels->lbls = (LabelType*)malloc(sizeof(LabelType)*labels->n)) == NULL ) {
5306 snprintf(errorString, LONG_STRING, "getAllocateClassifcationLabels: "
5307 "Could not allocate lbls!");
5308
5309 /* free and return */
5310 fclose(fp);
5311 RETURN(1);
5312 }
5313
5314 if( (labels->lbls_cont = (LabelType*)malloc(sizeof(LabelType)*labels->n)) == NULL ) {
5315 snprintf(errorString, LONG_STRING, "getAllocateClassificationLabels: "
5316 "Could not allocate lbls_cont!");
5317
5318 /* free and return */
5319 fclose(fp);
5320 IFree(labels->lbls);
5321 RETURN(1);
5322 }
5323
5324 if( (labels->class_list = (int *)malloc(sizeof(int)*CLASS_MAX)) == NULL ) {
5325 snprintf(errorString, LONG_STRING, "getAllocateClassificationLabels: "
5326 "Could not allocate labels class list!");
5327
5328 /* free and return */
5329 fclose(fp);
5330 IFree(labels->lbls);
5331 IFree(labels->lbls_cont);
5332 RETURN(1);
5333 }
5334
5335 /* JL Mar. 2014 */
5336 if( (labels->lbls_count = (int *)malloc(sizeof(int)*CLASS_MAX)) == NULL ) {
5337 snprintf(errorString, LONG_STRING, "getAllocateClassificationLabels: "
5338 "Could not allocate lbls_count!");
5339
5340 /* free and return */
5341 fclose(fp);
5342 IFree(labels->lbls);
5343 IFree(labels->lbls_cont);
5344 IFree(labels->class_list);
5345 RETURN(1);
5346 }
5347
5348 /* --- read labels from file and do some error checking --- */
5349 for( i=0; i<labels->n; i++ ) {
5350 fgets(labelString, LONG_STRING, fp);
5351
5352 /* -- check for empty lines -- */
5353 if ( (strLength = strlen(labelString)) == 1 ) {
5354 snprintf(errorString, LONG_STRING, "Labelfile: '%s' contains empty "
5355 "entry in line %ld!", labelFile, i+1);
5356
5357 /* free and return */
5358 fclose(fp);
5359
5360 IFree(labels->lbls);
5361 IFree(labels->lbls_cont);
5362 IFree(labels->class_list);
5363 IFree(labels->lbls_count);
5364 RETURN(1);
5365 }
5366 else labels->lbls[i] = (LabelType) atof(labelString);
5367
5368 /* -- check for negative entires other than -9999 */
5369 if ( (labels->lbls[i] < 0.0) && ((int)labels->lbls[i] != -9999) ) {
5370 snprintf(errorString, LONG_STRING, "Labelfile: '%s' contains a negative "
5371 "entry in line %ld! ", labelFile, i+1);
5372 /* free and return */
5373 fclose(fp);
5374 IFree(labels->lbls);
5375 IFree(labels->lbls_cont);
5376 IFree(labels->class_list);
5377 IFree(labels->lbls_count);
5378 RETURN(1);
5379 }
5380 }
5381 fclose(fp);
5382
5383 /* --- allocate censors --- */
5384 if( (labels->cnsrs = (LabelType *)malloc(sizeof(LabelType)*labels->n)) == NULL ) {
5385 snprintf(errorString, LONG_STRING, "getAllocateClassificationLabels: "
5386 "Could not allocate censors!");
5387
5388 /* free and return */
5389 IFree(labels->lbls);
5390 IFree(labels->lbls_cont);
5391 IFree(labels->class_list);
5392 IFree(labels->lbls_count);
5393 RETURN(1);
5394 }
5395
5396 /* --- initialize censors ---*/
5397 for( i=0; i<labels->n; ++i ) labels->cnsrs[i] = 1.0;
5398 labels->n_cnsrs = 0;
5399
5400 /* --- read censors from file and do some error checking --- */
5401 if( censorFile[0] ) {
5402 if( (fp = fopen(censorFile,"r")) == NULL ) {
5403 snprintf(errorString, LONG_STRING, "Could not open .1D censor file: %s",
5404 censorFile);
5405
5406 /* free and return */
5407 IFree(labels->lbls);
5408 IFree(labels->lbls_cont);
5409 IFree(labels->class_list);
5410 IFree(labels->lbls_count);
5411 IFree(labels->cnsrs);
5412 RETURN(1);
5413 }
5414 /* -- check if size of labelfile matches size of censorfile -- */
5415 if( labels->n != getFileSize(censorFile) ) {
5416 snprintf(errorString, LONG_STRING, "Lenght of labelfile: '%s' (%ld) "
5417 "does not match length of censorfile: '%s' (%ld)!",
5418 labelFile, labels->n, censorFile, getFileSize(censorFile));
5419
5420 /* free and return */
5421 IFree(labels->lbls);
5422 IFree(labels->lbls_cont);
5423 IFree(labels->class_list);
5424 IFree(labels->lbls_count);
5425 IFree(labels->cnsrs);
5426 fclose(fp);
5427 RETURN(1);
5428 }
5429
5430 /* -- read censors and do some more error checking -- */
5431 for(i=0; i<labels->n; ++i) {
5432 fgets(labelString, LONG_STRING, fp);
5433
5434 /* - check for empty lines - */
5435 if ( (strLength = strlen(labelString)) == 1 ) {
5436 snprintf(errorString, LONG_STRING, "Censorfile: '%s' line: '%ld' is "
5437 "empty!", censorFile, i+1);
5438
5439 /* free and return */
5440 IFree(labels->lbls);
5441 IFree(labels->lbls_cont);
5442 IFree(labels->class_list);
5443 IFree(labels->lbls_count);
5444 IFree(labels->cnsrs);
5445 fclose(fp);
5446 RETURN(1);
5447 }
5448 else labels->cnsrs[i] = (LabelType) atof(labelString);
5449
5450 /* - check for values other than 1 and 0 - */
5451 if ( (strcmp(trimString(labelString), "0")) &&
5452 (strcmp(trimString(labelString), "1")) ) {
5453
5454 snprintf(errorString, LONG_STRING, "Consorfile: '%s' line: '%ld' "
5455 "contains invalid entry: '%s'. Only 0 or 1 is allowed!",
5456 censorFile, i+1, labelString);
5457
5458 /* free and return */
5459 IFree(labels->lbls);
5460 IFree(labels->lbls_cont);
5461 IFree(labels->class_list);
5462 IFree(labels->lbls_count);
5463 IFree(labels->cnsrs);
5464 fclose(fp);
5465 RETURN(1);
5466 }
5467 }
5468 fclose(fp);
5469 }
5470
5471 /*----- DETERMINE NUMBER OF CLASSES --------------*/
5472 /* --- initializ class list --- */
5473 for(j=0; j<CLASS_MAX; ++j) {
5474 labels->class_list[j] = 9999;
5475 labels->lbls_count[j] = 0;
5476 }
5477
5478 /* i indexes all time points
5479 ** j indexes over total allowed classes (CLASS_MAX)
5480 ** k increments as each new class label is found
5481 */
5482 labels->n_classes = 0;
5483 k = 0;
5484 for( i=0; i < labels->n; ++i ) {
5485 if( ((int)rint(labels->lbls[i]) != 9999) && /* not censored in labelfile */
5486 ((int)rint(labels->lbls[i]) != -9999) && /* not trunsductive ) */
5487 ((int)rint(labels->cnsrs[i])) ) { /* not censored in censorfile*/
5488
5489 for( j=0; j < CLASS_MAX; ++j ) {
5490 if( (int)rint(labels->lbls[i]) == labels->class_list[j] ) {
5491 class_exists_flag = 1;
5492 break;
5493 }
5494 }
5495 if( !class_exists_flag ) {
5496 labels->class_list[k] = (int)rint(labels->lbls[i]);
5497 ++labels->n_classes;
5498 ++k;
5499 }
5500 else {
5501 class_exists_flag = 0;
5502 }
5503 }
5504 else {
5505 labels->n_cnsrs++;
5506 if ((int)rint(labels->lbls[i]) != -9999) labels->cnsrs[i] = (LabelType)0;
5507 }
5508 }
5509
5510 /* -- sort label list -- */
5511 qsort( labels->class_list, CLASS_MAX, sizeof(int), (void *)compare_ints );
5512
5513 /* -- convert user-given labels to continuous label values */
5514 for (j=0; j<labels->n_classes; ++j) {
5515 for(i=0; i<labels->n; ++i ) {
5516 if( ((int)rint(labels->lbls[i]) != 9999) && /* not censored in labelfile */
5517 ((int)rint(labels->lbls[i]) != -9999) && /* not trunsductive ) */
5518 ((int)rint(labels->cnsrs[i])) ) { /* not censored in censorfile*/
5519
5520 if ((int)rint(labels->lbls[i]) == labels->class_list[j]) {
5521 labels->lbls_cont[i] = (LabelType)j;
5522 /* JL Mar 2014: Count occurrence of each label */
5523 labels->lbls_count[j]++;
5524 }
5525 }
5526 else {
5527 labels->lbls_cont[i] = (LabelType)9999;
5528 }
5529 }
5530 }
5531
5532 if(verbosity >= 1) {
5533 INFO_message( "Number of classes = %d\n", labels->n_classes );
5534 printf("++ ");
5535 for( i = 0; i < labels->n_classes; ++i ) {
5536 printf( "class[%ld] = %d, ", i, labels->class_list[i] );
5537 }
5538 printf("\n");
5539 }
5540
5541 if (labels->n_classes >= CLASS_MAX) {
5542 snprintf(errorString, LONG_STRING, "Max numer of classes hard coded to %d! "
5543 "Complain to the authors if you need more.", CLASS_MAX-1);
5544
5545 /* free and return */
5546 IFree(labels->lbls);
5547 IFree(labels->lbls_cont);
5548 IFree(labels->class_list);
5549 IFree(labels->lbls_count);
5550 IFree(labels->cnsrs);
5551 RETURN(1);
5552 }
5553
5554 RETURN(0);
5555 }
5556
5557 /* JL May 2009: This function may duplicate getAllocateClassificationLabels
5558 * a bit, but for regression a few things can be simplified:
5559 *
5560 * - we are only supporting censoring with a separate censor file (not 9999s)
5561 * - we don't have to worry about multi-class
5562 * - since the target for svm-light does not need to be updated (no multi-class)
5563 * is generated here as well
5564 */
getAllocateRegressionLabelsAndTarget(LABELS * labels,LabelType ** target,char * labelFile,char * censorFile,char * errorString)5565 int getAllocateRegressionLabelsAndTarget(LABELS *labels, LabelType **target,
5566 char *labelFile, char *censorFile, char *errorString)
5567 {
5568 FILE *fp = NULL;
5569 long i, j = 0;
5570 long n9999 = 0;
5571 LabelType *tmpTarget = NULL;
5572 int strLength = 0;
5573 char labelString[LONG_STRING];
5574
5575
5576 ENTRY("getAllocateRegressionLabelsAndTarget");
5577
5578 /* JL Sep. 2010: Improved error checking for censor file and fixed a bug
5579 * (length of censorfile was not determined correctly).
5580 * JL July 2011: Modified error handling: Passing error string as argument
5581 * to the calling function, allocated memory is freed,
5582 * RETURN(1) instead of ERROR_exit().
5583 */
5584
5585 /*--- open labelfile ---*/
5586 if( (fp = fopen(labelFile, "r") ) == NULL ) {
5587 snprintf(errorString, LONG_STRING,
5588 "Could not open .1D label file: %s !", labelFile);
5589
5590 RETURN(1);
5591 }
5592
5593 /*--- initialize ---*/
5594 labels->n = getFileSize(labelFile);
5595 labels->n_cnsrs = 0;
5596
5597 /* -- allocate lbls -- */
5598 if( (labels->lbls = (LabelType*)malloc(sizeof(LabelType)*labels->n)) == NULL ) {
5599 snprintf(errorString, LONG_STRING, "getAllocateRegressionLabelsAndTarget: "
5600 "Memory allocation for labels failed!");
5601
5602 /* free and return */
5603 fclose(fp);
5604 RETURN(1);
5605 }
5606
5607 /* --------------------------------------------------------------------------*/
5608 /* TODO: This is not great and needs to be cleaned up!
5609 * Wasting some memory to be able to recycle existing functions
5610 * written for classification.
5611 */
5612 if( (labels->class_list = (int *)malloc(sizeof(int)*CLASS_MAX)) == NULL ) {
5613 snprintf(errorString, LONG_STRING, "getAllocateRegressionLabelsAndTarget: "
5614 "Memory allocation for class_list failed!");
5615
5616 /* free and return */
5617 fclose(fp);
5618 IFree(labels->lbls);
5619 RETURN(1);
5620
5621 }
5622 for( j=0 ; j<CLASS_MAX ; ++j ) {
5623 labels->class_list[j] = 9999;
5624 }
5625 labels->n_classes = 2;
5626 /* -------------------------------------------------------------------------*/
5627
5628 /*--- read labelfile ---*/
5629 for(i=0; i<labels->n; i++) {
5630 fgets(labelString, LONG_STRING, fp);
5631 if ( (strLength = strlen(labelString)) == 1 ) {
5632 snprintf(errorString, LONG_STRING,
5633 "Labelfile: '%s' contains empty entry in line %ld!", labelFile, i+1);
5634
5635 /* free and return */
5636 fclose(fp);
5637 IFree(labels->lbls);
5638 IFree(labels->class_list);
5639 RETURN(1);
5640 }
5641 else labels->lbls[i] = (LabelType) atof(labelString);
5642 }
5643 fclose(fp);
5644
5645 /*--- allocate censors ---*/
5646 if( (labels->cnsrs = (LabelType*)malloc(sizeof(LabelType)*labels->n)) == NULL ) {
5647 snprintf(errorString, LONG_STRING, "getAllocateRegressionLabelsAndTarget: "
5648 "Memory allocation for labels->cnsrs failed!");
5649
5650 /* free and return */
5651 IFree(labels->lbls);
5652 IFree(labels->class_list);
5653 RETURN(1);
5654 }
5655
5656 /* --- initialize censors --- */
5657 for(i=0; i<labels->n; ++i) labels->cnsrs[i] = 1.0;
5658
5659 /*--- open censorfile ---*/
5660 if( censorFile[0] ) {
5661 if( (fp = fopen(censorFile, "r")) == NULL ) {
5662 snprintf(errorString, LONG_STRING,
5663 "Could not open .1D censor file: %s", censorFile);
5664
5665 /* free and return */
5666 IFree(labels->lbls);
5667 IFree(labels->class_list);
5668 IFree(labels->cnsrs);
5669 RETURN(1);
5670 }
5671
5672 if( labels->n != getFileSize(censorFile) ) {
5673 snprintf(errorString, LONG_STRING,
5674 "Lenght of labelfile: '%s' (%ld) does not match length of\n"
5675 "censorfile: '%s' (%ld)!", labelFile, labels->n, censorFile,
5676 getFileSize(censorFile));
5677
5678 /* free and return */
5679 IFree(labels->lbls);
5680 IFree(labels->class_list);
5681 IFree(labels->cnsrs);
5682 fclose(fp);
5683 RETURN(1);
5684 }
5685
5686 /*--- read censorfile and count censors ---*/
5687 labels->n_cnsrs = 0;
5688
5689 for(i=0; i<labels->n; ++i) {
5690 fgets(labelString, LONG_STRING, fp);
5691
5692 if ( (strLength = strlen(labelString)) == 1 ) {
5693 ERROR_exit("Censorfile: '%s' line: '%ld' is empty!", censorFile, i+1);
5694 }
5695 else labels->cnsrs[i] = (LabelType) atof(labelString);
5696
5697 /* -- check for values other than 0 and 1 and count censors-- */
5698 if ( (strcmp(trimString(labelString), "0")) &&
5699 (strcmp(trimString(labelString), "1")) ) {
5700 snprintf(errorString, LONG_STRING,
5701 "Consorfile: '%s' line: '%ld' contains invalid entry: '%s'. "
5702 "Only 0 or 1 is allowed!", censorFile, i+1, labelString);
5703
5704 /* free and return */
5705 IFree(labels->lbls);
5706 IFree(labels->class_list);
5707 IFree(labels->cnsrs);
5708 fclose(fp);
5709 RETURN(1);
5710 }
5711 if( (int)labels->cnsrs[i] == 0 ) labels->n_cnsrs++;
5712 }
5713 fclose(fp);
5714 }
5715
5716 /*--- allocate target ---*/
5717 if( (tmpTarget = (LabelType *)malloc((labels->n-labels->n_cnsrs)*sizeof(LabelType))) == NULL ) {
5718 snprintf(errorString, LONG_STRING, "getAllocateRegressionLabelsAndTarget: "
5719 "Memory allocation for target failed!");
5720
5721 /* free and return */
5722 IFree(labels->lbls);
5723 IFree(labels->class_list);
5724 IFree(labels->cnsrs);
5725 RETURN(1);
5726 }
5727
5728 /*--- check labels and create target ---*/
5729 j=0;
5730 for( i=0; i<labels->n; ++i ) {
5731 if( (int)labels->cnsrs[i] ) {
5732
5733 /* -- check for 9999 in labels -- */
5734 if( (int)labels->lbls[i] == 9999 ) ++n9999;
5735
5736 tmpTarget[j] = labels->lbls[i];
5737 ++j;
5738 }
5739 }
5740
5741 if ( n9999 != 0 ) {
5742 WARNING_message("Labelfile '%s' contains 9999 '%ld' times.\n"
5743 " For classification, '9999' can be used to ignore timepoints.\n"
5744 " However, in regression (-type regression, you are running it right now)\n"
5745 " '9999' can not be used to ignore timepoints\n"
5746 " Please use a censorfile (option: -censor)", labelFile, n9999 );
5747 }
5748
5749
5750 *target = tmpTarget;
5751
5752 RETURN(0);
5753 }
5754
freeRegressionLabelsAndTarget(LABELS * labels,LabelType * target)5755 void freeRegressionLabelsAndTarget(LABELS *labels, LabelType *target)
5756 {
5757
5758 ENTRY("freeRegressionLabelsAndTarget");
5759
5760 IFree(labels->lbls);
5761 IFree(labels->cnsrs);
5762 IFree(labels->class_list);
5763 IFree(target);
5764
5765 EXRETURN;
5766 }
5767
test_classification(ASLoptions * options,MODEL * model,AFNI_MODEL * afniModel,THD_3dim_dataset * dsetTest,DatasetType ** dsetModelArray,MaskType * dsetMaskArray,long nt_mod,long nvox_mod,int argc,char ** argv,char * errorString)5768 int test_classification (ASLoptions *options, MODEL *model, AFNI_MODEL *afniModel,
5769 THD_3dim_dataset *dsetTest, DatasetType **dsetModelArray,
5770 MaskType *dsetMaskArray, long nt_mod, long nvox_mod,
5771 int argc, char **argv, char *errorString)
5772 {
5773
5774 DOC* docsTest = NULL; /* svm-light data structure used for testing */
5775
5776 DatasetType**
5777 dsetTestArray = NULL; /* array to hold test dataset values */
5778
5779 long nt = 0; /* number of time points in TEST dataset */
5780 long nvox = 0; /* number of voxels per time point in TEST dataset */
5781 long nvox_masked = 0; /* number of voxels in mask */
5782
5783 float dist_tmp = 0;
5784 float *dist = NULL; /* holds the distance for all timepoints */
5785 float **multiclass_dist = NULL;
5786 /* doing all of the pairwise tests and storing them in principle, don't have
5787 * to do this with directed, acyclic graph (DAG) but each test does not take
5788 * that long, and we may build in more options in the future.
5789 * This was originally a 1D array, and used "truth table" type of approach
5790 * that could have been slightly more robust but relied on the assumption that
5791 * most distances would be inside of their range ([-1,1] or transformed to [0,1]),
5792 * for now, I think it is better to stick with the DAG */
5793
5794 long cc, dd = 0;
5795 long sampleCount = 0; /* number of samples used in training */
5796
5797 float correct = 0.0;
5798 float incorrect = 0.0;
5799
5800 int DAG = 0; /* abbreviation for Directed Acyclic Graph:
5801 * index variable for traversing multiclass_dist */
5802 short edgeFlag = 0; /* DAG related */
5803 short classExistFlag = 0; /* multi-class related */
5804 int classAssignment = 0; /* multi-class related */
5805 float *classCorrect = NULL;
5806 float *classIncorrect = NULL;
5807
5808 int *classVote = NULL; /* mulit-class vote */
5809 int currentComb = 0;
5810 int class0, class1 = 0;
5811 int winningCount = 0; /* mulit-class vote */
5812 int *classList = NULL; /* needed for mapping non-continuous
5813 * to continuous class labels in multiclass */
5814
5815
5816 enum mctypes { MCTYPE_DAG, MCTYPE_VOTE }; /* types for multiclass */
5817 enum mctypes mctype = MCTYPE_DAG; /* default value */
5818
5819 /* labels: */
5820 LABELS testLabels;
5821 LabelType *censoredTargets= NULL; /* contains labels in svm-light readable
5822 * format. Here it is only used to calculate prediction accuracies and is
5823 * updated for each class combination (named tmp_Labels previously)*/
5824
5825 /* used for strtok magic and csv strings: */
5826 char* p = NULL;
5827 char* q = NULL;
5828 long p_string_size = 0; /* size of p string, dependent on number of
5829 * number of class-combinations */
5830 /* etc: */
5831 FILE *fp = NULL;
5832 long i,j,c,cl = 0;
5833 char predictionsFile[LONG_STRING];
5834
5835
5836 ENTRY("test_classification");
5837
5838
5839 /* JL Apr. 2010: Initialized all variables
5840 * JL Apr. 2010: Allocating p string (for strtok) dynamically.
5841 * Replaced all string functions by its equivalent that takes
5842 * also the string size as an argument
5843 * JL May. 2010: Mask is stored in brick (n and n+1) of model file
5844 * JL Aug. 2010: Modified multiclass (DAG and vote) to enable calculation of
5845 * prediction accuracies for arbitrary (non-continuous) class labels
5846 * JL July 2011: Modified error handling: Passing error message
5847 * as argument (errorString) to the calling function,
5848 * allocated memory is freed, RETURN(1) instead of ERROR_exit().
5849 * JL Mar. 2014: Fixed -classout and multi-class accuracies for non-continuous
5850 * class labels
5851 */
5852
5853 if (verbosity >= 1) INFO_message("\n++ CLASSIFICATION (testing):\n++");
5854
5855 /*----- INITIAL ERROR CHECKING ------*/
5856 if( afniModel == NULL || dsetModelArray == NULL ) {
5857 /* we should never get here */
5858 snprintf(errorString, LONG_STRING, "test_classification: "
5859 "What happened? Model could not be loaded!");
5860
5861 RETURN(1);
5862 }
5863
5864 /*----- LOAD TEST DATA --------*/
5865 if( (dsetTest = THD_open_one_dataset(options->testFile)) == NULL ) {
5866 snprintf(errorString, LONG_STRING,
5867 "Failed to open test dataset: %s", options->testFile );
5868
5869 RETURN(1);
5870 }
5871 DSET_load( dsetTest );
5872 nt = DSET_NUM_TIMES( dsetTest );
5873 nvox = DSET_NVOX( dsetTest );
5874 nvox_masked = afniModel->total_masked_features[0];
5875 /* assuming same mask for all class combinations */
5876
5877 /*----- GET TEST LABELS ------- */
5878 if( options->testLabelFile[0] ) {
5879 if( getAllocateClassificationLabels(&testLabels, options->testLabelFile,
5880 options->censorFile, errorString) ) {
5881
5882 /* free and return */
5883 DSET_unload(dsetTest);
5884 RETURN(1);
5885 }
5886
5887 if( testLabels.n != nt ) {
5888 snprintf(errorString, LONG_STRING,
5889 "Number of labels do not match the length of the test dataset:\n"
5890 " labelfile '%s' contains %ld labels, but the \n"
5891 " testvolume '%s' contains %ld brick(s). ",
5892 options->testLabelFile, testLabels.n, options->testFile, nt);
5893
5894 /* free and return */
5895 DSET_unload(dsetTest);
5896 freeClassificationLabels(&testLabels);
5897 RETURN(1);
5898 }
5899
5900 /*----- ALLOCATE censoredTargets ---- */
5901 if( (censoredTargets = (LabelType*)malloc(sizeof(LabelType)*testLabels.n)) == NULL ) {
5902
5903 snprintf(errorString, LONG_STRING, "test_classification: "
5904 "Memory allocation for censoredTargets failed!");
5905
5906 /* free and return */
5907 DSET_unload(dsetTest);
5908 freeClassificationLabels(&testLabels);
5909 RETURN(1);
5910 }
5911 }
5912
5913 /*----- PRODUCE TEST DATA ARRAY -------*/
5914 if( (dsetTestArray = getAllocateDsetArray(dsetTest, errorString)) == NULL ) {
5915
5916 /* free and return */
5917 DSET_unload(dsetTest);
5918 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
5919 if( options->testLabelFile[0] ) IFree(censoredTargets);
5920 RETURN(1);
5921 }
5922
5923 /* JL May 2010: Make sure number of voxels/t in model matches
5924 * number of voxels/t in test dataset */
5925 if( nvox != nvox_mod ) {
5926 snprintf(errorString, LONG_STRING,
5927 "Number of voxels in model: %s does not match\n"
5928 " number of voxels in test dataset: %s",
5929 options->modelFile, options->testFile);
5930
5931 /* free and return */
5932 freeDsetArray(dsetTest, dsetTestArray);
5933 DSET_unload(dsetTest);
5934 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
5935 if( options->testLabelFile[0] ) IFree(censoredTargets);
5936 RETURN(1);
5937 }
5938
5939 /*---- SET MULTICLASS METHOD -----------------*/
5940 if( (options->multiclass[0]) && (afniModel->class_count > 2) ) {
5941 if( !strcmp(options->multiclass, "DAG") ) mctype = MCTYPE_DAG;
5942 else if( !strcmp(options->multiclass, "vote") ) mctype = MCTYPE_VOTE;
5943 else {
5944 WARNING_message("Unknown method for multiclass: %s\n"
5945 " Setting mctype = DAG [default].", options->multiclass);
5946
5947 mctype = MCTYPE_DAG;
5948 }
5949 }
5950
5951 /*----- ALLOCATE AND FILL SVM-LIGHT STRUCTURES -----*/
5952 /* -- allocate DOCs for test dataset -- */
5953 if( (docsTest = allocateDOCs(nt, nvox_masked)) == NULL ) {
5954 snprintf(errorString, LONG_STRING, "test_classification: "
5955 "Memory allocation for docsTest failed!");
5956
5957 /* free and return */
5958 freeDsetArray(dsetTest, dsetTestArray);
5959 DSET_unload(dsetTest);
5960 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
5961 if( options->testLabelFile[0] ) IFree(censoredTargets);
5962 RETURN(1);
5963 }
5964
5965 /* -- fill DOCs with test dataset -- */
5966 afni_dset_to_svm_doc( docsTest, dsetTestArray, dsetMaskArray, nt,
5967 nvox, nvox_masked);
5968
5969 /* -- allocate MODEL -- */
5970 //model = (MODEL *)malloc(sizeof(MODEL));
5971 if( allocateModel(model, afniModel, errorString) ) {
5972
5973 /* free and return */
5974 freeDsetArray(dsetTest, dsetTestArray);
5975 DSET_unload(dsetTest);
5976 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
5977 if( options->testLabelFile[0] ) IFree(censoredTargets);
5978 freeDOCs(docsTest, nt);
5979 RETURN(1);
5980 }
5981
5982 /* -- fill MODEL with data from AFNI_MODEL */
5983 if( get_svm_model( model, dsetModelArray, dsetMaskArray, afniModel, nvox_mod,
5984 options->outModelNoMask, errorString) ) {
5985
5986 /* free and return */
5987 freeDsetArray(dsetTest, dsetTestArray);
5988 DSET_unload(dsetTest);
5989 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
5990 if( options->testLabelFile[0] ) IFree(censoredTargets);
5991 freeDOCs(docsTest, nt);
5992 freeModel(model, afniModel, TEST);
5993 RETURN(1);
5994 }
5995
5996
5997 /*----- ALLOCATE TEST PREDICTION ARRAYS -------*/
5998 if( (dist = (float *)malloc(sizeof(float)*nt)) == NULL ) {
5999 snprintf(errorString, LONG_STRING, "test_classification: "
6000 "Memory allocation for dist failed!");
6001
6002 /* free and return */
6003 freeDsetArray(dsetTest, dsetTestArray);
6004 DSET_unload(dsetTest);
6005 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6006 if( options->testLabelFile[0] ) IFree(censoredTargets);
6007 freeDOCs(docsTest, nt);
6008 freeModel(model, afniModel, TEST);
6009 RETURN(1);
6010 }
6011
6012 /* ------ ALLOCATE MULTICLASS ARRAYS ----- */
6013
6014 /* JL Apr. 2010: Allocate p string for strtok */
6015 p_string_size = afniModel->combinations*CSV_STRING;
6016 if ( (p = (char *) malloc(p_string_size * sizeof (char))) == NULL ) {
6017 snprintf(errorString, LONG_STRING, "test_classification: "
6018 "Could not allocate csv string!");
6019
6020 /* free and return */
6021 freeDsetArray(dsetTest, dsetTestArray);
6022 DSET_unload(dsetTest);
6023 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6024 if( options->testLabelFile[0] ) IFree(censoredTargets);
6025 freeDOCs(docsTest, nt);
6026 freeModel(model, afniModel, TEST);
6027 IFree(dist);
6028 if( afniModel->class_count > 2 ) {
6029 freeMultiClassArrays(multiclass_dist, classCorrect,
6030 classIncorrect, classVote, classList,
6031 (long) afniModel->combinations);
6032 }
6033 RETURN(1);
6034 }
6035
6036 /* JL July 2011: Only allocate multiclass arrays when needed */
6037 if( afniModel->class_count > 2 ) { /* multiclass ! */
6038 if( allocateMultiClassArrays(&multiclass_dist, &classCorrect,
6039 &classIncorrect, &classVote,
6040 &classList, afniModel->class_count, (long) afniModel->combinations,
6041 nt, errorString) ) {
6042
6043 /* free and return */
6044 freeDsetArray(dsetTest, dsetTestArray);
6045 DSET_unload(dsetTest);
6046 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6047 if( options->testLabelFile[0] ) IFree(censoredTargets);
6048 freeDOCs(docsTest, nt);
6049 freeModel(model, afniModel, TEST);
6050 IFree(dist);
6051 RETURN(1);
6052 }
6053
6054 /* recover class labels in model from class combinations */
6055 for( c = 0; c < afniModel->class_count-1; ++c ) {
6056 strncpy(p, afniModel->combName[c], p_string_size);
6057 q = strtok(p,"_");
6058 cc = atol(q);
6059 q = strtok(NULL,"_");
6060 dd = atol(q);
6061
6062 if( c == 0 ) classList[c]=cc;
6063 classList[c+1]=dd;
6064 }
6065 }
6066
6067 /* -- binary classification -- */
6068 for(i = 0; i < afniModel->combinations; ++i ) {
6069 if(verbosity >= 1) {
6070 INFO_message(" ");
6071 INFO_message("--------------------------------------------------------------"
6072 "------------------");
6073 INFO_message("Category combination = %ld (%s)", i, afniModel->combName[i]);
6074 }
6075
6076 /* recover current class combination integers */
6077 strncpy(p, afniModel->combName[i], p_string_size);
6078 q = strtok(p,"_");
6079 cc = atol(q);
6080 q = strtok(NULL,"_");
6081 dd = atol(q);
6082
6083 if( options->testLabelFile[0] ) {
6084 if( getCensoredClassTarget(censoredTargets, &sampleCount, &testLabels,
6085 cc, dd, TEST, errorString) ) {
6086
6087 /* free and return */
6088 freeDsetArray(dsetTest, dsetTestArray);
6089 DSET_unload(dsetTest);
6090 freeClassificationLabels(&testLabels);
6091 IFree(censoredTargets);
6092 freeDOCs(docsTest, nt);
6093 freeModel(model, afniModel, TEST);
6094 IFree(dist);
6095 if( afniModel->class_count > 2 ) {
6096 freeMultiClassArrays(multiclass_dist, classCorrect,
6097 classIncorrect, classVote, classList,
6098 (long) afniModel->combinations);
6099 }
6100 IFree(p);
6101 RETURN(1);
6102 }
6103
6104 correct=0.0;
6105 incorrect=0.0;
6106 }
6107
6108 /*----- GET SVM-LIGHT MODEL STRUCTURE -----*/
6109 updateModel(model, afniModel, (int) i);
6110
6111 if(afniModel->class_count == 2) {
6112 snprintf(predictionsFile, LONG_STRING, "%s.1D", options->predFile);
6113 }
6114 else {
6115 snprintf(predictionsFile, LONG_STRING, "%s_%s.1D", options->predFile,
6116 afniModel->combName[i]);
6117 }
6118 if( (fp = fopen( predictionsFile, "w" )) == NULL ) {
6119 snprintf(errorString, LONG_STRING,
6120 "Could not open file for writing predictions: %s", predictionsFile );
6121
6122 /* free and return */
6123 freeDsetArray(dsetTest, dsetTestArray);
6124 DSET_unload(dsetTest);
6125 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6126 if( options->testLabelFile[0] ) IFree(censoredTargets);
6127 freeDOCs(docsTest, nt);
6128 freeModel(model, afniModel, TEST);
6129 IFree(dist);
6130 if( afniModel->class_count > 2 ) {
6131 freeMultiClassArrays(multiclass_dist, classCorrect,
6132 classIncorrect, classVote, classList,
6133 (long) afniModel->combinations);
6134 }
6135 IFree(p);
6136 RETURN(1);
6137 }
6138
6139 /* JL Feb. 2009: Changed this part to support non-linear kernels */
6140 if (afniModel->kernel_type[i] == LINEAR) { /* linear kernel */
6141 for(j = 0; j < nt; ++j) {
6142 dist_tmp=classify_example_linear(model,&docsTest[j]);
6143 /* should do something smarter than re-casting double to float */
6144 dist[j]= (float) dist_tmp;
6145 }
6146 }
6147 else { /* non-linear kernel */
6148 for(j = 0; j < nt; ++j) {
6149 dist_tmp=classify_example(model,&docsTest[j]);
6150 dist[j]= (float) dist_tmp;
6151 }
6152 }
6153
6154 /* JL Nov. 2008: Changed detrending for censored timepoints */
6155 /* JL Aug. 2013: Bugfix: detrend_linear_cnsrs */
6156 if( (options->testLabelFile[0]) && (testLabels.n_cnsrs != 0) &&
6157 (!options->noPredDetrend) ) {
6158 if( detrend_linear_cnsrs(dist, &testLabels, errorString) ) {
6159 snprintf(errorString, LONG_STRING,
6160 "Could not open file for writing predictions: %s", predictionsFile );
6161
6162 /* free and return */
6163 freeDsetArray(dsetTest, dsetTestArray);
6164 DSET_unload(dsetTest);
6165 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6166 if( options->testLabelFile[0] ) IFree(censoredTargets);
6167 freeDOCs(docsTest, nt);
6168 freeModel(model, afniModel, TEST);
6169 IFree(dist);
6170 if( afniModel->class_count > 2 ) {
6171 freeMultiClassArrays(multiclass_dist, classCorrect,
6172 classIncorrect, classVote, classList,
6173 (long) afniModel->combinations);
6174 }
6175 IFree(p);
6176 RETURN(1);
6177 }
6178 }
6179 else {
6180 /* WC and SL Aug. 08 : moved this up so that detrending is done before
6181 * accuracies are calculated */
6182 /* detrend in place - assuming no intercept (bias towards one class), or slope */
6183 if(!options->noPredDetrend) {
6184 DETREND_linear( (int) nt, dist );
6185 }
6186 }
6187
6188 /* WC and SL Aug. 08 : now calculate the percent accuracy with the detrended data*/
6189 if( options->testLabelFile[0] ) {
6190 for(j = 0; j < nt; ++j){
6191 if( abs((int)rint(censoredTargets[j])) != 9999) {
6192 if(dist[j]>0) {
6193 if(censoredTargets[j]>0) correct++; else incorrect++;
6194 }
6195 else {
6196 if(censoredTargets[j]<0) correct++; else incorrect++;
6197 }
6198 }
6199 }
6200 }
6201
6202 if(options->testLabelFile[0] && (verbosity>=1)) {
6203 INFO_message(" ");
6204
6205 if (sampleCount == 0) {
6206 INFO_message("Accuracy on test set: 0.00%% "
6207 "(0 correct, 0 incorrect, 0 total)");
6208 INFO_message(" ");
6209 }
6210 else {
6211 INFO_message("Accuracy on test set: %.2f%% (%d correct, %d incorrect, %ld total)",
6212 (float)(correct)*100.0/sampleCount,(int)rint(correct),
6213 (int)rint(incorrect),sampleCount);
6214 INFO_message(" ");
6215 }
6216 }
6217
6218 /* JL Apr. 2010: Added:
6219 * option -noPredCensor: Only write predictions for current class-combination
6220 * and without censored timepoints
6221 * option -noPredScale: Do not scale predictions to {0,1}
6222 */
6223 for(j = 0; j < nt; ++j) {
6224 if( afniModel->class_count > 2 ) multiclass_dist[i][j] += dist[j];
6225
6226 /* convert output prediction to {0,1} class scale */
6227 if (!options->noPredScale) dist[j] = 0.5*( dist[j] + 1 );
6228
6229 /* output integer class memberships */
6230 if( (options->classout) && (!options->noPredScale) ){
6231 /* dist is centered around 0.5 */
6232 /* JL Mar 2014: Return correct class membership for non-
6233 * continuous class labels (integers) */
6234 if(dist[j] > 0.5) dist[j] = dd;
6235 else dist[j] = cc;
6236 }
6237
6238 /* only write non-censored predictions */
6239 if ( options->testLabelFile[0] && options->noPredCensor ) {
6240 if( abs((int)rint(censoredTargets[j])) != 9999) fprintf(fp,"%.4g\n",dist[j]);
6241 }
6242 else fprintf(fp,"%.4g\n",dist[j]);
6243 }
6244
6245 fclose(fp);
6246 if(verbosity >= 1) INFO_message("Predictions written to %s\n",predictionsFile);
6247 }
6248
6249 /* --- MULTICLASS --- */
6250 /* JL Aug. 2010: Modified (DAG and vote) to enable calculation of
6251 * prediction accuracies for (non-continuous) class labels
6252 */
6253 if(afniModel->class_count > 2) {
6254
6255 if( options->testLabelFile[0] ) {
6256 correct=0.0;
6257 incorrect=0.0;
6258 for(c = 0; c < afniModel->class_count; ++c) {
6259 classCorrect[c] = 0.0;
6260 classIncorrect[c] = 0.0;
6261 }
6262 }
6263
6264 /* --- multiclass: voting method --- */
6265 if (mctype == MCTYPE_VOTE) {
6266 snprintf(predictionsFile, LONG_STRING, "%s_overall_vote.1D", options->predFile);
6267 if( (fp = fopen( predictionsFile, "w" )) == NULL ) {
6268 ERROR_message("could not open file for writing predictions: %s",
6269 predictionsFile );
6270 }
6271
6272 if(verbosity >= 1)
6273 INFO_message(" ");
6274 INFO_message("---------------------------------- vote "
6275 "----------------------------------------\n");
6276 for(j = 0; j < nt; ++j) {
6277 /* code largely duplicated in DAG ................. */
6278 if(verbosity >=2) {
6279 for(i = 0; i < afniModel->combinations; ++i) {
6280 INFO_message("model number:%ld time point:%ld classifier output=%f",
6281 i,j,multiclass_dist[i][j]);
6282 }
6283 }
6284 for(c = 0; c < afniModel->class_count; ++c) {
6285 classVote[c] = 0;
6286 }
6287 classAssignment = 0;
6288 winningCount = 0;
6289 currentComb = 0;
6290 for(class0 = 0; class0 < afniModel->class_count-1; ++class0) {
6291 for(class1 = class0+1; class1 < afniModel->class_count; ++class1) {
6292 if(multiclass_dist[currentComb][j] < 0) {
6293 classVote[class0]++;
6294
6295 if(classVote[class0] > winningCount) {
6296 winningCount = classVote[class0];
6297 classAssignment = class0;
6298 }
6299 }
6300 else {
6301 classVote[class1]++;
6302 if(classVote[class1] > winningCount) {
6303 winningCount = classVote[class1];
6304 classAssignment = class1;
6305 }
6306 }
6307 currentComb++;
6308 }
6309 }
6310
6311 if(verbosity >=2) printf("++ point number %ld: ",j);
6312 for(i = 0; i < afniModel->class_count; ++i) {
6313 if(verbosity >=2) printf("+ class: %d, classVote[%ld] = %d; ",
6314 classList[i], i, classVote[i]);
6315 }
6316 if(verbosity >=2) printf("\n");
6317
6318 if(verbosity >=2) INFO_message("Voting result: observation number=%ld, "
6319 "classAssignment = %d\n", j, classList[classAssignment]);
6320
6321 /* write class assignment to prediction file */
6322 if ( options->testLabelFile[0] && options->noPredCensor ) {
6323 if ( (int)rint(testLabels.cnsrs[j]) ) {
6324 fprintf(fp,"%d\n", classList[classAssignment]);
6325 }
6326 }
6327 else {
6328 fprintf(fp,"%d\n", classList[classAssignment]);
6329 }
6330
6331
6332 /* compare result with label file */
6333 if( (options->testLabelFile[0]) && ((int)rint(testLabels.cnsrs[j])) ) {
6334
6335 if (classList[classAssignment] == (int)rint(testLabels.lbls[j])) {
6336 correct++;
6337 classCorrect[classAssignment]++;
6338 }
6339 else {
6340 incorrect++;
6341 classIncorrect[classAssignment]++;
6342 }
6343
6344 if(verbosity >= 2) {
6345 INFO_message("Overall: test labels=%d, current number "
6346 "correct = %d incorrect = %d", (int)(testLabels.lbls[j]),
6347 (int)rint(correct), (int)rint(incorrect));
6348
6349 }
6350 }
6351 }
6352 fclose(fp);
6353 }
6354 /* --- multiclass: Directed acyclic graph (DAG) ---*/
6355 else { /* if (mctype == MCTYPE_DAG) */
6356
6357 /* Directed acyclic graph of pairwise classifiers *************************
6358 *
6359 * Example: N = 5
6360 *
6361 * array index(DAG) 0 1 2 3 4 5 6 7 8 9
6362 * class pair 01 02 03 04 | 12 13 14 | 23 24 | 34
6363 *
6364 * set start index = N-2
6365 *
6366 *
6367 * 0 vs 4 (DAG=3)
6368 *L=1 -1/ \+N-L-1
6369 * 0 vs 3 (DAG=2) 1 vs 4 (DAG=6)
6370 *L=2 -1/ \+N-L -1/ \+N-L-1
6371 * 0 vs 2 (DAG=1) 1 vs 3 (DAG=5) 2 vs 4 (DAG=8)
6372 *L=3 -1/ \+N-L -1/ \+N-L -1/ \+N-L-1
6373 * 0 vs 1 (DAG=0) 1 vs 2 (DAG=4) 2 vs 3 (DAG=7) 3 vs 4 (DAG=9)
6374 * ------------ ------------ ------------ ------------
6375 * 0 1 2 3 4
6376 *
6377 *
6378 * Right hand edge is sequence N-2 +N-2 +N-3 + N-4 ...
6379 * And! if you leave that edge, you can't get back
6380 *
6381 * everytime you go left, take one away from classAssignment N-1
6382 ***************************************************************************/
6383
6384 snprintf(predictionsFile, LONG_STRING, "%s_overall_DAG.1D", options->predFile);
6385 if( (fp = fopen( predictionsFile, "w" )) == NULL ) {
6386 snprintf(errorString, LONG_STRING,
6387 "Could not open file for writing predictions: %s", predictionsFile );
6388
6389 /* free and return */
6390 freeDsetArray(dsetTest, dsetTestArray);
6391 DSET_unload(dsetTest);
6392 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6393 if( options->testLabelFile[0] ) IFree(censoredTargets);
6394 freeDOCs(docsTest, nt);
6395 freeModel(model, afniModel, TEST);
6396 IFree(dist);
6397 freeMultiClassArrays(multiclass_dist, classCorrect,
6398 classIncorrect, classVote, classList,
6399 (long) afniModel->combinations);
6400 IFree(p);
6401 RETURN(1);
6402 }
6403
6404 if(verbosity >= 1)
6405 INFO_message(" ");
6406 INFO_message("---------------------------------- DAG "
6407 "-----------------------------------------");
6408
6409 if(verbosity >=2) INFO_message("Verbosity >= 2: multiclass details (note"
6410 "decision threshold =0):");
6411 for(j = 0; j < nt; ++j) {
6412 if(verbosity >=2) {
6413 for(i = 0; i < afniModel->combinations; ++i) {
6414 INFO_message("model number:%ld time point:%ld classifier output=%f",
6415 i, j, multiclass_dist[i][j]);
6416 }
6417 }
6418 DAG = afniModel->class_count - 2;
6419 if(verbosity >= 2) printf("++ model number=%d: ", DAG);
6420 classAssignment = afniModel->class_count - 1;
6421 /* assuming class values [0,...,N-1] */
6422 edgeFlag = 1;
6423 for(i = 1; i < afniModel->class_count; ++i) {
6424 /* note: starting index at 1, and going through class_count-1 times*/
6425 if(verbosity >= 2) printf("++ classifier output = %f ", multiclass_dist[DAG][j]);
6426 if(multiclass_dist[DAG][j]>0) {
6427 if(edgeFlag) {
6428 DAG += afniModel->class_count - i - 1;
6429 if(verbosity >=2) INFO_message("next model number=%d, current max "
6430 "possible classAssignment = %d", DAG, classAssignment);
6431 }
6432 else {
6433 DAG += afniModel->class_count - i;
6434 if(verbosity >=2) INFO_message("next model number=%d, current max "
6435 "possible classAssignment = %d", DAG, classAssignment);
6436 }
6437 }
6438 else {
6439 edgeFlag = 0;
6440 DAG--;
6441 classAssignment--;
6442 if(verbosity >=2) INFO_message("next model number=%d, current max "
6443 "possible classAssignment = %d", DAG, classAssignment);
6444 }
6445 }
6446 if(verbosity >=2) INFO_message("DAG result: observation number=%ld model "
6447 "number=%d, classAssignment = %d (%d)", j, DAG, classAssignment,
6448 classList[classAssignment]);
6449
6450 /* write result to prediction file */
6451 if ( options->testLabelFile[0] && options->noPredCensor ) {
6452 if ( (int)rint(testLabels.cnsrs[j]) ) {
6453 fprintf(fp,"%d\n", classList[classAssignment]);
6454 }
6455 }
6456 else {
6457 fprintf(fp,"%d\n", classList[classAssignment]);
6458 }
6459
6460 /* compare result with label file */
6461 if((options->testLabelFile[0]) && ((int)(testLabels.lbls_cont[j] != 9999))) {
6462
6463 if (classList[classAssignment] == (int)testLabels.lbls[j]) {
6464 correct++;
6465 classCorrect[classAssignment]++;
6466 }
6467 else {
6468 incorrect++;
6469 classIncorrect[classAssignment]++;
6470 }
6471
6472 if(verbosity >= 2) {
6473 INFO_message("Overall: test labels=%d, current number "
6474 "correct = %d incorrect = %d", (int)(testLabels.lbls[j]),
6475 (int)rint(correct), (int)rint(incorrect));
6476
6477 }
6478 }
6479 }
6480 fclose(fp);
6481 }
6482
6483
6484 /* report accuracies */
6485 if(options->testLabelFile[0] && (verbosity>=1)) {
6486 INFO_message("Overall accuracy on multiclass test set: %.2f%% "
6487 "(%d correct, %d incorrect, %d total)",
6488 (float)correct*100.0/((int)rint(correct)+(int)rint(incorrect)),
6489 (int)rint(correct),(int)rint(incorrect),
6490 (int)rint(correct)+(int)rint(incorrect) );
6491
6492
6493 INFO_message("Individual Breakdown:");
6494 for( cl = 0; cl < testLabels.n_classes; ++cl ) {
6495 classExistFlag = 0;
6496 for( c = 0; c < afniModel->class_count; ++c ) {
6497 if( classList[c] == testLabels.class_list[cl] ) {
6498 classExistFlag=1;
6499
6500 INFO_message(" "
6501 "classLabel = %3d: %.2f%% (%d correct, %d total)\n",
6502 classList[c],
6503 classCorrect[c]*100.0/testLabels.lbls_count[cl],
6504 (int)rint(classCorrect[c]),
6505 testLabels.lbls_count[cl]);
6506
6507 break;
6508 }
6509 }
6510 if(!classExistFlag) {
6511 WARNING_message(" "
6512 "classLabel = %3d: not present in model file!\n",
6513 testLabels.class_list[cl]);
6514 }
6515 }
6516 }
6517
6518 /* free arrays allocated for multiclass */
6519 freeMultiClassArrays(multiclass_dist, classCorrect, classIncorrect,
6520 classVote, classList, (long) afniModel->combinations);
6521
6522 } /* multiclass done */
6523
6524 if(verbosity >= 1) {
6525 INFO_message("\n");
6526 INFO_message("Predictions for all categories written to %s",
6527 predictionsFile);
6528 }
6529
6530 /* free */
6531 if( options->testLabelFile[0] ) freeClassificationLabels(&testLabels);
6532 if( options->testLabelFile[0] ) IFree(censoredTargets);
6533 freeDOCs(docsTest, nt);
6534 freeModel(model, afniModel, TEST);
6535 IFree(dist);
6536
6537 IFree(p);
6538 freeDsetArray(dsetTest, dsetTestArray);
6539 DSET_unload(dsetTest);
6540
6541
6542 RETURN(0);
6543 }
6544
6545 /* JL May 2009: Added this function to support sv-regression in 3dsvm.
6546 * It is very similar to test_routine() (a lot of code has been recycled).
6547 * However, major differences are:
6548 *
6549 * - No need for mulitclass
6550 * - No detrending
6551 * - New function to read in the labelfile ( getAllocateRegressionLabels() )
6552 * - Using rms error as a benchmark
6553 */
test_regression(ASLoptions * options,MODEL * model,AFNI_MODEL * afniModel,THD_3dim_dataset * dsetTest,DatasetType ** dsetModelArray,MaskType * dsetMaskArray,long nt_mod,long nvox_mod,int argc,char ** argv,char * errorString)6554 int test_regression (ASLoptions *options, MODEL *model, AFNI_MODEL *afniModel,
6555 THD_3dim_dataset *dsetTest, DatasetType **dsetModelArray,
6556 MaskType *dsetMaskArray, long nt_mod, long nvox_mod, int argc, char **argv,
6557 char *errorString)
6558 {
6559 long nt = 0; /* number of time points in test dataset */
6560 long nvox = 0; /* number of voxels per time point in test dataset */
6561 long nvox_masked = 0; /* number of voxels in mask */
6562
6563 DOC* /* array to hold test dataset in svm-light data */
6564 docsTest = NULL; /* structure */
6565 DatasetType**
6566 dsetTestArray = NULL; /* array to hold test dataset values */
6567
6568 double dist_tmp = 0; /* temporary variable */
6569 double *dist = NULL; /* array holding the classification results for
6570 each tinepoint */
6571
6572 LabelType *target = NULL; /* labels without censored timepoints. Assuming
6573 the 'truth' is known and we want to determine
6574 the error. */
6575 LABELS testLabels;
6576 long j = 0;
6577 FILE* fp = NULL;
6578
6579 char predictionsFile[LONG_STRING];
6580
6581 double rms = 0; /* used to calculate rms error */
6582
6583
6584 ENTRY("test_regression");
6585
6586 /* JL July 2011: Modified error handling: Passing error message
6587 * as argument (errorString) to the calling function,
6588 * allocated memory is freed, RETURN(1) instead of ERROR_exit().
6589 */
6590
6591
6592 if (verbosity >= 1) INFO_message("\n++ REGRESSION (testing):\n++");
6593
6594 /*----- INITIAL ERROR CHECKING ------*/
6595 if( afniModel == NULL || dsetModelArray == NULL ) {
6596 /* we should never get here */
6597 snprintf(errorString, LONG_STRING, "test_regression: "
6598 "What happened? Model could not be loaded!");
6599
6600 RETURN(1);
6601 }
6602
6603 /*----- LOAD TEST DATA --------*/
6604 if( (dsetTest = THD_open_one_dataset(options->testFile)) == NULL ) {
6605 snprintf(errorString, LONG_STRING,
6606 "Failed to open test dataset: %s", options->testFile );
6607
6608 RETURN(1);
6609 }
6610 DSET_load( dsetTest );
6611 nt = DSET_NUM_TIMES( dsetTest );
6612 nvox = DSET_NVOX( dsetTest );
6613 nvox_masked = afniModel->total_masked_features[0];
6614 /* assuming same mask for all class combinations */
6615
6616 /*----- GET TEST LABELS -------*/
6617 if( options->testLabelFile[0] ) {
6618 if( getAllocateRegressionLabelsAndTarget(&testLabels, &target,
6619 options->testLabelFile, options->censorFile, errorString) ) {
6620
6621 /* free and return */
6622 DSET_unload(dsetTest);
6623 RETURN(1);
6624 }
6625
6626 if( testLabels.n != nt ) {
6627 snprintf(errorString, LONG_STRING,
6628 "Number of labels do not match the length of the test dataset:\n"
6629 " labelfile '%s' contains %ld labels, but the \n"
6630 " testvolume '%s' contains %ld brick(s). ",
6631 options->testLabelFile, testLabels.n, options->testFile, nt);
6632
6633 /* free and return */
6634 DSET_unload(dsetTest);
6635 freeRegressionLabelsAndTarget(&testLabels, target);
6636 RETURN(1);
6637 }
6638 }
6639
6640 /*----- PRODUCE TEST DATA ARRAY -------*/
6641 if( (dsetTestArray = getAllocateDsetArray(dsetTest, errorString) ) == NULL ) {
6642
6643 /* free and return */
6644 DSET_unload(dsetTest);
6645 if( options->testLabelFile[0] )
6646 freeRegressionLabelsAndTarget(&testLabels, target);
6647 RETURN(1);
6648 }
6649
6650 /*----- ALLOCATE AND FILL DOC STRUCTURE -----*/
6651 if( (docsTest = allocateDOCs(nt, nvox_masked)) == NULL ) {
6652 snprintf(errorString, LONG_STRING, "test_regression: "
6653 "Memory allocation for docsTest failed!");
6654
6655 /* free and return */
6656 DSET_unload(dsetTest);
6657 if( options->testLabelFile[0] )
6658 freeRegressionLabelsAndTarget(&testLabels, target);
6659 freeDsetArray(dsetTest, dsetTestArray);
6660 RETURN(1);
6661 }
6662
6663 afni_dset_to_svm_doc( docsTest, dsetTestArray, dsetMaskArray,
6664 nt, nvox, nvox_masked);
6665
6666 /*----- ALLOCATE AND FILL SVM MODEL -----*/
6667 model=(MODEL *)malloc(sizeof(MODEL));
6668 if( allocateModel(model, afniModel, errorString ) ) {
6669
6670 /* free and return */
6671 DSET_unload(dsetTest);
6672 if( options->testLabelFile[0] )
6673 freeRegressionLabelsAndTarget(&testLabels, target);
6674 freeDsetArray(dsetTest, dsetTestArray);
6675 freeDOCs(docsTest, nt);
6676 RETURN(1);
6677 }
6678
6679 if( get_svm_model(model, dsetModelArray, dsetMaskArray, afniModel, nvox_mod,
6680 options->outModelNoMask, errorString) ) {
6681
6682 /* free and return */
6683 DSET_unload(dsetTest);
6684 if( options->testLabelFile[0] )
6685 freeRegressionLabelsAndTarget(&testLabels, target);
6686 freeDsetArray(dsetTest, dsetTestArray);
6687 freeDOCs(docsTest, nt);
6688 freeModel(model, afniModel, TEST);
6689 RETURN(1);
6690 }
6691
6692 updateModel(model, afniModel, 0);
6693
6694 /*----- ALLOCATE PREDICTION ARRAY --------*/
6695 if( (dist = (double *)malloc(sizeof(double)*nt)) == NULL ) {
6696 snprintf(errorString, LONG_STRING, "test_regression: "
6697 "Memory allocation for dist failed!");
6698
6699 /* free and return */
6700 DSET_unload(dsetTest);
6701 if( options->testLabelFile[0] )
6702 freeRegressionLabelsAndTarget(&testLabels, target);
6703 freeDsetArray(dsetTest, dsetTestArray);
6704 freeDOCs(docsTest, nt);
6705 freeModel(model, afniModel, TEST);
6706 RETURN(1);
6707 }
6708
6709 /*----- PREDICTION OUTPUT FILE -----*/
6710 snprintf(predictionsFile, LONG_STRING, "%s.1D", options->predFile);
6711 if( (fp = fopen(predictionsFile, "w" )) == NULL ) {
6712 snprintf(errorString, LONG_STRING,
6713 "Could not open file for writing predictions: %s", predictionsFile);
6714
6715 /* free and return */
6716 DSET_unload(dsetTest);
6717 if( options->testLabelFile[0] )
6718 freeRegressionLabelsAndTarget(&testLabels, target);
6719 freeDsetArray(dsetTest, dsetTestArray);
6720 freeDOCs(docsTest, nt);
6721 freeModel(model, afniModel, TEST);
6722 IFree(dist);
6723 RETURN(1);
6724 }
6725
6726 /*----- PERFORM TESTING -----*/
6727 /* JL May. 2010: Added testing for non-linear kernels */
6728 if (afniModel->kernel_type[0] == LINEAR) {
6729 for(j=0; j<nt; ++j) {
6730 dist_tmp = classify_example_linear(model,&docsTest[j]);
6731 dist[j] = (float)dist_tmp;
6732 }
6733 }
6734 else { /* non-linear kernel */
6735 for(j=0; j<nt; ++j) {
6736 dist_tmp = classify_example(model,&docsTest[j]);
6737 dist[j] = (float)dist_tmp;
6738 }
6739 }
6740
6741 /*----- WRITE PREDICTIONS TO FILE -----*/
6742 for(j=0; j<nt; ++j) {
6743 if ( options->testLabelFile[0] ) {
6744 if (options->noPredCensor) {
6745 if( testLabels.cnsrs[j] == 1 ) fprintf(fp,"%.4g\n",dist[j]);
6746 }
6747 else fprintf(fp,"%.4g\n",dist[j]);
6748 }
6749 else fprintf(fp,"%.4g\n",dist[j]);
6750 }
6751
6752 /*----- DETERMINE RMS ERROR -----*/
6753 if( (options->testLabelFile[0]) && (verbosity >= 1) ){
6754 rms=0;
6755 for(j=0; j<nt; ++j) {
6756 if ( testLabels.cnsrs[j] == 1 ) {
6757 rms+=(testLabels.lbls[j]-dist[j])*(testLabels.lbls[j]-dist[j]);
6758 }
6759 }
6760
6761 rms=sqrt(rms/(testLabels.n-testLabels.n_cnsrs));
6762
6763 INFO_message("--------------------------------------------------------------"
6764 "----------------\n++");
6765
6766 INFO_message("RMS error: %.2f (%d censored, %ld total)\n++",
6767 rms, testLabels.n_cnsrs, testLabels.n);
6768
6769 INFO_message("--------------------------------------------------------------"
6770 "----------------\n++");
6771 }
6772 if(verbosity >= 1) INFO_message("Predictions written to %s\n", predictionsFile);
6773 fclose(fp);
6774
6775 /* --- FREE MEMORY --- */
6776 DSET_unload(dsetTest);
6777 if( options->testLabelFile[0] )
6778 freeRegressionLabelsAndTarget(&testLabels, target);
6779 freeDsetArray(dsetTest, dsetTestArray);
6780 freeDOCs(docsTest, nt);
6781 freeModel(model, afniModel, TEST);
6782 IFree(dist);
6783
6784 RETURN(0);
6785 }
6786
6787
6788 /* SL & JL Feb. 2009: Included 'long *kernel_cache_size' as an argument to
6789 * support non-linear kernels. */
train_classification(MODEL * model,LEARN_PARM * learn_parm,KERNEL_PARM * kernel_parm,long * kernel_cache_size,ASLoptions * options,THD_3dim_dataset * dsetTrain,THD_3dim_dataset * dsetMask,MaskType * dsetMaskArrayPtr,int argc,char ** argv,char * errorString)6790 int train_classification( MODEL *model, LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm,
6791 long *kernel_cache_size, ASLoptions *options, THD_3dim_dataset *dsetTrain,
6792 THD_3dim_dataset *dsetMask, MaskType *dsetMaskArrayPtr, int argc, char **argv,
6793 char * errorString )
6794 {
6795 LABELS labels; /* structure holding labels (class-categories)
6796 * (input from user) */
6797 AFNI_MODEL afniModel; /* holds everything required to write out
6798 * model.Head */
6799 MODEL_MAPS maps; /* holds the maps (e.g., weight-vector maps)
6800 * for the bucket */
6801
6802 LabelType*
6803 censoredTarget = NULL; /* array to hold svm-light readable labels
6804 * for current class-combination and 9999
6805 * otherwise (named tmp_labels previously)*/
6806 LabelType*
6807 classTarget = NULL; /* array to hold svm-light readable labels
6808 * for current class-combination
6809 * (named target previously)*/
6810 DatasetType**
6811 dsetTrainArray = NULL; /* array to hold training dataset values
6812 * for all time-points (JL: formerly holding
6813 * what is now called dsetClassTrainArray) */
6814 DatasetType**
6815 dsetClassTrainArray = NULL; /* JL: array to hold training dataset values
6816 * for specific class-combination
6817 * (named dsetTrainArray previously) */
6818 DOC* docsClassTrain = NULL; /* svm-light data structure used for training
6819 * (JL: named docsTrain previously) */
6820 KERNEL_CACHE
6821 kernel_cache; /* svm-light data structure holding kernel
6822 * parameters */
6823
6824 long nt = 0; /* number of time points in TRAIN dataset */
6825 long nvox = 0; /* number of voxels per time point in TRAIN
6826 * dataset */
6827 long nvox_masked = 0; /* number of masked voxels */
6828
6829 long classCount = 0; /* in training loop, keeps track of
6830 * current pairwise comb */
6831 long sampleCount = 0; /* number of samples used in training */
6832
6833
6834
6835 long i, cc, dd = 0;
6836 char docFileName[LONG_STRING]; /* naming of svm-light textfile output */
6837
6838
6839 ENTRY("train_classification");
6840
6841 /* JL June 2011: Modified error handling: Passing error string as argument
6842 * to the calling function, allocated memory is freed, RETURN(1)
6843 * instead of ERROR_exit.
6844 *
6845 * JL March 2014: Removed byte data type restriction for mask dataset.
6846 *
6847 */
6848
6849 if (verbosity >= 1) INFO_message("\n++ CLASSIFICATION (training):\n++");
6850
6851
6852 /*----- LOAD TRAINING DATA --------*/
6853 if( dsetTrain == NULL ) {
6854 if( (dsetTrain = THD_open_one_dataset(options->trainFile)) == NULL ) {
6855 snprintf(errorString, LONG_STRING, "Failed to open training dataset: %s",
6856 options->trainFile ); RETURN(1);
6857 }
6858 DSET_load( dsetTrain );
6859 }
6860 else {
6861 if( !DSET_LOADED(dsetTrain) ) {
6862 if( (dsetTrain = THD_open_one_dataset(options->trainFile)) == NULL ) {
6863 snprintf(errorString, LONG_STRING, "Failed to open training dataset: %s",
6864 options->trainFile ); RETURN(1);
6865 }
6866 DSET_load( dsetTrain );
6867 }
6868 }
6869
6870 nt = DSET_NUM_TIMES( dsetTrain );
6871 nvox = DSET_NVOX( dsetTrain );
6872 nvox_masked = nvox; /* this will be modified later if mask used */
6873
6874 /* JL Sep. 2010: Some trivial error checking */
6875 if( nt < 2 ) { snprintf(errorString, LONG_STRING, "Need at least two "
6876 "briks in training dataset %s!", options->trainFile);
6877
6878 /* free and return */
6879 DSET_unload(dsetTrain);
6880 RETURN(1);
6881 }
6882 if( nvox < 2 ) { snprintf(errorString, LONG_STRING, "Need at least two "
6883 "voxels in training dataset %s!", options->trainFile); RETURN(1);
6884
6885 /* free and return */
6886 DSET_unload(dsetTrain);
6887 RETURN(1);
6888 }
6889
6890 if(verbosity >= 1)
6891 INFO_message( "Number of time samples is %ld, and voxels %ld in training "
6892 "dataset.", nt, nvox );
6893
6894
6895 /*---- CRATE TRAINING DATASET ARRAY ----*/
6896 if( (dsetTrainArray = getAllocateDsetArray(dsetTrain, errorString)) == NULL ) {
6897
6898 /* free and return */
6899 DSET_unload(dsetTrain);
6900 RETURN(1);
6901 }
6902
6903 /*----- GET MASK ARRAY, IF SELECTED AND DETECT nvox_masked --------*/
6904 if( options->maskFile[0] ) {
6905 afniModel.mask_used = MASK_YES; /* JL */
6906 nvox_masked = 0;
6907
6908 /* -- open dataset -- */
6909 if( (dsetMask = THD_open_one_dataset(options->maskFile)) == NULL ) {
6910 snprintf(errorString, LONG_STRING, "Failed to open mask file: '%s'",
6911 options->maskFile );
6912
6913 /* free and return - */
6914 freeDsetArray(dsetTrain, dsetTrainArray);
6915 DSET_unload(dsetTrain);
6916 RETURN(1);
6917 }
6918 DSET_load(dsetMask);
6919
6920 /* JL May 2010: Make sure mask and training dataset have the same number of
6921 * voxels */
6922 if( DSET_NVOX( dsetMask ) != nvox) {
6923 snprintf(errorString, LONG_STRING, "Number of voxels in mask file: '%s' "
6924 "and training dataset: '%s' do not match", options->maskFile,
6925 options->trainFile);
6926
6927 /* free and return */
6928 freeDsetArray(dsetTrain, dsetTrainArray);
6929 DSET_unload(dsetTrain);
6930 DSET_unload(dsetMask);
6931 RETURN(1);
6932 }
6933
6934 /* JL Mar 2014: Make sure we have only one brick */
6935 if ( DSET_NUM_TIMES(dsetMask) > 1 ) {
6936 snprintf(errorString, LONG_STRING, "Mask file: '%s' can only contain "
6937 "a single brick!", options->maskFile);
6938 /* free and return */
6939 freeDsetArray(dsetTrain, dsetTrainArray);
6940 DSET_unload(dsetTrain);
6941 DSET_unload(dsetMask);
6942 RETURN(1);
6943 }
6944
6945 /* -- get pointer to mask array -- */
6946 if( (dsetMaskArrayPtr = getAllocateMaskArray(dsetMask, errorString)) == NULL ) {
6947
6948 /* free and return */
6949 freeDsetArray(dsetTrain, dsetTrainArray);
6950 DSET_unload(dsetTrain);
6951 RETURN(1);
6952 }
6953 DSET_unload(dsetMask);
6954
6955 /* -- count number of voxels in mask -- */
6956 for( i=0 ; i<nvox ; ++i ) {
6957 if( dsetMaskArrayPtr[i] ) nvox_masked++;
6958 }
6959 if(verbosity >= 1) {
6960 INFO_message( "Number of non-zero mask voxels is: %ld\n", nvox_masked );
6961 }
6962
6963 /* JL Sep. 2010: Some trivial error checking */
6964 if( nvox_masked < 2 ) {
6965 snprintf(errorString, LONG_STRING, "Need at least two voxels in mask "
6966 "dataset '%s'!", options->maskFile);
6967
6968 /* free and return */
6969 freeDsetArray(dsetTrain, dsetTrainArray);
6970 DSET_unload(dsetTrain);
6971 IFree(dsetMaskArrayPtr);
6972 RETURN(1);
6973 }
6974 }
6975 else if( !(options->outModelNoMask) ) {
6976 snprintf(errorString, LONG_STRING, "No mask file specified (use -mask "
6977 "file). If not using a mask file must use option -nomodelmask!");
6978
6979 /* free and return */
6980 freeDsetArray(dsetTrain, dsetTrainArray);
6981 DSET_unload(dsetTrain);
6982 RETURN(1);
6983 }
6984 else afniModel.mask_used = MASK_NO;
6985
6986
6987 /*----- RETRIEVE AND CHECK LABELS --------------*/
6988 labels.n = nt;
6989
6990 /* --- retrieve training labels --- */
6991 if( getAllocateClassificationLabels(&labels,options->labelFile,
6992 options->censorFile, errorString) ) {
6993
6994 /* free and return */
6995 freeDsetArray(dsetTrain, dsetTrainArray);
6996 DSET_unload(dsetTrain);
6997 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
6998 RETURN(1);
6999 }
7000
7001 /* -- JL Apr. 2010: Added some trivial error checking -- */
7002 if (labels.n_classes < 2) {
7003 snprintf(errorString, LONG_STRING, "There is only one class in labelfile: "
7004 "'%s'. Need at least two!", options->labelFile);
7005
7006 /* free and return */
7007 freeDsetArray(dsetTrain, dsetTrainArray);
7008 DSET_unload(dsetTrain);
7009 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7010 freeClassificationLabels(&labels);
7011 RETURN(1);
7012 }
7013
7014 if( labels.n != nt ) {
7015 snprintf(errorString, LONG_STRING, "Number of labels do not match the "
7016 "length of the train dataset:\n"
7017 " labelfile: '%s' contains %ld labels, but the \n"
7018 " trainvol: '%s' contains %ld bricks.", options->labelFile,
7019 labels.n, options->trainFile, nt);
7020
7021 /* free and return */
7022 freeDsetArray(dsetTrain, dsetTrainArray);
7023 DSET_unload(dsetTrain);
7024 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7025 freeClassificationLabels(&labels);
7026 RETURN(1);
7027 }
7028
7029 /*----- ALLOCATE afniModel --------------*/
7030 if( allocateAfniModel(&afniModel, &labels, options, errorString) ) {
7031
7032 /* free and return */
7033 freeDsetArray(dsetTrain, dsetTrainArray);
7034 DSET_unload(dsetTrain);
7035 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7036 freeClassificationLabels(&labels);
7037 RETURN(1);
7038 }
7039
7040 /*----- ALLOCATE censoredTarget --------------*/
7041 if( (censoredTarget = (LabelType*)malloc(sizeof(LabelType)*labels.n)) == NULL ) {
7042 snprintf(errorString, LONG_STRING, "train_classification: "
7043 "Memory allocation for censoredTarget failed");
7044
7045 /* free and return */
7046 freeDsetArray(dsetTrain, dsetTrainArray);
7047 DSET_unload(dsetTrain);
7048 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7049 freeClassificationLabels(&labels);
7050 freeAfniModel(&afniModel);
7051 RETURN(1);
7052 }
7053
7054 /*----- ALLOCATE maps -----*/
7055 if( options->modelWeightFile[0] ) {
7056 if( allocateModelMaps(&maps, (long)labels.n_classes, nvox, options->kernelName) ) {
7057 snprintf(errorString, LONG_STRING, "train_classification: "
7058 "Memory allocation for model maps failed!");
7059
7060 /* free and return */
7061 freeDsetArray(dsetTrain, dsetTrainArray);
7062 DSET_unload(dsetTrain);
7063 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7064 freeClassificationLabels(&labels);
7065 freeAfniModel(&afniModel);
7066 IFree(censoredTarget);
7067 RETURN(1);
7068 }
7069 }
7070
7071 /*----- SVMLEARN FOR ALL COMBINATIONS OF PARWISE TRAINING --------*/
7072 /* cc indexes -1 class; dd indexes +1 class - over multiple classes */
7073 classCount = 0; /* could figure it out from cc and dd, but easier just to keep track */
7074
7075 for( cc=0 ; cc<labels.n_classes-1; ++cc ) {
7076 for( dd=cc+1 ; dd<labels.n_classes; ++dd ) {
7077 if(verbosity >= 1) {
7078 INFO_message("\n++ Preparing classes %d and %d:",
7079 labels.class_list[cc], labels.class_list[dd]);
7080
7081 if (verbosity > 1) MCHECK ; fflush(stdout) ; /* ZSS */
7082 }
7083
7084 if( getCensoredClassTarget(censoredTarget, &sampleCount, &labels,
7085 cc, dd, TRAIN, errorString) ) {
7086
7087 /* free and return */
7088 freeDsetArray(dsetTrain, dsetTrainArray);
7089 DSET_unload(dsetTrain);
7090 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7091 freeClassificationLabels(&labels);
7092 freeAfniModel(&afniModel);
7093 IFree(censoredTarget);
7094 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7095 RETURN(1);
7096 }
7097
7098 if(verbosity >= 1) INFO_message( "SampleCount = %ld\n", sampleCount );
7099
7100 /*----- ALLOCATE MEMORY FOR SVM-LIGHT ARRAYS -----------*/
7101 if( (docsClassTrain = allocateDOCs(sampleCount, nvox_masked)) == NULL ) {
7102 snprintf(errorString, LONG_STRING, "train_classification: "
7103 "Memory allocation for docsClassTrain failed!");
7104
7105 /* free and return */
7106 freeDsetArray(dsetTrain, dsetTrainArray);
7107 DSET_unload(dsetTrain);
7108 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7109 freeClassificationLabels(&labels);
7110 freeAfniModel(&afniModel);
7111 IFree(censoredTarget);
7112 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7113 RETURN(1);
7114 }
7115
7116 if( (classTarget = (LabelType*)malloc(sizeof(LabelType)*sampleCount)) == NULL ) {
7117 snprintf(errorString, LONG_STRING, "train_classification: "
7118 "Memory allocation for classTarget failed!");
7119
7120 /* free and return */
7121 freeDsetArray(dsetTrain, dsetTrainArray);
7122 DSET_unload(dsetTrain);
7123 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7124 freeClassificationLabels(&labels);
7125 freeAfniModel(&afniModel);
7126 IFree(censoredTarget);
7127 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7128 freeDOCs(docsClassTrain, sampleCount);
7129 RETURN(1);
7130 }
7131
7132 if( (model=(MODEL *)malloc(sizeof(MODEL))) == NULL ) {
7133 snprintf(errorString, LONG_STRING, "train_classification: "
7134 "Memory allocation for model structure failed!");
7135
7136 /* free and return */
7137 freeDsetArray(dsetTrain, dsetTrainArray);
7138 DSET_unload(dsetTrain);
7139 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7140 freeClassificationLabels(&labels);
7141 freeAfniModel(&afniModel);
7142 IFree(censoredTarget);
7143 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7144 freeDOCs(docsClassTrain, sampleCount);
7145 IFree(classTarget);
7146 RETURN(1);
7147 }
7148
7149 /*----- GET TRAINING ARRAY AND CLASSTARGET FOR CURRENT CLASS COMBINATION -----*/
7150 if( (dsetClassTrainArray = Allocate2DT( sampleCount, nvox)) == NULL ) {
7151 snprintf(errorString, LONG_STRING, "train_classification: "
7152 "Memory allocation for dsetClassTrainArray failed!");
7153
7154 /* free and return */
7155 freeDsetArray(dsetTrain, dsetTrainArray);
7156 DSET_unload(dsetTrain);
7157 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7158 freeClassificationLabels(&labels);
7159 freeAfniModel(&afniModel);
7160 IFree(censoredTarget);
7161 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7162 freeDOCs(docsClassTrain, sampleCount);
7163 IFree(classTarget);
7164 RETURN(1);
7165 }
7166
7167 getClassTrainArrayAndTarget( dsetTrainArray, censoredTarget,
7168 dsetClassTrainArray, classTarget, nt, nvox );
7169
7170 /*----- ALPHA FILE OUTPUT -----*/
7171 /* JL Aug. 2010: Changed how the alphas are written to file.
7172 * Please read the comments in addToAfniModel for more info */
7173
7174 /* ---- MASK DATA AND CONVERT TO SVM-LIGHT DOC STRUCTURE */
7175 afni_dset_to_svm_doc( docsClassTrain, dsetClassTrainArray, dsetMaskArrayPtr,
7176 sampleCount, nvox, nvox_masked);
7177
7178 /* JL Apr. 2010: No training if we want to write out the svm-light
7179 * formated textfile only */
7180 if ( !options->docFileOnly[0] ) {
7181
7182 /* ---- INITIALIZE KERNEL PARAMETERS & TRAIN ----*/
7183 /* SL & JL Feb. 2009: Added this part to initialize the kernel parameters
7184 * in case of non-linear kernels. */
7185 if(kernel_parm->kernel_type == LINEAR) {
7186 /* don't need the cache if linear*/
7187
7188 svm_learn_classification( docsClassTrain, classTarget, sampleCount,
7189 nvox_masked, learn_parm, kernel_parm, NULL, model );
7190 }
7191 else {
7192 /* Always get a new kernel cache. It is not possible to use the
7193 * same cache for two different training runs */
7194 kernel_cache_init(&kernel_cache,sampleCount,*kernel_cache_size);
7195
7196 svm_learn_classification( docsClassTrain, classTarget, sampleCount,
7197 nvox_masked, learn_parm, kernel_parm, &kernel_cache, model );
7198
7199 /* Free the memory used for the cache. */
7200 kernel_cache_cleanup(&kernel_cache);
7201
7202 }
7203 fflush(stdout);
7204
7205 /* ---- SAVE RESULTS FOR CURRENT CLASS COMBINATION ---*/
7206 addToAfniModel(&afniModel, model, learn_parm, censoredTarget, options,
7207 classCount, sampleCount, labels.class_list[cc], labels.class_list[dd]);
7208
7209 if( options->modelWeightFile[0] ) {
7210 addToModelMap_bucket(&maps, &afniModel, dsetTrainArray,
7211 dsetMaskArrayPtr, options->maskFile, classCount);
7212 }
7213 }
7214
7215 /* ---- WRITE DATASET IN SVM-LIGHT FORMATED TEXTFILE ----*/
7216 if (options->docFile[0]) {
7217 if (labels.n_classes == 2) {
7218 snprintf( docFileName, LONG_STRING, "%s.svml", options->docFile);
7219 }
7220 else {
7221 snprintf( docFileName, LONG_STRING, "%s_%d_%d.svml", options->docFile,
7222 labels.class_list[cc], labels.class_list[dd] );
7223 }
7224 write_svmLight_doc(docsClassTrain, sampleCount, nvox_masked, classTarget,
7225 docFileName, VERSION_SVMLIGHT);
7226 }
7227
7228 ++classCount;
7229
7230 if( !options->docFileOnly[0] ) freeModel(model, &afniModel, TRAIN);
7231 freeDOCs(docsClassTrain, sampleCount);
7232 free2DT(dsetClassTrainArray, sampleCount);
7233 IFree(classTarget);
7234 }
7235 }
7236
7237 /* ----- WRITE MODEL AND BUCKET TO DISC ----- */
7238 if( !options->docFileOnly[0] && !options->noModelOut ) {
7239 /* JL May 2010: Modified writeModelBrick to write the model and the mask into
7240 * a single dataset */
7241 if( writeModelBrik(&afniModel, dsetTrain, dsetTrainArray, dsetMaskArrayPtr, options,
7242 options->modelFile, argc, argv, errorString) ) {
7243
7244 /* free and return */
7245 freeDsetArray(dsetTrain, dsetTrainArray);
7246 DSET_unload(dsetTrain);
7247 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7248 freeClassificationLabels(&labels);
7249 freeAfniModel(&afniModel);
7250 IFree(censoredTarget);
7251 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7252 RETURN(1);
7253 }
7254 }
7255
7256 if( (options->modelWeightFile[0]) && (!options->docFileOnly[0]) ) {
7257 if( writeModelMap_bucket(&maps, dsetMaskArrayPtr, dsetTrain, options->maskFile,
7258 options->modelWeightFile, afniModel.b, afniModel.combinations,
7259 options, argc, argv, errorString) ) {
7260
7261 /* free and return */
7262 freeDsetArray(dsetTrain, dsetTrainArray);
7263 DSET_unload(dsetTrain);
7264 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7265 freeClassificationLabels(&labels);
7266 freeAfniModel(&afniModel);
7267 IFree(censoredTarget);
7268 freeModelMaps(&maps);
7269 RETURN(1);
7270 }
7271 }
7272
7273 /* free memory */
7274 freeDsetArray(dsetTrain, dsetTrainArray);
7275 DSET_unload(dsetTrain);
7276 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7277 freeClassificationLabels(&labels);
7278 freeAfniModel(&afniModel);
7279 IFree(censoredTarget);
7280 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7281
7282 RETURN(0);
7283 }
7284
7285 /* JL May 2009: Added this function for sv-regression.
7286 * It is very similar to train_classification() (a lot of code has been recycled).
7287 * However, major differences are:
7288 *
7289 * - No need for multi-class
7290 * - New function to read in the labelfile (getAllocateRegressionLabels())
7291 * - New function to get the array with training data
7292 * (getAllocateCensoredRegressionArray())
7293 * - Using svm-light's function call: svm_learn_regression() instead of
7294 * svm_learn_classification()
7295 */
train_regression(MODEL * model,LEARN_PARM * learn_parm,KERNEL_PARM * kernel_parm,long * kernel_cache_size,ASLoptions * options,THD_3dim_dataset * dsetTrain,THD_3dim_dataset * dsetMask,MaskType * dsetMaskArrayPtr,int argc,char ** argv,char * errorString)7296 int train_regression(MODEL *model, LEARN_PARM *learn_parm,
7297 KERNEL_PARM *kernel_parm, long *kernel_cache_size, ASLoptions *options,
7298 THD_3dim_dataset *dsetTrain, THD_3dim_dataset *dsetMask,
7299 MaskType *dsetMaskArrayPtr, int argc, char **argv, char *errorString)
7300 {
7301
7302 LABELS labels;
7303 AFNI_MODEL afniModel; /* holds everything required to write out
7304 model.Head */
7305 MODEL_MAPS maps; /* holds the maps (e.g., weight-vector maps)
7306 for the bucket */
7307
7308 LabelType *target = NULL; /* array to hold labels for svm-light */
7309
7310 DatasetType**
7311 dsetTrainArray = NULL; /* array to hold training dataset values */
7312
7313 DatasetType**
7314 dsetTrainArrayCensored = NULL; /* array to hold training dataset values */
7315
7316 DOC *docsTrain = NULL; /* svm-light data structure used for training */
7317 KERNEL_CACHE kernel_cache; /* svm-light data structure holding kernel
7318 * paramters */
7319
7320
7321 long nt = 0; /* number of time points in TRAIN dataset */
7322 long nvox = 0; /* number of voxels per time point in TRAIN
7323 dataset */
7324 long nvox_masked = 0; /* number of voxels in mask dataset */
7325
7326 long sampleCount = 0; /* number of samples used in training */
7327
7328 long i = 0;
7329 char docFileName[LONG_STRING]; /* nameing of svm-light textfile output */
7330
7331
7332 ENTRY("train_regression");
7333
7334 /* JL June 2011: Modified error handling: Passing error string as argument
7335 * to the calling function, allocated memory is freed, RETURN(1)
7336 * instead of ERROR_exit.
7337 *
7338 * JL March 2014: Removed byte data type restriction for mask dataset.
7339 *
7340 */
7341
7342 if (verbosity >= 1) INFO_message("\n++ REGRESSION (training):\n++");
7343
7344
7345 /*----- LOAD TRAINING DATA ---*/
7346 if( dsetTrain == NULL ) {
7347 if( (dsetTrain = THD_open_one_dataset(options->trainFile)) == NULL ) {
7348 snprintf(errorString, LONG_STRING, "Failed to open training dataset: %s",
7349 options->trainFile ); RETURN(1);
7350 }
7351 DSET_load( dsetTrain );
7352 }
7353 else {
7354 if( !DSET_LOADED(dsetTrain) ) {
7355 if( (dsetTrain = THD_open_one_dataset(options->trainFile)) == NULL ) {
7356 snprintf(errorString, LONG_STRING, "Failed to open training dataset: %s",
7357 options->trainFile ); RETURN(1);
7358 }
7359 DSET_load( dsetTrain );
7360 }
7361 }
7362
7363 nt = DSET_NUM_TIMES( dsetTrain );
7364 nvox = DSET_NVOX( dsetTrain );
7365 nvox_masked = nvox; /* this will be modified later if mask used */
7366
7367 if(verbosity >= 1) {
7368 INFO_message( "Number of time samples is %ld, and voxels %ld in training "
7369 "dataset.", nt, nvox );
7370 }
7371
7372 /*------ GET MASK ARRAY, IF SELECTED AND DETECT nvox_masked ---*/
7373 if( options->maskFile[0] ) {
7374 afniModel.mask_used = MASK_YES; /* JL */
7375 nvox_masked = 0;
7376
7377 if( (dsetMask = THD_open_one_dataset(options->maskFile)) == NULL ) {
7378 snprintf(errorString, LONG_STRING,
7379 "Failed to open mask file: %s", options->maskFile );
7380
7381 /* free and return */
7382 DSET_unload(dsetTrain);
7383 RETURN(1);
7384 }
7385 DSET_load(dsetMask);
7386
7387 /* JL April 2014: Make sure mask and training dataset have the same number of
7388 * voxels */
7389 if( DSET_NVOX( dsetMask ) != nvox) {
7390 snprintf(errorString, LONG_STRING, "Number of voxels in mask file: '%s' "
7391 "and training dataset: '%s' do not match", options->maskFile,
7392 options->trainFile);
7393
7394 /* free and return */
7395 DSET_unload(dsetTrain);
7396 DSET_unload(dsetMask);
7397 RETURN(1);
7398 }
7399
7400 /* JL Mar 2014: Make sure we have only one brick */
7401 if ( DSET_NUM_TIMES(dsetMask) > 1 ) {
7402 snprintf(errorString, LONG_STRING, "Mask file: '%s' can only contain "
7403 "a single brick!", options->maskFile);
7404 /* free and return */
7405 DSET_unload(dsetTrain);
7406 DSET_unload(dsetMask);
7407 RETURN(1);
7408 }
7409
7410 if( (dsetMaskArrayPtr = getAllocateMaskArray(dsetMask, errorString)) == NULL ) {
7411
7412 /* free and return */
7413 DSET_unload(dsetTrain);
7414 RETURN(1);
7415 }
7416 DSET_unload(dsetMask);
7417
7418
7419 for( i=0 ; i<nvox ; ++i ) {
7420 if( dsetMaskArrayPtr[i] ) nvox_masked++;
7421 }
7422 if(verbosity >= 1)
7423 INFO_message( "Number of non-zero mask voxels is: %ld\n", nvox_masked );
7424 }
7425 else if( !(options->outModelNoMask) ) {
7426 snprintf(errorString, LONG_STRING,
7427 "No mask file specified (use -mask file). "
7428 "If not using a mask file must use option -nomodelmask");
7429
7430 /* free and return */
7431 DSET_unload(dsetTrain);
7432 RETURN(1);
7433 }
7434 else afniModel.mask_used = MASK_NO;
7435
7436 /*----- RETRIEVE LABELS AND GET SVM-LIGHT TARGET ----*/
7437 if( (getAllocateRegressionLabelsAndTarget(&labels, &target,
7438 options->labelFile, options->censorFile, errorString)) ) {
7439
7440 /* free and return */
7441 DSET_unload(dsetTrain);
7442 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7443 RETURN(1);
7444 }
7445
7446 if( labels.n != nt ) {
7447 snprintf(errorString, LONG_STRING,
7448 "Number of labels %ld in labelfile: %s does not match\n"
7449 " number of timepoints %ld in train dataset: %s!",
7450 labels.n, options->labelFile, nt, options->trainFile);
7451
7452 /* free and return */
7453 DSET_unload(dsetTrain);
7454 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7455 freeRegressionLabelsAndTarget(&labels, target);
7456 RETURN(1);
7457 }
7458
7459 sampleCount=labels.n-labels.n_cnsrs; /* number of uncensored timepoints */
7460
7461 /*---- GET TRAINING ARRAY -----*/
7462 if( (dsetTrainArray = getAllocateDsetArray(dsetTrain, errorString)) == NULL ) {
7463
7464 /* free and return */
7465 DSET_unload(dsetTrain);
7466 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7467 freeRegressionLabelsAndTarget(&labels, target);
7468 RETURN(1);
7469 }
7470
7471 /*----- GET TRAINING ARRAY WITHOUT CENSORED TIMEPOINTS -----*/
7472 if( (dsetTrainArrayCensored = getAllocateCensoredRegressionArray(dsetTrainArray,
7473 &labels, nvox)) == NULL ) {
7474
7475 snprintf(errorString, LONG_STRING, "train_regression"
7476 "Memory allocation for dsetTrainArrayCensored failed!");
7477
7478 /* free and return */
7479 freeDsetArray(dsetTrain, dsetTrainArray);
7480 DSET_unload(dsetTrain);
7481 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7482 freeRegressionLabelsAndTarget(&labels, target);
7483 RETURN(1);
7484 }
7485
7486 /*----- ALLOCATE afniModel -----*/
7487 if( allocateAfniModel(&afniModel, &labels, options, errorString) ) {
7488
7489 /* free and return */
7490 freeDsetArray(dsetTrain, dsetTrainArray);
7491 DSET_unload(dsetTrain);
7492 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7493 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7494 freeRegressionLabelsAndTarget(&labels, target);
7495 RETURN(1);
7496 }
7497
7498 /*----- ALLOCATE maps ------*/
7499 if( options->modelWeightFile[0] ) {
7500 if( allocateModelMaps(&maps, (long)labels.n_classes, nvox, options->kernelName) ) {
7501 snprintf(errorString, LONG_STRING, "train_regression: "
7502 "Memory allocation for model maps failed!");
7503
7504 /* free and return */
7505 freeDsetArray(dsetTrain, dsetTrainArray);
7506 DSET_unload(dsetTrain);
7507 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7508 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7509 freeRegressionLabelsAndTarget(&labels, target);
7510 freeAfniModel(&afniModel);
7511 RETURN(1);
7512 }
7513 }
7514
7515 /*----- ALLOCATE SVM-LIGHT DOCs ------*/
7516 if( (docsTrain = allocateDOCs(sampleCount, nvox_masked)) == NULL ) {
7517 snprintf(errorString, LONG_STRING, "train_classification: "
7518 "Memory allocation for docsTrain failed!");
7519
7520 /* free and return */
7521 freeDsetArray(dsetTrain, dsetTrainArray);
7522 DSET_unload(dsetTrain);
7523 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7524 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7525 freeRegressionLabelsAndTarget(&labels, target);
7526 freeAfniModel(&afniModel);
7527 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7528 RETURN(1);
7529 }
7530
7531 /*----- CONVERT TRAINING ARRAY TO SVM-LIGHT DOC STRUCTURE ------*/
7532 afni_dset_to_svm_doc( docsTrain, dsetTrainArrayCensored, dsetMaskArrayPtr,
7533 sampleCount, nvox, nvox_masked );
7534
7535 /* JL Apr. 2010: No training if we want to write out the svm-light
7536 * formated textfile only */
7537 if ( !options->docFileOnly[0] ) {
7538
7539 /*----- PERFORM THE SV-REGRESSION -----*/
7540 if ( !strcmp(options->kernelName, "linear") ) {
7541
7542 svm_learn_regression ( docsTrain, target, sampleCount, nvox_masked,
7543 learn_parm, kernel_parm, NULL, model);
7544
7545 }
7546 else { /* non-linear kernel */
7547 kernel_cache_init(&kernel_cache, 2*sampleCount, *kernel_cache_size);
7548
7549 svm_learn_regression ( docsTrain, target, sampleCount, nvox_masked,
7550 learn_parm, kernel_parm, &kernel_cache, model);
7551
7552 /* Free the memory used for the cache. */
7553 kernel_cache_cleanup(&kernel_cache);
7554 }
7555 }
7556
7557 /*----- UPDATE AFNI-MODEL -----*/
7558 if ( !options->docFileOnly[0] ) {
7559 addToAfniModel(&afniModel, model, learn_parm, &(labels.cnsrs[0]), options,
7560 0, sampleCount, 0, 0);
7561 }
7562
7563 /*---- UPDATE MODEL-MAPS -----*/
7564 if( (options->modelWeightFile[0]) && (!options->docFileOnly[0]) ) {
7565 addToModelMap_bucket(&maps, &afniModel, dsetTrainArray, dsetMaskArrayPtr,
7566 options->maskFile, 0);
7567 }
7568
7569 /*---- WRITE OUTPUT FILES TO DISC ----*/
7570 /* might not be necessary if testing and training are performed all at once */
7571
7572 /* --- write afni model --- */
7573 if ( !options->docFileOnly[0] && !options->noModelOut ) {
7574 /* JL May 2010: Modified writeModelBrick to write the model and the mask into
7575 * a single dataset */
7576 if( writeModelBrik(&afniModel, dsetTrain, dsetTrainArray, dsetMaskArrayPtr, options,
7577 options->modelFile, argc, argv, errorString) ) {
7578
7579 /* free and return */
7580 freeDsetArray(dsetTrain, dsetTrainArray);
7581 DSET_unload(dsetTrain);
7582 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7583 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7584 freeRegressionLabelsAndTarget(&labels, target);
7585 freeModel(model, &afniModel, TRAIN);
7586 freeAfniModel(&afniModel);
7587 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7588 freeDOCs(docsTrain, sampleCount);
7589 RETURN(1);
7590 }
7591 }
7592
7593 /* --- write model maps --- */
7594 if( (options->modelWeightFile[0]) && (!options->docFileOnly[0]) ) {
7595 if( writeModelMap_bucket(&maps, dsetMaskArrayPtr, dsetTrain, options->maskFile,
7596 options->modelWeightFile, afniModel.b, (long)afniModel.combinations,
7597 options, argc, argv, errorString) ) {
7598
7599 /* free and return */
7600 freeDsetArray(dsetTrain, dsetTrainArray);
7601 DSET_unload(dsetTrain);
7602 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7603 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7604 freeRegressionLabelsAndTarget(&labels, target);
7605 freeModel(model, &afniModel, TRAIN);
7606 freeAfniModel(&afniModel);
7607 freeModelMaps(&maps);
7608 freeDOCs(docsTrain, sampleCount);
7609 RETURN(1);
7610 }
7611 }
7612
7613 /* --- svm-light textfile ---*/
7614 if (options->docFile[0]) {
7615 snprintf( docFileName, LONG_STRING, "%s.svml", options->docFile);
7616 write_svmLight_doc(docsTrain, sampleCount, nvox_masked, target,
7617 docFileName, VERSION_SVMLIGHT);
7618 }
7619
7620 /*----- FREE MEMORY -----*/
7621 freeDsetArray(dsetTrain, dsetTrainArray);
7622 DSET_unload(dsetTrain);
7623 if( options->maskFile[0] ) IFree(dsetMaskArrayPtr);
7624 freeCensoredRegressionArray(dsetTrainArrayCensored, &labels);
7625 freeRegressionLabelsAndTarget(&labels, target);
7626 if (!options->docFileOnly[0]) freeModel(model, &afniModel, TRAIN);
7627 freeAfniModel(&afniModel);
7628 if( options->modelWeightFile[0] ) freeModelMaps(&maps);
7629 freeDOCs(docsTrain, sampleCount);
7630
7631
7632 RETURN(0);
7633 }
7634
7635 /* Cameron - modified this function because it wasn't correctly
7636 identifying missing arguements. Is this a OSX vs
7637 linux thing? */
7638 /* JL Sep. 2009: Error checking for options with argument.
7639 * Avoid out of bound error if last option and no argument
7640 * ppi = ++i */
ppi(int argc,int i,char ** argv)7641 int ppi (int argc, int i, char **argv)
7642 {
7643
7644 ENTRY("ppi");
7645
7646 if( i >= argc-1 )
7647 {
7648 ERROR_exit("No argument after %s!", argv[i]);
7649 }
7650
7651 /* fprintf(stderr, "finding arg for _%s_ [_%s_] %d (%d)\n",
7652 argv[i], argv[i+1],
7653 i, argc); */
7654 i++;
7655
7656 /* CC added the strtod so we can use negative numbers */
7657 if (( argv[i][0] == '-' ) && ( strtod( argv[i], (char**)NULL ) >=0 ))
7658 {
7659 ERROR_exit("Argument for %s must not start with '-'!\n", argv[i-1]);
7660 }
7661
7662 RETURN(i);
7663 }
7664
7665
7666
input_parse(int argc,char * argv[],long * main_verbosity,long * kernel_cache_size,LEARN_PARM * learn_parm,KERNEL_PARM * kernel_parm,ASLoptions * optionsData,enum modes * mode,int * svm_type,char * errorString)7667 int input_parse(int argc, char *argv[], long *main_verbosity,
7668 long *kernel_cache_size, LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm,
7669 ASLoptions* optionsData, enum modes *mode, int *svm_type, char *errorString)
7670 {
7671 long i;
7672 char type[200]; /* svm-light type (REGRESSION, CLASSIFICATION or RANKING) */
7673 int parseFlag = 0; /*ZSS: init to 0*/
7674 int aFlag = 0;
7675 int alphaFlag = 0;
7676 int tFlag = 0; /* JL: flag for svm-light option -t (kernel_type) */
7677 int zFlag = 0; /* JL: flag for svm-light option -z (type) */
7678 int typeFlag = 0; /* JL: flag for 3dsvm options -type */
7679 *mode = NOTHING;
7680
7681
7682 ENTRY("input_parse");
7683
7684
7685 /* TODO: The definitions bellow should be a header entry ...*/
7686 /* svm-light defaults */
7687 strncpy (learn_parm->predfile, "trans_predictions", 200);
7688 strncpy (learn_parm->alphafile, "", 200);
7689 (*main_verbosity)=1;
7690 verbosity=1; /*svm_light verbosity which is a little tricky as a static
7691 global and now the primary variable for functions in this file. */
7692 learn_parm->biased_hyperplane=1;
7693 learn_parm->remove_inconsistent=0;
7694 learn_parm->skip_final_opt_check=0;
7695 learn_parm->svm_maxqpsize=10;
7696 learn_parm->svm_newvarsinqp=0;
7697 learn_parm->svm_iter_to_shrink=-9999;
7698 (*kernel_cache_size)=40;
7699 learn_parm->svm_c=100.0;
7700 learn_parm->eps=0.1;
7701 learn_parm->transduction_posratio=-1.0;
7702 learn_parm->svm_costratio=1.0;
7703 learn_parm->svm_costratio_unlab=1.0;
7704 learn_parm->svm_unlabbound=1E-5;
7705 learn_parm->epsilon_crit=0.001;
7706 learn_parm->epsilon_a=1E-15;
7707 learn_parm->compute_loo=0;
7708 learn_parm->rho=1.0;
7709 learn_parm->xa_depth=0;
7710 /* JL July 2011: Added maximum number of iterations */
7711 learn_parm->max_iterations=1000000;
7712 kernel_parm->kernel_type=LINEAR;
7713 kernel_parm->poly_degree=3;
7714 kernel_parm->rbf_gamma=1.0;
7715 kernel_parm->coef_lin=1.0;
7716 kernel_parm->coef_const=1.0;
7717 strncpy(kernel_parm->custom,"empty", 50);
7718 strncpy(type,"c", 200);
7719
7720 /* 3dsvm specific */
7721 strncpy(optionsData->labelFile, "", LONG_STRING);
7722 strncpy(optionsData->censorFile, "", LONG_STRING);
7723 strncpy(optionsData->trainFile, "", LONG_STRING);
7724 strncpy(optionsData->maskFile, "", LONG_STRING);
7725 strncpy(optionsData->modelFile, "", LONG_STRING);
7726 strncpy(optionsData->docFile, "", LONG_STRING);
7727 strncpy(optionsData->docFileOnly, "", LONG_STRING);
7728 strncpy(optionsData->multiclass, "", LONG_STRING);
7729 strncpy(optionsData->kernelName, "", LONG_STRING);
7730 strncpy(optionsData->modelAlphaFile, "", LONG_STRING);
7731 strncpy(optionsData->modelWeightFile, "", LONG_STRING);
7732 strncpy(optionsData->testFile, "", LONG_STRING);
7733 strncpy(optionsData->testLabelFile, "", LONG_STRING);
7734 strncpy(optionsData->predFile, "pred", LONG_STRING);
7735 strncpy(optionsData->svmType, "", LONG_STRING);
7736 strncpy(optionsData->rtIP, "", LONG_STRING);
7737 optionsData->outModelNoMask = 0;
7738 optionsData->noModelOut = 0;
7739 optionsData->noPredDetrend = 0;
7740 optionsData->classout = 0;
7741 optionsData->noPredCensor = 0;
7742 optionsData->noPredScale = 0;
7743 optionsData->linearWmap = 0;
7744 optionsData->rtTrain = 0;
7745 optionsData->rtTest = 0;
7746 optionsData->rtPort = 0;
7747
7748 for( i=1; i<argc; ++i ) {
7749 parseFlag = 0;
7750
7751 if ( argv[i][0] != '-' ) { snprintf(errorString, LONG_STRING,
7752 "Option %s must start with '-'!", argv[i]); RETURN(1); }
7753
7754 /* svm-light options: */
7755 if( !strcmp(argv[i],"-z") ) { parseFlag=1; i=ppi(argc,i,argv);
7756 strncpy(type,argv[i], 200); zFlag=1; }
7757 if( !strcmp(argv[i],"-v") ) { parseFlag=1; i=ppi(argc,i,argv);
7758 (*main_verbosity)=atol(argv[i]); verbosity = *main_verbosity; }
7759 if( !strcmp(argv[i],"-b") ) { parseFlag=1; i=ppi(argc,i,argv);
7760 learn_parm->biased_hyperplane=atol(argv[i]); }
7761 if( !strcmp(argv[i],"-i") ) { parseFlag=1; i=ppi(argc,i,argv);
7762 learn_parm->remove_inconsistent=atol(argv[i]); }
7763 if( !strcmp(argv[i],"-f") ) { parseFlag=1; i=ppi(argc,i,argv);
7764 learn_parm->skip_final_opt_check=!atol(argv[i]); }
7765 if( !strcmp(argv[i],"-q") ) { parseFlag=1; i=ppi(argc,i,argv);
7766 learn_parm->svm_maxqpsize=atol(argv[i]); }
7767 if( !strcmp(argv[i],"-n") ) { parseFlag=1; i=ppi(argc,i,argv);
7768 learn_parm->svm_newvarsinqp=atol(argv[i]); }
7769 if( !strcmp(argv[i],"-h") ) { parseFlag=1; i=ppi(argc,i,argv);
7770 learn_parm->svm_iter_to_shrink=atol(argv[i]); }
7771 if( !strcmp(argv[i],"-m") ) { parseFlag=1; i=ppi(argc,i,argv);
7772 (*kernel_cache_size)=atol(argv[i]); }
7773 if( !strcmp(argv[i],"-c") ) { parseFlag=1; i=ppi(argc,i,argv);
7774 learn_parm->svm_c=atof(argv[i]); }
7775 if( !strcmp(argv[i],"-w") ) { parseFlag=1; i=ppi(argc,i,argv);
7776 learn_parm->eps=atof(argv[i]); }
7777 if( !strcmp(argv[i],"-p") ) { parseFlag=1; i=ppi(argc,i,argv);
7778 learn_parm->transduction_posratio=atof(argv[i]); }
7779 if( !strcmp(argv[i],"-j") ) { parseFlag=1; i=ppi(argc,i,argv);
7780 learn_parm->svm_costratio=atof(argv[i]); }
7781 if( !strcmp(argv[i],"-e") ) { parseFlag=1; i=ppi(argc,i,argv);
7782 learn_parm->epsilon_crit=atof(argv[i]); }
7783 if( !strcmp(argv[i],"-o") ) { parseFlag=1; i=ppi(argc,i,argv);
7784 learn_parm->rho=atof(argv[i]); }
7785 if( !strcmp(argv[i],"-k") ) { parseFlag=1; i=ppi(argc,i,argv);
7786 learn_parm->xa_depth=atol(argv[i]); }
7787 if( !strcmp(argv[i],"-x") ) { parseFlag=1; i=ppi(argc,i,argv);
7788 learn_parm->compute_loo=atol(argv[i]); }
7789 if( !strcmp(argv[i],"-t") ) { parseFlag=1; i=ppi(argc,i,argv);
7790 kernel_parm->kernel_type=atol(argv[i]); tFlag=1; }
7791 if( !strcmp(argv[i],"-d") ) { parseFlag=1; i=ppi(argc,i,argv);
7792 kernel_parm->poly_degree=atol(argv[i]); }
7793 if( !strcmp(argv[i],"-g") ) { parseFlag=1; i=ppi(argc,i,argv);
7794 kernel_parm->rbf_gamma=atof(argv[i]); }
7795 if( !strcmp(argv[i],"-s") ) { parseFlag=1; i=ppi(argc,i,argv);
7796 kernel_parm->coef_lin=atof(argv[i]); }
7797 if( !strcmp(argv[i],"-r") ) { parseFlag=1; i=ppi(argc,i,argv);
7798 kernel_parm->coef_const=atof(argv[i]); }
7799 if( !strcmp(argv[i],"-u") ) { parseFlag=1; i=ppi(argc,i,argv);
7800 strncpy(kernel_parm->custom,argv[i], CSV_STRING); }
7801 if( !strcmp(argv[i],"-l") ) { parseFlag=1; i=ppi(argc,i,argv);
7802 strncpy(learn_parm->predfile,argv[i], 200); }
7803
7804 /* JL July 2011: Added maximum number of iterations. Thanks CC */
7805 if( !strcmp(argv[i],"-max_iterations") ) { parseFlag=1; i=ppi(argc,i,argv);
7806 learn_parm->max_iterations=atol(argv[i]); }
7807 /* if( !strcmp(argv[i],"-a") ) { parseFlag=1; i=ppi(argc,i,argv);
7808 * strcpy(learn_parm->alphafile,argv[i]); }
7809 *
7810 * as an easy solution, we are fixing the svmLight's output file name and
7811 * letting 3dsvm write out the desired file */
7812
7813 /* 3dsvm options with arguments: */
7814 if( !strcmp(argv[i],"-type") ) { parseFlag=1; i=ppi(argc,i,argv);
7815 strncpy(optionsData->svmType,
7816 argv[i], LONG_STRING); typeFlag=1; }
7817 if( !strcmp(argv[i],"-a") ) { parseFlag=1; i=ppi(argc,i,argv);
7818 strncpy(optionsData->modelAlphaFile,
7819 argv[i], LONG_STRING); aFlag=1;}
7820 if( !strcmp(argv[i],"-alpha") ) { parseFlag=1; i=ppi(argc,i,argv);
7821 strncpy(optionsData->modelAlphaFile,
7822 argv[i], LONG_STRING); alphaFlag=1;}
7823 if( !strcmp(argv[i],"-trainvol") ) { parseFlag=1; i=ppi(argc,i,argv);
7824 strncpy(optionsData->trainFile,
7825 argv[i], LONG_STRING); }
7826 if( !strcmp(argv[i],"-testvol") ) { parseFlag=1; i=ppi(argc,i,argv);
7827 strncpy(optionsData->testFile,
7828 argv[i], LONG_STRING); }
7829 if( !strcmp(argv[i],"-multiclass") ) { parseFlag=1; i=ppi(argc,i,argv);
7830 strncpy(optionsData->multiclass,
7831 argv[i], LONG_STRING); }
7832 if( !strcmp(argv[i],"-trainlabels") ) { parseFlag=1; i=ppi(argc,i,argv);
7833 strncpy(optionsData->labelFile,
7834 argv[i], LONG_STRING); }
7835 if( !strcmp(argv[i],"-censor") ) { parseFlag=1; i=ppi(argc,i,argv);
7836 strncpy(optionsData->censorFile,
7837 argv[i], LONG_STRING); }
7838 if( !strcmp(argv[i],"-mask") ) { parseFlag=1; i=ppi(argc,i,argv);
7839 strncpy(optionsData->maskFile,
7840 argv[i], LONG_STRING); }
7841 if( !strcmp(argv[i],"-model") ) { parseFlag=1; i=ppi(argc,i,argv);
7842 strncpy(optionsData->modelFile,
7843 argv[i], LONG_STRING); }
7844 if( !strcmp(argv[i],"-bucket") ) { parseFlag=1; i=ppi(argc,i,argv);
7845 strncpy(optionsData->modelWeightFile,
7846 argv[i], LONG_STRING); }
7847 if( !strcmp(argv[i],"-testlabels") ) { parseFlag=1; i=ppi(argc,i,argv);
7848 strncpy(optionsData->testLabelFile,
7849 argv[i], LONG_STRING); }
7850 if( !strcmp(argv[i],"-predictions") ) { parseFlag=1; i=ppi(argc,i,argv);
7851 strncpy(optionsData->predFile,
7852 argv[i], LONG_STRING); }
7853 if( !strcmp(argv[i],"-pred") ) { parseFlag=1; i=ppi(argc,i,argv);
7854 strncpy(optionsData->predFile,
7855 argv[i], LONG_STRING); }
7856 if( !strcmp(argv[i],"-docout") ) { parseFlag=1; i=ppi(argc,i,argv);
7857 strncpy(optionsData->docFile,
7858 argv[i], LONG_STRING); }
7859 if( !strcmp(argv[i],"-doconly") ) { parseFlag=1; i=ppi(argc,i,argv);
7860 strncpy(optionsData->docFileOnly,
7861 argv[i], LONG_STRING); }
7862 /* for kernel below, using svm-light options for kernel parameters */
7863 if( !strcmp(argv[i],"-kernel") ) { parseFlag=1; i=ppi(argc,i,argv);
7864 strncpy(optionsData->kernelName,
7865 argv[i], LONG_STRING); }
7866
7867 if( !strcmp(argv[i],"-stim_ip") ) { parseFlag=1; i=ppi(argc,i,argv);
7868 strncpy(optionsData->rtIP,
7869 argv[i], LONG_STRING); }
7870
7871 if( !strcmp(argv[i],"-stim_port") ) { parseFlag=1; i=ppi(argc,i,argv);
7872 optionsData->rtPort=atoi(argv[i]);}
7873
7874 /* AFNI, 3dsvm options without arguments: */
7875 if( !strcmp(argv[i],"-trace")) { parseFlag = 1;
7876 #ifdef USE_TRACING
7877 DBG_trace = 1;
7878 #endif
7879 }
7880 #ifdef ALLOW_MCW_MALLOC
7881 if( !strcmp(argv[i],"-no_memcheck") ) { pause_mcw_malloc(); /* ZSS */ }
7882 #endif
7883 if( !strcmp(argv[i],"-nomodelmask") ) { parseFlag=1; optionsData->outModelNoMask = 1; }
7884 if( !strcmp(argv[i],"-nomodelfile") ) { parseFlag=1; optionsData->noModelOut = 1; } /* JL Oct. 2017 */
7885 if( !strcmp(argv[i],"-nodetrend") ) { parseFlag=1; optionsData->noPredDetrend = 1; }
7886 if( !strcmp(argv[i],"-classout") ) { parseFlag=1; optionsData->classout = 1; }
7887 if( !strcmp(argv[i],"-nopredcensored") ){ parseFlag=1; optionsData->noPredCensor = 1; }
7888 if( !strcmp(argv[i],"-nopredscale") ) { parseFlag=1; optionsData->noPredScale = 1; }
7889 if( !strcmp(argv[i],"-wout") ) { parseFlag=1; optionsData->linearWmap = 1; }
7890
7891 if( !strcmp(argv[i],"-change_summary")) { print_version(); RETURN(0); }
7892 if( !strcmp(argv[i],"-version")) { print_version(); RETURN(0); }
7893 if( !strcmp(argv[i],"-HELP") ) { printf("%s", advanced_helpstring); RETURN(0); }
7894 if( !strcmp(argv[i],"-rt_train") ) { parseFlag = 1; optionsData->rtTrain = 1; }
7895 if( !strcmp(argv[i],"-rt_test") ) { parseFlag = 1; optionsData->rtTest = 1; }
7896
7897 if( !strcmp(argv[i],"-help") )
7898 {
7899 printf("%s", cl_helpstring);
7900 printf("\n\n-------------------- SVM-light learn help -----------------------------\n");
7901 print_help_learn();
7902 printf("\n\n-------------------- SVM-light classify help -----------------------------\n");
7903 print_help_classify();
7904 printf("\n\n--------------------------------------------------------------------------\n");
7905 printf("%s", contribution_string);
7906
7907 RETURN(0);
7908 }
7909
7910 if( !parseFlag ) {
7911 snprintf(errorString, LONG_STRING, "Illegal option: %s !", argv[i]);
7912 RETURN(1);
7913 }
7914 }
7915
7916 if( argc == 1 ) {
7917 printf("%s", cl_helpstring);
7918 printf("\n\n-------------------- SVM-light learn help -----------------------------\n");
7919 print_help_learn();
7920 printf("\n\n-------------------- SVM-light classify help -----------------------------\n");
7921 print_help_classify();
7922 printf("\n\n--------------------------------------------------------------------------\n");
7923 printf("%s", contribution_string);
7924 RETURN(0);
7925 }
7926
7927 /* JL May 2009: Some error checking and initialization for svm learn type */
7928 if ( (zFlag) && (optionsData->svmType[0]) ) {
7929 WARNING_message("Both svm-light option: -z and 3dsvm option: -type "
7930 "were used.\n Using -type %s\n", optionsData->svmType);
7931 }
7932
7933 if( optionsData->svmType[0] ) {
7934 if( !strcmp(optionsData->svmType, "classification") ) {
7935 learn_parm->type=CLASSIFICATION;
7936 *svm_type=CLASSIFICATION;
7937 strncpy(type,"c", 200);
7938 }
7939 else if ( !strcmp(optionsData->svmType, "regression") ) {
7940 learn_parm->type=REGRESSION;
7941 *svm_type=REGRESSION;
7942 strncpy(type,"r", 200);
7943 }
7944 else {
7945 snprintf(errorString, LONG_STRING, "Unknown option -type %s!\n",
7946 optionsData->svmType); RETURN(1);
7947 }
7948 }
7949 else
7950 strncpy(optionsData->svmType,"classification", LONG_STRING);
7951 /* (matches default for learn_parm->type) */
7952
7953 /* the following corresponds to -t option in SVM-Light's original logic */
7954 if(strcmp(type,"c")==0) {
7955 learn_parm->type=CLASSIFICATION;
7956 *svm_type=CLASSIFICATION;
7957 strncpy(optionsData->svmType, "classification", LONG_STRING);
7958 }
7959 else if(strcmp(type,"r")==0) {
7960 learn_parm->type=REGRESSION;
7961 *svm_type=REGRESSION;
7962 strncpy(optionsData->svmType, "regression", LONG_STRING);
7963 }
7964 else if(strcmp(type,"p")==0) {
7965 snprintf(errorString, LONG_STRING, "Svm-light option -z p (preference ranking) "
7966 "is not supported yet!"); RETURN(1);
7967 /* svm_type=RANKING; */
7968 /* learn_parm->type=RANKING; */
7969 }
7970 else {
7971 snprintf(errorString, LONG_STRING, "Unknown type '%s': Valid types are 'c' "
7972 "(classification), 'r' (regession), and 'p' (preference ranking).",
7973 type); RETURN(1);
7974 }
7975
7976 /* JL Feb. 2009: Some error checking and initialization for kernel options */
7977 if ( tFlag && optionsData->kernelName[0] ) {
7978 WARNING_message("Both svm-light option: -t and 3dsvm option: -kernel "
7979 "were used.\n Using -kernel %s\n", optionsData->kernelName);
7980 }
7981
7982 if ( optionsData->kernelName[0] ) {
7983 if ( !strcmp(optionsData->kernelName, "complex1") ) {
7984 kernel_parm->kernel_type = CUSTOM;
7985 strncpy(kernel_parm->custom, "complex1", 50);
7986 }
7987 else if ( !strcmp(optionsData->kernelName, "linear") ) {
7988 kernel_parm->kernel_type = LINEAR;
7989 }
7990 else if ( !strcmp(optionsData->kernelName, "polynomial") ) {
7991 kernel_parm->kernel_type = POLY;
7992 }
7993 else if ( !strcmp(optionsData->kernelName, "rbf") ) {
7994 kernel_parm->kernel_type = RBF;
7995 }
7996 else if ( !strcmp(optionsData->kernelName, "sigmoid") ) {
7997 kernel_parm->kernel_type = SIGMOID;
7998 }
7999 else {
8000 snprintf(errorString, LONG_STRING, "Unknown kernel option -kernel %s\n",
8001 optionsData->kernelName); RETURN(1);
8002 }
8003 }
8004 else
8005 strncpy(optionsData->kernelName, "linear", LONG_STRING);
8006 /* (matches default for kernel_type) */
8007
8008
8009 /* JL Sep. 2010: Error checking an initialization of real-time options */
8010 if( (optionsData->rtTrain) && (optionsData->rtTest) ) {
8011 snprintf(errorString, LONG_STRING,
8012 "Combined training and testing is not supported in real time!");
8013 RETURN(1);
8014 }
8015 /* real-time regression is not implemented yet */
8016 /*if( (optionsData->rtTrain) || (optionsData->rtTest) ) {
8017 if( strncmp(optionsData->svmType, "classification", LONG_STRING) ) {
8018 snprintf(errorString, LONG_STRING,
8019 "Sorry, Only classification is supported in real time!");
8020 RETURN(1);
8021 }
8022 } */
8023
8024 if( optionsData->rtTrain ) {
8025 *mode = RT_TRAIN;
8026 strncpy(optionsData->trainFile, "rt_data", LONG_STRING);
8027
8028 if( !optionsData->labelFile[0] ) {
8029 snprintf(errorString, LONG_STRING,
8030 "Must specify a timeseries labelfile for training in real time!");
8031 RETURN(1);
8032 }
8033 if( (!optionsData->maskFile[0]) && (!optionsData->outModelNoMask) ) {
8034 snprintf(errorString, LONG_STRING,
8035 "Must specify a mask file for training in real time!\n"
8036 "For training without a mask :\n"
8037 " set environment variable: AFNI_3DSVM_NOMASK=YES\n");
8038 RETURN(1);
8039 }
8040 if( !optionsData->modelFile[0] && !optionsData->noModelOut ) {
8041 snprintf(errorString, LONG_STRING, "Must specify a model output file for "
8042 "training in real time or use option: -nomodelfile");
8043 RETURN (1);
8044 }
8045
8046 RETURN(0);
8047 }
8048 else if( optionsData->rtTest ) {
8049 *mode = RT_TEST;
8050 /* Cameron Craddock modified to support testing from the bucket */
8051 if( !optionsData->modelFile[0] && !optionsData->modelWeightFile[0] ) {
8052 snprintf(errorString, LONG_STRING,
8053 "Must specify a model file or a bucket file for testing in real-time!");
8054 RETURN(1);
8055 }
8056
8057 RETURN(0);
8058 }
8059
8060 /* JL Apr. 2010: Added the ability to write out svm-light textfile without
8061 * having to go through training or to testing.
8062 *
8063 * Still going through train function for classification or regression
8064 * ("-type ..." mandatory!) but ONLY the doc textfile is written.
8065 *
8066 */
8067
8068 if (optionsData->docFileOnly[0]) {
8069 /* some error checking for docout only */
8070 if ( (!typeFlag) && (!zFlag) ) {
8071 snprintf(errorString, LONG_STRING, "Must specify -type for -doconly!");
8072 RETURN(1);
8073 }
8074
8075 if ( (optionsData->trainFile[0]) && (optionsData->testFile[0]) ) {
8076 snprintf(errorString, LONG_STRING, "Please specify either -trainvol or "
8077 "-testvol for -doconly!"); RETURN(1);
8078 }
8079
8080 if ( (optionsData->labelFile[0]) && (optionsData->testLabelFile[0]) ){
8081 snprintf(errorString, LONG_STRING, "Please specify either -tainlabels or"
8082 " -testlabels for -doconly!"); RETURN(1);
8083 }
8084
8085 /* make sure this works for -testvol as well */
8086 if ( optionsData->testFile[0] ) {
8087 strncpy(optionsData->trainFile, optionsData->testFile, LONG_STRING);
8088 }
8089
8090 if ( optionsData->testLabelFile[0] ) {
8091 strncpy(optionsData->labelFile, optionsData->testLabelFile, LONG_STRING);
8092 }
8093
8094 /* set mode */
8095 *mode=TRAIN;
8096
8097 /* check for mask */
8098 if ( !optionsData->maskFile[0] ) {
8099 optionsData->outModelNoMask = 1;
8100 }
8101
8102 strncpy(optionsData->docFile, optionsData->docFileOnly, LONG_STRING);
8103
8104 RETURN(0);
8105 }
8106
8107 if( (optionsData->docFile[0]) && (optionsData->testFile[0]) ) {
8108 snprintf(errorString, LONG_STRING, "Sorry, option -testvol together with "
8109 "-docout is not supported. Please use option -doconly instead!");
8110 RETURN(1);
8111 }
8112
8113 if( (optionsData->modelFile[0]) && (optionsData->noModelOut) ) {
8114 WARNING_message("Option -model and -nomodelfile was specified. "
8115 "Option: -nomodelfile is ignored!");
8116 optionsData->noModelOut = 0;
8117 }
8118
8119 /* Set mode and do some error checking */
8120 /* JL Aug. 2009: Changed error checking for testlabels. */
8121 if( optionsData->trainFile[0] ) {
8122 if( !(optionsData->labelFile[0]) ) {
8123 snprintf(errorString, LONG_STRING, "Must specify timeseries labelfile for "
8124 "training!"); RETURN(1);
8125 }
8126 if( (optionsData->testFile[0]) ) *mode = TRAIN_AND_TEST;
8127 else *mode = TRAIN;
8128 }
8129 else if( (optionsData->testFile[0]) ) *mode = TEST;
8130 else { /* JL Oct 2009: Must specify trainvolume or testvolume */
8131 snprintf(errorString, LONG_STRING, "Must specify training or testing dataset!");
8132 RETURN(1);
8133 }
8134
8135 if( !(optionsData->modelFile[0]) ) {
8136 if( ( (*mode == TRAIN) && !optionsData->noModelOut) || (*mode == TRAIN_AND_TEST) ) {
8137 snprintf(errorString, LONG_STRING, "Must specify a model output file for "
8138 "training or use option: -nomodelfile"); RETURN (1);
8139 /* In the future it would be great to keep them model in memory for
8140 * TRAIN_AND_TEST and not write it to disc and read it back in */
8141 }
8142 else if ( *mode == TEST) {
8143 snprintf(errorString, LONG_STRING, "Must specify a model input file for "
8144 "testing!"); RETURN (1);
8145 }
8146 }
8147 /* at some point may want to check for TRAIN/TEST specific mode options */
8148 /* e.g. nodetrend only applies in test mode */
8149
8150 /* check for other errors */
8151 if( aFlag && alphaFlag ) {
8152 /* if both -a and -alpha are specified, both files need to match */
8153 WARNING_message("Both -a and -alpha were specified. "
8154 "Using filename %s", optionsData->modelAlphaFile);
8155 }
8156
8157 /* JL Mar. 2009: Enabled -bucket option for only linear and complex-linear
8158 * kernels */
8159 if( optionsData->modelWeightFile[0] ) {
8160 if ( !(kernel_parm->kernel_type == LINEAR) ) {
8161 if ( !( (kernel_parm->kernel_type == CUSTOM) &&
8162 (!strcmp(kernel_parm->custom, "complex1")) ) ) {
8163 WARNING_message("At this time, only can generate maps "
8164 "(-bucket option) for linear and linear-complex kernels!");
8165
8166 strncpy(optionsData->modelWeightFile, "", LONG_STRING);
8167 }
8168 }
8169 }
8170 if( !(optionsData->trainFile[0]) && (optionsData->modelWeightFile[0])) { /* JL */
8171 WARNING_message("Maps (-bucket option) only can be generated "
8172 "during training.");
8173 }
8174
8175 /* JL May 2011: */
8176 if( (optionsData->linearWmap) && !(optionsData->modelWeightFile[0])) {
8177 WARNING_message("Ignoring -wout. Please specify a bucket prefix (-bucket bprefix)");
8178 }
8179
8180 /* Check mask usage */
8181 if( !(optionsData->modelFile[0]) && !(optionsData->outModelNoMask)
8182 && !(optionsData->maskFile[0]) ){
8183 snprintf(errorString, LONG_STRING, "No mask file specified (use -mask file). "
8184 "If not using a mask file must use option -nomodelmask"); RETURN(1);
8185 }
8186 if( (optionsData->maskFile[0]) && (optionsData->outModelNoMask) ) { /* JL */
8187 WARNING_message("Option -mask and -nomodelmask was specified. "
8188 "Option -nomodelmask is used!");
8189 strncpy(optionsData->maskFile, "", LONG_STRING);
8190 }
8191
8192 /* This follows the original error checking of SVM-Light */
8193 if(learn_parm->svm_iter_to_shrink == -9999) {
8194 if(kernel_parm->kernel_type == LINEAR)
8195 learn_parm->svm_iter_to_shrink=2;
8196 else
8197 learn_parm->svm_iter_to_shrink=100;
8198 }
8199
8200 if((learn_parm->skip_final_opt_check)
8201 && (kernel_parm->kernel_type == LINEAR)) {
8202 INFO_message("It does not make sense to skip the final optimality check "
8203 "for linear kernels.");
8204 learn_parm->skip_final_opt_check=0;
8205 }
8206 if((learn_parm->skip_final_opt_check)
8207 && (learn_parm->remove_inconsistent)) {
8208 snprintf(errorString, LONG_STRING, "It is necessary to do the final optimality check when "
8209 "removing inconsistent examples."); RETURN(1);
8210 }
8211 if((learn_parm->svm_maxqpsize<2)) {
8212 snprintf(errorString, LONG_STRING, "Maximum size of QP-subproblems not in valid range: %ld [2..]",
8213 learn_parm->svm_maxqpsize); RETURN(1);
8214 }
8215 if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
8216 snprintf(errorString, LONG_STRING, "Maximum size of QP-subproblems [%ld] must be larger than\n"
8217 " the number of new variables [%ld] entering the working set in each "
8218 "iteration.",learn_parm->svm_maxqpsize, learn_parm->svm_newvarsinqp); RETURN(1);
8219 }
8220 if(learn_parm->svm_iter_to_shrink<1) {
8221 snprintf(errorString, LONG_STRING, "Maximum number of iterations for shrinking not in valid "
8222 "range: %ld [1,..]",learn_parm->svm_iter_to_shrink); RETURN(1);
8223 }
8224 if(learn_parm->svm_c<0) {
8225 snprintf(errorString, LONG_STRING, "The C parameter must be greater than zero!"); RETURN(1);
8226 }
8227 if(learn_parm->transduction_posratio>1) {
8228 snprintf(errorString, LONG_STRING, "The fraction of unlabeled examples to classify as positives\n"
8229 " must be less than 1.0 !!!"); RETURN(1);
8230 }
8231 if(learn_parm->svm_costratio<=0) {
8232 snprintf(errorString, LONG_STRING, "The COSTRATIO parameter must be greater than zero!"); RETURN(1);
8233 }
8234 if(learn_parm->epsilon_crit<=0) {
8235 snprintf(errorString, LONG_STRING, "The epsilon parameter must be greater than zero!"); RETURN(1);
8236 }
8237 if(learn_parm->rho<0) {
8238 snprintf(errorString, LONG_STRING, "The parameter rho for xi/alpha-estimates and leave-one-out\n"
8239 " pruning mustbe greater than zero (typically 1.0 or 2.0, see\n"
8240 " T. Joachims, Estimating the Generalization Performance of an\n"
8241 " SVM Efficiently, ICML, 2000.)!"); RETURN(1);
8242 }
8243 if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
8244 snprintf(errorString, LONG_STRING, "The parameter rho for xi/alpha-estimates and leave-one-out\n"
8245 " pruning mustbe greater than zero (typically 1.0 or 2.0, see\n"
8246 " T. Joachims, Estimating the Generalization Performance of an\n"
8247 " SVM Efficiently, ICML, 2000.)!"); RETURN(1);
8248 }
8249
8250 if( (*main_verbosity) >=2 ) printASLoptions(optionsData);
8251
8252 RETURN(0);
8253 }
8254