1 /** @file neural.cc
2 @author Jakub Adamek
3 @date 11/5/2001
4 @version 2.0
5
6 @brief This file implements the structures defined in neural.h
7 */
8
9 /* IMPORTANT: You must define EPOS in order to use this file - it is used in vector.cc!!! */
10
11 //This file contains markups which allow the Doxygen documentation generator to work on it
12
13 /*
14 * epos/src/neural.cc
15 * (c) 2000 jakubadamek@bigfoot.com
16 *
17 This program is free software; you can redistribute it and/or modify
18 it under the terms of the GNU General Public License as published by
19 the Free Software Foundation; either version 2 of the License, or
20 (at your option) any later version.
21
22 This program is distributed in the hope that it will be useful,
23 but WITHOUT ANY WARRANTY; without even the implied warranty of
24 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25 GNU General Public License in doc/COPYING for more details.
26 *
27 */
28 //Optimized for tab width: 3
29
30 #ifndef EPOS_NEURAL_CC
31 #define EPOS_NEURAL_CC
32
33 #include "epos.h"
34 #include "neural.h"
35 #include "xmlutils.h"
36 #include <errno.h>
37 #include <stdio.h>
38 #include <string.h>
39 #include <ctype.h>
40 #include <stdlib.h>
41 #include <iostream>
42 #include <time.h>
43
44 /*
45 REGSTRUCT (TChar2Float)
46 REGSTRUCT (CExpression)
47
48 REGVECTOR (TChar2Float, TChar2Floats)
49 REGVECTOR (CExpression, CExpressions)
50 REGVECTOR (CInt, TNNOutputs)
51 REGARRAY (CFloat, 256, TFloats256)
52 */
53 extern unit EMPTY; //unit.cc
54
55 const int MAX_LENGTH_STATEMENT = 250;
56 double const TChar2Float::CHARTOFLOAT_NULL = -100;
57
58 // name of chartofloat defining sonority in config file
59 static const char CHARTOFLOAT_SONORITY[] = "sonority";
60
61 int neuralparse (void *neuralnet); //BISON
62
get_level(const char * level_name)63 inline UNIT get_level (const char *level_name)
64 {
65 return str2enum (level_name, scfg->unit_levels, U_DEFAULT);
66 }
67
~CNeuralNet()68 CNeuralNet::~CNeuralNet()
69 {
70 }
71
72 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
73 // CNeuralNet::read
74 /**
75 Reads the XML config file. */
76 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
77
78 void
79 resolve_vars(char *line, hash *vars, text *file = NULL); //defined in block.cc
80
81 extern CString bison_row_buf;
82 extern const char *bison_row;
83 extern CExpression *bison_input_result;
84 extern CNeuralNet *bison_nnet;
85
read(CRox & xml)86 CString CNeuralNet::read (CRox &xml)
87 {
88 CString err, s;
89 CRox *ch, *ch1, *ch2;
90 CFloat f, ch2f_default;
91 TChar2Float ch2f;
92 char tmp[1000], *ptmp;
93 int i, ichar, ich2f;
94
95 xml.AddIncludes(compose_pathname(filename, this_lang->rules_dir, scfg->lang_base_dir));
96 xml.AddDefaults();
97
98 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
99 /* CHAR 2 FLOATS */
100
101 ch = &xml["char2floats"];
102 if (ch->Exists())
103 for (ich2f = 0; ich2f < ch->NChildren(); ++ich2f)
104 if (ch->Child(ich2f).Tag() != "char2float") {
105 ch1 = &ch->Child(ich2f);
106 err += ch1->GetAttr ("value",ch2f.name);
107 ch2f_default = TChar2Float::CHARTOFLOAT_NULL;
108 err += xml_read (ch1, ch2f_default, "default", false);
109
110 for (ichar = 0; ichar < 256; ++ichar)
111 ch2f.val [ichar] = ch2f_default;
112
113 err += xml_read (ch1, ch2f.empty, "empty", false);
114
115 for (i=0; i < ch1->NChildren(); ++i)
116 if (ch1->Child(i).Tag() != "chars") {
117 ch2 = &ch1->Child(i);
118 err += xml_read (ch2,s,"src");
119 err += ch2->GetAttr ("value",f);
120 tmp[0] = 'a'; //'a' or anything else
121 if (s.length()) {
122 strcpy (tmp+1, s.c_str());
123 tmp[strlen(tmp)+1] = 0;
124 resolve_vars (tmp, vars);
125 for (ptmp = tmp+1; *ptmp; ++ptmp)
126 ch2f.val [(unsigned char)*ptmp] = f;
127 }
128 }
129 char2floats.push_back (ch2f);
130 }
131 if (err.length()) return err;
132
133 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
134 /* TRAINDATA and PERCEPTRON_NN */
135
136 err += xml_read (&xml, log, "epos_log", false);
137 filename = "";
138 err += xml_read (&xml, filename, "epos_nn", false);
139 trDataPrepare = filename.length() == 0;
140
141 if (!trDataPrepare) {
142 XMLFile xmlFile;
143 xmlFile.setfile (compose_pathname(filename, this_lang->rules_dir, scfg->lang_base_dir));
144 CRox *holder = xmlFile.parse();
145 err += perceptronNN.read_all (holder, compose_pathname(filename, this_lang->rules_dir, scfg->lang_base_dir), &trData);
146 perceptronNN.initNeurons( );
147 if (err.length()) return err;
148 }
149 if (err.length()) return err;
150
151 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
152 /* COLUMNS */
153
154 int iinp;
155
156 ch = &xml["columns"];
157 if (ch->Exists()) {
158 inputs.DeleteAll();
159 outputs.DeleteAll();
160 logUnits.DeleteAll();
161 for (iinp = 0; iinp < ch->NChildren(); ++iinp)
162 if (ch->Child(iinp).Tag() == "column") {
163 ch1 = &ch->Child(iinp);
164 CInt use;
165 err += xml_read_enum (ch1, use, CUs, "use");
166 if (use == CU_INPUT) {
167 err += xml_read (ch1, bison_row_buf, "epos");
168 bison_row = bison_row_buf.c_str();
169 bison_nnet = this;
170
171 /* * * * * * * * * * * * * * * * * * * */
172 /* Running the BISON parser */
173 neuralparse (this);
174
175 inputs.push_back (*bison_input_result);
176 delete bison_input_result;
177 }
178 else if (use == CU_OUTPUT) {
179 err += xml_read (ch1, bison_row_buf, "epos");
180 bison_row = bison_row_buf.c_str();
181 bison_nnet = this;
182
183 /* * * * * * * * * * * * * * * * * * * */
184 /* Running the BISON parser */
185 neuralparse (this);
186
187 outputs.push_back (*bison_input_result);
188 delete bison_input_result;
189
190 CInt output;
191 err += xml_read_enum (ch1, output, NOs, "epos_output");
192 outputs_placement.push_back (output);
193 }
194 else if (use == CU_NO) {
195 CString level;
196 int logUnit = -1;
197 err += xml_read (ch1, level, "epos_loglevel",false);
198 if (level.length())
199 logUnit = str2enum (level, scfg->unit_levels, -1);
200 logUnits.push_back (logUnit);
201 }
202
203 }
204 }
205
206 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
207 /* REREAD */
208
209 err += xml_read (&xml, log, "epos_log", false);
210 err += xml_read (&xml, reread_find, "epos_reread_find", false);
211 reread_last_time = time (NULL);
212
213 if (trDataPrepare)
214 err += trData.read (&xml);
215
216 return err;
217 }
218
219 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
220 // CNeuralNet::init
221 /**
222 Does all the job - prepares network to process data,
223 processes the config file. */
224 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
225
226 void
init()227 CNeuralNet::init ()
228 {
229 // #ifndef EPOS
230 // shriek (861, "You must define EPOS when building this file. It is used in Bang 3 vector.cc file!");
231 // #endif
232
233 if (initialized) return;
234
235 /* Running the XML parser */
236 XMLFile xmlFile;
237 xmlFile.setfile (compose_pathname(filename, this_lang->rules_dir, scfg->lang_base_dir));
238 CString err = read (*xmlFile.parse());
239 if (err.length())
240 shriek (861, CString (CString("Neuralnet rule: ")+err).c_str());
241
242 D_PRINT(2, "Neuralnet initialized.\n");
243 initialized = true;
244 }
245
246 // * * * * * * * * * * * * * * * * * * * * * * * * * * * *
247 // CNeuralNet::run
248 // * * * * * * * * * * * * * * * * * * * * * * * * * * * *
249
run(unit * myunit)250 void CNeuralNet::run (unit *myunit)
251 {
252 if (inputs.size() != trData.getColCount(CU_INPUT))
253 shriek (861,fmt("Input count doesn't match %d Epos neuralnet definition", (int)inputs.size()));
254 if (outputs.size() != trData.getColCount(CU_OUTPUT))
255 shriek (861,fmt("Output count doesn't match %d Epos neuralnet definition", (int)outputs.size()));
256
257 trData.clear();
258
259 if (reread_find.length() && reread_last_time < time (NULL)) {
260 reread_last_time = time (NULL);
261 CString file = findFirstFileName (compose_pathname(reread_find, this_lang->rules_dir, scfg->lang_base_dir));
262 if (file.length() && file != last_reread_find) {
263 last_reread_find = file;
264 XMLFile xmlFile;
265 xmlFile.setfile (file);
266 CRox *holder = xmlFile.parse();
267 CString err = perceptronNN.read_all (holder, file, &trData);
268 perceptronNN.initNeurons( );
269 if (err.length()) shriek (862, err);
270 }
271 }
272
273 double *inputVals = new double [inputs.size()];
274 unit *subunit = myunit->LeftMost (target);
275 int isub, iCol;
276 double f, oldF;
277
278 // are we modelling first or second derivatives?
279 int nn_derivatives = 0;
280
281 if (trDataPrepare) {
282 for (isub = 0; isub < myunit->count (target); isub ++, subunit = subunit->Next (target)) {
283 trData.addRow (ST_TRAIN);
284 fill_input (myunit, subunit, inputVals);
285 for (iCol=0; iCol < trData.getColCount(CU_INPUT); ++iCol)
286 trData.fillColumn (CU_INPUT, iCol, inputVals[iCol]);
287 }
288 }
289
290 else {
291 for (isub = 0; isub < myunit->count (target); isub ++, subunit = subunit->Next (target)) {
292 if (isub < nn_derivatives) continue;
293 trData.addRow (ST_TRAIN);
294 fill_input (myunit, subunit, inputVals);
295 for (iCol=0; iCol < trData.getColCount(CU_INPUT); ++iCol)
296 trData.fillColumn (CU_INPUT, iCol, inputVals[iCol]);
297 }
298 trData.processWindows();
299 subunit = myunit->LeftMost (target);
300 isub = 0;
301 for (trData.moveToSetStart (ST_TRAIN); trData.getSet() == ST_TRAIN;
302 subunit = subunit->Next (target), trData.moveNext (ST_TRAIN)) {
303
304 perceptronNN.runNetwork (trData.getInputs(), isub ++);
305 perceptronNN.copyOutputs (trData.modifyOutputs());
306
307 for (iCol=0; iCol < trData.getColCount(CU_OUTPUT); ++iCol) {
308 f = outputs [iCol].calculate (myunit, subunit, &trData);
309 if (nn_derivatives) {
310 if (isub <= nn_derivatives) f = 100;
311 else f += oldF;
312 oldF = f;
313 }
314 switch ((int)outputs_placement[iCol]) {
315 case NO_NONE: break;
316 case NO_FREQUENCE:
317 #define UGLY_POSITION 0.99
318 subunit->prospoint(Q_FREQ, (int) (f - cfg->pros_neutral[Q_FREQ]), UGLY_POSITION);
319 break;
320 case NO_NOTDEF:
321 shriek (861, "At this point all outputs must be defined.");
322 }
323 }
324 }
325 }
326
327 // output to log file
328
329 bang_ofstream logF;
330 if (log.length())
331 logF.open (compose_pathname(log, this_lang->rules_dir, scfg->lang_base_dir), bang_ofstream::out | bang_ofstream::app);
332 if (logF) {
333 subunit = myunit->LeftMost (target);
334 isub = 0;
335 for (trData.moveToSetStart (ST_TRAIN); trData.getSet() == ST_TRAIN;
336 subunit = subunit->Next (target), trData.moveNext (ST_TRAIN)) {
337 int icol[3] = {0,0,0};
338 for (TColumns::const_iterator col = trData.getColumns().begin(); col != trData.getColumns().end(); ++col) {
339 switch ((int)col->use) {
340 case CU_INPUT:
341 trData.getPostprocessed (CU_INPUT, icol[CU_INPUT], f);
342 logF << f;
343 break;
344 case CU_OUTPUT:
345 if (!trDataPrepare) {
346 trData.getPostprocessed (CU_OUTPUT, icol[CU_OUTPUT], f);
347 logF << f;
348 }
349 break;
350 case CU_NO:
351 if (logUnits[icol[CU_NO]] != -1) {
352 int isub = 0;
353 int logUnit = logUnits[icol[CU_NO]];
354 unit *pomunit = subunit->LeftMost (logUnit);
355 for (; isub < subunit->count(logUnit); pomunit = pomunit->Next(logUnit), ++isub)
356 logF << (char)pomunit->getCont();
357 }
358 break;
359 }
360 logF << "\t";
361 icol[col->use] ++;
362 }
363 logF << "\n";
364 }
365
366 if (trData.seriesSeparator)
367 logF << ((const char *) trData.seriesSeparator) << "\n";
368 logF.close();
369 }
370
371 delete [] inputVals;
372 }
373
374 // * * * * * * * * * * * * * * * * * * * * * * * * * * * *
375 // CNeuralNet::fill_input
376 /**
377 Prepares the inputs for the input layer.
378 Calls calculate on all the input trees. */
379 // * * * * * * * * * * * * * * * * * * * * * * * * * * * */
380
fill_input(unit * scope,unit * myunit,double * input_val)381 void CNeuralNet::fill_input (unit *scope, unit *myunit, double *input_val)
382 {
383 FILE *infile = NULL;
384 if (infilename.length()) {
385 infile = fopen (infilename, "a");
386 if (!infile) shriek (812,"CNeuralNet::fill_input:Cannot open file for output.");
387 }
388
389 if (!initialized) shriek (861, "CNeuralNet::fill_input:Neuralnet not initialized");
390 if (log_level_input > (CInt) -1 && infilename.length()) {
391 unit *subunit = myunit->LeftMost (log_level_input);
392 for (int i_unit = 0; i_unit < myunit->count (log_level_input); ++i_unit, subunit = subunit->Next(log_level_input))
393 fprintf (infile, "%c", subunit->getCont());
394 fprintf (infile, "\t");
395 }
396 for (int i_input = 0; i_input < inputs.size(); ++i_input) {
397 input_val [i_input] = inputs [i_input].calculate (scope, myunit, &trData);
398 if (infile) fprintf (infile,fmt("%s%df ","%.",format_decdigits[i_input]),input_val [i_input]);
399 }
400 if (infile) {
401 fprintf (infile, "\n");
402 fclose (infile);
403 }
404 }
405
406 // * * * * * * * * * * * * * * * * * * * * * * * * * * * *
407 // CExpression::calculate
408 /**
409 Calculates one input tree - all nodes will have values.
410 Contains implementations of all the functions defined in enum_func. */
411 // * * * * * * * * * * * * * * * * * * * * * * * * * * * */
412
413 double
calculate(unit * scope,unit * myunit,CTrainingData * trData)414 CExpression::calculate (unit *scope, unit *myunit, CTrainingData *trData)
415 {
416 CFloat f;
417 unit *pomunit, *iunit;
418 int i;
419
420 #define par0 (tree->args[0].v())
421 #define par1 (tree->args[1].v())
422 #define par2 (tree->args[2].v())
423 #define float0 double (par0)
424 #define float1 double (par1)
425 #define bool0 (double (par0) != 0)
426 #define bool1 (double (par1) != 0)
427 #define int0 int (par0)
428 #define int1 int (par1)
429 #define int2 int (par2)
430 #define result tree->v()
431
432 if (head == NULL) make_list ();
433
434 for (CExpression *tree = head; tree; tree = tree->next) {
435 switch (tree->function) {
436 case fu_value: break;
437 case fu_not: result = ! bool0; break;
438 case fu_multiply: result = float0 * float1; break;
439 case fu_divide: if (!bool1) shriek (812,"CNeuralNet::calculate_input:division by zero.");
440 result = float0 / float1; break;
441 case fu_add: result = float0 + float1; break;
442 case fu_subtract: result = float0 - float1; break;
443 case fu_less: result = float0 < float1; break;
444 case fu_lessorequals: result = float0 <= float1; break;
445 case fu_greater: result = float0 > float1; break;
446 case fu_greaterorequals:result = float0 >= float1; break;
447 case fu_equals: result = float0 == float1; break;
448 case fu_notequals: result = float0 != float1; break;
449 case fu_and: result = bool0 && bool1; break;
450 case fu_or: result = bool0 || bool1; break;
451 case fu_count:
452 result = myunit->ancestor( par1 )->count( par0 ); break;
453 case fu_index:
454 result = 1 + myunit->index( par0, par1 ); break;
455 case fu_this:
456 result = myunit; break;
457 case fu_f0:
458 result = myunit->effective(Q_TIME); break;
459 case fu_cont:
460 result = ((unit *)par0)->getCont(); break;
461 case fu_next:
462 case fu_prev:
463 {
464 pomunit = par0;
465 int level;
466 if (!par1.get_value_type())
467 level = static_cast<const unit *>(par0)->getDepth();
468 else level = par1;
469 if (tree->function == fu_next)
470 for (i=0; i < int2; ++i)
471 pomunit = pomunit->Next(level);
472 else
473 for (i=0; i < int2; ++i)
474 pomunit = pomunit->Prev(level);
475 result = pomunit;
476 break;
477 }
478 case fu_ancestor:
479 result = myunit->ancestor (par0); break;
480 case fu_chartofloat:
481 if (par0 == &EMPTY)
482 f = tree->i_chartofloat()->empty;
483 else {
484 f = tree->i_chartofloat()->val[static_cast<const unit *>(par0)->getCont()];
485 if (f == TChar2Float::CHARTOFLOAT_NULL)
486 shriek (812, fmt ("CNeuralNet::calculate_input:on char '%c' is not %s defined",static_cast<const unit *>(par0)->getCont(),
487 tree->i_chartofloat()->name.c_str()));
488 }
489 result = f;
490 break;
491 case fu_maxfloat:
492 {
493 pomunit = myunit->ancestor (par1);
494 CFloat maxf = TChar2Float::CHARTOFLOAT_NULL;
495
496 int count = pomunit->count (par0);
497 pomunit = pomunit->LeftMost (par0);
498 int i;
499 for (iunit = pomunit, i=0; i < count; ++i, iunit = iunit->Next(par0))
500 if (tree->i_chartofloat()->val[iunit->getCont()] > maxf) {
501 maxf = tree->i_chartofloat()->val[iunit->getCont()];
502 pomunit = iunit;
503 }
504 result = pomunit;
505 break;
506 }
507 case fu_neural:
508 {
509 double f;
510 trData->moveToRow (((unit *)par0)->index (((unit *)par0)->getDepth(), scope->getDepth()));
511 trData->getPostprocessed (CU_OUTPUT, int1, f);
512 result = f;
513 break;
514 }
515 case fu_nothing: shriek (861, "CNeuralNet::calculate_input:fu_nothing ?!");
516 default: shriek (861, fmt ("CNeuralNet::calculate_input:non-handled function %i - add to the source code", tree->function));
517 }
518
519 }
520
521 return (which_value);
522 }//calculate_input
523
CNeuralNet(const char * my_filename,hash * my_vars)524 CNeuralNet::CNeuralNet (const char *my_filename, hash *my_vars)
525 {
526 filename = my_filename;
527 vars = my_vars;
528 initialized = false;
529 log_level_input = -1;
530
531 init ();
532 }
533
Init()534 void TChar2Float::Init ()
535 {
536 for (int c = 0; c < 256; ++c) val [c] = CHARTOFLOAT_NULL;
537 empty = 0;
538 }
539
540 void
write_list(FILE * file)541 CExpression::write_list (FILE *file)
542 {
543 if (head == NULL) make_list ();
544 head->write_list_node (file);
545 fprintf (file,"\n");
546 }
547
548 void
write_list_node(FILE * file)549 CExpression::write_list_node (FILE *file)
550 {
551 switch (function) {
552 case fu_nothing: fprintf(file,"~ "); break;
553 case fu_value: v().print(file); fprintf(file," "); break;
554 case fu_chartofloat: fprintf(file,"%s ",(char*) (i_chartofloat()->name)); break;
555 default: fprintf(file," func.%u ",(int) function);
556 }
557
558 if (next) next->write_list_node (file);
559 }
560
561 CExpression *
make_list(CExpression ** new_head)562 CExpression::make_list (CExpression **new_head)
563 {
564 next = *new_head;
565 *new_head = this;
566 for (int i=0; i < args.size(); ++i)
567 args[i].make_list (new_head);
568 head = *new_head;
569 return (head);
570 }
571
572 void
make_list()573 CExpression::make_list ()
574 {
575 CExpression *new_head = NULL;
576 head = make_list (&new_head);
577 }
578
579
Init()580 void CExpression::Init ()
581 {
582 head = NULL;
583 next = NULL;
584 owner = NULL;
585 function = fu_nothing;
586 }
587
TTypedValue(const TTypedValue & x)588 TTypedValue::TTypedValue(const TTypedValue&x)
589 {
590 init (x);
591 }
592
operator =(const TTypedValue & x)593 TTypedValue & TTypedValue::operator= (const TTypedValue&x)
594 {
595 init (x);
596 return *this;
597 }
598
init(const TTypedValue & x)599 void TTypedValue::init (const TTypedValue &x)
600 {
601 value_type = x.value_type;
602 switch (value_type) {
603 case 0: break;
604 case 'i': int_val = x.int_val; break;
605 case 'f': float_val = x.float_val; break;
606 case 'c': char_val = x.char_val; break;
607 case 'b': bool_val = x.bool_val; break;
608 case 's': string_val = strdup (x.string_val); break;
609 case 'u': unit_val = x.unit_val; break;
610 default: shriek (861, "TTypedValue:Type not handled in init.");
611 }
612 }
613
~TTypedValue()614 TTypedValue::~TTypedValue ()
615 {
616 clear ();
617 }
618
clear()619 void TTypedValue::clear ()
620 {
621 switch (value_type) {
622 case 0:
623 case 'i':
624 case 'f':
625 case 'c':
626 case 'b':
627 case 'u':
628 break;
629 case 's': free(string_val); break;
630 default: shriek (861, fmt ("TTypedValue:Type %c (%i) not handled in destructor.",value_type, value_type));
631 }
632 value_type = 0;
633 }
634
635 void
write(FILE * file)636 CExpression::write (FILE *file)
637 {
638 fprintf (file, "Tree content:\n");
639 write (0);
640 }
641
642 void
write(int level,FILE * file)643 CExpression::write (int level, FILE *file)
644 {
645 int i;
646 for (i=0; i<level; ++i) fprintf (file,"...");
647 switch (function) {
648 case fu_nothing: fprintf (file,"~"); break;
649 case fu_value: break;
650 case fu_chartofloat: fprintf (file,"%s\t\t", (char*) (i_chartofloat()->name)); break;
651 default: fprintf (STDDBG,"Function %i ?! ", function);
652 }
653 switch (function) {
654 case fu_nothing: break;
655 default:
656 v().print(file); break;
657 }
658 fprintf (file, "\n");
659
660 for (i=0; i < args.size(); ++i)
661 args[i].write (level+1,file);
662 }
663
664
665 void
print(FILE * file)666 TTypedValue::print (FILE *file)
667 {
668 switch (value_type) {
669 case 's': fprintf (file, "%s", string_val); break;
670 case 'f': fprintf (file, "%.2f", float_val); break;
671 case 'i': fprintf (file, "%i", int_val); break;
672 case 'c': fprintf (file, "%c", char_val); break;
673 case 'b': fprintf (file, "%s", bool_val ? "true" : "false"); break;
674 case 'u': fprintf (file, "unit var %lu", (unsigned long)(unit_val)); break;
675 default: fprintf (file, "Cannot print value type %c", value_type);
676 }
677 }
678
679 #endif
680