1 //
2 // Copyright (C) 2014-2019 Nick Gasson
3 //
4 // This program is free software: you can redistribute it and/or modify
5 // it under the terms of the GNU General Public License as published by
6 // the Free Software Foundation, either version 3 of the License, or
7 // (at your option) any later version.
8 //
9 // This program is distributed in the hope that it will be useful,
10 // but WITHOUT ANY WARRANTY; without even the implied warranty of
11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 // GNU General Public License for more details.
13 //
14 // You should have received a copy of the GNU General Public License
15 // along with this program. If not, see <http://www.gnu.org/licenses/>.
16 //
17
18 #include "object.h"
19 #include "common.h"
20
21 #include <string.h>
22 #include <stdlib.h>
23
24 DEFINE_ARRAY(tree);
25 DEFINE_ARRAY(netid);
26 DEFINE_ARRAY(type);
27 DEFINE_ARRAY(range);
28
29 static const char *item_text_map[] = {
30 "I_IDENT", "I_VALUE", "I_SEVERITY", "I_MESSAGE", "I_TARGET",
31 "I_LITERAL", "I_IDENT2", "I_DECLS", "I_STMTS", "I_PORTS",
32 "I_GENERICS", "I_PARAMS", "I_GENMAPS", "I_WAVES", "I_CONDS",
33 "I_TYPE", "I_SUBKIND", "I_DELAY", "I_REJECT", "I_POS",
34 "I_REF", "I_FILE_MODE", "I_ASSOCS", "I_CONTEXT", "I_TRIGGERS",
35 "I_ELSES", "I_CLASS", "I_RANGE", "I_NAME", "I_NETS",
36 "I_DVAL", "I_SPEC", "I_OPS", "I_CONSTR", "I_BASE",
37 "I_ELEM", "I_FILE", "I_ACCESS", "I_RESOLUTION", "I_RESULT",
38 "I_UNITS", "I_LITERALS", "I_DIMS", "I_FIELDS", "I_TEXT_BUF",
39 "I_ATTRS", "I_PTYPES", "I_CHARS", "I_CONSTR2", "I_FLAGS",
40 "I_TEXT",
41 };
42
43 static object_class_t *classes[4];
44 static uint32_t format_digest;
45 static generation_t next_generation = 1;
46 static object_t **all_objects = NULL;
47 static size_t max_objects = 256; // Grows at runtime
48 static size_t n_objects_alloc = 0;
49
object_lookup_failed(const char * name,const char ** kind_text_map,int kind,imask_t mask)50 void object_lookup_failed(const char *name, const char **kind_text_map,
51 int kind, imask_t mask)
52 {
53 int item;
54 for (item = 0; (mask & (1ull << item)) == 0; item++)
55 ;
56
57 assert(item < ARRAY_LEN(item_text_map));
58 fatal_trace("%s kind %s does not have item %s", name,
59 kind_text_map[kind], item_text_map[item]);
60 }
61
item_without_type(imask_t mask)62 void item_without_type(imask_t mask)
63 {
64 int item;
65 for (item = 0; (mask & (1ull << item)) == 0; item++)
66 ;
67
68 assert(item < ARRAY_LEN(item_text_map));
69 fatal_trace("item %s does not have a type", item_text_map[item]);
70 }
71
object_change_kind(const object_class_t * class,object_t * object,int kind)72 void object_change_kind(const object_class_t *class, object_t *object, int kind)
73 {
74 if (kind == object->kind)
75 return;
76
77 bool allow = false;
78 for (size_t i = 0; (class->change_allowed[i][0] != -1) && !allow; i++) {
79 allow = (class->change_allowed[i][0] == object->kind)
80 && (class->change_allowed[i][1] == kind);
81 }
82
83 if (!allow)
84 fatal_trace("cannot change %s kind %s to %s", class->name,
85 class->kind_text_map[object->kind],
86 class->kind_text_map[kind]);
87
88 const imask_t old_has = class->has_map[object->kind];
89 const imask_t new_has = class->has_map[kind];
90
91 const int old_nitems = class->object_nitems[object->kind];
92 const int new_nitems = class->object_nitems[kind];
93
94 const int max_items = MAX(old_nitems, new_nitems);
95
96 item_t tmp[max_items];
97 memcpy(tmp, object->items, sizeof(item_t) * max_items);
98
99 int op = 0, np = 0;
100 for (imask_t mask = 1; np < new_nitems; mask <<= 1) {
101 if ((old_has & mask) && (new_has & mask))
102 object->items[np++] = tmp[op++];
103 else if (old_has & mask)
104 ++op;
105 else if (new_has & mask)
106 memset(&(object->items[np++]), '\0', sizeof(item_t));
107 }
108
109 object->kind = kind;
110 }
111
object_init(object_class_t * class)112 static void object_init(object_class_t *class)
113 {
114 class->object_size = xmalloc(class->last_kind * sizeof(size_t));
115 class->object_nitems = xmalloc(class->last_kind * sizeof(int));
116 class->item_lookup = xmalloc(class->last_kind * sizeof(int) * 64);
117
118 assert(class->last_kind < (1 << (sizeof(uint8_t) * 8)));
119
120 assert(class->tag < ARRAY_LEN(classes));
121 classes[class->tag] = class;
122
123 for (int i = 0; i < class->last_kind; i++) {
124 const int nitems = __builtin_popcountll(class->has_map[i]);
125 class->object_size[i] = sizeof(object_t) + (nitems * sizeof(item_t));
126 class->object_nitems[i] = nitems;
127
128 // Knuth's multiplicative hash
129 format_digest +=
130 (uint32_t)(class->has_map[i] >> 32) * UINT32_C(2654435761);
131 format_digest +=
132 (uint32_t)(class->has_map[i]) * UINT32_C(2654435761);
133
134 int n = 0;
135 for (int j = 0; j < 64; j++) {
136 if (class->has_map[i] & ONE_HOT(j))
137 class->item_lookup[(i * 64) + j] = n++;
138 else
139 class->item_lookup[(i * 64) + j] = -1;
140 }
141 }
142
143 bool changed = false;
144 do {
145 changed = false;
146 for (int i = 0; i < class->last_kind; i++) {
147 size_t max_size = class->object_size[i];
148 for (size_t j = 0; class->change_allowed[j][0] != -1; j++) {
149 if (class->change_allowed[j][0] == i)
150 max_size = MAX(max_size,
151 class->object_size[class->change_allowed[j][1]]);
152 }
153
154 if (max_size != class->object_size[i]) {
155 class->object_size[i] = max_size;
156 changed = true;
157 }
158 }
159 } while (changed);
160
161 if (getenv("NVC_TREE_SIZES") != NULL) {
162 for (int i = 0; i < T_LAST_TREE_KIND; i++)
163 printf("%-15s %d\n", class->kind_text_map[i],
164 (int)class->object_size[i]);
165 }
166 }
167
object_one_time_init(void)168 void object_one_time_init(void)
169 {
170 extern object_class_t tree_object;
171 extern object_class_t type_object;
172
173 static bool done = false;
174
175 if (unlikely(!done)) {
176 object_init(&tree_object);
177 object_init(&type_object);
178
179 // Increment this each time a incompatible change is made to the
180 // on-disk format not expressed in the tree and type items table
181 const uint32_t format_fudge = 12;
182
183 format_digest += format_fudge * UINT32_C(2654435761);
184
185 done = true;
186 }
187 }
188
object_new(const object_class_t * class,int kind)189 object_t *object_new(const object_class_t *class, int kind)
190 {
191 if (unlikely(kind >= class->last_kind))
192 fatal_trace("invalid kind %d for %s object", kind, class->name);
193
194 object_one_time_init();
195
196 object_t *object = xcalloc(class->object_size[kind]);
197
198 object->kind = kind;
199 object->tag = class->tag;
200 object->index = UINT32_MAX;
201
202 if (unlikely(all_objects == NULL))
203 all_objects = xmalloc(sizeof(object_t *) * max_objects);
204
205 ARRAY_APPEND(all_objects, object, n_objects_alloc, max_objects);
206
207 return object;
208 }
209
object_sweep(object_t * object)210 static void object_sweep(object_t *object)
211 {
212 const object_class_t *class = classes[object->tag];
213
214 const imask_t has = class->has_map[object->kind];
215 const int nitems = class->object_nitems[object->kind];
216 imask_t mask = 1;
217 for (int n = 0; n < nitems; mask <<= 1) {
218 if (has & mask) {
219 if (ITEM_TREE_ARRAY & mask)
220 free(object->items[n].tree_array.items);
221 else if (ITEM_TYPE_ARRAY & mask)
222 free(object->items[n].type_array.items);
223 else if (ITEM_NETID_ARRAY & mask)
224 free(object->items[n].netid_array.items);
225 else if (ITEM_ATTRS & mask)
226 free(object->items[n].attrs.table);
227 else if (ITEM_RANGE_ARRAY & mask)
228 free(object->items[n].range_array.items);
229 else if (ITEM_TEXT_BUF & mask) {
230 if (object->items[n].text_buf != NULL)
231 tb_free(object->items[n].text_buf);
232 }
233 else if (ITEM_TEXT & mask)
234 free(object->items[n].text);
235 n++;
236 }
237 }
238
239 free(object);
240 }
241
object_gc(void)242 void object_gc(void)
243 {
244 // Generation will be updated by tree_visit
245 const generation_t base_gen = next_generation;
246
247 // Mark
248 for (unsigned i = 0; i < n_objects_alloc; i++) {
249 assert(all_objects[i] != NULL);
250
251 const object_class_t *class = classes[all_objects[i]->tag];
252
253 bool top_level = false;
254 for (int j = 0; (j < class->gc_num_roots) && !top_level; j++) {
255 if (class->gc_roots[j] == all_objects[i]->kind)
256 top_level = true;
257 }
258
259 if (top_level) {
260 object_visit_ctx_t ctx = {
261 .count = 0,
262 .postorder = NULL,
263 .preorder = NULL,
264 .context = NULL,
265 .kind = T_LAST_TREE_KIND,
266 .generation = next_generation++,
267 .deep = true
268 };
269
270 object_visit(all_objects[i], &ctx);
271 }
272 }
273
274 // Sweep
275 for (unsigned i = 0; i < n_objects_alloc; i++) {
276 object_t *object = all_objects[i];
277 if (object->generation < base_gen) {
278 object_sweep(object);
279 all_objects[i] = NULL;
280 }
281 }
282
283 // Compact
284 size_t p = 0;
285 for (unsigned i = 0; i < n_objects_alloc; i++) {
286 if (all_objects[i] != NULL)
287 all_objects[p++] = all_objects[i];
288 }
289
290 if ((getenv("NVC_GC_VERBOSE") != NULL) || is_debugger_running())
291 notef("GC: freed %zu objects; %zu allocated", n_objects_alloc - p, p);
292
293 n_objects_alloc = p;
294 }
295
object_visit(object_t * object,object_visit_ctx_t * ctx)296 void object_visit(object_t *object, object_visit_ctx_t *ctx)
297 {
298 // If `deep' then will follow links above the tree originally passed
299 // to tree_visit - e.g. following references back to their declarations
300 // Outside the garbage collector this is usually not what is required
301
302 if ((object == NULL) || (object->generation == ctx->generation))
303 return;
304
305 object->generation = ctx->generation;
306
307 const bool visit =
308 (object->tag == OBJECT_TAG_TREE && object->kind == ctx->kind)
309 || ctx->kind == T_LAST_TREE_KIND;
310
311 if (visit && ctx->preorder != NULL)
312 (*ctx->preorder)((tree_t)object, ctx->context);
313
314 const imask_t deep_mask = I_TYPE | I_REF | I_ATTRS;
315
316 const object_class_t *class = classes[object->tag];
317
318 const imask_t has = class->has_map[object->kind];
319 const int nitems = class->object_nitems[object->kind];
320 imask_t mask = 1;
321 for (int i = 0; i < nitems; mask <<= 1) {
322 if (has & mask & ~(ctx->deep ? 0 : deep_mask)) {
323 if (ITEM_IDENT & mask)
324 ;
325 else if (ITEM_TREE & mask)
326 object_visit((object_t *)object->items[i].tree, ctx);
327 else if (ITEM_TREE_ARRAY & mask) {
328 tree_array_t *a = &(object->items[i].tree_array);
329 for (unsigned j = 0; j < a->count; j++)
330 object_visit((object_t *)a->items[j], ctx);
331 }
332 else if (ITEM_TYPE_ARRAY & mask) {
333 type_array_t *a = &(object->items[i].type_array);
334 for (unsigned j = 0; j < a->count; j++)
335 object_visit((object_t *)a->items[j], ctx);
336 }
337 else if (ITEM_TYPE & mask)
338 object_visit((object_t *)object->items[i].type, ctx);
339 else if (ITEM_INT64 & mask)
340 ;
341 else if (ITEM_INT32 & mask)
342 ;
343 else if (ITEM_DOUBLE & mask)
344 ;
345 else if (ITEM_RANGE_ARRAY & mask) {
346 range_array_t *a = &(object->items[i].range_array);
347 for (unsigned j = 0; j < a->count; j++) {
348 object_visit((object_t *)a->items[j].left, ctx);
349 object_visit((object_t *)a->items[j].right, ctx);
350 }
351 }
352 else if (ITEM_NETID_ARRAY & mask)
353 ;
354 else if (ITEM_TEXT_BUF & mask)
355 ;
356 else if (ITEM_TEXT & mask)
357 ;
358 else if (ITEM_ATTRS & mask) {
359 attr_tab_t *attrs = &(object->items[i].attrs);
360 for (unsigned j = 0; j < attrs->num; j++) {
361 switch (attrs->table[j].kind) {
362 case A_TREE:
363 object_visit((object_t *)attrs->table[j].tval, ctx);
364 break;
365
366 default:
367 break;
368 }
369 }
370 }
371 else
372 item_without_type(mask);
373 }
374
375 if (has & mask)
376 i++;
377 }
378
379 if (visit) {
380 if (ctx->postorder != NULL)
381 (*ctx->postorder)((tree_t)object, ctx->context);
382 ctx->count++;
383 }
384 }
385
object_rewrite(object_t * object,object_rewrite_ctx_t * ctx)386 object_t *object_rewrite(object_t *object, object_rewrite_ctx_t *ctx)
387 {
388 if (object == NULL)
389 return NULL;
390
391 if (object->generation == ctx->generation) {
392 // Already rewritten this tree so return the cached version
393 return ctx->cache[object->index];
394 }
395
396 const imask_t skip_mask = (I_REF | I_ATTRS | I_NETS);
397
398 const object_class_t *class = classes[object->tag];
399
400 const imask_t has = class->has_map[object->kind];
401 const int nitems = class->object_nitems[object->kind];
402 int type_item = -1;
403 imask_t mask = 1;
404 for (int n = 0; n < nitems; mask <<= 1) {
405 if (has & mask & ~skip_mask) {
406 if (ITEM_IDENT & mask)
407 ;
408 else if (ITEM_TREE & mask)
409 object->items[n].tree =
410 (tree_t)object_rewrite((object_t *)object->items[n].tree, ctx);
411 else if (ITEM_TREE_ARRAY & mask) {
412 tree_array_t *a = &(object->items[n].tree_array);
413
414 for (size_t i = 0; i < a->count; i++)
415 a->items[i] =
416 (tree_t)object_rewrite((object_t *)a->items[i], ctx);
417
418 // If an item was rewritten to NULL then delete it
419 size_t n = 0;
420 for (size_t i = 0; i < a->count; i++) {
421 if (a->items[i] != NULL)
422 a->items[n++] = a->items[i];
423 }
424 a->count = n;
425 }
426 else if (ITEM_TYPE & mask)
427 type_item = n;
428 else if (ITEM_INT64 & mask)
429 ;
430 else if (ITEM_INT32 & mask)
431 ;
432 else if (ITEM_DOUBLE & mask)
433 ;
434 else if (ITEM_TYPE_ARRAY & mask) {
435 type_array_t *a = &(object->items[n].type_array);
436 for (unsigned i = 0; i < a->count; i++)
437 (void)object_rewrite((object_t *)a->items[i], ctx);
438 }
439 else if (ITEM_RANGE_ARRAY & mask) {
440 range_array_t *a = &(object->items[n].range_array);
441 for (unsigned i = 0; i < a->count; i++) {
442 a->items[i].left =
443 (tree_t)object_rewrite((object_t *)a->items[i].left, ctx);
444 a->items[i].right =
445 (tree_t)object_rewrite((object_t *)a->items[i].right, ctx);
446 assert(a->items[i].left);
447 assert(a->items[i].right);
448 }
449 }
450 else if (ITEM_TEXT_BUF & mask)
451 ;
452 else if (ITEM_TEXT & mask)
453 ;
454 else
455 item_without_type(mask);
456 }
457
458 if (has & mask)
459 n++;
460 }
461
462 if (unlikely(ctx->cache == NULL)) {
463 ctx->cache_size = MIN(4096, n_objects_alloc);
464 ctx->cache = xcalloc(sizeof(object_t *) * ctx->cache_size);
465 }
466 else if (unlikely(ctx->index >= ctx->cache_size)) {
467 assert(ctx->index < n_objects_alloc);
468 while (ctx->index >= ctx->cache_size) {
469 const size_t newsz = (ctx->cache_size * 3) / 2;
470 ctx->cache = xrealloc(ctx->cache, newsz * sizeof(object_t *));
471 memset(ctx->cache + ctx->cache_size, '\0',
472 (newsz - ctx->cache_size) * sizeof(object_t *));
473 ctx->cache_size = newsz;
474 }
475 }
476
477 if (object->generation != ctx->generation) {
478 object->generation = ctx->generation;
479 object->index = ctx->index++;
480 }
481
482 if (object->tag == OBJECT_TAG_TREE) {
483 // Rewrite this tree before we rewrite the type as there may
484 // be a circular reference
485 object_t *new = (object_t *)(*ctx->fn)((tree_t)object, ctx->context);
486 if (new != NULL && object != new) {
487 new->generation = ctx->generation;
488 new->index = object->index;
489 }
490 ctx->cache[object->index] = new;
491 }
492 else
493 ctx->cache[object->index] = object;
494
495 if (type_item != -1)
496 (void)object_rewrite((object_t *)object->items[type_item].type, ctx);
497
498 return ctx->cache[object->index];
499 }
500
object_write(object_t * object,object_wr_ctx_t * ctx)501 void object_write(object_t *object, object_wr_ctx_t *ctx)
502 {
503 if (object == NULL) {
504 write_u16(UINT16_C(0xffff), ctx->file); // Null marker
505 return;
506 }
507
508 if (object->generation == ctx->generation) {
509 // Already visited this tree
510 write_u16(UINT16_C(0xfffe), ctx->file); // Back reference marker
511 write_u32(object->index, ctx->file);
512 return;
513 }
514
515 object->generation = ctx->generation;
516 object->index = (ctx->n_objects)++;
517
518 write_u16(object->kind, ctx->file);
519
520 if (object->tag == OBJECT_TAG_TREE)
521 loc_write(&object->loc, ctx->file, ctx->ident_ctx);
522
523 const object_class_t *class = classes[object->tag];
524
525 const imask_t has = class->has_map[object->kind];
526 const int nitems = class->object_nitems[object->kind];
527 imask_t mask = 1;
528 for (int n = 0; n < nitems; mask <<= 1) {
529 if (has & mask) {
530 if (ITEM_IDENT & mask)
531 ident_write(object->items[n].ident, ctx->ident_ctx);
532 else if (ITEM_TREE & mask)
533 object_write((object_t *)object->items[n].tree, ctx);
534 else if (ITEM_TYPE & mask)
535 object_write((object_t *)object->items[n].type, ctx);
536 else if (ITEM_TREE_ARRAY & mask) {
537 const tree_array_t *a = &(object->items[n].tree_array);
538 write_u32(a->count, ctx->file);
539 for (unsigned i = 0; i < a->count; i++)
540 object_write((object_t *)a->items[i], ctx);
541 }
542 else if (ITEM_TYPE_ARRAY & mask) {
543 const type_array_t *a = &(object->items[n].type_array);
544 write_u16(a->count, ctx->file);
545 for (unsigned i = 0; i < a->count; i++)
546 object_write((object_t *)a->items[i], ctx);
547 }
548 else if (ITEM_INT64 & mask)
549 write_u64(object->items[n].ival, ctx->file);
550 else if (ITEM_INT32 & mask)
551 write_u32(object->items[n].ival, ctx->file);
552 else if (ITEM_NETID_ARRAY & mask) {
553 const netid_array_t *a = &(object->items[n].netid_array);
554 write_u32(a->count, ctx->file);
555 for (unsigned i = 0; i < a->count; i++)
556 write_u32(a->items[i], ctx->file);
557 }
558 else if (ITEM_DOUBLE & mask)
559 write_double(object->items[n].dval, ctx->file);
560 else if (ITEM_ATTRS & mask) {
561 const attr_tab_t *attrs = &(object->items[n].attrs);
562 write_u16(attrs->num, ctx->file);
563 for (unsigned i = 0; i < attrs->num; i++) {
564 write_u16(attrs->table[i].kind, ctx->file);
565 ident_write(attrs->table[i].name, ctx->ident_ctx);
566
567 switch (attrs->table[i].kind) {
568 case A_STRING:
569 ident_write(attrs->table[i].sval, ctx->ident_ctx);
570 break;
571
572 case A_INT:
573 write_u32(attrs->table[i].ival, ctx->file);
574 break;
575
576 case A_TREE:
577 object_write((object_t *)attrs->table[i].tval, ctx);
578 break;
579
580 case A_PTR:
581 fatal("pointer attributes cannot be saved");
582 }
583 }
584 }
585 else if (ITEM_RANGE_ARRAY & mask) {
586 range_array_t *a = &(object->items[n].range_array);
587 write_u16(a->count, ctx->file);
588 for (unsigned i = 0; i < a->count; i++) {
589 write_u8(a->items[i].kind, ctx->file);
590 object_write((object_t *)a->items[i].left, ctx);
591 object_write((object_t *)a->items[i].right, ctx);
592 }
593 }
594 else if (ITEM_TEXT & mask) {
595 size_t len = strlen(object->items[n].text);
596 assert(len <= UINT16_MAX);
597 write_u16(len, ctx->file);
598 write_raw(object->items[n].text, len, ctx->file);
599 }
600 else if (ITEM_TEXT_BUF & mask)
601 ;
602 else
603 item_without_type(mask);
604 n++;
605 }
606 }
607 }
608
object_write_begin(fbuf_t * f)609 object_wr_ctx_t *object_write_begin(fbuf_t *f)
610 {
611 write_u32(format_digest, f);
612 write_u8(standard(), f);
613
614 object_wr_ctx_t *ctx = xmalloc(sizeof(object_wr_ctx_t));
615 ctx->file = f;
616 ctx->generation = next_generation++;
617 ctx->n_objects = 0;
618 ctx->ident_ctx = ident_write_begin(f);
619
620 return ctx;
621 }
622
object_write_end(object_wr_ctx_t * ctx)623 void object_write_end(object_wr_ctx_t *ctx)
624 {
625 ident_write_end(ctx->ident_ctx);
626 free(ctx);
627 }
628
object_read(object_rd_ctx_t * ctx,int tag)629 object_t *object_read(object_rd_ctx_t *ctx, int tag)
630 {
631 uint16_t marker = read_u16(ctx->file);
632 if (marker == UINT16_C(0xffff))
633 return NULL; // Null marker
634 else if (marker == UINT16_C(0xfffe)) {
635 // Back reference marker
636 index_t index = read_u32(ctx->file);
637 assert(index < ctx->n_objects);
638 return ctx->store[index];
639 }
640
641 const object_class_t *class = classes[tag];
642
643 assert(marker < class->last_kind);
644
645 object_t *object = object_new(class, marker);
646
647 if (tag == OBJECT_TAG_TREE)
648 loc_read(&(object->loc), ctx->file, ctx->ident_ctx);
649
650 // Stash pointer for later back references
651 // This must be done early as a child node of this type may
652 // reference upwards
653 object->index = ctx->n_objects++;
654 if (ctx->n_objects == ctx->store_sz) {
655 ctx->store_sz *= 2;
656 ctx->store = xrealloc(ctx->store, ctx->store_sz * sizeof(tree_t));
657 }
658 ctx->store[object->index] = object;
659
660 const imask_t has = class->has_map[object->kind];
661 const int nitems = class->object_nitems[object->kind];
662 imask_t mask = 1;
663 for (int n = 0; n < nitems; mask <<= 1) {
664 if (has & mask) {
665 if (ITEM_IDENT & mask)
666 object->items[n].ident = ident_read(ctx->ident_ctx);
667 else if (ITEM_TREE & mask)
668 object->items[n].tree = (tree_t)object_read(ctx, OBJECT_TAG_TREE);
669 else if (ITEM_TYPE & mask)
670 object->items[n].tree = (tree_t)object_read(ctx, OBJECT_TAG_TYPE);
671 else if (ITEM_TREE_ARRAY & mask) {
672 tree_array_t *a = &(object->items[n].tree_array);
673 tree_array_resize(a, read_u32(ctx->file), 0);
674 for (unsigned i = 0; i < a->count; i++)
675 a->items[i] = (tree_t)object_read(ctx, OBJECT_TAG_TREE);
676 }
677 else if (ITEM_TYPE_ARRAY & mask) {
678 type_array_t *a = &(object->items[n].type_array);
679 type_array_resize(a, read_u16(ctx->file), 0);
680 for (unsigned i = 0; i < a->count; i++)
681 a->items[i] = (type_t)object_read(ctx, OBJECT_TAG_TYPE);
682 }
683 else if (ITEM_INT64 & mask)
684 object->items[n].ival = read_u64(ctx->file);
685 else if (ITEM_INT32 & mask)
686 object->items[n].ival = read_u32(ctx->file);
687 else if (ITEM_RANGE_ARRAY & mask) {
688 range_array_t *a = &(object->items[n].range_array);
689 range_array_resize(a, read_u16(ctx->file), 0);
690
691 for (unsigned i = 0; i < a->count; i++) {
692 a->items[i].kind = read_u8(ctx->file);
693 a->items[i].left =
694 (tree_t)object_read(ctx, OBJECT_TAG_TREE);
695 a->items[i].right =
696 (tree_t)object_read(ctx, OBJECT_TAG_TREE);
697 }
698 }
699 else if (ITEM_TEXT_BUF & mask)
700 ;
701 else if (ITEM_TEXT & mask) {
702 size_t len = read_u16(ctx->file);
703 object->items[n].text = xmalloc(len + 1);
704 read_raw(object->items[n].text, len, ctx->file);
705 object->items[n].text[len] = '\0';
706 }
707 else if (ITEM_NETID_ARRAY & mask) {
708 netid_array_t *a = &(object->items[n].netid_array);
709 netid_array_resize(a, read_u32(ctx->file), 0xff);
710 for (unsigned i = 0; i < a->count; i++)
711 a->items[i] = read_u32(ctx->file);
712 }
713 else if (ITEM_DOUBLE & mask)
714 object->items[n].dval = read_double(ctx->file);
715 else if (ITEM_ATTRS & mask) {
716 attr_tab_t *attrs = &(object->items[n].attrs);
717
718 attrs->num = read_u16(ctx->file);
719 if (attrs->num > 0) {
720 attrs->alloc = next_power_of_2(attrs->num);
721 attrs->table = xmalloc(sizeof(attr_t) * attrs->alloc);
722 }
723
724 for (unsigned i = 0; i < attrs->num; i++) {
725 attrs->table[i].kind = read_u16(ctx->file);
726 attrs->table[i].name = ident_read(ctx->ident_ctx);
727
728 switch (attrs->table[i].kind) {
729 case A_STRING:
730 attrs->table[i].sval = ident_read(ctx->ident_ctx);
731 break;
732
733 case A_INT:
734 attrs->table[i].ival = read_u32(ctx->file);
735 break;
736
737 case A_TREE:
738 attrs->table[i].tval =
739 (tree_t)object_read(ctx, OBJECT_TAG_TREE);
740 break;
741
742 default:
743 abort();
744 }
745 }
746 }
747 else
748 item_without_type(mask);
749 n++;
750 }
751 }
752
753 return object;
754 }
755
object_read_begin(fbuf_t * f,const char * fname)756 object_rd_ctx_t *object_read_begin(fbuf_t *f, const char *fname)
757 {
758 object_one_time_init();
759
760 const uint32_t ver = read_u32(f);
761 if (ver != format_digest)
762 fatal("%s: serialised format digest is %x expected %x. This design "
763 "unit uses a library format from an earlier version of "
764 PACKAGE_NAME " and should be reanalysed.",
765 fname, ver, format_digest);
766
767 const vhdl_standard_t std = read_u8(f);
768 if (std > standard())
769 fatal("%s: design unit was analysed using standard revision %s which "
770 "is more recent that the currently selected standard %s",
771 fname, standard_text(std), standard_text(standard()));
772
773 object_rd_ctx_t *ctx = xcalloc(sizeof(object_rd_ctx_t));
774 ctx->file = f;
775 ctx->ident_ctx = ident_read_begin(f);
776 ctx->store_sz = 256;
777 ctx->store = xmalloc(ctx->store_sz * sizeof(object_t *));
778 ctx->n_objects = 0;
779 ctx->db_fname = xstrdup(fname);
780
781 return ctx;
782 }
783
object_read_end(object_rd_ctx_t * ctx)784 void object_read_end(object_rd_ctx_t *ctx)
785 {
786 if (ctx->ident_ctx != NULL)
787 ident_read_end(ctx->ident_ctx);
788 free(ctx->store);
789 free(ctx->db_fname);
790 free(ctx);
791 }
792
object_next_generation(void)793 unsigned object_next_generation(void)
794 {
795 return next_generation++;
796 }
797
object_copy_mark(object_t * object,object_copy_ctx_t * ctx)798 bool object_copy_mark(object_t *object, object_copy_ctx_t *ctx)
799 {
800 if (object == NULL)
801 return false;
802
803 if (object->generation == ctx->generation)
804 return (object->index != UINT32_MAX);
805
806 object->generation = ctx->generation;
807 object->index = UINT32_MAX;
808
809 const object_class_t *class = classes[object->tag];
810
811 bool marked = false;
812 if (object->tag == OBJECT_TAG_TREE) {
813 if ((marked = (*ctx->callback)((tree_t)object, ctx->context)))
814 object->index = (ctx->index)++;
815 }
816
817 int type_item = -1;
818 const imask_t has = class->has_map[object->kind];
819 const int nitems = class->object_nitems[object->kind];
820 imask_t mask = 1;
821 for (int n = 0; n < nitems; mask <<= 1) {
822 if (has & mask) {
823 if (ITEM_IDENT & mask)
824 ;
825 else if (ITEM_TREE & mask)
826 marked = object_copy_mark((object_t *)object->items[n].tree, ctx)
827 || marked;
828 else if (ITEM_DOUBLE & mask)
829 ;
830 else if (ITEM_TREE_ARRAY & mask) {
831 tree_array_t *a = &(object->items[n].tree_array);
832 for (unsigned i = 0; i < a->count; i++)
833 marked = object_copy_mark((object_t *)a->items[i], ctx)
834 || marked;
835 }
836 else if (ITEM_TYPE_ARRAY & mask) {
837 type_array_t *a = &(object->items[n].type_array);
838 for (unsigned i = 0; i < a->count; i++)
839 marked = object_copy_mark((object_t *)a->items[i], ctx)
840 || marked;
841 }
842 else if (ITEM_TYPE & mask)
843 type_item = n;
844 else if (ITEM_INT64 & mask)
845 ;
846 else if (ITEM_INT32 & mask)
847 ;
848 else if (ITEM_RANGE_ARRAY & mask) {
849 range_array_t *a = &(object->items[n].range_array);
850 for (unsigned i = 0; i < a->count; i++) {
851 marked = object_copy_mark((object_t *)a->items[i].left, ctx)
852 || marked;
853 marked = object_copy_mark((object_t *)a->items[i].right, ctx)
854 || marked;
855 }
856 }
857 else if (ITEM_NETID_ARRAY & mask)
858 ;
859 else if (ITEM_ATTRS & mask)
860 ;
861 else if (ITEM_TEXT_BUF & mask)
862 ;
863 else
864 item_without_type(mask);
865 n++;
866 }
867 }
868
869 // Check type last as it may contain a circular reference
870 if (type_item != -1)
871 marked = object_copy_mark((object_t *)object->items[type_item].type, ctx)
872 || marked;
873
874 if (marked && (object->index == UINT32_MAX))
875 object->index = (ctx->index)++;
876
877 return marked;
878 }
879
object_copy_sweep(object_t * object,object_copy_ctx_t * ctx)880 object_t *object_copy_sweep(object_t *object, object_copy_ctx_t *ctx)
881 {
882 if (object == NULL)
883 return NULL;
884
885 assert(object->generation == ctx->generation);
886
887 if (object->index == UINT32_MAX)
888 return object;
889
890 assert(object->index < ctx->index);
891
892 if (ctx->copied[object->index] != NULL) {
893 // Already copied this object
894 return ctx->copied[object->index];
895 }
896
897 const object_class_t *class = classes[object->tag];
898
899 object_t *copy = object_new(class, object->kind);
900 ctx->copied[object->index] = copy;
901
902 copy->loc = object->loc;
903
904 const imask_t has = class->has_map[object->kind];
905 const int nitems = class->object_nitems[object->kind];
906 imask_t mask = 1;
907 for (int n = 0; n < nitems; mask <<= 1) {
908 if (has & mask) {
909 if (ITEM_IDENT & mask)
910 copy->items[n].ident = object->items[n].ident;
911 else if (ITEM_TREE & mask)
912 copy->items[n].tree = (tree_t)
913 object_copy_sweep((object_t *)object->items[n].tree, ctx);
914 else if (ITEM_DOUBLE & mask)
915 copy->items[n].dval = object->items[n].dval;
916 else if (ITEM_TREE_ARRAY & mask) {
917 const tree_array_t *from = &(object->items[n].tree_array);
918 tree_array_t *to = &(copy->items[n].tree_array);
919
920 tree_array_resize(to, from->count, 0);
921
922 for (size_t i = 0; i < from->count; i++)
923 to->items[i] = (tree_t)
924 object_copy_sweep((object_t *)from->items[i], ctx);
925 }
926 else if (ITEM_TYPE & mask)
927 copy->items[n].type = (type_t)
928 object_copy_sweep((object_t *)object->items[n].type, ctx);
929 else if ((ITEM_INT64 & mask) || (ITEM_INT32 & mask))
930 copy->items[n].ival = object->items[n].ival;
931 else if (ITEM_NETID_ARRAY & mask) {
932 const netid_array_t *from = &(object->items[n].netid_array);
933 netid_array_t *to = &(copy->items[n].netid_array);
934
935 netid_array_resize(to, from->count, 0xff);
936
937 for (unsigned i = 0; i < from->count; i++)
938 to->items[i] = from->items[i];
939 }
940 else if (ITEM_ATTRS & mask) {
941 if ((copy->items[n].attrs.num = object->items[n].attrs.num) > 0) {
942 copy->items[n].attrs.alloc = object->items[n].attrs.alloc;
943 copy->items[n].attrs.table =
944 xmalloc(sizeof(attr_t) * copy->items[n].attrs.alloc);
945 for (unsigned i = 0; i < object->items[n].attrs.num; i++)
946 copy->items[n].attrs.table[i] =
947 object->items[n].attrs.table[i];
948 }
949 }
950 else if (ITEM_RANGE_ARRAY & mask) {
951 const range_array_t *from = &(object->items[n].range_array);
952 range_array_t *to = &(copy->items[n].range_array);
953 range_array_resize(to, from->count, 0);
954
955 for (unsigned i = 0; i < from->count; i++) {
956 to->items[i].kind = from->items[i].kind;
957 to->items[i].left = (tree_t)
958 object_copy_sweep((object_t *)from->items[i].left, ctx);
959 to->items[i].right = (tree_t)
960 object_copy_sweep((object_t *)from->items[i].right, ctx);
961 }
962 }
963 else if (ITEM_TYPE_ARRAY & mask) {
964 const type_array_t *from = &(object->items[n].type_array);
965 type_array_t *to = &(copy->items[n].type_array);
966
967 type_array_resize(to, from->count, 0);
968
969 for (unsigned i = 0; i < from->count; i++)
970 to->items[i] = (type_t)
971 object_copy_sweep((object_t *)from->items[i], ctx);
972 }
973 else if (ITEM_TEXT_BUF & mask)
974 ;
975 else
976 item_without_type(mask);
977 n++;
978 }
979 }
980
981 return copy;
982 }
983
object_replace(object_t * t,object_t * a)984 void object_replace(object_t *t, object_t *a)
985 {
986 const object_class_t *class = classes[t->tag];
987
988 object_change_kind(class, t, a->kind);
989
990 const imask_t has = class->has_map[t->kind];
991 const int nitems = class->object_nitems[t->kind];
992 imask_t mask = 1;
993 for (int n = 0; n < nitems; mask <<= 1) {
994 if (has & mask) {
995 if (ITEM_TYPE_ARRAY & mask) {
996 const type_array_t *from = &(a->items[n].type_array);
997 type_array_t *to = &(t->items[n].type_array);
998
999 type_array_resize(to, from->count, 0);
1000
1001 for (unsigned i = 0; i < from->count; i++)
1002 to->items[i] = from->items[i];
1003 }
1004 else if (ITEM_TYPE & mask)
1005 t->items[n].type = a->items[n].type;
1006 else if (ITEM_TREE & mask)
1007 t->items[n].tree = a->items[n].tree;
1008 else if (ITEM_TREE_ARRAY & mask) {
1009 const tree_array_t *from = &(a->items[n].tree_array);
1010 tree_array_t *to = &(t->items[n].tree_array);
1011
1012 tree_array_resize(to, from->count, 0);
1013
1014 for (size_t i = 0; i < from->count; i++)
1015 to->items[i] = from->items[i];
1016 }
1017 else if (ITEM_RANGE_ARRAY & mask) {
1018 const range_array_t *from = &(a->items[n].range_array);
1019 range_array_t *to = &(t->items[n].range_array);
1020
1021 range_array_resize(to, from->count, 0);
1022
1023 for (unsigned i = 0; i < from->count; i++)
1024 to->items[i] = from->items[i];
1025 }
1026 else if (ITEM_TEXT_BUF & mask)
1027 ;
1028 else if (ITEM_IDENT & mask)
1029 t->items[n].ident = a->items[n].ident;
1030 else
1031 item_without_type(mask);
1032 n++;
1033 }
1034 }
1035 }
1036