1 /*********************                                                        */
2 /*! \file node_manager.cpp
3  ** \verbatim
4  ** Top contributors (to current version):
5  **   Morgan Deters, Andrew Reynolds, Tim King
6  ** This file is part of the CVC4 project.
7  ** Copyright (c) 2009-2019 by the authors listed in the file AUTHORS
8  ** in the top-level source directory) and their institutional affiliations.
9  ** All rights reserved.  See the file COPYING in the top-level source
10  ** directory for licensing information.\endverbatim
11  **
12  ** \brief Expression manager implementation.
13  **
14  ** Expression manager implementation.
15  **
16  ** Reviewed by Chris Conway, Apr 5 2010 (bug #65).
17  **/
18 #include "expr/node_manager.h"
19 
20 #include <algorithm>
21 #include <stack>
22 #include <utility>
23 
24 #include "base/cvc4_assert.h"
25 #include "base/listener.h"
26 #include "expr/attribute.h"
27 #include "expr/node_manager_attributes.h"
28 #include "expr/node_manager_listeners.h"
29 #include "expr/type_checker.h"
30 #include "options/options.h"
31 #include "options/smt_options.h"
32 #include "util/statistics_registry.h"
33 #include "util/resource_manager.h"
34 
35 using namespace std;
36 using namespace CVC4::expr;
37 
38 namespace CVC4 {
39 
40 thread_local NodeManager* NodeManager::s_current = NULL;
41 
42 namespace {
43 
44 /**
45  * This class sets it reference argument to true and ensures that it gets set
46  * to false on destruction. This can be used to make sure a flag gets toggled
47  * in a function even on exceptional exit (e.g., see reclaimZombies()).
48  */
49 struct ScopedBool {
50   bool& d_value;
51 
ScopedBoolCVC4::__anon9570e7da0111::ScopedBool52   ScopedBool(bool& value) :
53     d_value(value) {
54 
55     Debug("gc") << ">> setting ScopedBool\n";
56     d_value = true;
57   }
58 
~ScopedBoolCVC4::__anon9570e7da0111::ScopedBool59   ~ScopedBool() {
60     Debug("gc") << "<< clearing ScopedBool\n";
61     d_value = false;
62   }
63 };
64 
65 /**
66  * Similarly, ensure d_nodeUnderDeletion gets set to NULL even on
67  * exceptional exit from NodeManager::reclaimZombies().
68  */
69 struct NVReclaim {
70   NodeValue*& d_deletionField;
71 
NVReclaimCVC4::__anon9570e7da0111::NVReclaim72   NVReclaim(NodeValue*& deletionField) :
73     d_deletionField(deletionField) {
74 
75     Debug("gc") << ">> setting NVRECLAIM field\n";
76   }
77 
~NVReclaimCVC4::__anon9570e7da0111::NVReclaim78   ~NVReclaim() {
79     Debug("gc") << "<< clearing NVRECLAIM field\n";
80     d_deletionField = NULL;
81   }
82 };
83 
84 } // namespace
85 
86 namespace attr {
87   struct LambdaBoundVarListTag { };
88 }/* CVC4::attr namespace */
89 
90 // attribute that stores the canonical bound variable list for function types
91 typedef expr::Attribute<attr::LambdaBoundVarListTag, Node> LambdaBoundVarListAttr;
92 
NodeManager(ExprManager * exprManager)93 NodeManager::NodeManager(ExprManager* exprManager) :
94   d_options(new Options()),
95   d_statisticsRegistry(new StatisticsRegistry()),
96   d_resourceManager(new ResourceManager()),
97   d_registrations(new ListenerRegistrationList()),
98   next_id(0),
99   d_attrManager(new expr::attr::AttributeManager()),
100   d_exprManager(exprManager),
101   d_nodeUnderDeletion(NULL),
102   d_inReclaimZombies(false),
103   d_abstractValueCount(0),
104   d_skolemCounter(0) {
105   init();
106 }
107 
NodeManager(ExprManager * exprManager,const Options & options)108 NodeManager::NodeManager(ExprManager* exprManager,
109                          const Options& options) :
110   d_options(new Options()),
111   d_statisticsRegistry(new StatisticsRegistry()),
112   d_resourceManager(new ResourceManager()),
113   d_registrations(new ListenerRegistrationList()),
114   next_id(0),
115   d_attrManager(new expr::attr::AttributeManager()),
116   d_exprManager(exprManager),
117   d_nodeUnderDeletion(NULL),
118   d_inReclaimZombies(false),
119   d_abstractValueCount(0),
120   d_skolemCounter(0)
121 {
122   d_options->copyValues(options);
123   init();
124 }
125 
init()126 void NodeManager::init() {
127   poolInsert( &expr::NodeValue::null() );
128 
129   for(unsigned i = 0; i < unsigned(kind::LAST_KIND); ++i) {
130     Kind k = Kind(i);
131 
132     if(hasOperator(k)) {
133       d_operators[i] = mkConst(Kind(k));
134     }
135   }
136   d_resourceManager->setHardLimit((*d_options)[options::hardLimit]);
137   if((*d_options)[options::perCallResourceLimit] != 0) {
138     d_resourceManager->setResourceLimit((*d_options)[options::perCallResourceLimit], false);
139   }
140   if((*d_options)[options::cumulativeResourceLimit] != 0) {
141     d_resourceManager->setResourceLimit((*d_options)[options::cumulativeResourceLimit], true);
142   }
143   if((*d_options)[options::perCallMillisecondLimit] != 0) {
144     d_resourceManager->setTimeLimit((*d_options)[options::perCallMillisecondLimit], false);
145   }
146   if((*d_options)[options::cumulativeMillisecondLimit] != 0) {
147     d_resourceManager->setTimeLimit((*d_options)[options::cumulativeMillisecondLimit], true);
148   }
149   if((*d_options)[options::cpuTime]) {
150     d_resourceManager->useCPUTime(true);
151   }
152 
153   // Do not notify() upon registration as these were handled manually above.
154   d_registrations->add(d_options->registerTlimitListener(
155       new TlimitListener(d_resourceManager), false));
156   d_registrations->add(d_options->registerTlimitPerListener(
157       new TlimitPerListener(d_resourceManager), false));
158   d_registrations->add(d_options->registerRlimitListener(
159       new RlimitListener(d_resourceManager), false));
160   d_registrations->add(d_options->registerRlimitPerListener(
161       new RlimitPerListener(d_resourceManager), false));
162 }
163 
~NodeManager()164 NodeManager::~NodeManager() {
165   // have to ensure "this" is the current NodeManager during
166   // destruction of operators, because they get GCed.
167 
168   NodeManagerScope nms(this);
169 
170   {
171     ScopedBool dontGC(d_inReclaimZombies);
172     // hopefully by this point all SmtEngines have been deleted
173     // already, along with all their attributes
174     d_attrManager->deleteAllAttributes();
175   }
176 
177   for(unsigned i = 0; i < unsigned(kind::LAST_KIND); ++i) {
178     d_operators[i] = Node::null();
179   }
180 
181   d_unique_vars.clear();
182 
183   TypeNode dummy;
184   d_tt_cache.d_children.clear();
185   d_tt_cache.d_data = dummy;
186   d_rt_cache.d_children.clear();
187   d_rt_cache.d_data = dummy;
188 
189   for (std::vector<Datatype*>::iterator
190            datatype_iter = d_ownedDatatypes.begin(),
191            datatype_end = d_ownedDatatypes.end();
192        datatype_iter != datatype_end; ++datatype_iter) {
193     Datatype* datatype = *datatype_iter;
194     delete datatype;
195   }
196   d_ownedDatatypes.clear();
197 
198   Assert(!d_attrManager->inGarbageCollection() );
199 
200   std::vector<NodeValue*> order = TopologicalSort(d_maxedOut);
201   d_maxedOut.clear();
202 
203   while (!d_zombies.empty() || !order.empty()) {
204     if (d_zombies.empty()) {
205       // Delete the maxed out nodes in toplogical order once we know
206       // there are no additional zombies, or other nodes to worry about.
207       Assert(!order.empty());
208       // We process these in reverse to reverse the topological order.
209       NodeValue* greatest_maxed_out = order.back();
210       order.pop_back();
211       Assert(greatest_maxed_out->HasMaximizedReferenceCount());
212       Debug("gc") << "Force zombify " << greatest_maxed_out << std::endl;
213       greatest_maxed_out->d_rc = 0;
214       markForDeletion(greatest_maxed_out);
215     } else {
216       reclaimZombies();
217     }
218   }
219 
220   poolRemove( &expr::NodeValue::null() );
221 
222   if(Debug.isOn("gc:leaks")) {
223     Debug("gc:leaks") << "still in pool:" << endl;
224     for(NodeValuePool::const_iterator i = d_nodeValuePool.begin(),
225           iend = d_nodeValuePool.end();
226         i != iend;
227         ++i) {
228       Debug("gc:leaks") << "  " << *i
229                         << " id=" << (*i)->d_id
230                         << " rc=" << (*i)->d_rc
231                         << " " << **i << endl;
232     }
233     Debug("gc:leaks") << ":end:" << endl;
234   }
235 
236   // defensive coding, in case destruction-order issues pop up (they often do)
237   delete d_statisticsRegistry;
238   d_statisticsRegistry = NULL;
239   delete d_registrations;
240   d_registrations = NULL;
241   delete d_resourceManager;
242   d_resourceManager = NULL;
243   delete d_attrManager;
244   d_attrManager = NULL;
245   delete d_options;
246   d_options = NULL;
247 }
248 
registerDatatype(Datatype * dt)249 unsigned NodeManager::registerDatatype(Datatype* dt) {
250   unsigned sz = d_ownedDatatypes.size();
251   d_ownedDatatypes.push_back( dt );
252   return sz;
253 }
254 
getDatatypeForIndex(unsigned index) const255 const Datatype & NodeManager::getDatatypeForIndex( unsigned index ) const{
256   Assert( index<d_ownedDatatypes.size() );
257   return *d_ownedDatatypes[index];
258 }
259 
reclaimZombies()260 void NodeManager::reclaimZombies() {
261   // FIXME multithreading
262   Assert(!d_attrManager->inGarbageCollection());
263 
264   Debug("gc") << "reclaiming " << d_zombies.size() << " zombie(s)!\n";
265 
266   // during reclamation, reclaimZombies() is never supposed to be called
267   Assert(! d_inReclaimZombies, "NodeManager::reclaimZombies() not re-entrant!");
268 
269   // whether exit is normal or exceptional, the Reclaim dtor is called
270   // and ensures that d_inReclaimZombies is set back to false.
271   ScopedBool r(d_inReclaimZombies);
272 
273   // We copy the set away and clear the NodeManager's set of zombies.
274   // This is because reclaimZombie() decrements the RC of the
275   // NodeValue's children, which may (recursively) reclaim them.
276   //
277   // Let's say we're reclaiming zombie NodeValue "A" and its child "B"
278   // then becomes a zombie (NodeManager::markForDeletion(B) is called).
279   //
280   // One way to handle B's zombification would be simply to put it
281   // into d_zombies.  This is what we do.  However, if we were to
282   // concurrently process d_zombies in the loop below, such addition
283   // may be invisible to us (B is leaked) or even invalidate our
284   // iterator, causing a crash.  So we need to copy the set away.
285 
286   vector<NodeValue*> zombies;
287   zombies.reserve(d_zombies.size());
288   remove_copy_if(d_zombies.begin(),
289                  d_zombies.end(),
290                  back_inserter(zombies),
291                  NodeValueReferenceCountNonZero());
292   d_zombies.clear();
293 
294 #ifdef _LIBCPP_VERSION
295   NodeValue* last = NULL;
296 #endif
297   for(vector<NodeValue*>::iterator i = zombies.begin();
298       i != zombies.end();
299       ++i) {
300     NodeValue* nv = *i;
301 #ifdef _LIBCPP_VERSION
302     // Work around an apparent bug in libc++'s hash_set<> which can
303     // (very occasionally) have an element repeated.
304     if(nv == last) {
305       continue;
306     }
307     last = nv;
308 #endif
309 
310     // collect ONLY IF still zero
311     if(nv->d_rc == 0) {
312       if(Debug.isOn("gc")) {
313         Debug("gc") << "deleting node value " << nv
314                     << " [" << nv->d_id << "]: ";
315         nv->printAst(Debug("gc"));
316         Debug("gc") << endl;
317       }
318 
319       // remove from the pool
320       kind::MetaKind mk = nv->getMetaKind();
321       if(mk != kind::metakind::VARIABLE && mk != kind::metakind::NULLARY_OPERATOR) {
322         poolRemove(nv);
323       }
324 
325       // whether exit is normal or exceptional, the NVReclaim dtor is
326       // called and ensures that d_nodeUnderDeletion is set back to
327       // NULL.
328       NVReclaim rc(d_nodeUnderDeletion);
329       d_nodeUnderDeletion = nv;
330 
331       // remove attributes
332       { // notify listeners of deleted node
333         TNode n;
334         n.d_nv = nv;
335         nv->d_rc = 1; // so that TNode doesn't assert-fail
336         for(vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
337           (*i)->nmNotifyDeleteNode(n);
338         }
339         // this would mean that one of the listeners stowed away
340         // a reference to this node!
341         Assert(nv->d_rc == 1);
342       }
343       nv->d_rc = 0;
344       d_attrManager->deleteAllAttributes(nv);
345 
346       // decr ref counts of children
347       nv->decrRefCounts();
348       if(mk == kind::metakind::CONSTANT) {
349         // Destroy (call the destructor for) the C++ type representing
350         // the constant in this NodeValue.  This is needed for
351         // e.g. CVC4::Rational, since it has a gmp internal
352         // representation that mallocs memory and should be cleaned
353         // up.  (This won't delete a pointer value if used as a
354         // constant, but then, you should probably use a smart-pointer
355         // type for a constant payload.)
356         kind::metakind::deleteNodeValueConstant(nv);
357       }
358       free(nv);
359     }
360   }
361 }/* NodeManager::reclaimZombies() */
362 
TopologicalSort(const std::vector<NodeValue * > & roots)363 std::vector<NodeValue*> NodeManager::TopologicalSort(
364     const std::vector<NodeValue*>& roots) {
365   std::vector<NodeValue*> order;
366   // The stack of nodes to visit. The Boolean value is false when visiting the
367   // node in preorder and true when visiting it in postorder.
368   std::vector<std::pair<bool, NodeValue*> > stack;
369   // Nodes that have been visited in both pre- and postorder
370   NodeValueIDSet visited;
371   const NodeValueIDSet root_set(roots.begin(), roots.end());
372 
373   for (size_t index = 0; index < roots.size(); index++) {
374     NodeValue* root = roots[index];
375     if (visited.find(root) == visited.end()) {
376       stack.push_back(std::make_pair(false, root));
377     }
378     while (!stack.empty()) {
379       NodeValue* current = stack.back().second;
380       const bool visited_children = stack.back().first;
381       Debug("gc") << "Topological sort " << current << " " << visited_children
382                   << std::endl;
383       if (visited_children) {
384         if (root_set.find(current) != root_set.end()) {
385           order.push_back(current);
386         }
387         stack.pop_back();
388       }
389       else if (visited.find(current) == visited.end())
390       {
391         stack.back().first = true;
392         visited.insert(current);
393         for (unsigned i = 0; i < current->getNumChildren(); ++i) {
394           expr::NodeValue* child = current->getChild(i);
395           stack.push_back(std::make_pair(false, child));
396         }
397       }
398       else
399       {
400         stack.pop_back();
401       }
402     }
403   }
404   Assert(order.size() == roots.size());
405   return order;
406 } /* NodeManager::TopologicalSort() */
407 
getType(TNode n,bool check)408 TypeNode NodeManager::getType(TNode n, bool check)
409 {
410   // Many theories' type checkers call Node::getType() directly.  This
411   // is incorrect, since "this" might not be the caller's current node
412   // manager.  Rather than force the individual typecheckers not to do
413   // this (by policy, which would be imperfect and lead to
414   // hard-to-find bugs, which it has in the past), we just set this
415   // node manager to be current for the duration of this check.
416   //
417   NodeManagerScope nms(this);
418 
419   TypeNode typeNode;
420   bool hasType = getAttribute(n, TypeAttr(), typeNode);
421   bool needsCheck = check && !getAttribute(n, TypeCheckedAttr());
422 
423 
424   Debug("getType") << this << " getting type for " << &n << " " << n << ", check=" << check << ", needsCheck = " << needsCheck << ", hasType = " << hasType << endl;
425 
426   if(needsCheck && !(*d_options)[options::earlyTypeChecking]) {
427     /* Iterate and compute the children bottom up. This avoids stack
428        overflows in computeType() when the Node graph is really deep,
429        which should only affect us when we're type checking lazily. */
430     stack<TNode> worklist;
431     worklist.push(n);
432 
433     while( !worklist.empty() ) {
434       TNode m = worklist.top();
435 
436       bool readyToCompute = true;
437 
438       for( TNode::iterator it = m.begin(), end = m.end();
439            it != end;
440            ++it ) {
441         if( !hasAttribute(*it, TypeAttr())
442             || (check && !getAttribute(*it, TypeCheckedAttr())) ) {
443           readyToCompute = false;
444           worklist.push(*it);
445         }
446       }
447 
448       if( readyToCompute ) {
449         Assert( check || m.getMetaKind()!=kind::metakind::NULLARY_OPERATOR );
450         /* All the children have types, time to compute */
451         typeNode = TypeChecker::computeType(this, m, check);
452         worklist.pop();
453       }
454     } // end while
455 
456     /* Last type computed in loop should be the type of n */
457     Assert( typeNode == getAttribute(n, TypeAttr()) );
458   } else if( !hasType || needsCheck ) {
459     /* We can compute the type top-down, without worrying about
460        deep recursion. */
461     Assert( check || n.getMetaKind()!=kind::metakind::NULLARY_OPERATOR );
462     typeNode = TypeChecker::computeType(this, n, check);
463   }
464 
465   /* The type should be have been computed and stored. */
466   Assert( hasAttribute(n, TypeAttr()) );
467   /* The check should have happened, if we asked for it. */
468   Assert( !check || getAttribute(n, TypeCheckedAttr()) );
469 
470   Debug("getType") << "type of " << &n << " " <<  n << " is " << typeNode << endl;
471   return typeNode;
472 }
473 
mkSkolem(const std::string & prefix,const TypeNode & type,const std::string & comment,int flags)474 Node NodeManager::mkSkolem(const std::string& prefix, const TypeNode& type, const std::string& comment, int flags) {
475   Node n = NodeBuilder<0>(this, kind::SKOLEM);
476   setAttribute(n, TypeAttr(), type);
477   setAttribute(n, TypeCheckedAttr(), true);
478   if((flags & SKOLEM_EXACT_NAME) == 0) {
479     stringstream name;
480     name << prefix << '_' << ++d_skolemCounter;
481     setAttribute(n, expr::VarNameAttr(), name.str());
482   } else {
483     setAttribute(n, expr::VarNameAttr(), prefix);
484   }
485   if((flags & SKOLEM_NO_NOTIFY) == 0) {
486     for(vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
487       (*i)->nmNotifyNewSkolem(n, comment, (flags & SKOLEM_IS_GLOBAL) == SKOLEM_IS_GLOBAL);
488     }
489   }
490   return n;
491 }
492 
mkConstructorType(const DatatypeConstructor & constructor,TypeNode range)493 TypeNode NodeManager::mkConstructorType(const DatatypeConstructor& constructor,
494                                         TypeNode range) {
495   vector<TypeNode> sorts;
496   Debug("datatypes") << "ctor name: " << constructor.getName() << endl;
497   for(DatatypeConstructor::const_iterator i = constructor.begin();
498       i != constructor.end();
499       ++i) {
500     TypeNode selectorType = *(*i).getSelector().getType().d_typeNode;
501     Debug("datatypes") << selectorType << endl;
502     TypeNode sort = selectorType[1];
503 
504     // should be guaranteed here already, but just in case
505     Assert(!sort.isFunctionLike());
506 
507     Debug("datatypes") << "ctor sort: " << sort << endl;
508     sorts.push_back(sort);
509   }
510   Debug("datatypes") << "ctor range: " << range << endl;
511   PrettyCheckArgument(!range.isFunctionLike(), range,
512                       "cannot create higher-order function types");
513   sorts.push_back(range);
514   return mkTypeNode(kind::CONSTRUCTOR_TYPE, sorts);
515 }
516 
getTupleType(NodeManager * nm,std::vector<TypeNode> & types,unsigned index)517 TypeNode NodeManager::TupleTypeCache::getTupleType( NodeManager * nm, std::vector< TypeNode >& types, unsigned index ) {
518   if( index==types.size() ){
519     if( d_data.isNull() ){
520       std::stringstream sst;
521       sst << "__cvc4_tuple";
522       for (unsigned i = 0; i < types.size(); ++ i) {
523         sst << "_" << types[i];
524       }
525       Datatype dt(sst.str());
526       dt.setTuple();
527       std::stringstream ssc;
528       ssc << sst.str() << "_ctor";
529       DatatypeConstructor c(ssc.str());
530       for (unsigned i = 0; i < types.size(); ++ i) {
531         std::stringstream ss;
532         ss << sst.str() << "_stor_" << i;
533         c.addArg(ss.str().c_str(), types[i].toType());
534       }
535       dt.addConstructor(c);
536       d_data = TypeNode::fromType(nm->toExprManager()->mkDatatypeType(dt));
537       Debug("tuprec-debug") << "Return type : " << d_data << std::endl;
538     }
539     return d_data;
540   }else{
541     return d_children[types[index]].getTupleType( nm, types, index+1 );
542   }
543 }
544 
getRecordType(NodeManager * nm,const Record & rec,unsigned index)545 TypeNode NodeManager::RecTypeCache::getRecordType( NodeManager * nm, const Record& rec, unsigned index ) {
546   if( index==rec.getNumFields() ){
547     if( d_data.isNull() ){
548       const Record::FieldVector& fields = rec.getFields();
549       std::stringstream sst;
550       sst << "__cvc4_record";
551       for(Record::FieldVector::const_iterator i = fields.begin(); i != fields.end(); ++i) {
552         sst << "_" << (*i).first << "_" << (*i).second;
553       }
554       Datatype dt(sst.str());
555       dt.setRecord();
556       std::stringstream ssc;
557       ssc << sst.str() << "_ctor";
558       DatatypeConstructor c(ssc.str());
559       for(Record::FieldVector::const_iterator i = fields.begin(); i != fields.end(); ++i) {
560         c.addArg((*i).first, (*i).second);
561       }
562       dt.addConstructor(c);
563       d_data = TypeNode::fromType(nm->toExprManager()->mkDatatypeType(dt));
564       Debug("tuprec-debug") << "Return type : " << d_data << std::endl;
565     }
566     return d_data;
567   }else{
568     return d_children[TypeNode::fromType( rec[index].second )][rec[index].first].getRecordType( nm, rec, index+1 );
569   }
570 }
571 
mkTupleType(const std::vector<TypeNode> & types)572 TypeNode NodeManager::mkTupleType(const std::vector<TypeNode>& types) {
573   std::vector< TypeNode > ts;
574   Debug("tuprec-debug") << "Make tuple type : ";
575   for (unsigned i = 0; i < types.size(); ++ i) {
576     CheckArgument(!types[i].isFunctionLike(), types, "cannot put function-like types in tuples");
577     ts.push_back( types[i] );
578     Debug("tuprec-debug") << types[i] << " ";
579   }
580   Debug("tuprec-debug") << std::endl;
581   return d_tt_cache.getTupleType( this, ts );
582 }
583 
mkRecordType(const Record & rec)584 TypeNode NodeManager::mkRecordType(const Record& rec) {
585   return d_rt_cache.getRecordType( this, rec );
586 }
587 
reclaimAllZombies()588 void NodeManager::reclaimAllZombies(){
589   reclaimZombiesUntil(0u);
590 }
591 
592 /** Reclaim zombies while there are more than k nodes in the pool (if possible).*/
reclaimZombiesUntil(uint32_t k)593 void NodeManager::reclaimZombiesUntil(uint32_t k){
594   if(safeToReclaimZombies()){
595     while(poolSize() >= k && !d_zombies.empty()){
596       reclaimZombies();
597     }
598   }
599 }
600 
poolSize() const601 size_t NodeManager::poolSize() const{
602   return d_nodeValuePool.size();
603 }
604 
mkSort(uint32_t flags)605 TypeNode NodeManager::mkSort(uint32_t flags) {
606   NodeBuilder<1> nb(this, kind::SORT_TYPE);
607   Node sortTag = NodeBuilder<0>(this, kind::SORT_TAG);
608   nb << sortTag;
609   TypeNode tn = nb.constructTypeNode();
610   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
611     (*i)->nmNotifyNewSort(tn, flags);
612   }
613   return tn;
614 }
615 
mkSort(const std::string & name,uint32_t flags)616 TypeNode NodeManager::mkSort(const std::string& name, uint32_t flags) {
617   NodeBuilder<1> nb(this, kind::SORT_TYPE);
618   Node sortTag = NodeBuilder<0>(this, kind::SORT_TAG);
619   nb << sortTag;
620   TypeNode tn = nb.constructTypeNode();
621   setAttribute(tn, expr::VarNameAttr(), name);
622   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
623     (*i)->nmNotifyNewSort(tn, flags);
624   }
625   return tn;
626 }
627 
mkSort(TypeNode constructor,const std::vector<TypeNode> & children,uint32_t flags)628 TypeNode NodeManager::mkSort(TypeNode constructor,
629                                     const std::vector<TypeNode>& children,
630                                     uint32_t flags) {
631   Assert(constructor.getKind() == kind::SORT_TYPE &&
632          constructor.getNumChildren() == 0,
633          "expected a sort constructor");
634   Assert(children.size() > 0, "expected non-zero # of children");
635   Assert( hasAttribute(constructor.d_nv, expr::SortArityAttr()) &&
636           hasAttribute(constructor.d_nv, expr::VarNameAttr()),
637           "expected a sort constructor" );
638   std::string name = getAttribute(constructor.d_nv, expr::VarNameAttr());
639   Assert(getAttribute(constructor.d_nv, expr::SortArityAttr()) == children.size(),
640          "arity mismatch in application of sort constructor");
641   NodeBuilder<> nb(this, kind::SORT_TYPE);
642   Node sortTag = Node(constructor.d_nv->d_children[0]);
643   nb << sortTag;
644   nb.append(children);
645   TypeNode type = nb.constructTypeNode();
646   setAttribute(type, expr::VarNameAttr(), name);
647   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
648     (*i)->nmNotifyInstantiateSortConstructor(constructor, type, flags);
649   }
650   return type;
651 }
652 
mkSortConstructor(const std::string & name,size_t arity,uint32_t flags)653 TypeNode NodeManager::mkSortConstructor(const std::string& name,
654                                         size_t arity,
655                                         uint32_t flags)
656 {
657   Assert(arity > 0);
658   NodeBuilder<> nb(this, kind::SORT_TYPE);
659   Node sortTag = NodeBuilder<0>(this, kind::SORT_TAG);
660   nb << sortTag;
661   TypeNode type = nb.constructTypeNode();
662   setAttribute(type, expr::VarNameAttr(), name);
663   setAttribute(type, expr::SortArityAttr(), arity);
664   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
665     (*i)->nmNotifyNewSortConstructor(type, flags);
666   }
667   return type;
668 }
669 
mkVar(const std::string & name,const TypeNode & type,uint32_t flags)670 Node NodeManager::mkVar(const std::string& name, const TypeNode& type, uint32_t flags) {
671   Node n = NodeBuilder<0>(this, kind::VARIABLE);
672   setAttribute(n, TypeAttr(), type);
673   setAttribute(n, TypeCheckedAttr(), true);
674   setAttribute(n, expr::VarNameAttr(), name);
675   setAttribute(n, expr::GlobalVarAttr(), flags & ExprManager::VAR_FLAG_GLOBAL);
676   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
677     (*i)->nmNotifyNewVar(n, flags);
678   }
679   return n;
680 }
681 
mkVarPtr(const std::string & name,const TypeNode & type,uint32_t flags)682 Node* NodeManager::mkVarPtr(const std::string& name,
683                             const TypeNode& type, uint32_t flags) {
684   Node* n = NodeBuilder<0>(this, kind::VARIABLE).constructNodePtr();
685   setAttribute(*n, TypeAttr(), type);
686   setAttribute(*n, TypeCheckedAttr(), true);
687   setAttribute(*n, expr::VarNameAttr(), name);
688   setAttribute(*n, expr::GlobalVarAttr(), flags & ExprManager::VAR_FLAG_GLOBAL);
689   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
690     (*i)->nmNotifyNewVar(*n, flags);
691   }
692   return n;
693 }
694 
mkBoundVar(const std::string & name,const TypeNode & type)695 Node NodeManager::mkBoundVar(const std::string& name, const TypeNode& type) {
696   Node n = mkBoundVar(type);
697   setAttribute(n, expr::VarNameAttr(), name);
698   return n;
699 }
700 
mkBoundVarPtr(const std::string & name,const TypeNode & type)701 Node* NodeManager::mkBoundVarPtr(const std::string& name,
702                                  const TypeNode& type) {
703   Node* n = mkBoundVarPtr(type);
704   setAttribute(*n, expr::VarNameAttr(), name);
705   return n;
706 }
707 
getBoundVarListForFunctionType(TypeNode tn)708 Node NodeManager::getBoundVarListForFunctionType( TypeNode tn ) {
709   Assert( tn.isFunction() );
710   Node bvl = tn.getAttribute(LambdaBoundVarListAttr());
711   if( bvl.isNull() ){
712     std::vector< Node > vars;
713     for( unsigned i=0; i<tn.getNumChildren()-1; i++ ){
714       vars.push_back( NodeManager::currentNM()->mkBoundVar( tn[i] ) );
715     }
716     bvl = NodeManager::currentNM()->mkNode( kind::BOUND_VAR_LIST, vars );
717     Trace("functions") << "Make standard bound var list " << bvl << " for " << tn << std::endl;
718     tn.setAttribute(LambdaBoundVarListAttr(),bvl);
719   }
720   return bvl;
721 }
722 
mkVar(const TypeNode & type,uint32_t flags)723 Node NodeManager::mkVar(const TypeNode& type, uint32_t flags) {
724   Node n = NodeBuilder<0>(this, kind::VARIABLE);
725   setAttribute(n, TypeAttr(), type);
726   setAttribute(n, TypeCheckedAttr(), true);
727   setAttribute(n, expr::GlobalVarAttr(), flags & ExprManager::VAR_FLAG_GLOBAL);
728   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
729     (*i)->nmNotifyNewVar(n, flags);
730   }
731   return n;
732 }
733 
mkVarPtr(const TypeNode & type,uint32_t flags)734 Node* NodeManager::mkVarPtr(const TypeNode& type, uint32_t flags) {
735   Node* n = NodeBuilder<0>(this, kind::VARIABLE).constructNodePtr();
736   setAttribute(*n, TypeAttr(), type);
737   setAttribute(*n, TypeCheckedAttr(), true);
738   setAttribute(*n, expr::GlobalVarAttr(), flags & ExprManager::VAR_FLAG_GLOBAL);
739   for(std::vector<NodeManagerListener*>::iterator i = d_listeners.begin(); i != d_listeners.end(); ++i) {
740     (*i)->nmNotifyNewVar(*n, flags);
741   }
742   return n;
743 }
744 
mkBoundVar(const TypeNode & type)745 Node NodeManager::mkBoundVar(const TypeNode& type) {
746   Node n = NodeBuilder<0>(this, kind::BOUND_VARIABLE);
747   setAttribute(n, TypeAttr(), type);
748   setAttribute(n, TypeCheckedAttr(), true);
749   return n;
750 }
751 
mkBoundVarPtr(const TypeNode & type)752 Node* NodeManager::mkBoundVarPtr(const TypeNode& type) {
753   Node* n = NodeBuilder<0>(this, kind::BOUND_VARIABLE).constructNodePtr();
754   setAttribute(*n, TypeAttr(), type);
755   setAttribute(*n, TypeCheckedAttr(), true);
756   return n;
757 }
758 
mkInstConstant(const TypeNode & type)759 Node NodeManager::mkInstConstant(const TypeNode& type) {
760   Node n = NodeBuilder<0>(this, kind::INST_CONSTANT);
761   n.setAttribute(TypeAttr(), type);
762   n.setAttribute(TypeCheckedAttr(), true);
763   return n;
764 }
765 
mkBooleanTermVariable()766 Node NodeManager::mkBooleanTermVariable() {
767   Node n = NodeBuilder<0>(this, kind::BOOLEAN_TERM_VARIABLE);
768   n.setAttribute(TypeAttr(), booleanType());
769   n.setAttribute(TypeCheckedAttr(), true);
770   return n;
771 }
772 
mkNullaryOperator(const TypeNode & type,Kind k)773 Node NodeManager::mkNullaryOperator(const TypeNode& type, Kind k) {
774   std::map< TypeNode, Node >::iterator it = d_unique_vars[k].find( type );
775   if( it==d_unique_vars[k].end() ){
776     Node n = NodeBuilder<0>(this, k).constructNode();
777     setAttribute(n, TypeAttr(), type);
778     //setAttribute(n, TypeCheckedAttr(), true);
779     d_unique_vars[k][type] = n;
780     Assert( n.getMetaKind() == kind::metakind::NULLARY_OPERATOR );
781     return n;
782   }else{
783     return it->second;
784   }
785 }
786 
mkAbstractValue(const TypeNode & type)787 Node NodeManager::mkAbstractValue(const TypeNode& type) {
788   Node n = mkConst(AbstractValue(++d_abstractValueCount));
789   n.setAttribute(TypeAttr(), type);
790   n.setAttribute(TypeCheckedAttr(), true);
791   return n;
792 }
793 
safeToReclaimZombies() const794 bool NodeManager::safeToReclaimZombies() const{
795   // FIXME multithreading
796   return !d_inReclaimZombies && !d_attrManager->inGarbageCollection();
797 }
798 
deleteAttributes(const std::vector<const expr::attr::AttributeUniqueId * > & ids)799 void NodeManager::deleteAttributes(const std::vector<const expr::attr::AttributeUniqueId*>& ids){
800   d_attrManager->deleteAttributes(ids);
801 }
802 
debugHook(int debugFlag)803 void NodeManager::debugHook(int debugFlag){
804   // For debugging purposes only, DO NOT CHECK IN ANY CODE!
805 }
806 
807 }/* CVC4 namespace */
808