1 /*
2
3 This file is part of the Maude 2 interpreter.
4
5 Copyright 1997-2003 SRI International, Menlo Park, CA 94025, USA.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
20
21 */
22
23 //
24 // Implementation for base class MemoryCell
25 //
26
27 // utility stuff
28 #include "macros.hh"
29 #include "vector.hh"
30
31 // forward declarations
32 #include "interface.hh"
33 #include "core.hh"
34
35 #include "symbol.hh"
36 #include "dagNode.hh"
37 #include "rootContainer.hh"
38
39 #include "memoryCell.hh"
40
41 const int
42 MemoryCell::dagNodeSize =
43 sizeof(MemoryCell::FullSizeMemoryCell) / sizeof(MachineWord);
44
45 struct MemoryCell::Arena
46 {
47 union
48 {
49 Arena* nextArena;
50 Int64 alignmentDummy; // force 8 byte alignment for FullSizeMemoryCell objects
51 };
52 MachineWord storage[ARENA_SIZE * dagNodeSize];
53 FullSizeMemoryCell* firstNode();
54 };
55
56 inline MemoryCell::FullSizeMemoryCell*
firstNode()57 MemoryCell::Arena::firstNode()
58 {
59 return reinterpret_cast<FullSizeMemoryCell*>(storage);
60 }
61
62 bool MemoryCell::showGC = false;
63 //
64 // Arena management variables.
65 //
66 int MemoryCell::nrArenas = 0;
67 int MemoryCell::nrNodesInUse = 0;
68 bool MemoryCell::currentArenaPastActiveArena = true;
69 bool MemoryCell::needToCollectGarbage = false;
70 MemoryCell::Arena* MemoryCell::firstArena = 0;
71 MemoryCell::Arena* MemoryCell::lastArena = 0;
72 MemoryCell::Arena* MemoryCell::currentArena = 0;
73 MemoryCell::FullSizeMemoryCell* MemoryCell::nextNode = 0;
74 MemoryCell::FullSizeMemoryCell* MemoryCell::endPointer = 0;
75 MemoryCell::Arena* MemoryCell::lastActiveArena = 0;
76 MemoryCell::FullSizeMemoryCell* MemoryCell::lastActiveNode = 0;
77 //
78 // Bucket management variables.
79 //
80 int MemoryCell::nrBuckets = 0;
81 MemoryCell::Bucket* MemoryCell::bucketList = 0;
82 MemoryCell::Bucket* MemoryCell::unusedList = 0;
83 size_t MemoryCell::bucketStorage = 0;
84 size_t MemoryCell::storageInUse = 0;
85 size_t MemoryCell::target = INITIAL_TARGET;
86
87 MemoryCell::Arena*
allocateNewArena()88 MemoryCell::allocateNewArena()
89 {
90 #ifdef GC_DEBUG
91 cerr << "allocateNewArena()\n";
92 dumpMemoryVariables(cerr);
93 #endif
94 Arena* a = new Arena;
95 a->nextArena = 0;
96 if (lastArena == 0)
97 firstArena = a;
98 else
99 lastArena->nextArena = a;
100 lastArena = a;
101 FullSizeMemoryCell* d = a->firstNode();
102 for (int i = 0; i < ARENA_SIZE; i++, d++)
103 d->h.flags = 0;
104 ++nrArenas;
105 return a;
106 }
107
108 MemoryCell*
slowNew()109 MemoryCell::slowNew()
110 {
111 #ifdef GC_DEBUG
112 cerr << "slowNew()\n";
113 dumpMemoryVariables(cerr);
114 #endif
115 for(;;)
116 {
117 if (currentArena == 0)
118 {
119 //
120 // Allocate first arena.
121 //
122 currentArena = allocateNewArena();
123 FullSizeMemoryCell* d = currentArena->firstNode();
124 endPointer = d + ARENA_SIZE - RESERVE_SIZE;
125 nextNode = d + 1;
126 Assert(d->h.flags == 0, "flags not cleared");
127 return d;
128 }
129 Arena* a = currentArena->nextArena;
130 if (a == 0)
131 {
132 needToCollectGarbage = true;
133 FullSizeMemoryCell* e = currentArena->firstNode() + ARENA_SIZE;
134 if (endPointer != e)
135 {
136 //
137 // Use up reserve.
138 //
139 nextNode = endPointer; // nextNode is invalid where we are called
140 endPointer = e;
141 }
142 else
143 {
144 //
145 // Allocate a new arena.
146 //
147 if (currentArena == lastActiveArena)
148 currentArenaPastActiveArena = true;
149 currentArena = allocateNewArena();
150 FullSizeMemoryCell* d = currentArena->firstNode();
151 endPointer = d + ARENA_SIZE;
152 nextNode = d + 1;
153 Assert(d->h.flags == 0, "flags not cleared");
154 return d;
155 }
156 }
157 else
158 {
159 //
160 // Use next arena.
161 //
162 if (currentArena == lastActiveArena)
163 currentArenaPastActiveArena = true;
164 currentArena = a;
165 nextNode = a->firstNode();
166 endPointer = nextNode +
167 ((a->nextArena != 0) ? ARENA_SIZE : ARENA_SIZE - RESERVE_SIZE);
168 }
169 //
170 // Now execute lazy sweep to actually find a free location
171 //
172 #ifdef GC_DEBUG
173 checkInvariant();
174 #endif
175 FullSizeMemoryCell* e = endPointer;
176 for (FullSizeMemoryCell* d = nextNode; d != e; d++)
177 {
178 if ((d->h.flags & (MARKED | CALL_DTOR)) == 0)
179 {
180 nextNode = d + 1;
181 return d;
182 }
183 if ((d->h.flags & MARKED) == 0)
184 {
185 d->callDtor();
186 nextNode = d + 1;
187 return d;
188 }
189 d->h.flags &= ~MARKED;
190 }
191 }
192 }
193
194 void*
slowAllocateStorage(size_t bytesNeeded)195 MemoryCell::slowAllocateStorage(size_t bytesNeeded)
196 {
197 Bucket* p = 0;
198 for (Bucket* b = unusedList; b; p = b, b = b->nextBucket)
199 {
200 if (b->bytesFree >= bytesNeeded)
201 {
202 //
203 // Move b from unused list to bucket list
204 //
205 if (p == 0)
206 unusedList = b->nextBucket;
207 else
208 p->nextBucket = b->nextBucket;
209 b->nextBucket = bucketList;
210 bucketList = b;
211 //
212 // Allocate storage from b
213 //
214 b->bytesFree -= bytesNeeded;
215 void* t = b->nextFree;
216 b->nextFree = static_cast<char*>(t) + bytesNeeded;
217 return t;
218 }
219 }
220 //
221 // Create new bucket
222 //
223 size_t size = BUCKET_MULTIPLIER * bytesNeeded;
224 if (size < MIN_BUCKET_SIZE)
225 size = MIN_BUCKET_SIZE;
226
227 Bucket* b = static_cast<Bucket*>(operator new[](size));
228 ++nrBuckets;
229 void* t = b + 1;
230 size_t nrBytes = size - sizeof(Bucket);
231 bucketStorage += nrBytes;
232 b->nrBytes = nrBytes;
233 b->bytesFree = nrBytes - bytesNeeded;
234 b->nextFree = static_cast<char*>(t) + bytesNeeded;
235 b->nextBucket = bucketList;
236 bucketList = b;
237 return t;
238 }
239
240 void
tidyArenas()241 MemoryCell::tidyArenas()
242 {
243 #ifdef GC_DEBUG
244 cerr << "tidyArenas()\n";
245 dumpMemoryVariables(cerr);
246 #endif
247 //
248 // Tidy up lazy sweep phase - clear marked flags and call dtors
249 // where necessary.
250 //
251 Arena* newLastActiveArena = currentArena;
252 FullSizeMemoryCell* newLastActiveNode = nextNode - 1; // nextNode never points to first node
253
254 if (!currentArenaPastActiveArena)
255 {
256 //
257 // First tidy arenas from current up to lastActive.
258 //
259 FullSizeMemoryCell* d = nextNode;
260 Arena* c = currentArena;
261 for (; c != lastActiveArena; c = c->nextArena, d = c->firstNode())
262 {
263 FullSizeMemoryCell* e = c->firstNode() + ARENA_SIZE;
264 for (; d != e; d++)
265 {
266 if (d->h.flags & MARKED)
267 {
268 newLastActiveArena = c;
269 newLastActiveNode = d;
270 d->h.flags &= ~MARKED;
271 }
272 else
273 {
274 if (d->h.flags & CALL_DTOR)
275 d->callDtor();
276 d->h.flags = 0;
277 }
278 }
279 }
280 //
281 // Now tidy lastActiveArena from d upto and including lastActiveNode.
282 //
283 FullSizeMemoryCell* e = lastActiveNode;
284 for(; d <= e; d++)
285 {
286 if (d->h.flags & MARKED)
287 {
288 newLastActiveArena = c;
289 newLastActiveNode = d;
290 d->h.flags &= ~MARKED;
291 }
292 else
293 {
294 if (d->h.flags & CALL_DTOR)
295 d->callDtor();
296 d->h.flags = 0;
297 }
298 }
299 }
300
301 lastActiveArena = newLastActiveArena;
302 lastActiveNode = newLastActiveNode;
303 }
304
305 void
collectGarbage()306 MemoryCell::collectGarbage()
307 {
308 if (firstArena == 0)
309 return;
310 tidyArenas();
311 #ifdef GC_DEBUG
312 checkArenas();
313 #endif
314 //
315 // Mark phase
316 //
317 nrNodesInUse = 0;
318 size_t oldStorageInUse = storageInUse;
319 Bucket* b = bucketList;
320 bucketList = unusedList;
321 unusedList = 0;
322 storageInUse = 0;
323
324 RootContainer::markPhase();
325
326 unusedList = b;
327 for (; b; b = b->nextBucket)
328 {
329 b->bytesFree = b->nrBytes;
330 b->nextFree = b + 1; // reset
331 }
332 size_t newTarget = TARGET_MULTIPLIER * storageInUse;
333 if (target < newTarget)
334 target = newTarget;
335 //
336 // Calculate if we should allocate more arenas to avoid an early gc.
337 //
338 int nrNodes = nrArenas * ARENA_SIZE;
339 if (showGC)
340 {
341 cout << "Arenas: " << nrArenas <<
342 "\tNodes: " << nrNodes <<
343 // "\tIn use: " << nrNodesInUse <<
344 // "\tCollected: " << reclaimed <<
345 "\tNow: " << nrNodesInUse <<
346 "\nBuckets: " << nrBuckets <<
347 "\tBytes: " << bucketStorage <<
348 "\tIn use: " << oldStorageInUse <<
349 "\tCollected: " << oldStorageInUse - storageInUse <<
350 "\tNow: " << storageInUse << '\n';
351 }
352 //
353 // Allocate new arenas so that we have at least 50% of nodes unused.
354 //
355 int neededArenas = ceilingDivision(2 * nrNodesInUse, ARENA_SIZE);
356 while (nrArenas < neededArenas)
357 (void) allocateNewArena();
358 //
359 // Reset node stuff.
360 //
361 currentArenaPastActiveArena = false;
362 currentArena = firstArena;
363 nextNode = currentArena->firstNode();
364 endPointer = nextNode +
365 ((firstArena->nextArena != 0) ? ARENA_SIZE : ARENA_SIZE - RESERVE_SIZE);
366 needToCollectGarbage = false;
367 #ifdef GC_DEBUG
368 // stompArenas();
369 cerr << "end of GC\n";
370 dumpMemoryVariables(cerr);
371 #endif
372 }
373
374 #ifdef GC_DEBUG
375 void
stompArenas()376 MemoryCell::stompArenas()
377 {
378 for (Arena* a = firstArena; a != 0; a = a->nextArena)
379 {
380 FullSizeMemoryCell* d = a->firstNode();
381 for (int i = 0; i < ARENA_SIZE; i++, d++)
382 {
383 if (!(d->h.flags & MARKED) && !(d->h.flags & CALL_DTOR))
384 d->topSymbol = reinterpret_cast<Symbol*>(0x33);
385 }
386 }
387 }
388
389 void
checkArenas()390 MemoryCell::checkArenas()
391 {
392 int n = 0;
393 for (Arena* a = firstArena; a != 0; a = a->nextArena, n++)
394 {
395 FullSizeMemoryCell* d = a->firstNode();
396 for (int i = 0; i < ARENA_SIZE; i++, d++)
397 {
398 if (d->h.flags & MARKED)
399 {
400 cerr << "checkArenas(): MARKED DagNode! arena = " <<
401 n << " node = " << i << '\n';
402 }
403 }
404 }
405 }
406
407 void
checkInvariant()408 MemoryCell::checkInvariant()
409 {
410 int n = 0;
411 for (Arena* a = firstArena;; a = a->nextArena, n++)
412 {
413 FullSizeMemoryCell* d = a->firstNode();
414 int bound = (a == currentArena) ? nextNode - d : ARENA_SIZE;
415 for (int i = 0; i < bound; i++, d++)
416 {
417 if (d->h.flags & MARKED)
418 {
419 cerr << "checkInvariant() : MARKED DagNode! arena = " <<
420 n << " node = " << i << '\n';
421 }
422 }
423 if (a == currentArena)
424 break;
425 }
426 }
427
428 void
dumpMemoryVariables(ostream & s)429 MemoryCell::dumpMemoryVariables(ostream& s)
430 {
431 s << "nrArenas = " << nrArenas <<
432 "\nnrNodesInUse = " << nrNodesInUse <<
433 "\ncurrentArenaPastActiveArena = " << currentArenaPastActiveArena <<
434 "\nneedToCollectGarbage = " << needToCollectGarbage <<
435 "\nfirstArena = " << firstArena <<
436 "\nlastArena = " << lastArena <<
437 "\ncurrentArena = " << currentArena <<
438 "\nnextNode = " << static_cast<void*>(nextNode) <<
439 "\nendPointer = " << static_cast<void*>(endPointer) <<
440 "\nlastActiveArena = " << lastActiveArena <<
441 "\nlastActiveNode = " << static_cast<void*>(lastActiveNode) <<
442 "\n\n";
443 }
444 #endif
445