1 /*
2 Title: memmgr.cpp Memory segment manager
3
4 Copyright (c) 2006-7, 2011-12, 2016-18 David C. J. Matthews
5
6 This library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License version 2.1 as published by the Free Software Foundation.
9
10 This library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with this library; if not, write to the Free Software
17 Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18
19 */
20 #ifdef HAVE_CONFIG_H
21 #include "config.h"
22 #elif defined(_WIN32)
23 #include "winconfig.h"
24 #else
25 #error "No configuration file"
26 #endif
27
28 #ifdef HAVE_ASSERT_H
29 #include <assert.h>
30 #define ASSERT(x) assert(x)
31 #else
32 #define ASSERT(x)
33 #endif
34
35 #include <stdio.h>
36
37 #include <new>
38
39 #include "globals.h"
40 #include "memmgr.h"
41 #include "osmem.h"
42 #include "scanaddrs.h"
43 #include "bitmap.h"
44 #include "mpoly.h"
45 #include "diagnostics.h"
46 #include "statistics.h"
47 #include "processes.h"
48 #include "machine_dep.h"
49
50
51 #ifdef POLYML32IN64
52 // This contains the address of the base of the heap.
53 PolyWord *globalHeapBase, *globalCodeBase;
54 #endif
55
56 // heap resizing policy option requested on command line
57 unsigned heapsizingOption = 0;
58
59 // If we are building for the interpreted version we don't need or want the
60 // code to be executable.
61 static const enum OSMem::_MemUsage executableCodeWhereNecessary =
62 #ifdef CODEISNOTEXECUTABLE
63 OSMem::UsageData;
64 #else
65 OSMem::UsageExecutableCode;
66 #endif
67
MemSpace(OSMem * alloc)68 MemSpace::MemSpace(OSMem *alloc): SpaceTree(true)
69 {
70 spaceType = ST_PERMANENT;
71 isMutable = false;
72 bottom = 0;
73 top = 0;
74 isCode = false;
75 allocator = alloc;
76 shadowSpace = 0;
77 }
78
~MemSpace()79 MemSpace::~MemSpace()
80 {
81 if (allocator != 0 && bottom != 0)
82 {
83 if (isCode)
84 allocator->FreeCodeArea(bottom, shadowSpace, (char*)top - (char*)bottom);
85 else allocator->FreeDataArea(bottom, (char*)top - (char*)bottom);
86 }
87 }
88
MarkableSpace(OSMem * alloc)89 MarkableSpace::MarkableSpace(OSMem *alloc): MemSpace(alloc), spaceLock("Local space")
90 {
91 }
92
LocalMemSpace(OSMem * alloc)93 LocalMemSpace::LocalMemSpace(OSMem *alloc): MarkableSpace(alloc)
94 {
95 spaceType = ST_LOCAL;
96 upperAllocPtr = lowerAllocPtr = 0;
97 for (unsigned i = 0; i < NSTARTS; i++)
98 start[i] = 0;
99 start_index = 0;
100 i_marked = m_marked = updated = 0;
101 allocationSpace = false;
102 }
103
InitSpace(PolyWord * heapSpace,uintptr_t size,bool mut)104 bool LocalMemSpace::InitSpace(PolyWord *heapSpace, uintptr_t size, bool mut)
105 {
106 isMutable = mut;
107 bottom = heapSpace;
108 top = bottom + size;
109 // Initialise all the fields. The partial GC in particular relies on this.
110 upperAllocPtr = partialGCTop = fullGCRescanStart = fullGCLowerLimit = lowestWeak = top;
111 lowerAllocPtr = partialGCScan = partialGCRootBase = partialGCRootTop =
112 fullGCRescanEnd = highestWeak = bottom;
113 #ifdef POLYML32IN64
114 // The address must be on an odd-word boundary so that after the length
115 // word is put in the actual cell address is on an even-word boundary.
116 lowerAllocPtr[0] = PolyWord::FromUnsigned(0);
117 lowerAllocPtr = bottom + 1;
118 #endif
119 spaceOwner = 0;
120
121 allocationSpace = false;
122
123 // Bitmap for the space.
124 return bitmap.Create(size);
125 }
126
MemMgr()127 MemMgr::MemMgr(): allocLock("Memmgr alloc"), codeBitmapLock("Code bitmap")
128 {
129 nextIndex = 0;
130 reservedSpace = 0;
131 nextAllocator = 0;
132 defaultSpaceSize = 0;
133 spaceBeforeMinorGC = 0;
134 spaceForHeap = 0;
135 currentAllocSpace = currentHeapSize = 0;
136 defaultSpaceSize = 1024 * 1024 / sizeof(PolyWord); // 1Mbyte segments.
137 spaceTree = new SpaceTreeTree;
138 }
139
~MemMgr()140 MemMgr::~MemMgr()
141 {
142 delete(spaceTree); // Have to do this before we delete the spaces.
143 for (std::vector<PermanentMemSpace *>::iterator i = pSpaces.begin(); i < pSpaces.end(); i++)
144 delete(*i);
145 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
146 delete(*i);
147 for (std::vector<PermanentMemSpace *>::iterator i = eSpaces.begin(); i < eSpaces.end(); i++)
148 delete(*i);
149 for (std::vector<StackSpace *>::iterator i = sSpaces.begin(); i < sSpaces.end(); i++)
150 delete(*i);
151 for (std::vector<CodeSpace *>::iterator i = cSpaces.begin(); i < cSpaces.end(); i++)
152 delete(*i);
153 }
154
Initialise()155 bool MemMgr::Initialise()
156 {
157 #ifdef POLYML32IN64
158 // Allocate a single 16G area but with no access.
159 void *heapBase;
160 if (!osHeapAlloc.Initialise(OSMem::UsageData, (size_t)16 * 1024 * 1024 * 1024, &heapBase))
161 return false;
162 globalHeapBase = (PolyWord*)heapBase;
163
164 // Allocate a 4 gbyte area for the stacks.
165 // It's important that the stack and code areas have addresses with
166 // non-zero top 32-bits.
167 if (!osStackAlloc.Initialise(OSMem::UsageStack, (size_t)4 * 1024 * 1024 * 1024))
168 return false;
169
170 // Allocate a 2G area for the code.
171 void *codeBase;
172 if (!osCodeAlloc.Initialise(executableCodeWhereNecessary,
173 (size_t)2 * 1024 * 1024 * 1024, &codeBase))
174 return false;
175 globalCodeBase = (PolyWord*)codeBase;
176 return true;
177 #else
178 return osHeapAlloc.Initialise(OSMem::UsageData) && osStackAlloc.Initialise(OSMem::UsageStack) &&
179 osCodeAlloc.Initialise(executableCodeWhereNecessary);
180 #endif
181 }
182
183 // Create and initialise a new local space and add it to the table.
NewLocalSpace(uintptr_t size,bool mut)184 LocalMemSpace* MemMgr::NewLocalSpace(uintptr_t size, bool mut)
185 {
186 try {
187 LocalMemSpace *space = new LocalMemSpace(&osHeapAlloc);
188 // Before trying to allocate the heap temporarily allocate the
189 // reserved space. This ensures that this much space will always
190 // be available for C stacks and the C++ heap.
191 void *reservation = 0;
192 size_t rSpace = reservedSpace*sizeof(PolyWord);
193
194 if (reservedSpace != 0) {
195 reservation = osHeapAlloc.AllocateDataArea(rSpace);
196 if (reservation == NULL) {
197 // Insufficient space for the reservation. Can't allocate this local space.
198 if (debugOptions & DEBUG_MEMMGR)
199 Log("MMGR: New local %smutable space: insufficient reservation space\n", mut ? "": "im");
200 delete space;
201 return 0;
202 }
203 }
204
205 // Allocate the heap itself.
206 size_t iSpace = size * sizeof(PolyWord);
207 PolyWord* heapSpace = (PolyWord*)osHeapAlloc.AllocateDataArea(iSpace);
208 // The size may have been rounded up to a block boundary.
209 size = iSpace / sizeof(PolyWord);
210 bool success = heapSpace != 0 && space->InitSpace(heapSpace, size, mut) && AddLocalSpace(space);
211
212 if (reservation != 0) osHeapAlloc.FreeDataArea(reservation, rSpace);
213 if (success)
214 {
215 if (debugOptions & DEBUG_MEMMGR)
216 Log("MMGR: New local %smutable space %p, size=%luk words, bottom=%p, top=%p\n", mut ? "": "im",
217 space, space->spaceSize()/1024, space->bottom, space->top);
218 currentHeapSize += space->spaceSize();
219 globalStats.setSize(PSS_TOTAL_HEAP, currentHeapSize * sizeof(PolyWord));
220 return space;
221 }
222
223 // If something went wrong.
224 delete space;
225 if (debugOptions & DEBUG_MEMMGR)
226 Log("MMGR: New local %smutable space: insufficient space\n", mut ? "": "im");
227 return 0;
228 }
229 catch (std::bad_alloc&) {
230 if (debugOptions & DEBUG_MEMMGR)
231 Log("MMGR: New local %smutable space: \"new\" failed\n", mut ? "": "im");
232 return 0;
233 }
234 }
235
236 // Create a local space for initial allocation.
CreateAllocationSpace(uintptr_t size)237 LocalMemSpace *MemMgr::CreateAllocationSpace(uintptr_t size)
238 {
239 LocalMemSpace *result = NewLocalSpace(size, true);
240 if (result)
241 {
242 result->allocationSpace = true;
243 currentAllocSpace += result->spaceSize();
244 globalStats.incSize(PSS_ALLOCATION, result->spaceSize()*sizeof(PolyWord));
245 globalStats.incSize(PSS_ALLOCATION_FREE, result->freeSpace()*sizeof(PolyWord));
246 }
247 return result;
248 }
249
250 // If an allocation space has a lot of data left in it after a GC, particularly
251 // a single large object we should turn it into a local area.
ConvertAllocationSpaceToLocal(LocalMemSpace * space)252 void MemMgr::ConvertAllocationSpaceToLocal(LocalMemSpace *space)
253 {
254 ASSERT(space->allocationSpace);
255 space->allocationSpace = false;
256 // Currently it is left as a mutable area but if the contents are all
257 // immutable e.g. a large vector it could be better to turn it into an
258 // immutable area.
259 currentAllocSpace -= space->spaceSize();
260 }
261
262 // Add a local memory space to the table.
AddLocalSpace(LocalMemSpace * space)263 bool MemMgr::AddLocalSpace(LocalMemSpace *space)
264 {
265 // Add to the table.
266 // Update the B-tree.
267 try {
268 AddTree(space);
269 // The entries in the local table are ordered so that the copy phase of the full
270 // GC simply has to copy to an entry earlier in the table. Immutable spaces come
271 // first, followed by mutable spaces and finally allocation spaces.
272 if (space->allocationSpace)
273 lSpaces.push_back(space); // Just add at the end
274 else if (space->isMutable)
275 {
276 // Add before the allocation spaces
277 std::vector<LocalMemSpace*>::iterator i = lSpaces.begin();
278 while (i != lSpaces.end() && ! (*i)->allocationSpace) i++;
279 lSpaces.insert(i, space);
280 }
281 else
282 {
283 // Immutable space: Add before the mutable spaces
284 std::vector<LocalMemSpace*>::iterator i = lSpaces.begin();
285 while (i != lSpaces.end() && ! (*i)->isMutable) i++;
286 lSpaces.insert(i, space);
287 }
288 }
289 catch (std::bad_alloc&) {
290 RemoveTree(space);
291 return false;
292 }
293 return true;
294 }
295
296 // Create an entry for a permanent space.
NewPermanentSpace(PolyWord * base,uintptr_t words,unsigned flags,unsigned index,unsigned hierarchy)297 PermanentMemSpace* MemMgr::NewPermanentSpace(PolyWord *base, uintptr_t words,
298 unsigned flags, unsigned index, unsigned hierarchy /*= 0*/)
299 {
300 try {
301 PermanentMemSpace *space = new PermanentMemSpace(0/* Not freed */);
302 space->bottom = base;
303 space->topPointer = space->top = space->bottom + words;
304 space->spaceType = ST_PERMANENT;
305 space->isMutable = flags & MTF_WRITEABLE ? true : false;
306 space->noOverwrite = flags & MTF_NO_OVERWRITE ? true : false;
307 space->byteOnly = flags & MTF_BYTES ? true : false;
308 space->isCode = flags & MTF_EXECUTABLE ? true : false;
309 space->index = index;
310 space->hierarchy = hierarchy;
311 if (index >= nextIndex) nextIndex = index+1;
312
313 // Extend the permanent memory table and add this space to it.
314 try {
315 AddTree(space);
316 pSpaces.push_back(space);
317 }
318 catch (std::exception&) {
319 RemoveTree(space);
320 delete space;
321 return 0;
322 }
323 return space;
324 }
325 catch (std::bad_alloc&) {
326 return 0;
327 }
328 }
329
AllocateNewPermanentSpace(uintptr_t byteSize,unsigned flags,unsigned index,unsigned hierarchy)330 PermanentMemSpace *MemMgr::AllocateNewPermanentSpace(uintptr_t byteSize, unsigned flags, unsigned index, unsigned hierarchy)
331 {
332 try {
333 OSMem *alloc = flags & MTF_EXECUTABLE ? &osCodeAlloc : &osHeapAlloc;
334 PermanentMemSpace *space = new PermanentMemSpace(alloc);
335 size_t actualSize = byteSize;
336 PolyWord* base;
337 void* newShadow=0;
338 if (flags & MTF_EXECUTABLE)
339 base = (PolyWord*)alloc->AllocateCodeArea(actualSize, newShadow);
340 else base = (PolyWord*)alloc->AllocateDataArea(actualSize);
341 if (base == 0)
342 {
343 delete(space);
344 return 0;
345 }
346 space->bottom = base;
347 space->shadowSpace = (PolyWord*)newShadow;
348 space->topPointer = space->top = space->bottom + actualSize/sizeof(PolyWord);
349 space->spaceType = ST_PERMANENT;
350 space->isMutable = flags & MTF_WRITEABLE ? true : false;
351 space->noOverwrite = flags & MTF_NO_OVERWRITE ? true : false;
352 space->byteOnly = flags & MTF_BYTES ? true : false;
353 space->isCode = flags & MTF_EXECUTABLE ? true : false;
354 space->index = index;
355 space->hierarchy = hierarchy;
356 if (index >= nextIndex) nextIndex = index + 1;
357
358 // Extend the permanent memory table and add this space to it.
359 try {
360 AddTree(space);
361 pSpaces.push_back(space);
362 }
363 catch (std::exception&) {
364 RemoveTree(space);
365 delete space;
366 return 0;
367 }
368 return space;
369 }
370 catch (std::bad_alloc&) {
371 return 0;
372 }
373 }
374
CompletePermanentSpaceAllocation(PermanentMemSpace * space)375 bool MemMgr::CompletePermanentSpaceAllocation(PermanentMemSpace *space)
376 {
377 // Remove write access unless it is mutable.
378 // Don't remove write access unless this is top-level. Share-data assumes only hierarchy 0 is write-protected.
379 if (!space->isMutable && space->hierarchy == 0)
380 {
381 if (space->isCode)
382 osCodeAlloc.DisableWriteForCode(space->bottom, space->shadowSpace, (char*)space->top - (char*)space->bottom);
383 else osHeapAlloc.EnableWrite(false, space->bottom, (char*)space->top - (char*)space->bottom);
384 }
385 return true;
386 }
387
388 // Delete a local space and remove it from the table.
DeleteLocalSpace(std::vector<LocalMemSpace * >::iterator & iter)389 void MemMgr::DeleteLocalSpace(std::vector<LocalMemSpace*>::iterator &iter)
390 {
391 LocalMemSpace *sp = *iter;
392 if (debugOptions & DEBUG_MEMMGR)
393 Log("MMGR: Deleted local %s space %p at %p size %zu\n", sp->spaceTypeString(), sp, sp->bottom, sp->spaceSize());
394 currentHeapSize -= sp->spaceSize();
395 globalStats.setSize(PSS_TOTAL_HEAP, currentHeapSize * sizeof(PolyWord));
396 if (sp->allocationSpace) currentAllocSpace -= sp->spaceSize();
397 RemoveTree(sp);
398 delete(sp);
399 iter = lSpaces.erase(iter);
400 }
401
402 // Remove local areas that are now empty after a GC.
403 // It isn't clear if we always want to do this.
RemoveEmptyLocals()404 void MemMgr::RemoveEmptyLocals()
405 {
406 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); )
407 {
408 LocalMemSpace *space = *i;
409 if (space->isEmpty())
410 DeleteLocalSpace(i);
411 else i++;
412 }
413 }
414
415 // Create and initialise a new export space and add it to the table.
NewExportSpace(uintptr_t size,bool mut,bool noOv,bool code)416 PermanentMemSpace* MemMgr::NewExportSpace(uintptr_t size, bool mut, bool noOv, bool code)
417 {
418 try {
419 OSMem *alloc = code ? &osCodeAlloc : &osHeapAlloc;
420 PermanentMemSpace *space = new PermanentMemSpace(alloc);
421 space->spaceType = ST_EXPORT;
422 space->isMutable = mut;
423 space->noOverwrite = noOv;
424 space->isCode = code;
425 space->index = nextIndex++;
426 // Allocate the memory itself.
427 size_t iSpace = size*sizeof(PolyWord);
428 if (code)
429 {
430 void* shadow;
431 space->bottom = (PolyWord*)alloc->AllocateCodeArea(iSpace, shadow);
432 if (space->bottom != 0)
433 space->shadowSpace = (PolyWord*)shadow;
434 }
435 else space->bottom = (PolyWord*)alloc->AllocateDataArea(iSpace);
436
437 if (space->bottom == 0)
438 {
439 delete space;
440 if (debugOptions & DEBUG_MEMMGR)
441 Log("MMGR: New export %smutable space: insufficient space\n", mut ? "" : "im");
442 return 0;
443 }
444
445 // The size may have been rounded up to a block boundary.
446 size = iSpace/sizeof(PolyWord);
447 space->top = space->bottom + size;
448 space->topPointer = space->bottom;
449 #ifdef POLYML32IN64
450 // The address must be on an odd-word boundary so that after the length
451 // word is put in the actual cell address is on an even-word boundary.
452 space->writeAble(space->topPointer)[0] = PolyWord::FromUnsigned(0);
453 space->topPointer = space->bottom + 1;
454 #endif
455
456 if (debugOptions & DEBUG_MEMMGR)
457 Log("MMGR: New export %smutable %s%sspace %p, size=%luk words, bottom=%p, top=%p\n", mut ? "" : "im",
458 noOv ? "no-overwrite " : "", code ? "code " : "", space,
459 space->spaceSize() / 1024, space->bottom, space->top);
460
461 // Add to the table.
462 try {
463 AddTree(space);
464 eSpaces.push_back(space);
465 }
466 catch (std::exception&) {
467 RemoveTree(space);
468 delete space;
469 if (debugOptions & DEBUG_MEMMGR)
470 Log("MMGR: New export %smutable space: Adding to tree failed\n", mut ? "" : "im");
471 return 0;
472 }
473 return space;
474 }
475 catch (std::bad_alloc&) {
476 if (debugOptions & DEBUG_MEMMGR)
477 Log("MMGR: New export %smutable space: \"new\" failed\n", mut ? "" : "im");
478 return 0;
479 }
480 }
481
DeleteExportSpaces(void)482 void MemMgr::DeleteExportSpaces(void)
483 {
484 for (std::vector<PermanentMemSpace *>::iterator i = eSpaces.begin(); i < eSpaces.end(); i++)
485 {
486 PermanentMemSpace *space = *i;
487 RemoveTree(space);
488 delete(space);
489 }
490 eSpaces.clear();
491 }
492
493 // If we have saved the state rather than exported a function we turn the exported
494 // spaces into permanent ones, removing existing permanent spaces at the same or
495 // lower level.
PromoteExportSpaces(unsigned hierarchy)496 bool MemMgr::PromoteExportSpaces(unsigned hierarchy)
497 {
498 // Save permanent spaces at a lower hierarchy. Others are converted into
499 // local spaces. Most or all items will have been copied from these spaces
500 // into an export space but there could be items reachable only from the stack.
501 std::vector<PermanentMemSpace*>::iterator i = pSpaces.begin();
502 while (i != pSpaces.end())
503 {
504 PermanentMemSpace *pSpace = *i;
505 if (pSpace->hierarchy < hierarchy)
506 i++;
507 else
508 {
509 try {
510 // Turn this into a local space or a code space
511 // Remove this from the tree - AddLocalSpace will make an entry for the local version.
512 RemoveTree(pSpace);
513
514 if (pSpace->isCode)
515 {
516 // Enable write access. Permanent spaces are read-only.
517 // osCodeAlloc.SetPermissions(pSpace->bottom, (char*)pSpace->top - (char*)pSpace->bottom,
518 // PERMISSION_READ | PERMISSION_WRITE | PERMISSION_EXEC);
519 CodeSpace *space = new CodeSpace(pSpace->bottom, pSpace->shadowSpace, pSpace->spaceSize(), &osCodeAlloc);
520 if (! space->headerMap.Create(space->spaceSize()))
521 {
522 if (debugOptions & DEBUG_MEMMGR)
523 Log("MMGR: Unable to create header map for state space %p\n", pSpace);
524 return false;
525 }
526 if (!AddCodeSpace(space))
527 {
528 if (debugOptions & DEBUG_MEMMGR)
529 Log("MMGR: Unable to convert saved state space %p into code space\n", pSpace);
530 return false;
531 }
532 if (debugOptions & DEBUG_MEMMGR)
533 Log("MMGR: Converted saved state space %p into code space %p\n", pSpace, space);
534 // Set the bits in the header map.
535 for (PolyWord *ptr = space->bottom; ptr < space->top; )
536 {
537 PolyObject *obj = (PolyObject*)(ptr+1);
538 // We may have forwarded this if this has been
539 // copied to the exported area. Restore the original length word.
540 if (obj->ContainsForwardingPtr())
541 {
542 #ifdef POLYML32IN64
543 PolyObject *forwardedTo = obj;
544 // This is relative to globalCodeBase not globalHeapBase
545 while (forwardedTo->ContainsForwardingPtr())
546 forwardedTo = (PolyObject*)(globalCodeBase + ((forwardedTo->LengthWord() & ~_OBJ_TOMBSTONE_BIT) << 1));
547 #else
548 PolyObject *forwardedTo = obj->FollowForwardingChain();
549 #endif
550 obj->SetLengthWord(forwardedTo->LengthWord());
551 }
552 // Set the "start" bit if this is allocated. It will be a byte seg if not.
553 if (obj->IsCodeObject())
554 space->headerMap.SetBit(ptr-space->bottom);
555 ASSERT(!obj->IsClosureObject());
556 ptr += obj->Length() + 1;
557 }
558 }
559 else
560 {
561 // Enable write access. Permanent spaces are read-only.
562 // osHeapAlloc.SetPermissions(pSpace->bottom, (char*)pSpace->top - (char*)pSpace->bottom,
563 // PERMISSION_READ | PERMISSION_WRITE);
564 LocalMemSpace *space = new LocalMemSpace(&osHeapAlloc);
565 space->top = pSpace->top;
566 // Space is allocated in local areas from the top down. This area is full and
567 // all data is in the old generation. The area can be recovered by a full GC.
568 space->bottom = space->upperAllocPtr = space->lowerAllocPtr =
569 space->fullGCLowerLimit = pSpace->bottom;
570 space->isMutable = pSpace->isMutable;
571 space->isCode = false;
572 if (! space->bitmap.Create(space->top-space->bottom) || ! AddLocalSpace(space))
573 {
574 if (debugOptions & DEBUG_MEMMGR)
575 Log("MMGR: Unable to convert saved state space %p into local space\n", pSpace);
576 return false;
577 }
578
579 if (debugOptions & DEBUG_MEMMGR)
580 Log("MMGR: Converted saved state space %p into local %smutable space %p\n",
581 pSpace, pSpace->isMutable ? "im": "", space);
582 currentHeapSize += space->spaceSize();
583 globalStats.setSize(PSS_TOTAL_HEAP, currentHeapSize * sizeof(PolyWord));
584 }
585 i = pSpaces.erase(i);
586 }
587 catch (std::bad_alloc&) {
588 return false;
589 }
590 }
591 }
592 // Save newly exported spaces.
593 for(std::vector<PermanentMemSpace *>::iterator j = eSpaces.begin(); j < eSpaces.end(); j++)
594 {
595 PermanentMemSpace *space = *j;
596 space->hierarchy = hierarchy; // Set the hierarchy of the new spaces.
597 space->spaceType = ST_PERMANENT;
598 // Put a dummy object to fill up the unused space.
599 if (space->topPointer != space->top)
600 FillUnusedSpace(space->writeAble(space->topPointer), space->top - space->topPointer);
601 // Put in a dummy object to fill the rest of the space.
602 pSpaces.push_back(space);
603 }
604 eSpaces.clear();
605
606 return true;
607 }
608
609
610 // Before we import a hierarchical saved state we need to turn any previously imported
611 // spaces into local spaces.
DemoteImportSpaces()612 bool MemMgr::DemoteImportSpaces()
613 {
614 return PromoteExportSpaces(1); // Only truly permanent spaces are retained.
615 }
616
617 // Return the space for a given index
SpaceForIndex(unsigned index)618 PermanentMemSpace *MemMgr::SpaceForIndex(unsigned index)
619 {
620 for (std::vector<PermanentMemSpace*>::iterator i = pSpaces.begin(); i < pSpaces.end(); i++)
621 {
622 PermanentMemSpace *space = *i;
623 if (space->index == index)
624 return space;
625 }
626 return NULL;
627 }
628
629 // In several places we assume that segments are filled with valid
630 // objects. This fills unused memory with one or more "byte" objects.
FillUnusedSpace(PolyWord * base,uintptr_t words)631 void MemMgr::FillUnusedSpace(PolyWord *base, uintptr_t words)
632 {
633 PolyWord *pDummy = base+1;
634 while (words > 0)
635 {
636 #ifdef POLYML32IN64
637 // Make sure that any dummy object we insert is properly aligned.
638 if (((uintptr_t)pDummy) & 4)
639 {
640 *pDummy++ = PolyWord::FromUnsigned(0);
641 words--;
642 continue;
643 }
644 #endif
645 POLYUNSIGNED oSize;
646 // If the space is larger than the maximum object size
647 // we will need several objects.
648 if (words > MAX_OBJECT_SIZE) oSize = MAX_OBJECT_SIZE;
649 else oSize = (POLYUNSIGNED)(words-1);
650 // Make this a byte object so it's always skipped.
651 ((PolyObject*)pDummy)->SetLengthWord(oSize, F_BYTE_OBJ);
652 words -= oSize+1;
653 pDummy += oSize+1;
654 }
655 }
656
657 // Allocate an area of the heap of at least minWords and at most maxWords.
658 // This is used both when allocating single objects (when minWords and maxWords
659 // are the same) and when allocating heap segments. If there is insufficient
660 // space to satisfy the minimum it will return 0.
AllocHeapSpace(uintptr_t minWords,uintptr_t & maxWords,bool doAllocation)661 PolyWord *MemMgr::AllocHeapSpace(uintptr_t minWords, uintptr_t &maxWords, bool doAllocation)
662 {
663 PLocker locker(&allocLock);
664 // We try to distribute the allocations between the memory spaces
665 // so that at the next GC we don't have all the most recent cells in
666 // one space. The most recent cells will be more likely to survive a
667 // GC so distibuting them improves the load balance for a multi-thread GC.
668 nextAllocator++;
669 if (nextAllocator > gMem.lSpaces.size()) nextAllocator = 0;
670
671 unsigned j = nextAllocator;
672 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
673 {
674 if (j >= gMem.lSpaces.size()) j = 0;
675 LocalMemSpace *space = gMem.lSpaces[j++];
676 if (space->allocationSpace)
677 {
678 uintptr_t available = space->freeSpace();
679 if (available > 0 && available >= minWords)
680 {
681 // Reduce the maximum value if we had less than that.
682 if (available < maxWords) maxWords = available;
683 #ifdef POLYML32IN64
684 // If necessary round down to an even boundary
685 if (maxWords & 1)
686 {
687 maxWords--;
688 space->lowerAllocPtr[maxWords] = PolyWord::FromUnsigned(0);
689 }
690 #endif
691 PolyWord *result = space->lowerAllocPtr; // Return the address.
692 if (doAllocation)
693 space->lowerAllocPtr += maxWords; // Allocate it.
694 #ifdef POLYML32IN64
695 ASSERT((uintptr_t)result & 4); // Must be odd-word aligned
696 #endif
697 return result;
698 }
699 }
700 }
701 // There isn't space in the existing areas - can we create a new area?
702 // The reason we don't have enough space could simply be that we want to
703 // allocate an object larger than the default space size. Try deleting
704 // some other spaces to bring currentAllocSpace below spaceBeforeMinorGC - minWords.
705 if (minWords > defaultSpaceSize && minWords < spaceBeforeMinorGC)
706 RemoveExcessAllocation(spaceBeforeMinorGC - minWords);
707
708 if (currentAllocSpace/* + minWords */ < spaceBeforeMinorGC)
709 {
710 // i.e. the current allocation space is less than the space allowed for the minor GC
711 // but it may be that allocating this object will take us over the limit. We allow
712 // that to happen so that we can successfully allocate very large objects even if
713 // we have a new GC very shortly.
714 uintptr_t spaceSize = defaultSpaceSize;
715 #ifdef POLYML32IN64
716 // When we create the allocation space we take one word so that the first
717 // length word is on an odd-word boundary. We need to allow for that otherwise
718 // we may have available < minWords.
719 if (minWords >= spaceSize) spaceSize = minWords+1; // If we really want a large space.
720 #else
721 if (minWords > spaceSize) spaceSize = minWords; // If we really want a large space.
722 #endif
723 LocalMemSpace *space = CreateAllocationSpace(spaceSize);
724 if (space == 0) return 0; // Can't allocate it
725 // Allocate our space in this new area.
726 uintptr_t available = space->freeSpace();
727 ASSERT(available >= minWords);
728 if (available < maxWords)
729 {
730 maxWords = available;
731 #ifdef POLYML32IN64
732 // If necessary round down to an even boundary
733 if (maxWords & 1)
734 {
735 maxWords--;
736 space->lowerAllocPtr[maxWords] = PolyWord::FromUnsigned(0);
737 }
738 #endif
739 }
740 PolyWord *result = space->lowerAllocPtr; // Return the address.
741 if (doAllocation)
742 space->lowerAllocPtr += maxWords; // Allocate it.
743 #ifdef POLYML32IN64
744 ASSERT((uintptr_t)result & 4); // Must be odd-word aligned
745 #endif
746 return result;
747 }
748 return 0; // There isn't space even for the minimum.
749 }
750
CodeSpace(PolyWord * start,PolyWord * shadow,uintptr_t spaceSize,OSMem * alloc)751 CodeSpace::CodeSpace(PolyWord *start, PolyWord *shadow, uintptr_t spaceSize, OSMem *alloc): MarkableSpace(alloc)
752 {
753 bottom = start;
754 shadowSpace = shadow;
755 top = start+spaceSize;
756 isMutable = true; // Make it mutable just in case. This will cause it to be scanned.
757 isCode = true;
758 spaceType = ST_CODE;
759 #ifdef POLYML32IN64
760 // Dummy word so that the cell itself, after the length word, is on an 8-byte boundary.
761 writeAble(start)[0] = PolyWord::FromUnsigned(0);
762 largestFree = spaceSize - 2;
763 firstFree = start+1;
764 #else
765 largestFree = spaceSize - 1;
766 firstFree = start;
767 #endif
768 }
769
NewCodeSpace(uintptr_t size)770 CodeSpace *MemMgr::NewCodeSpace(uintptr_t size)
771 {
772 // Allocate a new area and add it at the end of the table.
773 CodeSpace *allocSpace = 0;
774 // Allocate a new mutable, code space. N.B. This may round up "actualSize".
775 size_t actualSize = size * sizeof(PolyWord);
776 void* shadow;
777 PolyWord *mem =
778 (PolyWord*)osCodeAlloc.AllocateCodeArea(actualSize, shadow);
779 if (mem != 0)
780 {
781 try {
782 allocSpace = new CodeSpace(mem, (PolyWord*)shadow, actualSize / sizeof(PolyWord), &osCodeAlloc);
783 allocSpace->shadowSpace = (PolyWord*)shadow;
784 if (!allocSpace->headerMap.Create(allocSpace->spaceSize()))
785 {
786 delete allocSpace;
787 allocSpace = 0;
788 }
789 else if (!AddCodeSpace(allocSpace))
790 {
791 delete allocSpace;
792 allocSpace = 0;
793 }
794 else if (debugOptions & DEBUG_MEMMGR)
795 Log("MMGR: New code space %p allocated at %p size %lu\n", allocSpace, allocSpace->bottom, allocSpace->spaceSize());
796 // Put in a byte cell to mark the area as unallocated.
797 FillUnusedSpace(allocSpace->writeAble(allocSpace->firstFree), allocSpace->top- allocSpace->firstFree);
798 }
799 catch (std::bad_alloc&)
800 {
801 }
802 if (allocSpace == 0)
803 {
804 osCodeAlloc.FreeCodeArea(mem, shadow, actualSize);
805 mem = 0;
806 }
807 }
808 return allocSpace;
809 }
810
811 // Allocate memory for a piece of code. This needs to be both mutable and executable,
812 // at least for native code. The interpreted version need not (should not?) make the
813 // area executable. It will not be executed until the mutable bit has been cleared.
814 // Once code is allocated it is not GCed or moved.
815 // initCell is a byte cell that is copied into the new code area.
AllocCodeSpace(POLYUNSIGNED requiredSize)816 PolyObject* MemMgr::AllocCodeSpace(POLYUNSIGNED requiredSize)
817 {
818 PLocker locker(&codeSpaceLock);
819 // Search the code spaces until we find a free area big enough.
820 size_t i = 0;
821 while (true)
822 {
823 if (i != cSpaces.size())
824 {
825 CodeSpace *space = cSpaces[i];
826 if (space->largestFree >= requiredSize)
827 {
828 POLYUNSIGNED actualLargest = 0;
829 while (space->firstFree < space->top)
830 {
831 PolyObject *obj = (PolyObject*)(space->firstFree+1);
832 // Skip over allocated areas or free areas that are too small.
833 if (obj->IsCodeObject() || obj->Length() < 8)
834 space->firstFree += obj->Length()+1;
835 else break;
836 }
837 PolyWord *pt = space->firstFree;
838 while (pt < space->top)
839 {
840 PolyObject *obj = (PolyObject*)(pt+1);
841 POLYUNSIGNED length = obj->Length();
842 if (obj->IsByteObject())
843 {
844 if (length >= requiredSize)
845 {
846 // Free and large enough
847 PolyWord *next = pt+requiredSize+1;
848 POLYUNSIGNED spare = length - requiredSize;
849 #ifdef POLYML32IN64
850 // Maintain alignment.
851 if (((requiredSize + 1) & 1) && spare != 0)
852 {
853 space->writeAble(next++)[0] = PolyWord::FromUnsigned(0);
854 spare--;
855 }
856 #endif
857 if (spare != 0)
858 FillUnusedSpace(space->writeAble(next), spare);
859 space->isMutable = true; // Set this - it ensures the area is scanned on GC.
860 space->headerMap.SetBit(pt-space->bottom); // Set the "header" bit
861 // Set the length word of the code area and copy the byte cell in.
862 // The code bit must be set before the lock is released to ensure
863 // another thread doesn't reuse this.
864 space->writeAble(obj)->SetLengthWord(requiredSize, F_CODE_OBJ|F_MUTABLE_BIT);
865 return obj;
866 }
867 else if (length >= actualLargest) actualLargest = length+1;
868 }
869 pt += length+1;
870 }
871 // Reached the end without finding what we wanted. Update the largest size.
872 space->largestFree = actualLargest;
873 }
874 i++; // Next area
875 }
876 else
877 {
878 // Allocate a new area and add it at the end of the table.
879 uintptr_t spaceSize = requiredSize + 1;
880 #ifdef POLYML32IN64
881 // We need to allow for the extra alignment word otherwise we
882 // may allocate less than we need.
883 spaceSize += 1;
884 #endif
885 CodeSpace *allocSpace = NewCodeSpace(spaceSize);
886 if (allocSpace == 0)
887 return 0; // Try a GC.
888 globalStats.incSize(PSS_CODE_SPACE, allocSpace->spaceSize() * sizeof(PolyWord));
889 }
890 }
891 }
892
893 // Remove code areas that are completely empty. This is probably better than waiting to reuse them.
894 // It's particularly important if we reload a saved state because the code areas for old saved states
895 // are made into local code areas just in case they are currently in use or reachable.
RemoveEmptyCodeAreas()896 void MemMgr::RemoveEmptyCodeAreas()
897 {
898 for (std::vector<CodeSpace *>::iterator i = cSpaces.begin(); i != cSpaces.end(); )
899 {
900 CodeSpace *space = *i;
901 PolyObject *start = (PolyObject *)(space->bottom+1);
902 if (start->IsByteObject() && start->Length() == space->spaceSize()-1)
903 {
904 if (debugOptions & DEBUG_MEMMGR)
905 Log("MMGR: Deleted code space %p at %p size %zu\n", space, space->bottom, space->spaceSize());
906 globalStats.decSize(PSS_CODE_SPACE, space->spaceSize() * sizeof(PolyWord));
907 // We have an empty cell that fills the whole space.
908 RemoveTree(space);
909 delete(space);
910 i = cSpaces.erase(i);
911 }
912 else i++;
913 }
914 }
915
916 // Add a code space to the tables. Used both for newly compiled code and also demoted saved spaces.
AddCodeSpace(CodeSpace * space)917 bool MemMgr::AddCodeSpace(CodeSpace *space)
918 {
919 try {
920 AddTree(space);
921 cSpaces.push_back(space);
922 }
923 catch (std::exception&) {
924 RemoveTree(space);
925 return false;
926 }
927 return true;
928 }
929
930 // Check that we have sufficient space for an allocation to succeed.
931 // Called from the GC to ensure that we will not get into an infinite
932 // loop trying to allocate, failing and garbage-collecting again.
CheckForAllocation(uintptr_t words)933 bool MemMgr::CheckForAllocation(uintptr_t words)
934 {
935 uintptr_t allocated = 0;
936 return AllocHeapSpace(words, allocated, false) != 0;
937 }
938
939 // Adjust the allocation area by removing free areas so that the total
940 // size of the allocation area is less than the required value. This
941 // is used after the quick GC and also if we need to allocate a large
942 // object.
RemoveExcessAllocation(uintptr_t words)943 void MemMgr::RemoveExcessAllocation(uintptr_t words)
944 {
945 // First remove any non-standard allocation areas.
946 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end();)
947 {
948 LocalMemSpace *space = *i;
949 if (space->allocationSpace && space->isEmpty() &&
950 space->spaceSize() != defaultSpaceSize)
951 DeleteLocalSpace(i);
952 else i++;
953 }
954 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); currentAllocSpace > words && i < lSpaces.end(); )
955 {
956 LocalMemSpace *space = *i;
957 if (space->allocationSpace && space->isEmpty())
958 DeleteLocalSpace(i);
959 else i++;
960 }
961 }
962
963 // Return number of words free in all allocation spaces.
GetFreeAllocSpace()964 uintptr_t MemMgr::GetFreeAllocSpace()
965 {
966 uintptr_t freeSpace = 0;
967 PLocker lock(&allocLock);
968 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
969 {
970 LocalMemSpace *space = *i;
971 if (space->allocationSpace)
972 freeSpace += space->freeSpace();
973 }
974 return freeSpace;
975 }
976
NewStackSpace(uintptr_t size)977 StackSpace *MemMgr::NewStackSpace(uintptr_t size)
978 {
979 PLocker lock(&stackSpaceLock);
980
981 try {
982 StackSpace *space = new StackSpace(&osStackAlloc);
983 size_t iSpace = size*sizeof(PolyWord);
984 space->bottom = (PolyWord*)osStackAlloc.AllocateDataArea(iSpace);
985 if (space->bottom == 0)
986 {
987 if (debugOptions & DEBUG_MEMMGR)
988 Log("MMGR: New stack space: insufficient space\n");
989 delete space;
990 return 0;
991 }
992
993 // The size may have been rounded up to a block boundary.
994 size = iSpace/sizeof(PolyWord);
995 space->top = space->bottom + size;
996 space->spaceType = ST_STACK;
997 space->isMutable = true;
998
999 // Add the stack space to the tree. This ensures that operations such as
1000 // LocalSpaceForAddress will work for addresses within the stack. We can
1001 // get them in the RTS with functions such as quot_rem and exception stack.
1002 // It's not clear whether they really appear in the GC.
1003 try {
1004 AddTree(space);
1005 sSpaces.push_back(space);
1006 }
1007 catch (std::exception&) {
1008 RemoveTree(space);
1009 delete space;
1010 return 0;
1011 }
1012 if (debugOptions & DEBUG_MEMMGR)
1013 Log("MMGR: New stack space %p allocated at %p size %lu\n", space, space->bottom, space->spaceSize());
1014 globalStats.incSize(PSS_STACK_SPACE, space->spaceSize() * sizeof(PolyWord));
1015 return space;
1016 }
1017 catch (std::bad_alloc&) {
1018 if (debugOptions & DEBUG_MEMMGR)
1019 Log("MMGR: New stack space: \"new\" failed\n");
1020 return 0;
1021 }
1022 }
1023
1024 // If checkmem is given write protect the immutable areas except during a GC.
ProtectImmutable(bool on)1025 void MemMgr::ProtectImmutable(bool on)
1026 {
1027 if (debugOptions & DEBUG_CHECK_OBJECTS)
1028 {
1029 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
1030 {
1031 LocalMemSpace *space = *i;
1032 if (!space->isMutable)
1033 {
1034 if (!space->isCode)
1035 osHeapAlloc.EnableWrite(!on, space->bottom, (char*)space->top - (char*)space->bottom);
1036 }
1037 }
1038 }
1039 }
1040
GrowOrShrinkStack(TaskData * taskData,uintptr_t newSize)1041 bool MemMgr::GrowOrShrinkStack(TaskData *taskData, uintptr_t newSize)
1042 {
1043 StackSpace *space = taskData->stack;
1044 size_t iSpace = newSize*sizeof(PolyWord);
1045
1046 PolyWord *newSpace = (PolyWord*)osStackAlloc.AllocateDataArea(iSpace);
1047 if (newSpace == 0)
1048 {
1049 if (debugOptions & DEBUG_MEMMGR)
1050 Log("MMGR: Unable to change size of stack %p from %lu to %lu: insufficient space\n",
1051 space, space->spaceSize(), newSize);
1052 return false;
1053 }
1054 // The size may have been rounded up to a block boundary.
1055 newSize = iSpace/sizeof(PolyWord);
1056 try {
1057 AddTree(space, newSpace, newSpace+newSize);
1058 }
1059 catch (std::bad_alloc&) {
1060 RemoveTree(space, newSpace, newSpace+newSize);
1061 delete space;
1062 return 0;
1063 }
1064 taskData->CopyStackFrame(space->stack(), space->spaceSize(), (StackObject*)newSpace, newSize);
1065 if (debugOptions & DEBUG_MEMMGR)
1066 Log("MMGR: Size of stack %p changed from %lu to %lu at %p\n", space, space->spaceSize(), newSize, newSpace);
1067 globalStats.incSize(PSS_STACK_SPACE, (newSize - space->spaceSize()) * sizeof(PolyWord));
1068 RemoveTree(space); // Remove it BEFORE freeing the space - another thread may allocate it
1069 PolyWord *oldBottom = space->bottom;
1070 size_t oldSize = (char*)space->top - (char*)space->bottom;
1071 space->bottom = newSpace; // Switch this before freeing - We could get a profile trap during the free
1072 space->top = newSpace+newSize;
1073 osStackAlloc.FreeDataArea(oldBottom, oldSize);
1074 return true;
1075 }
1076
1077
1078 // Delete a stack when a thread has finished.
1079 // This can be called by an ML thread so needs an interlock.
DeleteStackSpace(StackSpace * space)1080 bool MemMgr::DeleteStackSpace(StackSpace *space)
1081 {
1082 PLocker lock(&stackSpaceLock);
1083
1084 for (std::vector<StackSpace *>::iterator i = sSpaces.begin(); i < sSpaces.end(); i++)
1085 {
1086 if (*i == space)
1087 {
1088 globalStats.decSize(PSS_STACK_SPACE, space->spaceSize() * sizeof(PolyWord));
1089 RemoveTree(space);
1090 delete space;
1091 sSpaces.erase(i);
1092 if (debugOptions & DEBUG_MEMMGR)
1093 Log("MMGR: Deleted stack space %p at %p size %zu\n", space, space->bottom, space->spaceSize());
1094 return true;
1095 }
1096 }
1097 ASSERT(false); // It should always be in the table.
1098 return false;
1099 }
1100
SpaceTreeTree()1101 SpaceTreeTree::SpaceTreeTree(): SpaceTree(false)
1102 {
1103 for (unsigned i = 0; i < 256; i++)
1104 tree[i] = 0;
1105 }
1106
~SpaceTreeTree()1107 SpaceTreeTree::~SpaceTreeTree()
1108 {
1109 for (unsigned i = 0; i < 256; i++)
1110 {
1111 if (tree[i] && ! tree[i]->isSpace)
1112 delete(tree[i]);
1113 }
1114 }
1115
1116 // Add and remove entries in the space tree.
1117
AddTree(MemSpace * space,PolyWord * startS,PolyWord * endS)1118 void MemMgr::AddTree(MemSpace *space, PolyWord *startS, PolyWord *endS)
1119 {
1120 // It isn't clear we need to lock here but it's probably sensible.
1121 PLocker lock(&spaceTreeLock);
1122 AddTreeRange(&spaceTree, space, (uintptr_t)startS, (uintptr_t)endS);
1123 }
1124
RemoveTree(MemSpace * space,PolyWord * startS,PolyWord * endS)1125 void MemMgr::RemoveTree(MemSpace *space, PolyWord *startS, PolyWord *endS)
1126 {
1127 PLocker lock(&spaceTreeLock);
1128 RemoveTreeRange(&spaceTree, space, (uintptr_t)startS, (uintptr_t)endS);
1129 }
1130
1131
AddTreeRange(SpaceTree ** tt,MemSpace * space,uintptr_t startS,uintptr_t endS)1132 void MemMgr::AddTreeRange(SpaceTree **tt, MemSpace *space, uintptr_t startS, uintptr_t endS)
1133 {
1134 if (*tt == 0)
1135 *tt = new SpaceTreeTree;
1136 ASSERT(! (*tt)->isSpace);
1137 SpaceTreeTree *t = (SpaceTreeTree*)*tt;
1138
1139 const unsigned shift = (sizeof(void*)-1) * 8; // Takes the high-order byte
1140 uintptr_t r = startS >> shift;
1141 ASSERT(r < 256);
1142 const uintptr_t s = endS == 0 ? 256 : endS >> shift;
1143 ASSERT(s >= r && s <= 256);
1144
1145 if (r == s) // Wholly within this entry
1146 AddTreeRange(&(t->tree[r]), space, startS << 8, endS << 8);
1147 else
1148 {
1149 // Deal with any remainder at the start.
1150 if ((r << shift) != startS)
1151 {
1152 AddTreeRange(&(t->tree[r]), space, startS << 8, 0 /*End of range*/);
1153 r++;
1154 }
1155 // Whole entries.
1156 while (r < s)
1157 {
1158 ASSERT(t->tree[r] == 0);
1159 t->tree[r] = space;
1160 r++;
1161 }
1162 // Remainder at the end.
1163 if ((s << shift) != endS)
1164 AddTreeRange(&(t->tree[r]), space, 0, endS << 8);
1165 }
1166 }
1167
1168 // Remove an entry from the tree for a range. Strictly speaking we don't need the
1169 // space argument here but it's useful as a check.
1170 // This may be called to remove a partially installed structure if we have
1171 // run out of space in AddTreeRange.
RemoveTreeRange(SpaceTree ** tt,MemSpace * space,uintptr_t startS,uintptr_t endS)1172 void MemMgr::RemoveTreeRange(SpaceTree **tt, MemSpace *space, uintptr_t startS, uintptr_t endS)
1173 {
1174 SpaceTreeTree *t = (SpaceTreeTree*)*tt;
1175 if (t == 0)
1176 return; // This can only occur if we're recovering.
1177 ASSERT(! t->isSpace);
1178 const unsigned shift = (sizeof(void*)-1) * 8;
1179 uintptr_t r = startS >> shift;
1180 const uintptr_t s = endS == 0 ? 256 : endS >> shift;
1181
1182 if (r == s)
1183 RemoveTreeRange(&(t->tree[r]), space, startS << 8, endS << 8);
1184 else
1185 {
1186 // Deal with any remainder at the start.
1187 if ((r << shift) != startS)
1188 {
1189 RemoveTreeRange(&(t->tree[r]), space, startS << 8, 0);
1190 r++;
1191 }
1192 // Whole entries.
1193 while (r < s)
1194 {
1195 ASSERT(t->tree[r] == space || t->tree[r] == 0 /* Recovery only */);
1196 t->tree[r] = 0;
1197 r++;
1198 }
1199 // Remainder at the end.
1200 if ((s << shift) != endS)
1201 RemoveTreeRange(&(t->tree[r]), space, 0, endS << 8);
1202 }
1203 // See if the whole vector is now empty.
1204 for (unsigned j = 0; j < 256; j++)
1205 {
1206 if (t->tree[j])
1207 return; // It's not empty - we're done.
1208 }
1209 delete(t);
1210 *tt = 0;
1211 }
1212
AllocatedInAlloc()1213 uintptr_t MemMgr::AllocatedInAlloc()
1214 {
1215 uintptr_t inAlloc = 0;
1216 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
1217 {
1218 LocalMemSpace *sp = *i;
1219 if (sp->allocationSpace) inAlloc += sp->allocatedSpace();
1220 }
1221 return inAlloc;
1222 }
1223
1224 // Report heap sizes and occupancy before and after GC
ReportHeapSizes(const char * phase)1225 void MemMgr::ReportHeapSizes(const char *phase)
1226 {
1227 uintptr_t alloc = 0, nonAlloc = 0, inAlloc = 0, inNonAlloc = 0;
1228 for (std::vector<LocalMemSpace*>::iterator i = lSpaces.begin(); i < lSpaces.end(); i++)
1229 {
1230 LocalMemSpace *sp = *i;
1231 if (sp->allocationSpace)
1232 {
1233 alloc += sp->spaceSize();
1234 inAlloc += sp->allocatedSpace();
1235 }
1236 else
1237 {
1238 nonAlloc += sp->spaceSize();
1239 inNonAlloc += sp->allocatedSpace();
1240 }
1241 }
1242 Log("Heap: %s Major heap used ", phase);
1243 LogSize(inNonAlloc); Log(" of ");
1244 LogSize(nonAlloc);
1245 Log(" (%1.0f%%). Alloc space used ", (float)inNonAlloc / (float)nonAlloc * 100.0F);
1246 LogSize(inAlloc); Log(" of ");
1247 LogSize(alloc);
1248 Log(" (%1.0f%%). Total space ", (float)inAlloc / (float)alloc * 100.0F);
1249 LogSize(spaceForHeap);
1250 Log(" %1.0f%% full.\n", (float)(inAlloc + inNonAlloc) / (float)spaceForHeap * 100.0F);
1251 Log("Heap: Local spaces %" PRI_SIZET ", permanent spaces %" PRI_SIZET ", code spaces %" PRI_SIZET ", stack spaces %" PRI_SIZET "\n",
1252 lSpaces.size(), pSpaces.size(), cSpaces.size(), sSpaces.size());
1253 uintptr_t cTotal = 0, cOccupied = 0;
1254 for (std::vector<CodeSpace*>::iterator c = cSpaces.begin(); c != cSpaces.end(); c++)
1255 {
1256 cTotal += (*c)->spaceSize();
1257 PolyWord *pt = (*c)->bottom;
1258 while (pt < (*c)->top)
1259 {
1260 pt++;
1261 PolyObject *obj = (PolyObject*)pt;
1262 if (obj->ContainsForwardingPtr())
1263 {
1264 #ifdef POLYML32IN64
1265 // This is relative to globalCodeBase not globalHeapBase
1266 while (obj->ContainsForwardingPtr())
1267 obj = (PolyObject*)(globalCodeBase + ((obj->LengthWord() & ~_OBJ_TOMBSTONE_BIT) << 1));
1268 #else
1269 obj = obj->FollowForwardingChain();
1270 #endif
1271 pt += obj->Length();
1272 }
1273 else
1274 {
1275 if (obj->IsCodeObject())
1276 cOccupied += obj->Length() + 1;
1277 pt += obj->Length();
1278 }
1279 }
1280 }
1281 Log("Heap: Code area: total "); LogSize(cTotal); Log(" occupied: "); LogSize(cOccupied); Log("\n");
1282 uintptr_t stackSpace = 0;
1283 for (std::vector<StackSpace*>::iterator s = sSpaces.begin(); s != sSpaces.end(); s++)
1284 {
1285 stackSpace += (*s)->spaceSize();
1286 }
1287 Log("Heap: Stack area: total "); LogSize(stackSpace); Log("\n");
1288 }
1289
1290 // Profiling - Find a code object or return zero if not found.
1291 // This can be called on a "user" thread.
FindCodeObject(const byte * addr)1292 PolyObject *MemMgr::FindCodeObject(const byte *addr)
1293 {
1294 MemSpace *space = SpaceForAddress(addr);
1295 if (space == 0) return 0;
1296 Bitmap *profMap = 0;
1297 if (! space->isCode) return 0;
1298 if (space->spaceType == ST_CODE)
1299 {
1300 CodeSpace *cSpace = (CodeSpace*)space;
1301 profMap = &cSpace->headerMap;
1302 }
1303 else if (space->spaceType == ST_PERMANENT)
1304 {
1305 PermanentMemSpace *pSpace = (PermanentMemSpace*)space;
1306 profMap = &pSpace->profileCode;
1307 }
1308 else return 0; // Must be in code or permanent code.
1309
1310 // For the permanent areas the header maps are created and initialised on demand.
1311 if (! profMap->Created())
1312 {
1313 PLocker lock(&codeBitmapLock);
1314 if (! profMap->Created()) // Second check now we've got the lock.
1315 {
1316 // Create the bitmap. If it failed just say "not in this area"
1317 if (! profMap->Create(space->spaceSize()))
1318 return 0;
1319 // Set the first bit before releasing the lock.
1320 profMap->SetBit(0);
1321 }
1322 }
1323
1324 // A bit is set if it is a length word.
1325 while ((uintptr_t)addr & (sizeof(POLYUNSIGNED)-1)) addr--; // Make it word aligned
1326 PolyWord *wordAddr = (PolyWord*)addr;
1327 // Work back to find the first set bit before this.
1328 // Normally we will find one but if we're looking up a value that
1329 // is actually an integer it might be in a piece of code that is now free.
1330 uintptr_t bitOffset = profMap->FindLastSet(wordAddr - space->bottom);
1331 if (space->spaceType == ST_CODE)
1332 {
1333 PolyWord *ptr = space->bottom+bitOffset;
1334 if (ptr >= space->top) return 0;
1335 // This will find the last non-free code cell or the first cell.
1336 // Return zero if the value was not actually in the cell or it wasn't code.
1337 PolyObject *obj = (PolyObject*)(ptr+1);
1338 #ifdef POLYML32IN64
1339 PolyObject *lastObj = obj;
1340 // This is relative to globalCodeBase not globalHeapBase.
1341 while (lastObj->ContainsForwardingPtr())
1342 lastObj = (PolyObject*)(globalCodeBase + ((lastObj->LengthWord() & ~_OBJ_TOMBSTONE_BIT) << 1));
1343 #else
1344 PolyObject *lastObj = obj->FollowForwardingChain();
1345 #endif
1346 // We normally replace forwarding pointers but when scanning to update
1347 // addresses after a saved state we may not have yet done that.
1348 if (wordAddr > ptr && wordAddr < ptr + 1 + lastObj->Length() && lastObj->IsCodeObject())
1349 return obj;
1350 else return 0;
1351 }
1352 // Permanent area - the bits are set on demand.
1353 // Now work forward, setting any bits if necessary. We don't need a lock
1354 // because this is monotonic.
1355 for (;;)
1356 {
1357 PolyWord *ptr = space->bottom+bitOffset;
1358 if (ptr >= space->top) return 0;
1359 PolyObject *obj = (PolyObject*)(ptr+1);
1360 ASSERT(obj->ContainsNormalLengthWord());
1361 if (wordAddr > ptr && wordAddr < ptr + obj->Length())
1362 return obj;
1363 bitOffset += obj->Length()+1;
1364 profMap->SetBit(bitOffset);
1365 }
1366 return 0;
1367 }
1368
1369 // Remove profiling bitmaps from permanent areas to free up memory.
RemoveProfilingBitmaps()1370 void MemMgr::RemoveProfilingBitmaps()
1371 {
1372 for (std::vector<PermanentMemSpace*>::iterator i = pSpaces.begin(); i < pSpaces.end(); i++)
1373 (*i)->profileCode.Destroy();
1374 }
1375
1376 #ifdef POLYML32IN64DEBUG
AddressToObjectPtr(void * address)1377 POLYOBJECTPTR PolyWord::AddressToObjectPtr(void *address)
1378 {
1379 ASSERT(address >= globalHeapBase);
1380 uintptr_t offset = (PolyWord*)address - globalHeapBase;
1381 ASSERT(offset <= 0x7fffffff); // Currently limited to 8Gbytes
1382 ASSERT((offset & 1) == 0);
1383 return (POLYOBJECTPTR)offset;
1384 }
1385 #endif
1386
1387 MemMgr gMem; // The one and only memory manager object
1388
1389