1 /*
2 * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "gc/shared/allocTracer.hpp"
28 #include "gc/shared/collectedHeap.hpp"
29 #include "gc/shared/memAllocator.hpp"
30 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
31 #include "memory/universe.hpp"
32 #include "oops/arrayOop.hpp"
33 #include "oops/oop.inline.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "runtime/sharedRuntime.hpp"
36 #include "runtime/handles.inline.hpp"
37 #include "runtime/thread.inline.hpp"
38 #include "services/lowMemoryDetector.hpp"
39 #include "utilities/align.hpp"
40 #include "utilities/copy.hpp"
41
42 class MemAllocator::Allocation: StackObj {
43 friend class MemAllocator;
44
45 const MemAllocator& _allocator;
46 Thread* _thread;
47 oop* _obj_ptr;
48 bool _overhead_limit_exceeded;
49 bool _allocated_outside_tlab;
50 size_t _allocated_tlab_size;
51 bool _tlab_end_reset_for_sample;
52
53 bool check_out_of_memory();
54 void verify_before();
55 void verify_after();
56 void notify_allocation();
57 void notify_allocation_jvmti_allocation_event();
58 void notify_allocation_jvmti_sampler();
59 void notify_allocation_low_memory_detector();
60 void notify_allocation_jfr_sampler();
61 void notify_allocation_dtrace_sampler();
62 void check_for_bad_heap_word_value() const;
63 #ifdef ASSERT
64 void check_for_valid_allocation_state() const;
65 #endif
66
67 class PreserveObj;
68
69 public:
Allocation(const MemAllocator & allocator,oop * obj_ptr)70 Allocation(const MemAllocator& allocator, oop* obj_ptr)
71 : _allocator(allocator),
72 _thread(Thread::current()),
73 _obj_ptr(obj_ptr),
74 _overhead_limit_exceeded(false),
75 _allocated_outside_tlab(false),
76 _allocated_tlab_size(0),
77 _tlab_end_reset_for_sample(false)
78 {
79 verify_before();
80 }
81
~Allocation()82 ~Allocation() {
83 if (!check_out_of_memory()) {
84 verify_after();
85 notify_allocation();
86 }
87 }
88
obj() const89 oop obj() const { return *_obj_ptr; }
90 };
91
92 class MemAllocator::Allocation::PreserveObj: StackObj {
93 HandleMark _handle_mark;
94 Handle _handle;
95 oop* const _obj_ptr;
96
97 public:
PreserveObj(Thread * thread,oop * obj_ptr)98 PreserveObj(Thread* thread, oop* obj_ptr)
99 : _handle_mark(thread),
100 _handle(thread, *obj_ptr),
101 _obj_ptr(obj_ptr)
102 {
103 *obj_ptr = NULL;
104 }
105
~PreserveObj()106 ~PreserveObj() {
107 *_obj_ptr = _handle();
108 }
109
operator ()() const110 oop operator()() const {
111 return _handle();
112 }
113 };
114
check_out_of_memory()115 bool MemAllocator::Allocation::check_out_of_memory() {
116 Thread* THREAD = _thread;
117 assert(!HAS_PENDING_EXCEPTION, "Unexpected exception, will result in uninitialized storage");
118
119 if (obj() != NULL) {
120 return false;
121 }
122
123 const char* message = _overhead_limit_exceeded ? "GC overhead limit exceeded" : "Java heap space";
124 if (!THREAD->in_retryable_allocation()) {
125 // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
126 report_java_out_of_memory(message);
127
128 if (JvmtiExport::should_post_resource_exhausted()) {
129 JvmtiExport::post_resource_exhausted(
130 JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP,
131 message);
132 }
133 oop exception = _overhead_limit_exceeded ?
134 Universe::out_of_memory_error_gc_overhead_limit() :
135 Universe::out_of_memory_error_java_heap();
136 THROW_OOP_(exception, true);
137 } else {
138 THROW_OOP_(Universe::out_of_memory_error_retry(), true);
139 }
140 }
141
verify_before()142 void MemAllocator::Allocation::verify_before() {
143 // Clear unhandled oops for memory allocation. Memory allocation might
144 // not take out a lock if from tlab, so clear here.
145 Thread* THREAD = _thread;
146 assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending");
147 debug_only(check_for_valid_allocation_state());
148 assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
149 }
150
verify_after()151 void MemAllocator::Allocation::verify_after() {
152 NOT_PRODUCT(check_for_bad_heap_word_value();)
153 }
154
check_for_bad_heap_word_value() const155 void MemAllocator::Allocation::check_for_bad_heap_word_value() const {
156 MemRegion obj_range = _allocator.obj_memory_range(obj());
157 HeapWord* addr = obj_range.start();
158 size_t size = obj_range.word_size();
159 if (CheckMemoryInitialization && ZapUnusedHeapArea) {
160 for (size_t slot = 0; slot < size; slot += 1) {
161 assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
162 "Found badHeapWordValue in post-allocation check");
163 }
164 }
165 }
166
167 #ifdef ASSERT
check_for_valid_allocation_state() const168 void MemAllocator::Allocation::check_for_valid_allocation_state() const {
169 // How to choose between a pending exception and a potential
170 // OutOfMemoryError? Don't allow pending exceptions.
171 // This is a VM policy failure, so how do we exhaustively test it?
172 assert(!_thread->has_pending_exception(),
173 "shouldn't be allocating with pending exception");
174 // Allocation of an oop can always invoke a safepoint.
175 _thread->check_for_valid_safepoint_state();
176 }
177 #endif
178
notify_allocation_jvmti_sampler()179 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
180 // support for JVMTI VMObjectAlloc event (no-op if not enabled)
181 JvmtiExport::vm_object_alloc_event_collector(obj());
182
183 if (!JvmtiExport::should_post_sampled_object_alloc()) {
184 // Sampling disabled
185 return;
186 }
187
188 if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
189 // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
190 // or expands it due to taking a sampler induced slow path.
191 return;
192 }
193
194 // If we want to be sampling, protect the allocated object with a Handle
195 // before doing the callback. The callback is done in the destructor of
196 // the JvmtiSampledObjectAllocEventCollector.
197 size_t bytes_since_last = 0;
198
199 {
200 PreserveObj obj_h(_thread, _obj_ptr);
201 JvmtiSampledObjectAllocEventCollector collector;
202 size_t size_in_bytes = _allocator._word_size * HeapWordSize;
203 ThreadLocalAllocBuffer& tlab = _thread->tlab();
204
205 if (!_allocated_outside_tlab) {
206 bytes_since_last = tlab.bytes_since_last_sample_point();
207 }
208
209 _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last);
210 }
211
212 if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) {
213 // Tell tlab to forget bytes_since_last if we passed it to the heap sampler.
214 _thread->tlab().set_sample_end(bytes_since_last != 0);
215 }
216 }
217
notify_allocation_low_memory_detector()218 void MemAllocator::Allocation::notify_allocation_low_memory_detector() {
219 // support low memory notifications (no-op if not enabled)
220 LowMemoryDetector::detect_low_memory_for_collected_pools();
221 }
222
notify_allocation_jfr_sampler()223 void MemAllocator::Allocation::notify_allocation_jfr_sampler() {
224 HeapWord* mem = cast_from_oop<HeapWord*>(obj());
225 size_t size_in_bytes = _allocator._word_size * HeapWordSize;
226
227 if (_allocated_outside_tlab) {
228 AllocTracer::send_allocation_outside_tlab(obj()->klass(), mem, size_in_bytes, _thread);
229 } else if (_allocated_tlab_size != 0) {
230 // TLAB was refilled
231 AllocTracer::send_allocation_in_new_tlab(obj()->klass(), mem, _allocated_tlab_size * HeapWordSize,
232 size_in_bytes, _thread);
233 }
234 }
235
notify_allocation_dtrace_sampler()236 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() {
237 if (DTraceAllocProbes) {
238 // support for Dtrace object alloc event (no-op most of the time)
239 Klass* klass = obj()->klass();
240 size_t word_size = _allocator._word_size;
241 if (klass != NULL && klass->name() != NULL) {
242 SharedRuntime::dtrace_object_alloc(obj(), (int)word_size);
243 }
244 }
245 }
246
notify_allocation()247 void MemAllocator::Allocation::notify_allocation() {
248 notify_allocation_low_memory_detector();
249 notify_allocation_jfr_sampler();
250 notify_allocation_dtrace_sampler();
251 notify_allocation_jvmti_sampler();
252 }
253
allocate_outside_tlab(Allocation & allocation) const254 HeapWord* MemAllocator::allocate_outside_tlab(Allocation& allocation) const {
255 allocation._allocated_outside_tlab = true;
256 HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded);
257 if (mem == NULL) {
258 return mem;
259 }
260
261 NOT_PRODUCT(Universe::heap()->check_for_non_bad_heap_word_value(mem, _word_size));
262 size_t size_in_bytes = _word_size * HeapWordSize;
263 _thread->incr_allocated_bytes(size_in_bytes);
264
265 return mem;
266 }
267
allocate_inside_tlab(Allocation & allocation) const268 HeapWord* MemAllocator::allocate_inside_tlab(Allocation& allocation) const {
269 assert(UseTLAB, "should use UseTLAB");
270
271 // Try allocating from an existing TLAB.
272 HeapWord* mem = _thread->tlab().allocate(_word_size);
273 if (mem != NULL) {
274 return mem;
275 }
276
277 // Try refilling the TLAB and allocating the object in it.
278 return allocate_inside_tlab_slow(allocation);
279 }
280
allocate_inside_tlab_slow(Allocation & allocation) const281 HeapWord* MemAllocator::allocate_inside_tlab_slow(Allocation& allocation) const {
282 HeapWord* mem = NULL;
283 ThreadLocalAllocBuffer& tlab = _thread->tlab();
284
285 if (JvmtiExport::should_post_sampled_object_alloc()) {
286 tlab.set_back_allocation_end();
287 mem = tlab.allocate(_word_size);
288
289 // We set back the allocation sample point to try to allocate this, reset it
290 // when done.
291 allocation._tlab_end_reset_for_sample = true;
292
293 if (mem != NULL) {
294 return mem;
295 }
296 }
297
298 // Retain tlab and allocate object in shared space if
299 // the amount free in the tlab is too large to discard.
300 if (tlab.free() > tlab.refill_waste_limit()) {
301 tlab.record_slow_allocation(_word_size);
302 return NULL;
303 }
304
305 // Discard tlab and allocate a new one.
306 // To minimize fragmentation, the last TLAB may be smaller than the rest.
307 size_t new_tlab_size = tlab.compute_size(_word_size);
308
309 tlab.retire_before_allocation();
310
311 if (new_tlab_size == 0) {
312 return NULL;
313 }
314
315 // Allocate a new TLAB requesting new_tlab_size. Any size
316 // between minimal and new_tlab_size is accepted.
317 size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(_word_size);
318 mem = Universe::heap()->allocate_new_tlab(min_tlab_size, new_tlab_size, &allocation._allocated_tlab_size);
319 if (mem == NULL) {
320 assert(allocation._allocated_tlab_size == 0,
321 "Allocation failed, but actual size was updated. min: " SIZE_FORMAT
322 ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT,
323 min_tlab_size, new_tlab_size, allocation._allocated_tlab_size);
324 return NULL;
325 }
326 assert(allocation._allocated_tlab_size != 0, "Allocation succeeded but actual size not updated. mem at: "
327 PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT,
328 p2i(mem), min_tlab_size, new_tlab_size);
329
330 if (ZeroTLAB) {
331 // ..and clear it.
332 Copy::zero_to_words(mem, allocation._allocated_tlab_size);
333 } else {
334 // ...and zap just allocated object.
335 #ifdef ASSERT
336 // Skip mangling the space corresponding to the object header to
337 // ensure that the returned space is not considered parsable by
338 // any concurrent GC thread.
339 size_t hdr_size = oopDesc::header_size();
340 Copy::fill_to_words(mem + hdr_size, allocation._allocated_tlab_size - hdr_size, badHeapWordVal);
341 #endif // ASSERT
342 }
343
344 tlab.fill(mem, mem + _word_size, allocation._allocated_tlab_size);
345 return mem;
346 }
347
mem_allocate(Allocation & allocation) const348 HeapWord* MemAllocator::mem_allocate(Allocation& allocation) const {
349 if (UseTLAB) {
350 HeapWord* result = allocate_inside_tlab(allocation);
351 if (result != NULL) {
352 return result;
353 }
354 }
355
356 return allocate_outside_tlab(allocation);
357 }
358
allocate() const359 oop MemAllocator::allocate() const {
360 oop obj = NULL;
361 {
362 Allocation allocation(*this, &obj);
363 HeapWord* mem = mem_allocate(allocation);
364 if (mem != NULL) {
365 obj = initialize(mem);
366 } else {
367 // The unhandled oop detector will poison local variable obj,
368 // so reset it to NULL if mem is NULL.
369 obj = NULL;
370 }
371 }
372 return obj;
373 }
374
mem_clear(HeapWord * mem) const375 void MemAllocator::mem_clear(HeapWord* mem) const {
376 assert(mem != NULL, "cannot initialize NULL object");
377 const size_t hs = oopDesc::header_size();
378 assert(_word_size >= hs, "unexpected object size");
379 oopDesc::set_klass_gap(mem, 0);
380 Copy::fill_to_aligned_words(mem + hs, _word_size - hs);
381 }
382
finish(HeapWord * mem) const383 oop MemAllocator::finish(HeapWord* mem) const {
384 assert(mem != NULL, "NULL object pointer");
385 if (UseBiasedLocking) {
386 oopDesc::set_mark(mem, _klass->prototype_header());
387 } else {
388 // May be bootstrapping
389 oopDesc::set_mark(mem, markWord::prototype());
390 }
391 // Need a release store to ensure array/class length, mark word, and
392 // object zeroing are visible before setting the klass non-NULL, for
393 // concurrent collectors.
394 oopDesc::release_set_klass(mem, _klass);
395 return oop(mem);
396 }
397
initialize(HeapWord * mem) const398 oop ObjAllocator::initialize(HeapWord* mem) const {
399 mem_clear(mem);
400 return finish(mem);
401 }
402
obj_memory_range(oop obj) const403 MemRegion ObjArrayAllocator::obj_memory_range(oop obj) const {
404 if (_do_zero) {
405 return MemAllocator::obj_memory_range(obj);
406 }
407 ArrayKlass* array_klass = ArrayKlass::cast(_klass);
408 const size_t hs = arrayOopDesc::header_size(array_klass->element_type());
409 return MemRegion(cast_from_oop<HeapWord*>(obj) + hs, _word_size - hs);
410 }
411
initialize(HeapWord * mem) const412 oop ObjArrayAllocator::initialize(HeapWord* mem) const {
413 // Set array length before setting the _klass field because a
414 // non-NULL klass field indicates that the object is parsable by
415 // concurrent GC.
416 assert(_length >= 0, "length should be non-negative");
417 if (_do_zero) {
418 mem_clear(mem);
419 }
420 arrayOopDesc::set_length(mem, _length);
421 return finish(mem);
422 }
423
initialize(HeapWord * mem) const424 oop ClassAllocator::initialize(HeapWord* mem) const {
425 // Set oop_size field before setting the _klass field because a
426 // non-NULL _klass field indicates that the object is parsable by
427 // concurrent GC.
428 assert(_word_size > 0, "oop_size must be positive.");
429 mem_clear(mem);
430 java_lang_Class::set_oop_size(mem, (int)_word_size);
431 return finish(mem);
432 }
433