1 /*
2  * Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4  *
5  * This code is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 only, as
7  * published by the Free Software Foundation.
8  *
9  * This code is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12  * version 2 for more details (a copy is included in the LICENSE file that
13  * accompanied this code).
14  *
15  * You should have received a copy of the GNU General Public License version
16  * 2 along with this work; if not, write to the Free Software Foundation,
17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18  *
19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20  * or visit www.oracle.com if you need additional information or have any
21  * questions.
22  */
23 
24 #include "precompiled.hpp"
25 #include "classfile/javaClasses.inline.hpp"
26 #include "classfile/symbolTable.hpp"
27 #include "compiler/compileBroker.hpp"
28 #include "jvmci/jniAccessMark.inline.hpp"
29 #include "jvmci/jvmciCompilerToVM.hpp"
30 #include "jvmci/jvmciRuntime.hpp"
31 #include "logging/log.hpp"
32 #include "memory/oopFactory.hpp"
33 #include "memory/universe.hpp"
34 #include "oops/constantPool.inline.hpp"
35 #include "oops/method.inline.hpp"
36 #include "oops/objArrayKlass.hpp"
37 #include "oops/oop.inline.hpp"
38 #include "runtime/atomic.hpp"
39 #include "runtime/biasedLocking.hpp"
40 #include "runtime/deoptimization.hpp"
41 #include "runtime/fieldDescriptor.inline.hpp"
42 #include "runtime/frame.inline.hpp"
43 #include "runtime/sharedRuntime.hpp"
44 #if INCLUDE_G1GC
45 #include "gc/g1/g1ThreadLocalData.hpp"
46 #endif // INCLUDE_G1GC
47 
48 // Simple helper to see if the caller of a runtime stub which
49 // entered the VM has been deoptimized
50 
caller_is_deopted()51 static bool caller_is_deopted() {
52   JavaThread* thread = JavaThread::current();
53   RegisterMap reg_map(thread, false);
54   frame runtime_frame = thread->last_frame();
55   frame caller_frame = runtime_frame.sender(&reg_map);
56   assert(caller_frame.is_compiled_frame(), "must be compiled");
57   return caller_frame.is_deoptimized_frame();
58 }
59 
60 // Stress deoptimization
deopt_caller()61 static void deopt_caller() {
62   if ( !caller_is_deopted()) {
63     JavaThread* thread = JavaThread::current();
64     RegisterMap reg_map(thread, false);
65     frame runtime_frame = thread->last_frame();
66     frame caller_frame = runtime_frame.sender(&reg_map);
67     Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint);
68     assert(caller_is_deopted(), "Must be deoptimized");
69   }
70 }
71 
72 // Manages a scope for a JVMCI runtime call that attempts a heap allocation.
73 // If there is a pending exception upon closing the scope and the runtime
74 // call is of the variety where allocation failure returns NULL without an
75 // exception, the following action is taken:
76 //   1. The pending exception is cleared
77 //   2. NULL is written to JavaThread::_vm_result
78 //   3. Checks that an OutOfMemoryError is Universe::out_of_memory_error_retry().
79 class RetryableAllocationMark: public StackObj {
80  private:
81   JavaThread* _thread;
82  public:
RetryableAllocationMark(JavaThread * thread,bool activate)83   RetryableAllocationMark(JavaThread* thread, bool activate) {
84     if (activate) {
85       assert(!thread->in_retryable_allocation(), "retryable allocation scope is non-reentrant");
86       _thread = thread;
87       _thread->set_in_retryable_allocation(true);
88     } else {
89       _thread = NULL;
90     }
91   }
~RetryableAllocationMark()92   ~RetryableAllocationMark() {
93     if (_thread != NULL) {
94       _thread->set_in_retryable_allocation(false);
95       JavaThread* THREAD = _thread;
96       if (HAS_PENDING_EXCEPTION) {
97         oop ex = PENDING_EXCEPTION;
98         CLEAR_PENDING_EXCEPTION;
99         oop retry_oome = Universe::out_of_memory_error_retry();
100         if (ex->is_a(retry_oome->klass()) && retry_oome != ex) {
101           ResourceMark rm;
102           fatal("Unexpected exception in scope of retryable allocation: " INTPTR_FORMAT " of type %s", p2i(ex), ex->klass()->external_name());
103         }
104         _thread->set_vm_result(NULL);
105       }
106     }
107   }
108 };
109 
110 JRT_BLOCK_ENTRY(void, JVMCIRuntime::new_instance_common(JavaThread* thread, Klass* klass, bool null_on_fail))
111   JRT_BLOCK;
112   assert(klass->is_klass(), "not a class");
113   Handle holder(THREAD, klass->klass_holder()); // keep the klass alive
114   InstanceKlass* h = InstanceKlass::cast(klass);
115   {
116     RetryableAllocationMark ram(thread, null_on_fail);
117     h->check_valid_for_instantiation(true, CHECK);
118     oop obj;
119     if (null_on_fail) {
120       if (!h->is_initialized()) {
121         // Cannot re-execute class initialization without side effects
122         // so return without attempting the initialization
123         return;
124       }
125     } else {
126       // make sure klass is initialized
127       h->initialize(CHECK);
128     }
129     // allocate instance and return via TLS
130     obj = h->allocate_instance(CHECK);
131     thread->set_vm_result(obj);
132   }
133   JRT_BLOCK_END;
134   SharedRuntime::on_slowpath_allocation_exit(thread);
135 JRT_END
136 
137 JRT_BLOCK_ENTRY(void, JVMCIRuntime::new_array_common(JavaThread* thread, Klass* array_klass, jint length, bool null_on_fail))
138   JRT_BLOCK;
139   // Note: no handle for klass needed since they are not used
140   //       anymore after new_objArray() and no GC can happen before.
141   //       (This may have to change if this code changes!)
142   assert(array_klass->is_klass(), "not a class");
143   oop obj;
144   if (array_klass->is_typeArray_klass()) {
145     BasicType elt_type = TypeArrayKlass::cast(array_klass)->element_type();
146     RetryableAllocationMark ram(thread, null_on_fail);
147     obj = oopFactory::new_typeArray(elt_type, length, CHECK);
148   } else {
149     Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
150     Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
151     RetryableAllocationMark ram(thread, null_on_fail);
152     obj = oopFactory::new_objArray(elem_klass, length, CHECK);
153   }
154   thread->set_vm_result(obj);
155   // This is pretty rare but this runtime patch is stressful to deoptimization
156   // if we deoptimize here so force a deopt to stress the path.
157   if (DeoptimizeALot) {
158     static int deopts = 0;
159     // Alternate between deoptimizing and raising an error (which will also cause a deopt)
160     if (deopts++ % 2 == 0) {
161       if (null_on_fail) {
162         return;
163       } else {
164         ResourceMark rm(THREAD);
165         THROW(vmSymbols::java_lang_OutOfMemoryError());
166       }
167     } else {
168       deopt_caller();
169     }
170   }
171   JRT_BLOCK_END;
172   SharedRuntime::on_slowpath_allocation_exit(thread);
173 JRT_END
174 
175 JRT_ENTRY(void, JVMCIRuntime::new_multi_array_common(JavaThread* thread, Klass* klass, int rank, jint* dims, bool null_on_fail))
176   assert(klass->is_klass(), "not a class");
177   assert(rank >= 1, "rank must be nonzero");
178   Handle holder(THREAD, klass->klass_holder()); // keep the klass alive
179   RetryableAllocationMark ram(thread, null_on_fail);
180   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
181   thread->set_vm_result(obj);
182 JRT_END
183 
184 JRT_ENTRY(void, JVMCIRuntime::dynamic_new_array_common(JavaThread* thread, oopDesc* element_mirror, jint length, bool null_on_fail))
185   RetryableAllocationMark ram(thread, null_on_fail);
186   oop obj = Reflection::reflect_new_array(element_mirror, length, CHECK);
187   thread->set_vm_result(obj);
188 JRT_END
189 
190 JRT_ENTRY(void, JVMCIRuntime::dynamic_new_instance_common(JavaThread* thread, oopDesc* type_mirror, bool null_on_fail))
191   InstanceKlass* klass = InstanceKlass::cast(java_lang_Class::as_Klass(type_mirror));
192 
193   if (klass == NULL) {
194     ResourceMark rm(THREAD);
195     THROW(vmSymbols::java_lang_InstantiationException());
196   }
197   RetryableAllocationMark ram(thread, null_on_fail);
198 
199   // Create new instance (the receiver)
200   klass->check_valid_for_instantiation(false, CHECK);
201 
202   if (null_on_fail) {
203     if (!klass->is_initialized()) {
204       // Cannot re-execute class initialization without side effects
205       // so return without attempting the initialization
206       return;
207     }
208   } else {
209     // Make sure klass gets initialized
210     klass->initialize(CHECK);
211   }
212 
213   oop obj = klass->allocate_instance(CHECK);
214   thread->set_vm_result(obj);
215 JRT_END
216 
217 extern void vm_exit(int code);
218 
219 // Enter this method from compiled code handler below. This is where we transition
220 // to VM mode. This is done as a helper routine so that the method called directly
221 // from compiled code does not have to transition to VM. This allows the entry
222 // method to see if the nmethod that we have just looked up a handler for has
223 // been deoptimized while we were in the vm. This simplifies the assembly code
224 // cpu directories.
225 //
226 // We are entering here from exception stub (via the entry method below)
227 // If there is a compiled exception handler in this method, we will continue there;
228 // otherwise we will unwind the stack and continue at the caller of top frame method
229 // Note: we enter in Java using a special JRT wrapper. This wrapper allows us to
230 // control the area where we can allow a safepoint. After we exit the safepoint area we can
231 // check to see if the handler we are going to return is now in a nmethod that has
232 // been deoptimized. If that is the case we return the deopt blob
233 // unpack_with_exception entry instead. This makes life for the exception blob easier
234 // because making that same check and diverting is painful from assembly language.
235 JRT_ENTRY_NO_ASYNC(static address, exception_handler_for_pc_helper(JavaThread* thread, oopDesc* ex, address pc, CompiledMethod*& cm))
236   // Reset method handle flag.
237   thread->set_is_method_handle_return(false);
238 
239   Handle exception(thread, ex);
240   cm = CodeCache::find_compiled(pc);
241   assert(cm != NULL, "this is not a compiled method");
242   // Adjust the pc as needed/
243   if (cm->is_deopt_pc(pc)) {
244     RegisterMap map(thread, false);
245     frame exception_frame = thread->last_frame().sender(&map);
246     // if the frame isn't deopted then pc must not correspond to the caller of last_frame
247     assert(exception_frame.is_deoptimized_frame(), "must be deopted");
248     pc = exception_frame.pc();
249   }
250 #ifdef ASSERT
251   assert(exception.not_null(), "NULL exceptions should be handled by throw_exception");
252   assert(oopDesc::is_oop(exception()), "just checking");
253   // Check that exception is a subclass of Throwable, otherwise we have a VerifyError
254   if (!(exception->is_a(SystemDictionary::Throwable_klass()))) {
255     if (ExitVMOnVerifyError) vm_exit(-1);
256     ShouldNotReachHere();
257   }
258 #endif
259 
260   // Check the stack guard pages and reenable them if necessary and there is
261   // enough space on the stack to do so.  Use fast exceptions only if the guard
262   // pages are enabled.
263   bool guard_pages_enabled = thread->stack_guards_enabled();
264   if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
265 
266   if (JvmtiExport::can_post_on_exceptions()) {
267     // To ensure correct notification of exception catches and throws
268     // we have to deoptimize here.  If we attempted to notify the
269     // catches and throws during this exception lookup it's possible
270     // we could deoptimize on the way out of the VM and end back in
271     // the interpreter at the throw site.  This would result in double
272     // notifications since the interpreter would also notify about
273     // these same catches and throws as it unwound the frame.
274 
275     RegisterMap reg_map(thread);
276     frame stub_frame = thread->last_frame();
277     frame caller_frame = stub_frame.sender(&reg_map);
278 
279     // We don't really want to deoptimize the nmethod itself since we
280     // can actually continue in the exception handler ourselves but I
281     // don't see an easy way to have the desired effect.
282     Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint);
283     assert(caller_is_deopted(), "Must be deoptimized");
284 
285     return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
286   }
287 
288   // ExceptionCache is used only for exceptions at call sites and not for implicit exceptions
289   if (guard_pages_enabled) {
290     address fast_continuation = cm->handler_for_exception_and_pc(exception, pc);
291     if (fast_continuation != NULL) {
292       // Set flag if return address is a method handle call site.
293       thread->set_is_method_handle_return(cm->is_method_handle_return(pc));
294       return fast_continuation;
295     }
296   }
297 
298   // If the stack guard pages are enabled, check whether there is a handler in
299   // the current method.  Otherwise (guard pages disabled), force an unwind and
300   // skip the exception cache update (i.e., just leave continuation==NULL).
301   address continuation = NULL;
302   if (guard_pages_enabled) {
303 
304     // New exception handling mechanism can support inlined methods
305     // with exception handlers since the mappings are from PC to PC
306 
307     // debugging support
308     // tracing
309     if (log_is_enabled(Info, exceptions)) {
310       ResourceMark rm;
311       stringStream tempst;
312       assert(cm->method() != NULL, "Unexpected null method()");
313       tempst.print("compiled method <%s>\n"
314                    " at PC" INTPTR_FORMAT " for thread " INTPTR_FORMAT,
315                    cm->method()->print_value_string(), p2i(pc), p2i(thread));
316       Exceptions::log_exception(exception, tempst.as_string());
317     }
318     // for AbortVMOnException flag
319     NOT_PRODUCT(Exceptions::debug_check_abort(exception));
320 
321     // Clear out the exception oop and pc since looking up an
322     // exception handler can cause class loading, which might throw an
323     // exception and those fields are expected to be clear during
324     // normal bytecode execution.
325     thread->clear_exception_oop_and_pc();
326 
327     bool recursive_exception = false;
328     continuation = SharedRuntime::compute_compiled_exc_handler(cm, pc, exception, false, false, recursive_exception);
329     // If an exception was thrown during exception dispatch, the exception oop may have changed
330     thread->set_exception_oop(exception());
331     thread->set_exception_pc(pc);
332 
333     // The exception cache is used only for non-implicit exceptions
334     // Update the exception cache only when another exception did
335     // occur during the computation of the compiled exception handler
336     // (e.g., when loading the class of the catch type).
337     // Checking for exception oop equality is not
338     // sufficient because some exceptions are pre-allocated and reused.
339     if (continuation != NULL && !recursive_exception && !SharedRuntime::deopt_blob()->contains(continuation)) {
340       cm->add_handler_for_exception_and_pc(exception, pc, continuation);
341     }
342   }
343 
344   // Set flag if return address is a method handle call site.
345   thread->set_is_method_handle_return(cm->is_method_handle_return(pc));
346 
347   if (log_is_enabled(Info, exceptions)) {
348     ResourceMark rm;
349     log_info(exceptions)("Thread " PTR_FORMAT " continuing at PC " PTR_FORMAT
350                          " for exception thrown at PC " PTR_FORMAT,
351                          p2i(thread), p2i(continuation), p2i(pc));
352   }
353 
354   return continuation;
355 JRT_END
356 
357 // Enter this method from compiled code only if there is a Java exception handler
358 // in the method handling the exception.
359 // We are entering here from exception stub. We don't do a normal VM transition here.
360 // We do it in a helper. This is so we can check to see if the nmethod we have just
361 // searched for an exception handler has been deoptimized in the meantime.
exception_handler_for_pc(JavaThread * thread)362 address JVMCIRuntime::exception_handler_for_pc(JavaThread* thread) {
363   oop exception = thread->exception_oop();
364   address pc = thread->exception_pc();
365   // Still in Java mode
366   DEBUG_ONLY(ResetNoHandleMark rnhm);
367   CompiledMethod* cm = NULL;
368   address continuation = NULL;
369   {
370     // Enter VM mode by calling the helper
371     ResetNoHandleMark rnhm;
372     continuation = exception_handler_for_pc_helper(thread, exception, pc, cm);
373   }
374   // Back in JAVA, use no oops DON'T safepoint
375 
376   // Now check to see if the compiled method we were called from is now deoptimized.
377   // If so we must return to the deopt blob and deoptimize the nmethod
378   if (cm != NULL && caller_is_deopted()) {
379     continuation = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
380   }
381 
382   assert(continuation != NULL, "no handler found");
383   return continuation;
384 }
385 
386 JRT_BLOCK_ENTRY(void, JVMCIRuntime::monitorenter(JavaThread* thread, oopDesc* obj, BasicLock* lock))
387   SharedRuntime::monitor_enter_helper(obj, lock, thread);
388 JRT_END
389 
390 JRT_LEAF(void, JVMCIRuntime::monitorexit(JavaThread* thread, oopDesc* obj, BasicLock* lock))
391   assert(thread->last_Java_sp(), "last_Java_sp must be set");
392   assert(oopDesc::is_oop(obj), "invalid lock object pointer dected");
393   SharedRuntime::monitor_exit_helper(obj, lock, thread);
394 JRT_END
395 
396 // Object.notify() fast path, caller does slow path
397 JRT_LEAF(jboolean, JVMCIRuntime::object_notify(JavaThread *thread, oopDesc* obj))
398 
399   // Very few notify/notifyAll operations find any threads on the waitset, so
400   // the dominant fast-path is to simply return.
401   // Relatedly, it's critical that notify/notifyAll be fast in order to
402   // reduce lock hold times.
403   if (!SafepointSynchronize::is_synchronizing()) {
404     if (ObjectSynchronizer::quick_notify(obj, thread, false)) {
405       return true;
406     }
407   }
408   return false; // caller must perform slow path
409 
410 JRT_END
411 
412 // Object.notifyAll() fast path, caller does slow path
413 JRT_LEAF(jboolean, JVMCIRuntime::object_notifyAll(JavaThread *thread, oopDesc* obj))
414 
415   if (!SafepointSynchronize::is_synchronizing() ) {
416     if (ObjectSynchronizer::quick_notify(obj, thread, true)) {
417       return true;
418     }
419   }
420   return false; // caller must perform slow path
421 
422 JRT_END
423 
424 JRT_BLOCK_ENTRY(int, JVMCIRuntime::throw_and_post_jvmti_exception(JavaThread* thread, const char* exception, const char* message))
425   JRT_BLOCK;
426   TempNewSymbol symbol = SymbolTable::new_symbol(exception);
427   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, message);
428   JRT_BLOCK_END;
429   return caller_is_deopted();
430 JRT_END
431 
432 JRT_BLOCK_ENTRY(int, JVMCIRuntime::throw_klass_external_name_exception(JavaThread* thread, const char* exception, Klass* klass))
433   JRT_BLOCK;
434   ResourceMark rm(thread);
435   TempNewSymbol symbol = SymbolTable::new_symbol(exception);
436   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, klass->external_name());
437   JRT_BLOCK_END;
438   return caller_is_deopted();
439 JRT_END
440 
441 JRT_BLOCK_ENTRY(int, JVMCIRuntime::throw_class_cast_exception(JavaThread* thread, const char* exception, Klass* caster_klass, Klass* target_klass))
442   JRT_BLOCK;
443   ResourceMark rm(thread);
444   const char* message = SharedRuntime::generate_class_cast_message(caster_klass, target_klass);
445   TempNewSymbol symbol = SymbolTable::new_symbol(exception);
446   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, message);
447   JRT_BLOCK_END;
448   return caller_is_deopted();
449 JRT_END
450 
451 JRT_LEAF(void, JVMCIRuntime::log_object(JavaThread* thread, oopDesc* obj, bool as_string, bool newline))
452   ttyLocker ttyl;
453 
454   if (obj == NULL) {
455     tty->print("NULL");
456   } else if (oopDesc::is_oop_or_null(obj, true) && (!as_string || !java_lang_String::is_instance(obj))) {
457     if (oopDesc::is_oop_or_null(obj, true)) {
458       char buf[O_BUFLEN];
459       tty->print("%s@" INTPTR_FORMAT, obj->klass()->name()->as_C_string(buf, O_BUFLEN), p2i(obj));
460     } else {
461       tty->print(INTPTR_FORMAT, p2i(obj));
462     }
463   } else {
464     ResourceMark rm;
465     assert(obj != NULL && java_lang_String::is_instance(obj), "must be");
466     char *buf = java_lang_String::as_utf8_string(obj);
467     tty->print_raw(buf);
468   }
469   if (newline) {
470     tty->cr();
471   }
472 JRT_END
473 
474 #if INCLUDE_G1GC
475 
476 JRT_LEAF(void, JVMCIRuntime::write_barrier_pre(JavaThread* thread, oopDesc* obj))
477   G1ThreadLocalData::satb_mark_queue(thread).enqueue(obj);
478 JRT_END
479 
480 JRT_LEAF(void, JVMCIRuntime::write_barrier_post(JavaThread* thread, void* card_addr))
481   G1ThreadLocalData::dirty_card_queue(thread).enqueue(card_addr);
482 JRT_END
483 
484 #endif // INCLUDE_G1GC
485 
486 JRT_LEAF(jboolean, JVMCIRuntime::validate_object(JavaThread* thread, oopDesc* parent, oopDesc* child))
487   bool ret = true;
488   if(!Universe::heap()->is_in(parent)) {
489     tty->print_cr("Parent Object " INTPTR_FORMAT " not in heap", p2i(parent));
490     parent->print();
491     ret=false;
492   }
493   if(!Universe::heap()->is_in(child)) {
494     tty->print_cr("Child Object " INTPTR_FORMAT " not in heap", p2i(child));
495     child->print();
496     ret=false;
497   }
498   return (jint)ret;
499 JRT_END
500 
501 JRT_ENTRY(void, JVMCIRuntime::vm_error(JavaThread* thread, jlong where, jlong format, jlong value))
502   ResourceMark rm;
503   const char *error_msg = where == 0L ? "<internal JVMCI error>" : (char*) (address) where;
504   char *detail_msg = NULL;
505   if (format != 0L) {
506     const char* buf = (char*) (address) format;
507     size_t detail_msg_length = strlen(buf) * 2;
508     detail_msg = (char *) NEW_RESOURCE_ARRAY(u_char, detail_msg_length);
509     jio_snprintf(detail_msg, detail_msg_length, buf, value);
510   }
511   report_vm_error(__FILE__, __LINE__, error_msg, "%s", detail_msg);
512 JRT_END
513 
514 JRT_LEAF(oopDesc*, JVMCIRuntime::load_and_clear_exception(JavaThread* thread))
515   oop exception = thread->exception_oop();
516   assert(exception != NULL, "npe");
517   thread->set_exception_oop(NULL);
518   thread->set_exception_pc(0);
519   return exception;
520 JRT_END
521 
522 PRAGMA_DIAG_PUSH
523 PRAGMA_FORMAT_NONLITERAL_IGNORED
524 JRT_LEAF(void, JVMCIRuntime::log_printf(JavaThread* thread, const char* format, jlong v1, jlong v2, jlong v3))
525   ResourceMark rm;
526   tty->print(format, v1, v2, v3);
527 JRT_END
528 PRAGMA_DIAG_POP
529 
decipher(jlong v,bool ignoreZero)530 static void decipher(jlong v, bool ignoreZero) {
531   if (v != 0 || !ignoreZero) {
532     void* p = (void *)(address) v;
533     CodeBlob* cb = CodeCache::find_blob(p);
534     if (cb) {
535       if (cb->is_nmethod()) {
536         char buf[O_BUFLEN];
537         tty->print("%s [" INTPTR_FORMAT "+" JLONG_FORMAT "]", cb->as_nmethod_or_null()->method()->name_and_sig_as_C_string(buf, O_BUFLEN), p2i(cb->code_begin()), (jlong)((address)v - cb->code_begin()));
538         return;
539       }
540       cb->print_value_on(tty);
541       return;
542     }
543     if (Universe::heap()->is_in(p)) {
544       oop obj = oop(p);
545       obj->print_value_on(tty);
546       return;
547     }
548     tty->print(INTPTR_FORMAT " [long: " JLONG_FORMAT ", double %lf, char %c]",p2i((void *)v), (jlong)v, (jdouble)v, (char)v);
549   }
550 }
551 
552 PRAGMA_DIAG_PUSH
553 PRAGMA_FORMAT_NONLITERAL_IGNORED
554 JRT_LEAF(void, JVMCIRuntime::vm_message(jboolean vmError, jlong format, jlong v1, jlong v2, jlong v3))
555   ResourceMark rm;
556   const char *buf = (const char*) (address) format;
557   if (vmError) {
558     if (buf != NULL) {
559       fatal(buf, v1, v2, v3);
560     } else {
561       fatal("<anonymous error>");
562     }
563   } else if (buf != NULL) {
564     tty->print(buf, v1, v2, v3);
565   } else {
566     assert(v2 == 0, "v2 != 0");
567     assert(v3 == 0, "v3 != 0");
568     decipher(v1, false);
569   }
570 JRT_END
571 PRAGMA_DIAG_POP
572 
573 JRT_LEAF(void, JVMCIRuntime::log_primitive(JavaThread* thread, jchar typeChar, jlong value, jboolean newline))
574   union {
575       jlong l;
576       jdouble d;
577       jfloat f;
578   } uu;
579   uu.l = value;
580   switch (typeChar) {
581     case 'Z': tty->print(value == 0 ? "false" : "true"); break;
582     case 'B': tty->print("%d", (jbyte) value); break;
583     case 'C': tty->print("%c", (jchar) value); break;
584     case 'S': tty->print("%d", (jshort) value); break;
585     case 'I': tty->print("%d", (jint) value); break;
586     case 'F': tty->print("%f", uu.f); break;
587     case 'J': tty->print(JLONG_FORMAT, value); break;
588     case 'D': tty->print("%lf", uu.d); break;
589     default: assert(false, "unknown typeChar"); break;
590   }
591   if (newline) {
592     tty->cr();
593   }
594 JRT_END
595 
596 JRT_ENTRY(jint, JVMCIRuntime::identity_hash_code(JavaThread* thread, oopDesc* obj))
597   return (jint) obj->identity_hash();
598 JRT_END
599 
600 JRT_ENTRY(jint, JVMCIRuntime::test_deoptimize_call_int(JavaThread* thread, int value))
601   deopt_caller();
602   return (jint) value;
603 JRT_END
604 
605 
606 // private static JVMCIRuntime JVMCI.initializeRuntime()
607 JVM_ENTRY_NO_ENV(jobject, JVM_GetJVMCIRuntime(JNIEnv *env, jclass c))
608   JNI_JVMCIENV(thread, env);
609   if (!EnableJVMCI) {
610     JVMCI_THROW_MSG_NULL(InternalError, "JVMCI is not enabled");
611   }
612   JVMCIENV->runtime()->initialize_HotSpotJVMCIRuntime(JVMCI_CHECK_NULL);
613   JVMCIObject runtime = JVMCIENV->runtime()->get_HotSpotJVMCIRuntime(JVMCI_CHECK_NULL);
614   return JVMCIENV->get_jobject(runtime);
615 JVM_END
616 
call_getCompiler(TRAPS)617 void JVMCIRuntime::call_getCompiler(TRAPS) {
618   THREAD_JVMCIENV(JavaThread::current());
619   JVMCIObject jvmciRuntime = JVMCIRuntime::get_HotSpotJVMCIRuntime(JVMCI_CHECK);
620   initialize(JVMCIENV);
621   JVMCIENV->call_HotSpotJVMCIRuntime_getCompiler(jvmciRuntime, JVMCI_CHECK);
622 }
623 
initialize(int nmethod_mirror_index,const char * name,FailedSpeculation ** failed_speculations)624 void JVMCINMethodData::initialize(
625   int nmethod_mirror_index,
626   const char* name,
627   FailedSpeculation** failed_speculations)
628 {
629   _failed_speculations = failed_speculations;
630   _nmethod_mirror_index = nmethod_mirror_index;
631   if (name != NULL) {
632     _has_name = true;
633     char* dest = (char*) this->name();
634     strcpy(dest, name);
635   } else {
636     _has_name = false;
637   }
638 }
639 
add_failed_speculation(nmethod * nm,jlong speculation)640 void JVMCINMethodData::add_failed_speculation(nmethod* nm, jlong speculation) {
641   uint index = (speculation >> 32) & 0xFFFFFFFF;
642   int length = (int) speculation;
643   if (index + length > (uint) nm->speculations_size()) {
644     fatal(INTPTR_FORMAT "[index: %d, length: %d] out of bounds wrt encoded speculations of length %u", speculation, index, length, nm->speculations_size());
645   }
646   address data = nm->speculations_begin() + index;
647   FailedSpeculation::add_failed_speculation(nm, _failed_speculations, data, length);
648 }
649 
get_nmethod_mirror(nmethod * nm,bool phantom_ref)650 oop JVMCINMethodData::get_nmethod_mirror(nmethod* nm, bool phantom_ref) {
651   if (_nmethod_mirror_index == -1) {
652     return NULL;
653   }
654   if (phantom_ref) {
655     return nm->oop_at_phantom(_nmethod_mirror_index);
656   } else {
657     return nm->oop_at(_nmethod_mirror_index);
658   }
659 }
660 
set_nmethod_mirror(nmethod * nm,oop new_mirror)661 void JVMCINMethodData::set_nmethod_mirror(nmethod* nm, oop new_mirror) {
662   assert(_nmethod_mirror_index != -1, "cannot set JVMCI mirror for nmethod");
663   oop* addr = nm->oop_addr_at(_nmethod_mirror_index);
664   assert(new_mirror != NULL, "use clear_nmethod_mirror to clear the mirror");
665   assert(*addr == NULL, "cannot overwrite non-null mirror");
666 
667   *addr = new_mirror;
668 
669   // Since we've patched some oops in the nmethod,
670   // (re)register it with the heap.
671   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
672   Universe::heap()->register_nmethod(nm);
673 }
674 
clear_nmethod_mirror(nmethod * nm)675 void JVMCINMethodData::clear_nmethod_mirror(nmethod* nm) {
676   if (_nmethod_mirror_index != -1) {
677     oop* addr = nm->oop_addr_at(_nmethod_mirror_index);
678     *addr = NULL;
679   }
680 }
681 
invalidate_nmethod_mirror(nmethod * nm)682 void JVMCINMethodData::invalidate_nmethod_mirror(nmethod* nm) {
683   oop nmethod_mirror = get_nmethod_mirror(nm, /* phantom_ref */ false);
684   if (nmethod_mirror == NULL) {
685     return;
686   }
687 
688   // Update the values in the mirror if it still refers to nm.
689   // We cannot use JVMCIObject to wrap the mirror as this is called
690   // during GC, forbidding the creation of JNIHandles.
691   JVMCIEnv* jvmciEnv = NULL;
692   nmethod* current = (nmethod*) HotSpotJVMCI::InstalledCode::address(jvmciEnv, nmethod_mirror);
693   if (nm == current) {
694     if (!nm->is_alive()) {
695       // Break the link from the mirror to nm such that
696       // future invocations via the mirror will result in
697       // an InvalidInstalledCodeException.
698       HotSpotJVMCI::InstalledCode::set_address(jvmciEnv, nmethod_mirror, 0);
699       HotSpotJVMCI::InstalledCode::set_entryPoint(jvmciEnv, nmethod_mirror, 0);
700     } else if (nm->is_not_entrant()) {
701       // Zero the entry point so any new invocation will fail but keep
702       // the address link around that so that existing activations can
703       // be deoptimized via the mirror (i.e. JVMCIEnv::invalidate_installed_code).
704       HotSpotJVMCI::InstalledCode::set_entryPoint(jvmciEnv, nmethod_mirror, 0);
705     }
706   }
707 }
708 
initialize_HotSpotJVMCIRuntime(JVMCI_TRAPS)709 void JVMCIRuntime::initialize_HotSpotJVMCIRuntime(JVMCI_TRAPS) {
710   if (is_HotSpotJVMCIRuntime_initialized()) {
711     if (JVMCIENV->is_hotspot() && UseJVMCINativeLibrary) {
712       JVMCI_THROW_MSG(InternalError, "JVMCI has already been enabled in the JVMCI shared library");
713     }
714   }
715 
716   initialize(JVMCIENV);
717 
718   // This should only be called in the context of the JVMCI class being initialized
719   JVMCIObject result = JVMCIENV->call_HotSpotJVMCIRuntime_runtime(JVMCI_CHECK);
720 
721   _HotSpotJVMCIRuntime_instance = JVMCIENV->make_global(result);
722 }
723 
initialize(JVMCIEnv * JVMCIENV)724 void JVMCIRuntime::initialize(JVMCIEnv* JVMCIENV) {
725   assert(this != NULL, "sanity");
726   // Check first without JVMCI_lock
727   if (_initialized) {
728     return;
729   }
730 
731   MutexLocker locker(JVMCI_lock);
732   // Check again under JVMCI_lock
733   if (_initialized) {
734     return;
735   }
736 
737   while (_being_initialized) {
738     JVMCI_lock->wait();
739     if (_initialized) {
740       return;
741     }
742   }
743 
744   _being_initialized = true;
745 
746   {
747     MutexUnlocker unlock(JVMCI_lock);
748 
749     HandleMark hm;
750     ResourceMark rm;
751     JavaThread* THREAD = JavaThread::current();
752     if (JVMCIENV->is_hotspot()) {
753       HotSpotJVMCI::compute_offsets(CHECK_EXIT);
754     } else {
755       JNIAccessMark jni(JVMCIENV);
756 
757       JNIJVMCI::initialize_ids(jni.env());
758       if (jni()->ExceptionCheck()) {
759         jni()->ExceptionDescribe();
760         fatal("JNI exception during init");
761       }
762     }
763     create_jvmci_primitive_type(T_BOOLEAN, JVMCI_CHECK_EXIT_((void)0));
764     create_jvmci_primitive_type(T_BYTE, JVMCI_CHECK_EXIT_((void)0));
765     create_jvmci_primitive_type(T_CHAR, JVMCI_CHECK_EXIT_((void)0));
766     create_jvmci_primitive_type(T_SHORT, JVMCI_CHECK_EXIT_((void)0));
767     create_jvmci_primitive_type(T_INT, JVMCI_CHECK_EXIT_((void)0));
768     create_jvmci_primitive_type(T_LONG, JVMCI_CHECK_EXIT_((void)0));
769     create_jvmci_primitive_type(T_FLOAT, JVMCI_CHECK_EXIT_((void)0));
770     create_jvmci_primitive_type(T_DOUBLE, JVMCI_CHECK_EXIT_((void)0));
771     create_jvmci_primitive_type(T_VOID, JVMCI_CHECK_EXIT_((void)0));
772 
773     if (!JVMCIENV->is_hotspot()) {
774       JVMCIENV->copy_saved_properties();
775     }
776   }
777 
778   _initialized = true;
779   _being_initialized = false;
780   JVMCI_lock->notify_all();
781 }
782 
create_jvmci_primitive_type(BasicType type,JVMCI_TRAPS)783 JVMCIObject JVMCIRuntime::create_jvmci_primitive_type(BasicType type, JVMCI_TRAPS) {
784   Thread* THREAD = Thread::current();
785   // These primitive types are long lived and are created before the runtime is fully set up
786   // so skip registering them for scanning.
787   JVMCIObject mirror = JVMCIENV->get_object_constant(java_lang_Class::primitive_mirror(type), false, true);
788   if (JVMCIENV->is_hotspot()) {
789     JavaValue result(T_OBJECT);
790     JavaCallArguments args;
791     args.push_oop(Handle(THREAD, HotSpotJVMCI::resolve(mirror)));
792     args.push_int(type2char(type));
793     JavaCalls::call_static(&result, HotSpotJVMCI::HotSpotResolvedPrimitiveType::klass(), vmSymbols::fromMetaspace_name(), vmSymbols::primitive_fromMetaspace_signature(), &args, CHECK_(JVMCIObject()));
794 
795     return JVMCIENV->wrap(JNIHandles::make_local((oop)result.get_jobject()));
796   } else {
797     JNIAccessMark jni(JVMCIENV);
798     jobject result = jni()->CallStaticObjectMethod(JNIJVMCI::HotSpotResolvedPrimitiveType::clazz(),
799                                            JNIJVMCI::HotSpotResolvedPrimitiveType_fromMetaspace_method(),
800                                            mirror.as_jobject(), type2char(type));
801     if (jni()->ExceptionCheck()) {
802       return JVMCIObject();
803     }
804     return JVMCIENV->wrap(result);
805   }
806 }
807 
initialize_JVMCI(JVMCI_TRAPS)808 void JVMCIRuntime::initialize_JVMCI(JVMCI_TRAPS) {
809   if (!is_HotSpotJVMCIRuntime_initialized()) {
810     initialize(JVMCI_CHECK);
811     JVMCIENV->call_JVMCI_getRuntime(JVMCI_CHECK);
812   }
813 }
814 
get_HotSpotJVMCIRuntime(JVMCI_TRAPS)815 JVMCIObject JVMCIRuntime::get_HotSpotJVMCIRuntime(JVMCI_TRAPS) {
816   initialize(JVMCIENV);
817   initialize_JVMCI(JVMCI_CHECK_(JVMCIObject()));
818   return _HotSpotJVMCIRuntime_instance;
819 }
820 
821 
822 // private void CompilerToVM.registerNatives()
823 JVM_ENTRY_NO_ENV(void, JVM_RegisterJVMCINatives(JNIEnv *env, jclass c2vmClass))
824   JNI_JVMCIENV(thread, env);
825 
826   if (!EnableJVMCI) {
827     JVMCI_THROW_MSG(InternalError, "JVMCI is not enabled");
828   }
829 
830   JVMCIENV->runtime()->initialize(JVMCIENV);
831 
832   {
833     ResourceMark rm;
834     HandleMark hm(thread);
835     ThreadToNativeFromVM trans(thread);
836 
837     // Ensure _non_oop_bits is initialized
838     Universe::non_oop_word();
839 
840     if (JNI_OK != env->RegisterNatives(c2vmClass, CompilerToVM::methods, CompilerToVM::methods_count())) {
841       if (!env->ExceptionCheck()) {
842         for (int i = 0; i < CompilerToVM::methods_count(); i++) {
843           if (JNI_OK != env->RegisterNatives(c2vmClass, CompilerToVM::methods + i, 1)) {
844             guarantee(false, "Error registering JNI method %s%s", CompilerToVM::methods[i].name, CompilerToVM::methods[i].signature);
845             break;
846           }
847         }
848       } else {
849         env->ExceptionDescribe();
850       }
851       guarantee(false, "Failed registering CompilerToVM native methods");
852     }
853   }
854 JVM_END
855 
856 
shutdown()857 void JVMCIRuntime::shutdown() {
858   if (is_HotSpotJVMCIRuntime_initialized()) {
859     _shutdown_called = true;
860 
861     THREAD_JVMCIENV(JavaThread::current());
862     JVMCIENV->call_HotSpotJVMCIRuntime_shutdown(_HotSpotJVMCIRuntime_instance);
863   }
864 }
865 
bootstrap_finished(TRAPS)866 void JVMCIRuntime::bootstrap_finished(TRAPS) {
867   if (is_HotSpotJVMCIRuntime_initialized()) {
868     THREAD_JVMCIENV(JavaThread::current());
869     JVMCIENV->call_HotSpotJVMCIRuntime_bootstrapFinished(_HotSpotJVMCIRuntime_instance, JVMCIENV);
870   }
871 }
872 
describe_pending_hotspot_exception(JavaThread * THREAD,bool clear)873 void JVMCIRuntime::describe_pending_hotspot_exception(JavaThread* THREAD, bool clear) {
874   if (HAS_PENDING_EXCEPTION) {
875     Handle exception(THREAD, PENDING_EXCEPTION);
876     const char* exception_file = THREAD->exception_file();
877     int exception_line = THREAD->exception_line();
878     CLEAR_PENDING_EXCEPTION;
879     if (exception->is_a(SystemDictionary::ThreadDeath_klass())) {
880       // Don't print anything if we are being killed.
881     } else {
882       java_lang_Throwable::print_stack_trace(exception, tty);
883 
884       // Clear and ignore any exceptions raised during printing
885       CLEAR_PENDING_EXCEPTION;
886     }
887     if (!clear) {
888       THREAD->set_pending_exception(exception(), exception_file, exception_line);
889     }
890   }
891 }
892 
893 
exit_on_pending_exception(JVMCIEnv * JVMCIENV,const char * message)894 void JVMCIRuntime::exit_on_pending_exception(JVMCIEnv* JVMCIENV, const char* message) {
895   JavaThread* THREAD = JavaThread::current();
896 
897   static volatile int report_error = 0;
898   if (!report_error && Atomic::cmpxchg(&report_error, 0, 1) == 0) {
899     // Only report an error once
900     tty->print_raw_cr(message);
901     if (JVMCIENV != NULL) {
902       JVMCIENV->describe_pending_exception(true);
903     } else {
904       describe_pending_hotspot_exception(THREAD, true);
905     }
906   } else {
907     // Allow error reporting thread to print the stack trace.
908     THREAD->sleep(200);
909   }
910 
911   before_exit(THREAD);
912   vm_exit(-1);
913 }
914 
915 // ------------------------------------------------------------------
916 // Note: the logic of this method should mirror the logic of
917 // constantPoolOopDesc::verify_constant_pool_resolve.
check_klass_accessibility(Klass * accessing_klass,Klass * resolved_klass)918 bool JVMCIRuntime::check_klass_accessibility(Klass* accessing_klass, Klass* resolved_klass) {
919   if (accessing_klass->is_objArray_klass()) {
920     accessing_klass = ObjArrayKlass::cast(accessing_klass)->bottom_klass();
921   }
922   if (!accessing_klass->is_instance_klass()) {
923     return true;
924   }
925 
926   if (resolved_klass->is_objArray_klass()) {
927     // Find the element klass, if this is an array.
928     resolved_klass = ObjArrayKlass::cast(resolved_klass)->bottom_klass();
929   }
930   if (resolved_klass->is_instance_klass()) {
931     Reflection::VerifyClassAccessResults result =
932       Reflection::verify_class_access(accessing_klass, InstanceKlass::cast(resolved_klass), true);
933     return result == Reflection::ACCESS_OK;
934   }
935   return true;
936 }
937 
938 // ------------------------------------------------------------------
get_klass_by_name_impl(Klass * & accessing_klass,const constantPoolHandle & cpool,Symbol * sym,bool require_local)939 Klass* JVMCIRuntime::get_klass_by_name_impl(Klass*& accessing_klass,
940                                           const constantPoolHandle& cpool,
941                                           Symbol* sym,
942                                           bool require_local) {
943   JVMCI_EXCEPTION_CONTEXT;
944 
945   // Now we need to check the SystemDictionary
946   if (sym->char_at(0) == JVM_SIGNATURE_CLASS &&
947       sym->char_at(sym->utf8_length()-1) == JVM_SIGNATURE_ENDCLASS) {
948     // This is a name from a signature.  Strip off the trimmings.
949     // Call recursive to keep scope of strippedsym.
950     TempNewSymbol strippedsym = SymbolTable::new_symbol(sym->as_utf8()+1,
951                                                         sym->utf8_length()-2);
952     return get_klass_by_name_impl(accessing_klass, cpool, strippedsym, require_local);
953   }
954 
955   Handle loader(THREAD, (oop)NULL);
956   Handle domain(THREAD, (oop)NULL);
957   if (accessing_klass != NULL) {
958     loader = Handle(THREAD, accessing_klass->class_loader());
959     domain = Handle(THREAD, accessing_klass->protection_domain());
960   }
961 
962   Klass* found_klass;
963   {
964     ttyUnlocker ttyul;  // release tty lock to avoid ordering problems
965     MutexLocker ml(Compile_lock);
966     if (!require_local) {
967       found_klass = SystemDictionary::find_constrained_instance_or_array_klass(sym, loader, CHECK_NULL);
968     } else {
969       found_klass = SystemDictionary::find_instance_or_array_klass(sym, loader, domain, CHECK_NULL);
970     }
971   }
972 
973   // If we fail to find an array klass, look again for its element type.
974   // The element type may be available either locally or via constraints.
975   // In either case, if we can find the element type in the system dictionary,
976   // we must build an array type around it.  The CI requires array klasses
977   // to be loaded if their element klasses are loaded, except when memory
978   // is exhausted.
979   if (sym->char_at(0) == JVM_SIGNATURE_ARRAY &&
980       (sym->char_at(1) == JVM_SIGNATURE_ARRAY || sym->char_at(1) == JVM_SIGNATURE_CLASS)) {
981     // We have an unloaded array.
982     // Build it on the fly if the element class exists.
983     TempNewSymbol elem_sym = SymbolTable::new_symbol(sym->as_utf8()+1,
984                                                      sym->utf8_length()-1);
985 
986     // Get element Klass recursively.
987     Klass* elem_klass =
988       get_klass_by_name_impl(accessing_klass,
989                              cpool,
990                              elem_sym,
991                              require_local);
992     if (elem_klass != NULL) {
993       // Now make an array for it
994       return elem_klass->array_klass(THREAD);
995     }
996   }
997 
998   if (found_klass == NULL && !cpool.is_null() && cpool->has_preresolution()) {
999     // Look inside the constant pool for pre-resolved class entries.
1000     for (int i = cpool->length() - 1; i >= 1; i--) {
1001       if (cpool->tag_at(i).is_klass()) {
1002         Klass*  kls = cpool->resolved_klass_at(i);
1003         if (kls->name() == sym) {
1004           return kls;
1005         }
1006       }
1007     }
1008   }
1009 
1010   return found_klass;
1011 }
1012 
1013 // ------------------------------------------------------------------
get_klass_by_name(Klass * accessing_klass,Symbol * klass_name,bool require_local)1014 Klass* JVMCIRuntime::get_klass_by_name(Klass* accessing_klass,
1015                                   Symbol* klass_name,
1016                                   bool require_local) {
1017   ResourceMark rm;
1018   constantPoolHandle cpool;
1019   return get_klass_by_name_impl(accessing_klass,
1020                                                  cpool,
1021                                                  klass_name,
1022                                                  require_local);
1023 }
1024 
1025 // ------------------------------------------------------------------
1026 // Implementation of get_klass_by_index.
get_klass_by_index_impl(const constantPoolHandle & cpool,int index,bool & is_accessible,Klass * accessor)1027 Klass* JVMCIRuntime::get_klass_by_index_impl(const constantPoolHandle& cpool,
1028                                         int index,
1029                                         bool& is_accessible,
1030                                         Klass* accessor) {
1031   JVMCI_EXCEPTION_CONTEXT;
1032   Klass* klass = ConstantPool::klass_at_if_loaded(cpool, index);
1033   Symbol* klass_name = NULL;
1034   if (klass == NULL) {
1035     klass_name = cpool->klass_name_at(index);
1036   }
1037 
1038   if (klass == NULL) {
1039     // Not found in constant pool.  Use the name to do the lookup.
1040     Klass* k = get_klass_by_name_impl(accessor,
1041                                         cpool,
1042                                         klass_name,
1043                                         false);
1044     // Calculate accessibility the hard way.
1045     if (k == NULL) {
1046       is_accessible = false;
1047     } else if (k->class_loader() != accessor->class_loader() &&
1048                get_klass_by_name_impl(accessor, cpool, k->name(), true) == NULL) {
1049       // Loaded only remotely.  Not linked yet.
1050       is_accessible = false;
1051     } else {
1052       // Linked locally, and we must also check public/private, etc.
1053       is_accessible = check_klass_accessibility(accessor, k);
1054     }
1055     if (!is_accessible) {
1056       return NULL;
1057     }
1058     return k;
1059   }
1060 
1061   // It is known to be accessible, since it was found in the constant pool.
1062   is_accessible = true;
1063   return klass;
1064 }
1065 
1066 // ------------------------------------------------------------------
1067 // Get a klass from the constant pool.
get_klass_by_index(const constantPoolHandle & cpool,int index,bool & is_accessible,Klass * accessor)1068 Klass* JVMCIRuntime::get_klass_by_index(const constantPoolHandle& cpool,
1069                                    int index,
1070                                    bool& is_accessible,
1071                                    Klass* accessor) {
1072   ResourceMark rm;
1073   Klass* result = get_klass_by_index_impl(cpool, index, is_accessible, accessor);
1074   return result;
1075 }
1076 
1077 // ------------------------------------------------------------------
1078 // Implementation of get_field_by_index.
1079 //
1080 // Implementation note: the results of field lookups are cached
1081 // in the accessor klass.
get_field_by_index_impl(InstanceKlass * klass,fieldDescriptor & field_desc,int index)1082 void JVMCIRuntime::get_field_by_index_impl(InstanceKlass* klass, fieldDescriptor& field_desc,
1083                                         int index) {
1084   JVMCI_EXCEPTION_CONTEXT;
1085 
1086   assert(klass->is_linked(), "must be linked before using its constant-pool");
1087 
1088   constantPoolHandle cpool(thread, klass->constants());
1089 
1090   // Get the field's name, signature, and type.
1091   Symbol* name  = cpool->name_ref_at(index);
1092 
1093   int nt_index = cpool->name_and_type_ref_index_at(index);
1094   int sig_index = cpool->signature_ref_index_at(nt_index);
1095   Symbol* signature = cpool->symbol_at(sig_index);
1096 
1097   // Get the field's declared holder.
1098   int holder_index = cpool->klass_ref_index_at(index);
1099   bool holder_is_accessible;
1100   Klass* declared_holder = get_klass_by_index(cpool, holder_index,
1101                                                holder_is_accessible,
1102                                                klass);
1103 
1104   // The declared holder of this field may not have been loaded.
1105   // Bail out with partial field information.
1106   if (!holder_is_accessible) {
1107     return;
1108   }
1109 
1110 
1111   // Perform the field lookup.
1112   Klass*  canonical_holder =
1113     InstanceKlass::cast(declared_holder)->find_field(name, signature, &field_desc);
1114   if (canonical_holder == NULL) {
1115     return;
1116   }
1117 
1118   assert(canonical_holder == field_desc.field_holder(), "just checking");
1119 }
1120 
1121 // ------------------------------------------------------------------
1122 // Get a field by index from a klass's constant pool.
get_field_by_index(InstanceKlass * accessor,fieldDescriptor & fd,int index)1123 void JVMCIRuntime::get_field_by_index(InstanceKlass* accessor, fieldDescriptor& fd, int index) {
1124   ResourceMark rm;
1125   return get_field_by_index_impl(accessor, fd, index);
1126 }
1127 
1128 // ------------------------------------------------------------------
1129 // Perform an appropriate method lookup based on accessor, holder,
1130 // name, signature, and bytecode.
lookup_method(InstanceKlass * accessor,Klass * holder,Symbol * name,Symbol * sig,Bytecodes::Code bc,constantTag tag)1131 Method* JVMCIRuntime::lookup_method(InstanceKlass* accessor,
1132                                     Klass*        holder,
1133                                     Symbol*       name,
1134                                     Symbol*       sig,
1135                                     Bytecodes::Code bc,
1136                                     constantTag   tag) {
1137   // Accessibility checks are performed in JVMCIEnv::get_method_by_index_impl().
1138   assert(check_klass_accessibility(accessor, holder), "holder not accessible");
1139 
1140   Method* dest_method;
1141   LinkInfo link_info(holder, name, sig, accessor, LinkInfo::needs_access_check, tag);
1142   switch (bc) {
1143   case Bytecodes::_invokestatic:
1144     dest_method =
1145       LinkResolver::resolve_static_call_or_null(link_info);
1146     break;
1147   case Bytecodes::_invokespecial:
1148     dest_method =
1149       LinkResolver::resolve_special_call_or_null(link_info);
1150     break;
1151   case Bytecodes::_invokeinterface:
1152     dest_method =
1153       LinkResolver::linktime_resolve_interface_method_or_null(link_info);
1154     break;
1155   case Bytecodes::_invokevirtual:
1156     dest_method =
1157       LinkResolver::linktime_resolve_virtual_method_or_null(link_info);
1158     break;
1159   default: ShouldNotReachHere();
1160   }
1161 
1162   return dest_method;
1163 }
1164 
1165 
1166 // ------------------------------------------------------------------
get_method_by_index_impl(const constantPoolHandle & cpool,int index,Bytecodes::Code bc,InstanceKlass * accessor)1167 Method* JVMCIRuntime::get_method_by_index_impl(const constantPoolHandle& cpool,
1168                                                int index, Bytecodes::Code bc,
1169                                                InstanceKlass* accessor) {
1170   if (bc == Bytecodes::_invokedynamic) {
1171     ConstantPoolCacheEntry* cpce = cpool->invokedynamic_cp_cache_entry_at(index);
1172     bool is_resolved = !cpce->is_f1_null();
1173     if (is_resolved) {
1174       // Get the invoker Method* from the constant pool.
1175       // (The appendix argument, if any, will be noted in the method's signature.)
1176       Method* adapter = cpce->f1_as_method();
1177       return adapter;
1178     }
1179 
1180     return NULL;
1181   }
1182 
1183   int holder_index = cpool->klass_ref_index_at(index);
1184   bool holder_is_accessible;
1185   Klass* holder = get_klass_by_index_impl(cpool, holder_index, holder_is_accessible, accessor);
1186 
1187   // Get the method's name and signature.
1188   Symbol* name_sym = cpool->name_ref_at(index);
1189   Symbol* sig_sym  = cpool->signature_ref_at(index);
1190 
1191   if (cpool->has_preresolution()
1192       || ((holder == SystemDictionary::MethodHandle_klass() || holder == SystemDictionary::VarHandle_klass()) &&
1193           MethodHandles::is_signature_polymorphic_name(holder, name_sym))) {
1194     // Short-circuit lookups for JSR 292-related call sites.
1195     // That is, do not rely only on name-based lookups, because they may fail
1196     // if the names are not resolvable in the boot class loader (7056328).
1197     switch (bc) {
1198     case Bytecodes::_invokevirtual:
1199     case Bytecodes::_invokeinterface:
1200     case Bytecodes::_invokespecial:
1201     case Bytecodes::_invokestatic:
1202       {
1203         Method* m = ConstantPool::method_at_if_loaded(cpool, index);
1204         if (m != NULL) {
1205           return m;
1206         }
1207       }
1208       break;
1209     default:
1210       break;
1211     }
1212   }
1213 
1214   if (holder_is_accessible) { // Our declared holder is loaded.
1215     constantTag tag = cpool->tag_ref_at(index);
1216     Method* m = lookup_method(accessor, holder, name_sym, sig_sym, bc, tag);
1217     if (m != NULL) {
1218       // We found the method.
1219       return m;
1220     }
1221   }
1222 
1223   // Either the declared holder was not loaded, or the method could
1224   // not be found.
1225 
1226   return NULL;
1227 }
1228 
1229 // ------------------------------------------------------------------
get_instance_klass_for_declared_method_holder(Klass * method_holder)1230 InstanceKlass* JVMCIRuntime::get_instance_klass_for_declared_method_holder(Klass* method_holder) {
1231   // For the case of <array>.clone(), the method holder can be an ArrayKlass*
1232   // instead of an InstanceKlass*.  For that case simply pretend that the
1233   // declared holder is Object.clone since that's where the call will bottom out.
1234   if (method_holder->is_instance_klass()) {
1235     return InstanceKlass::cast(method_holder);
1236   } else if (method_holder->is_array_klass()) {
1237     return SystemDictionary::Object_klass();
1238   } else {
1239     ShouldNotReachHere();
1240   }
1241   return NULL;
1242 }
1243 
1244 
1245 // ------------------------------------------------------------------
get_method_by_index(const constantPoolHandle & cpool,int index,Bytecodes::Code bc,InstanceKlass * accessor)1246 Method* JVMCIRuntime::get_method_by_index(const constantPoolHandle& cpool,
1247                                      int index, Bytecodes::Code bc,
1248                                      InstanceKlass* accessor) {
1249   ResourceMark rm;
1250   return get_method_by_index_impl(cpool, index, bc, accessor);
1251 }
1252 
1253 // ------------------------------------------------------------------
1254 // Check for changes to the system dictionary during compilation
1255 // class loads, evolution, breakpoints
validate_compile_task_dependencies(Dependencies * dependencies,JVMCICompileState * compile_state,char ** failure_detail)1256 JVMCI::CodeInstallResult JVMCIRuntime::validate_compile_task_dependencies(Dependencies* dependencies, JVMCICompileState* compile_state, char** failure_detail) {
1257   // If JVMTI capabilities were enabled during compile, the compilation is invalidated.
1258   if (compile_state != NULL && compile_state->jvmti_state_changed()) {
1259     *failure_detail = (char*) "Jvmti state change during compilation invalidated dependencies";
1260     return JVMCI::dependencies_failed;
1261   }
1262 
1263   CompileTask* task = compile_state == NULL ? NULL : compile_state->task();
1264   Dependencies::DepType result = dependencies->validate_dependencies(task, failure_detail);
1265   if (result == Dependencies::end_marker) {
1266     return JVMCI::ok;
1267   }
1268 
1269   return JVMCI::dependencies_failed;
1270 }
1271 
1272 // Reports a pending exception and exits the VM.
fatal_exception_in_compile(JVMCIEnv * JVMCIENV,JavaThread * thread,const char * msg)1273 static void fatal_exception_in_compile(JVMCIEnv* JVMCIENV, JavaThread* thread, const char* msg) {
1274   // Only report a fatal JVMCI compilation exception once
1275   static volatile int report_init_failure = 0;
1276   if (!report_init_failure && Atomic::cmpxchg(&report_init_failure, 0, 1) == 0) {
1277       tty->print_cr("%s:", msg);
1278       JVMCIENV->describe_pending_exception(true);
1279   }
1280   JVMCIENV->clear_pending_exception();
1281   before_exit(thread);
1282   vm_exit(-1);
1283 }
1284 
compile_method(JVMCIEnv * JVMCIENV,JVMCICompiler * compiler,const methodHandle & method,int entry_bci)1285 void JVMCIRuntime::compile_method(JVMCIEnv* JVMCIENV, JVMCICompiler* compiler, const methodHandle& method, int entry_bci) {
1286   JVMCI_EXCEPTION_CONTEXT
1287 
1288   JVMCICompileState* compile_state = JVMCIENV->compile_state();
1289 
1290   bool is_osr = entry_bci != InvocationEntryBci;
1291   if (compiler->is_bootstrapping() && is_osr) {
1292     // no OSR compilations during bootstrap - the compiler is just too slow at this point,
1293     // and we know that there are no endless loops
1294     compile_state->set_failure(true, "No OSR during boostrap");
1295     return;
1296   }
1297   if (JVMCI::shutdown_called()) {
1298     compile_state->set_failure(false, "Avoiding compilation during shutdown");
1299     return;
1300   }
1301 
1302   HandleMark hm;
1303   JVMCIObject receiver = get_HotSpotJVMCIRuntime(JVMCIENV);
1304   if (JVMCIENV->has_pending_exception()) {
1305     fatal_exception_in_compile(JVMCIENV, thread, "Exception during HotSpotJVMCIRuntime initialization");
1306   }
1307   JVMCIObject jvmci_method = JVMCIENV->get_jvmci_method(method, JVMCIENV);
1308   if (JVMCIENV->has_pending_exception()) {
1309     JVMCIENV->describe_pending_exception(true);
1310     compile_state->set_failure(false, "exception getting JVMCI wrapper method");
1311     return;
1312   }
1313 
1314   JVMCIObject result_object = JVMCIENV->call_HotSpotJVMCIRuntime_compileMethod(receiver, jvmci_method, entry_bci,
1315                                                                      (jlong) compile_state, compile_state->task()->compile_id());
1316   if (!JVMCIENV->has_pending_exception()) {
1317     if (result_object.is_non_null()) {
1318       JVMCIObject failure_message = JVMCIENV->get_HotSpotCompilationRequestResult_failureMessage(result_object);
1319       if (failure_message.is_non_null()) {
1320         // Copy failure reason into resource memory first ...
1321         const char* failure_reason = JVMCIENV->as_utf8_string(failure_message);
1322         // ... and then into the C heap.
1323         failure_reason = os::strdup(failure_reason, mtJVMCI);
1324         bool retryable = JVMCIENV->get_HotSpotCompilationRequestResult_retry(result_object) != 0;
1325         compile_state->set_failure(retryable, failure_reason, true);
1326       } else {
1327         if (compile_state->task()->code() == NULL) {
1328           compile_state->set_failure(true, "no nmethod produced");
1329         } else {
1330           compile_state->task()->set_num_inlined_bytecodes(JVMCIENV->get_HotSpotCompilationRequestResult_inlinedBytecodes(result_object));
1331           compiler->inc_methods_compiled();
1332         }
1333       }
1334     } else {
1335       assert(false, "JVMCICompiler.compileMethod should always return non-null");
1336     }
1337   } else {
1338     // An uncaught exception here implies failure during compiler initialization.
1339     // The only sensible thing to do here is to exit the VM.
1340     fatal_exception_in_compile(JVMCIENV, thread, "Exception during JVMCI compiler initialization");
1341   }
1342   if (compiler->is_bootstrapping()) {
1343     compiler->set_bootstrap_compilation_request_handled();
1344   }
1345 }
1346 
1347 
1348 // ------------------------------------------------------------------
register_method(JVMCIEnv * JVMCIENV,const methodHandle & method,nmethod * & nm,int entry_bci,CodeOffsets * offsets,int orig_pc_offset,CodeBuffer * code_buffer,int frame_words,OopMapSet * oop_map_set,ExceptionHandlerTable * handler_table,ImplicitExceptionTable * implicit_exception_table,AbstractCompiler * compiler,DebugInformationRecorder * debug_info,Dependencies * dependencies,int compile_id,bool has_unsafe_access,bool has_wide_vector,JVMCIObject compiled_code,JVMCIObject nmethod_mirror,FailedSpeculation ** failed_speculations,char * speculations,int speculations_len)1349 JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
1350                                 const methodHandle& method,
1351                                 nmethod*& nm,
1352                                 int entry_bci,
1353                                 CodeOffsets* offsets,
1354                                 int orig_pc_offset,
1355                                 CodeBuffer* code_buffer,
1356                                 int frame_words,
1357                                 OopMapSet* oop_map_set,
1358                                 ExceptionHandlerTable* handler_table,
1359                                 ImplicitExceptionTable* implicit_exception_table,
1360                                 AbstractCompiler* compiler,
1361                                 DebugInformationRecorder* debug_info,
1362                                 Dependencies* dependencies,
1363                                 int compile_id,
1364                                 bool has_unsafe_access,
1365                                 bool has_wide_vector,
1366                                 JVMCIObject compiled_code,
1367                                 JVMCIObject nmethod_mirror,
1368                                 FailedSpeculation** failed_speculations,
1369                                 char* speculations,
1370                                 int speculations_len) {
1371   JVMCI_EXCEPTION_CONTEXT;
1372   nm = NULL;
1373   int comp_level = CompLevel_full_optimization;
1374   char* failure_detail = NULL;
1375 
1376   bool install_default = JVMCIENV->get_HotSpotNmethod_isDefault(nmethod_mirror) != 0;
1377   assert(JVMCIENV->isa_HotSpotNmethod(nmethod_mirror), "must be");
1378   JVMCIObject name = JVMCIENV->get_InstalledCode_name(nmethod_mirror);
1379   const char* nmethod_mirror_name = name.is_null() ? NULL : JVMCIENV->as_utf8_string(name);
1380   int nmethod_mirror_index;
1381   if (!install_default) {
1382     // Reserve or initialize mirror slot in the oops table.
1383     OopRecorder* oop_recorder = debug_info->oop_recorder();
1384     nmethod_mirror_index = oop_recorder->allocate_oop_index(nmethod_mirror.is_hotspot() ? nmethod_mirror.as_jobject() : NULL);
1385   } else {
1386     // A default HotSpotNmethod mirror is never tracked by the nmethod
1387     nmethod_mirror_index = -1;
1388   }
1389 
1390   JVMCI::CodeInstallResult result;
1391   {
1392     // To prevent compile queue updates.
1393     MutexLocker locker(THREAD, MethodCompileQueue_lock);
1394 
1395     // Prevent SystemDictionary::add_to_hierarchy from running
1396     // and invalidating our dependencies until we install this method.
1397     MutexLocker ml(Compile_lock);
1398 
1399     // Encode the dependencies now, so we can check them right away.
1400     dependencies->encode_content_bytes();
1401 
1402     // Record the dependencies for the current compile in the log
1403     if (LogCompilation) {
1404       for (Dependencies::DepStream deps(dependencies); deps.next(); ) {
1405         deps.log_dependency();
1406       }
1407     }
1408 
1409     // Check for {class loads, evolution, breakpoints} during compilation
1410     result = validate_compile_task_dependencies(dependencies, JVMCIENV->compile_state(), &failure_detail);
1411     if (result != JVMCI::ok) {
1412       // While not a true deoptimization, it is a preemptive decompile.
1413       MethodData* mdp = method()->method_data();
1414       if (mdp != NULL) {
1415         mdp->inc_decompile_count();
1416 #ifdef ASSERT
1417         if (mdp->decompile_count() > (uint)PerMethodRecompilationCutoff) {
1418           ResourceMark m;
1419           tty->print_cr("WARN: endless recompilation of %s. Method was set to not compilable.", method()->name_and_sig_as_C_string());
1420         }
1421 #endif
1422       }
1423 
1424       // All buffers in the CodeBuffer are allocated in the CodeCache.
1425       // If the code buffer is created on each compile attempt
1426       // as in C2, then it must be freed.
1427       //code_buffer->free_blob();
1428     } else {
1429       nm =  nmethod::new_nmethod(method,
1430                                  compile_id,
1431                                  entry_bci,
1432                                  offsets,
1433                                  orig_pc_offset,
1434                                  debug_info, dependencies, code_buffer,
1435                                  frame_words, oop_map_set,
1436                                  handler_table, implicit_exception_table,
1437                                  compiler, comp_level,
1438                                  speculations, speculations_len,
1439                                  nmethod_mirror_index, nmethod_mirror_name, failed_speculations);
1440 
1441 
1442       // Free codeBlobs
1443       if (nm == NULL) {
1444         // The CodeCache is full.  Print out warning and disable compilation.
1445         {
1446           MutexUnlocker ml(Compile_lock);
1447           MutexUnlocker locker(MethodCompileQueue_lock);
1448           CompileBroker::handle_full_code_cache(CodeCache::get_code_blob_type(comp_level));
1449         }
1450       } else {
1451         nm->set_has_unsafe_access(has_unsafe_access);
1452         nm->set_has_wide_vectors(has_wide_vector);
1453 
1454         // Record successful registration.
1455         // (Put nm into the task handle *before* publishing to the Java heap.)
1456         if (JVMCIENV->compile_state() != NULL) {
1457           JVMCIENV->compile_state()->task()->set_code(nm);
1458         }
1459 
1460         JVMCINMethodData* data = nm->jvmci_nmethod_data();
1461         assert(data != NULL, "must be");
1462         if (install_default) {
1463           assert(!nmethod_mirror.is_hotspot() || data->get_nmethod_mirror(nm, /* phantom_ref */ false) == NULL, "must be");
1464           if (entry_bci == InvocationEntryBci) {
1465             if (TieredCompilation) {
1466               // If there is an old version we're done with it
1467               CompiledMethod* old = method->code();
1468               if (TraceMethodReplacement && old != NULL) {
1469                 ResourceMark rm;
1470                 char *method_name = method->name_and_sig_as_C_string();
1471                 tty->print_cr("Replacing method %s", method_name);
1472               }
1473               if (old != NULL ) {
1474                 old->make_not_entrant();
1475               }
1476             }
1477 
1478             LogTarget(Info, nmethod, install) lt;
1479             if (lt.is_enabled()) {
1480               ResourceMark rm;
1481               char *method_name = method->name_and_sig_as_C_string();
1482               lt.print("Installing method (%d) %s [entry point: %p]",
1483                         comp_level, method_name, nm->entry_point());
1484             }
1485             // Allow the code to be executed
1486             MutexLocker ml(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1487             if (nm->make_in_use()) {
1488               method->set_code(method, nm);
1489             }
1490           } else {
1491             LogTarget(Info, nmethod, install) lt;
1492             if (lt.is_enabled()) {
1493               ResourceMark rm;
1494               char *method_name = method->name_and_sig_as_C_string();
1495               lt.print("Installing osr method (%d) %s @ %d",
1496                         comp_level, method_name, entry_bci);
1497             }
1498             MutexLocker ml(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1499             if (nm->make_in_use()) {
1500               InstanceKlass::cast(method->method_holder())->add_osr_nmethod(nm);
1501             }
1502           }
1503         } else {
1504           assert(!nmethod_mirror.is_hotspot() || data->get_nmethod_mirror(nm, /* phantom_ref */ false) == HotSpotJVMCI::resolve(nmethod_mirror), "must be");
1505         }
1506       }
1507       result = nm != NULL ? JVMCI::ok :JVMCI::cache_full;
1508     }
1509   }
1510 
1511   // String creation must be done outside lock
1512   if (failure_detail != NULL) {
1513     // A failure to allocate the string is silently ignored.
1514     JVMCIObject message = JVMCIENV->create_string(failure_detail, JVMCIENV);
1515     JVMCIENV->set_HotSpotCompiledNmethod_installationFailureMessage(compiled_code, message);
1516   }
1517 
1518   // JVMTI -- compiled method notification (must be done outside lock)
1519   if (nm != NULL) {
1520     nm->post_compiled_method_load_event();
1521   }
1522 
1523   return result;
1524 }
1525