1 /*
2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.inline.hpp"
27 #include "code/compiledIC.hpp"
28 #include "code/icBuffer.hpp"
29 #include "code/nativeInst.hpp"
30 #include "code/nmethod.hpp"
31 #include "memory/resourceArea.hpp"
32 #include "runtime/mutexLocker.hpp"
33 #include "runtime/safepoint.hpp"
34
35 // ----------------------------------------------------------------------------
36 #if COMPILER2_OR_JVMCI
37 #define __ _masm.
38 // emit call stub, compiled java to interpreter
emit_to_interp_stub(CodeBuffer & cbuf,address mark)39 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
40 // Stub is fixed up when the corresponding call is converted from calling
41 // compiled code to calling interpreted code.
42 // set (empty), R9
43 // b -1
44
45 if (mark == NULL) {
46 mark = cbuf.insts_mark(); // get mark within main instrs section
47 }
48
49 MacroAssembler _masm(&cbuf);
50
51 address base = __ start_a_stub(to_interp_stub_size());
52 if (base == NULL) {
53 return NULL; // CodeBuffer::expand failed
54 }
55
56 // static stub relocation stores the instruction address of the call
57 __ relocate(static_stub_Relocation::spec(mark));
58
59 InlinedMetadata object_literal(NULL);
60 // single instruction, see NativeMovConstReg::next_instruction_address() in
61 // CompiledStaticCall::set_to_interpreted()
62 __ ldr_literal(Rmethod, object_literal);
63
64 __ set_inst_mark(); // Who uses this?
65
66 bool near_range = __ cache_fully_reachable();
67 InlinedAddress dest((address)-1);
68 address branch_site = __ pc();
69 if (near_range) {
70 __ b(branch_site); // special NativeJump -1 destination
71 } else {
72 // Can't trash LR, FP, or argument registers
73 __ indirect_jump(dest, Rtemp);
74 }
75 __ bind_literal(object_literal); // includes spec_for_immediate reloc
76 if (!near_range) {
77 __ bind_literal(dest); // special NativeJump -1 destination
78 }
79
80 assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size");
81
82 // Update current stubs pointer and restore code_end.
83 __ end_a_stub();
84 return base;
85 }
86 #undef __
87
88 // Relocation entries for call stub, compiled java to interpreter.
reloc_to_interp_stub()89 int CompiledStaticCall::reloc_to_interp_stub() {
90 return 10; // 4 in emit_to_interp_stub + 1 in Java_Static_Call
91 }
92 #endif // COMPILER2_OR_JVMCI
93
to_trampoline_stub_size()94 int CompiledStaticCall::to_trampoline_stub_size() {
95 // ARM doesn't use trampolines.
96 return 0;
97 }
98
99 // size of C2 call stub, compiled java to interpretor
to_interp_stub_size()100 int CompiledStaticCall::to_interp_stub_size() {
101 return 8 * NativeInstruction::instruction_size;
102 }
103
set_to_interpreted(const methodHandle & callee,address entry)104 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
105 address stub = find_stub(/*is_aot*/ false);
106 guarantee(stub != NULL, "stub not found");
107
108 if (TraceICs) {
109 ResourceMark rm;
110 tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
111 p2i(instruction_address()),
112 callee->name_and_sig_as_C_string());
113 }
114
115 // Creation also verifies the object.
116 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
117 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
118
119 #ifdef ASSERT
120 // read the value once
121 volatile intptr_t data = method_holder->data();
122 volatile address destination = jump->jump_destination();
123 assert(data == 0 || data == (intptr_t)callee(),
124 "a) MT-unsafe modification of inline cache");
125 assert(destination == (address)-1 || destination == entry,
126 "b) MT-unsafe modification of inline cache");
127 #endif
128
129 // Update stub.
130 method_holder->set_data((intptr_t)callee());
131 jump->set_jump_destination(entry);
132
133 ICache::invalidate_range(stub, to_interp_stub_size());
134
135 // Update jump to call.
136 set_destination_mt_safe(stub);
137 }
138
set_stub_to_clean(static_stub_Relocation * static_stub)139 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
140 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
141 // Reset stub.
142 address stub = static_stub->addr();
143 assert(stub != NULL, "stub not found");
144 // Creation also verifies the object.
145 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
146 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
147 method_holder->set_data(0);
148 jump->set_jump_destination((address)-1);
149 }
150
151 //-----------------------------------------------------------------------------
152 // Non-product mode code
153 #ifndef PRODUCT
154
verify()155 void CompiledDirectStaticCall::verify() {
156 // Verify call.
157 _call->verify();
158 if (os::is_MP()) {
159 _call->verify_alignment();
160 }
161
162 // Verify stub.
163 address stub = find_stub(/*is_aot*/ false);
164 assert(stub != NULL, "no stub found for static call");
165 // Creation also verifies the object.
166 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
167 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
168
169 // Verify state.
170 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
171 }
172
173 #endif // !PRODUCT
174