1 /*
2  * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5  *
6  * This code is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 only, as
8  * published by the Free Software Foundation.
9  *
10  * This code is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
13  * version 2 for more details (a copy is included in the LICENSE file that
14  * accompanied this code).
15  *
16  * You should have received a copy of the GNU General Public License version
17  * 2 along with this work; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19  *
20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21  * or visit www.oracle.com if you need additional information or have any
22  * questions.
23  *
24  */
25 
26 #include "precompiled.hpp"
27 #include "asm/macroAssembler.inline.hpp"
28 #include "code/compiledIC.hpp"
29 #include "code/icBuffer.hpp"
30 #include "code/nmethod.hpp"
31 #include "memory/resourceArea.hpp"
32 #include "runtime/mutexLocker.hpp"
33 #include "runtime/safepoint.hpp"
34 
35 // ----------------------------------------------------------------------------
36 
37 #define __ _masm.
emit_to_interp_stub(CodeBuffer & cbuf,address mark)38 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
39   // Stub is fixed up when the corresponding call is converted from
40   // calling compiled code to calling interpreted code.
41   // mov rmethod, 0
42   // jmp -4 # to self
43 
44   if (mark == NULL) {
45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
46   }
47 
48   // Note that the code buffer's insts_mark is always relative to insts.
49   // That's why we must use the macroassembler to generate a stub.
50   MacroAssembler _masm(&cbuf);
51 
52   address base = __ start_a_stub(to_interp_stub_size());
53   int offset = __ offset();
54   if (base == NULL) {
55     return NULL;  // CodeBuffer::expand failed
56   }
57   // static stub relocation stores the instruction address of the call
58   __ relocate(static_stub_Relocation::spec(mark));
59 
60 #if INCLUDE_AOT
61   // Don't create a Metadata reloc if we're generating immutable PIC.
62   if (cbuf.immutable_PIC()) {
63     __ movptr(rmethod, 0);
64     __ movptr(rscratch1, 0);
65     __ br(rscratch1);
66 
67   } else
68 #endif
69   {
70     __ emit_static_call_stub();
71   }
72 
73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
74   __ end_a_stub();
75   return base;
76 }
77 #undef __
78 
to_interp_stub_size()79 int CompiledStaticCall::to_interp_stub_size() {
80   // isb; movk; movz; movz; movk; movz; movz; br
81   return 8 * NativeInstruction::instruction_size;
82 }
83 
to_trampoline_stub_size()84 int CompiledStaticCall::to_trampoline_stub_size() {
85   // Somewhat pessimistically, we count 3 instructions here (although
86   // there are only two) because we sometimes emit an alignment nop.
87   // Trampoline stubs are always word aligned.
88   return 3 * NativeInstruction::instruction_size + wordSize;
89 }
90 
91 // Relocation entries for call stub, compiled java to interpreter.
reloc_to_interp_stub()92 int CompiledStaticCall::reloc_to_interp_stub() {
93   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
94 }
95 
96 #if INCLUDE_AOT
97 #define __ _masm.
emit_to_aot_stub(CodeBuffer & cbuf,address mark)98 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
99   if (!UseAOT) {
100     return;
101   }
102   // Stub is fixed up when the corresponding call is converted from
103   // calling compiled code to calling aot code.
104   // mov r, imm64_aot_code_address
105   // jmp r
106 
107   if (mark == NULL) {
108     mark = cbuf.insts_mark();  // Get mark within main instrs section.
109   }
110 
111   // Note that the code buffer's insts_mark is always relative to insts.
112   // That's why we must use the macroassembler to generate a stub.
113   MacroAssembler _masm(&cbuf);
114 
115   address base =
116   __ start_a_stub(to_aot_stub_size());
117   guarantee(base != NULL, "out of space");
118 
119   // Static stub relocation stores the instruction address of the call.
120   __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */));
121   // Load destination AOT code address.
122   __ movptr(rscratch1, 0);  // address is zapped till fixup time.
123   // This is recognized as unresolved by relocs/nativeinst/ic code.
124   __ br(rscratch1);
125 
126   assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
127 
128   // Update current stubs pointer and restore insts_end.
129   __ end_a_stub();
130 }
131 #undef __
132 
to_aot_stub_size()133 int CompiledStaticCall::to_aot_stub_size() {
134   if (UseAOT) {
135     return 5 * 4;  // movz; movk; movk; movk; br
136   } else {
137     return 0;
138   }
139 }
140 
141 // Relocation entries for call stub, compiled java to aot.
reloc_to_aot_stub()142 int CompiledStaticCall::reloc_to_aot_stub() {
143   if (UseAOT) {
144     return 5 * 4;  // movz; movk; movk; movk; br
145   } else {
146     return 0;
147   }
148 }
149 #endif // INCLUDE_AOT
150 
set_to_interpreted(const methodHandle & callee,address entry)151 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
152   address stub = find_stub(false /* is_aot */);
153   guarantee(stub != NULL, "stub not found");
154 
155   if (TraceICs) {
156     ResourceMark rm;
157     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
158                   p2i(instruction_address()),
159                   callee->name_and_sig_as_C_string());
160   }
161 
162   // Creation also verifies the object.
163   NativeMovConstReg* method_holder
164     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
165 
166 #ifdef ASSERT
167   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
168   verify_mt_safe(callee, entry, method_holder, jump);
169 #endif
170 
171   // Update stub.
172   method_holder->set_data((intptr_t)callee());
173   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
174   ICache::invalidate_range(stub, to_interp_stub_size());
175   // Update jump to call.
176   set_destination_mt_safe(stub);
177 }
178 
set_stub_to_clean(static_stub_Relocation * static_stub)179 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
180   // Reset stub.
181   address stub = static_stub->addr();
182   assert(stub != NULL, "stub not found");
183   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
184   // Creation also verifies the object.
185   NativeMovConstReg* method_holder
186     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
187   method_holder->set_data(0);
188   if (!static_stub->is_aot()) {
189     NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
190     jump->set_jump_destination((address)-1);
191   }
192 }
193 
194 //-----------------------------------------------------------------------------
195 // Non-product mode code
196 #ifndef PRODUCT
197 
verify()198 void CompiledDirectStaticCall::verify() {
199   // Verify call.
200   _call->verify();
201   _call->verify_alignment();
202 
203   // Verify stub.
204   address stub = find_stub(false /* is_aot */);
205   assert(stub != NULL, "no stub found for static call");
206   // Creation also verifies the object.
207   NativeMovConstReg* method_holder
208     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
209   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
210 
211   // Verify state.
212   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
213 }
214 
215 #endif // !PRODUCT
216