1 /*
2 * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2012, 2019, SAP SE. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "asm/macroAssembler.inline.hpp"
28 #include "code/compiledIC.hpp"
29 #include "memory/resourceArea.hpp"
30 #include "nativeInst_ppc.hpp"
31 #include "oops/compressedOops.inline.hpp"
32 #include "oops/oop.hpp"
33 #include "runtime/handles.hpp"
34 #include "runtime/orderAccess.hpp"
35 #include "runtime/sharedRuntime.hpp"
36 #include "runtime/stubRoutines.hpp"
37 #include "utilities/ostream.hpp"
38 #ifdef COMPILER1
39 #include "c1/c1_Runtime1.hpp"
40 #endif
41
42 // We use an illtrap for marking a method as not_entrant or zombie
43 // Work around a C++ compiler bug which changes 'this'
is_sigill_zombie_not_entrant_at(address addr)44 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
45 if (*(int*)addr != 0 /*illtrap*/) return false;
46 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
47 if (cb == NULL || !cb->is_nmethod()) return false;
48 nmethod *nm = (nmethod *)cb;
49 // This method is not_entrant or zombie iff the illtrap instruction is
50 // located at the verified entry point.
51 return nm->verified_entry_point() == addr;
52 }
53
54 #ifdef ASSERT
verify()55 void NativeInstruction::verify() {
56 // Make sure code pattern is actually an instruction address.
57 address addr = addr_at(0);
58 if (addr == 0 || ((intptr_t)addr & 3) != 0) {
59 fatal("not an instruction address");
60 }
61 }
62 #endif // ASSERT
63
64 // Extract call destination from a NativeCall. The call might use a trampoline stub.
destination() const65 address NativeCall::destination() const {
66 address addr = (address)this;
67 address destination = Assembler::bxx_destination(addr);
68
69 // Do we use a trampoline stub for this call?
70 // Trampoline stubs are located behind the main code.
71 if (destination > addr) {
72 // Filter out recursive method invocation (call to verified/unverified entry point).
73 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // Else we get assertion if nmethod is zombie.
74 assert(cb && cb->is_nmethod(), "sanity");
75 nmethod *nm = (nmethod *)cb;
76 if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
77 // Yes we do, so get the destination from the trampoline stub.
78 const address trampoline_stub_addr = destination;
79 destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
80 }
81 }
82
83 return destination;
84 }
85
86 // Similar to replace_mt_safe, but just changes the destination. The
87 // important thing is that free-running threads are able to execute this
88 // call instruction at all times. Thus, the displacement field must be
89 // instruction-word-aligned.
90 //
91 // Used in the runtime linkage of calls; see class CompiledIC.
92 //
93 // Add parameter assert_lock to switch off assertion
94 // during code generation, where no patching lock is needed.
set_destination_mt_safe(address dest,bool assert_lock)95 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
96 assert(!assert_lock ||
97 (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
98 CompiledICLocker::is_safe(addr_at(0)),
99 "concurrent code patching");
100
101 ResourceMark rm;
102 int code_size = 1 * BytesPerInstWord;
103 address addr_call = addr_at(0);
104 assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
105
106 CodeBuffer cb(addr_call, code_size + 1);
107 MacroAssembler* a = new MacroAssembler(&cb);
108
109 // Patch the call.
110 if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
111 address trampoline_stub_addr = get_trampoline();
112
113 // We did not find a trampoline stub because the current codeblob
114 // does not provide this information. The branch will be patched
115 // later during a final fixup, when all necessary information is
116 // available.
117 if (trampoline_stub_addr == 0)
118 return;
119
120 // Patch the constant in the call's trampoline stub.
121 NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
122 dest = trampoline_stub_addr;
123 }
124
125 OrderAccess::release();
126 a->bl(dest);
127
128 ICache::ppc64_flush_icache_bytes(addr_call, code_size);
129 }
130
get_trampoline()131 address NativeCall::get_trampoline() {
132 address call_addr = addr_at(0);
133
134 CodeBlob *code = CodeCache::find_blob(call_addr);
135 assert(code != NULL, "Could not find the containing code blob");
136
137 // There are no relocations available when the code gets relocated
138 // because of CodeBuffer expansion.
139 if (code->relocation_size() == 0)
140 return NULL;
141
142 address bl_destination = Assembler::bxx_destination(call_addr);
143 if (code->contains(bl_destination) &&
144 is_NativeCallTrampolineStub_at(bl_destination))
145 return bl_destination;
146
147 // If the codeBlob is not a nmethod, this is because we get here from the
148 // CodeBlob constructor, which is called within the nmethod constructor.
149 return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
150 }
151
152 #ifdef ASSERT
verify()153 void NativeCall::verify() {
154 address addr = addr_at(0);
155
156 if (!NativeCall::is_call_at(addr)) {
157 tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
158 // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
159 fatal("not a NativeCall at " PTR_FORMAT, p2i(addr));
160 }
161 }
162 #endif // ASSERT
163
164 #ifdef ASSERT
verify()165 void NativeFarCall::verify() {
166 address addr = addr_at(0);
167
168 NativeInstruction::verify();
169 if (!NativeFarCall::is_far_call_at(addr)) {
170 tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
171 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
172 fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
173 }
174 }
175 #endif // ASSERT
176
next_instruction_address() const177 address NativeMovConstReg::next_instruction_address() const {
178 #ifdef ASSERT
179 CodeBlob* nm = CodeCache::find_blob(instruction_address());
180 assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
181 #endif
182
183 if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
184 return addr_at(load_const_from_method_toc_instruction_size);
185 } else {
186 return addr_at(load_const_instruction_size);
187 }
188 }
189
data() const190 intptr_t NativeMovConstReg::data() const {
191 address addr = addr_at(0);
192
193 if (MacroAssembler::is_load_const_at(addr)) {
194 return MacroAssembler::get_const(addr);
195 }
196
197 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
198 if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
199 narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
200 return cast_from_oop<intptr_t>(CompressedOops::decode(no));
201 } else {
202 assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
203
204 address ctable = cb->content_begin();
205 int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
206 return *(intptr_t *)(ctable + offset);
207 }
208 }
209
set_data_plain(intptr_t data,CodeBlob * cb)210 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
211 address addr = instruction_address();
212 address next_address = NULL;
213 if (!cb) cb = CodeCache::find_blob(addr);
214
215 if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
216 // A load from the method's TOC (ctable).
217 assert(cb->is_nmethod(), "must be nmethod");
218 const address ctable = cb->content_begin();
219 const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
220 *(intptr_t *)(ctable + toc_offset) = data;
221 next_address = addr + BytesPerInstWord;
222 } else if (cb != NULL &&
223 MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
224 // A calculation relative to the global TOC.
225 if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
226 (address)data) {
227 const address inst2_addr = addr;
228 const address inst1_addr =
229 MacroAssembler::patch_calculate_address_from_global_toc_at(inst2_addr, cb->content_begin(),
230 (address)data);
231 assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
232 const int range = inst2_addr - inst1_addr + BytesPerInstWord;
233 ICache::ppc64_flush_icache_bytes(inst1_addr, range);
234 }
235 next_address = addr + 1 * BytesPerInstWord;
236 } else if (MacroAssembler::is_load_const_at(addr)) {
237 // A normal 5 instruction load_const code sequence.
238 if (MacroAssembler::get_const(addr) != (long)data) {
239 // This is not mt safe, ok in methods like CodeBuffer::copy_code().
240 MacroAssembler::patch_const(addr, (long)data);
241 ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
242 }
243 next_address = addr + 5 * BytesPerInstWord;
244 } else if (MacroAssembler::is_bl(* (int*) addr)) {
245 // A single branch-and-link instruction.
246 ResourceMark rm;
247 const int code_size = 1 * BytesPerInstWord;
248 CodeBuffer cb(addr, code_size + 1);
249 MacroAssembler* a = new MacroAssembler(&cb);
250 a->bl((address) data);
251 ICache::ppc64_flush_icache_bytes(addr, code_size);
252 next_address = addr + code_size;
253 } else {
254 ShouldNotReachHere();
255 }
256
257 return next_address;
258 }
259
set_data(intptr_t data)260 void NativeMovConstReg::set_data(intptr_t data) {
261 // Store the value into the instruction stream.
262 CodeBlob *cb = CodeCache::find_blob(instruction_address());
263 address next_address = set_data_plain(data, cb);
264
265 // Also store the value into an oop_Relocation cell, if any.
266 if (cb && cb->is_nmethod()) {
267 RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
268 oop* oop_addr = NULL;
269 Metadata** metadata_addr = NULL;
270 while (iter.next()) {
271 if (iter.type() == relocInfo::oop_type) {
272 oop_Relocation *r = iter.oop_reloc();
273 if (oop_addr == NULL) {
274 oop_addr = r->oop_addr();
275 *oop_addr = cast_to_oop(data);
276 } else {
277 assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
278 }
279 }
280 if (iter.type() == relocInfo::metadata_type) {
281 metadata_Relocation *r = iter.metadata_reloc();
282 if (metadata_addr == NULL) {
283 metadata_addr = r->metadata_addr();
284 *metadata_addr = (Metadata*)data;
285 } else {
286 assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
287 }
288 }
289 }
290 }
291 }
292
set_narrow_oop(narrowOop data,CodeBlob * code)293 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
294 address inst2_addr = addr_at(0);
295 CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
296 if (MacroAssembler::get_narrow_oop(inst2_addr, cb->content_begin()) == (long)data)
297 return;
298 const address inst1_addr =
299 MacroAssembler::patch_set_narrow_oop(inst2_addr, cb->content_begin(), (long)data);
300 assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
301 const int range = inst2_addr - inst1_addr + BytesPerInstWord;
302 ICache::ppc64_flush_icache_bytes(inst1_addr, range);
303 }
304
305 // Do not use an assertion here. Let clients decide whether they only
306 // want this when assertions are enabled.
307 #ifdef ASSERT
verify()308 void NativeMovConstReg::verify() {
309 address addr = addr_at(0);
310 if (! MacroAssembler::is_load_const_at(addr) &&
311 ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
312 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // find_nmethod() asserts if nmethod is zombie.
313 if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
314 ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
315 ! MacroAssembler::is_bl(*((int*) addr))) {
316 tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
317 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
318 fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
319 }
320 }
321 }
322 #endif // ASSERT
323
patch_verified_entry(address entry,address verified_entry,address dest)324 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
325 ResourceMark rm;
326 int code_size = 1 * BytesPerInstWord;
327 CodeBuffer cb(verified_entry, code_size + 1);
328 MacroAssembler* a = new MacroAssembler(&cb);
329 #ifdef COMPILER2
330 assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
331 #endif
332 // Patch this nmethod atomically. Always use illtrap/trap in debug build.
333 if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
334 a->b(dest);
335 } else {
336 // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
337 // We use an illtrap for marking a method as not_entrant or zombie.
338 a->illtrap();
339 }
340 ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
341 }
342
343 #ifdef ASSERT
verify()344 void NativeJump::verify() {
345 address addr = addr_at(0);
346
347 NativeInstruction::verify();
348 if (!NativeJump::is_jump_at(addr)) {
349 tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
350 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
351 fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
352 }
353 }
354 #endif // ASSERT
355
356
insert_unconditional(address code_pos,address entry)357 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
358 CodeBuffer cb(code_pos, BytesPerInstWord + 1);
359 MacroAssembler a(&cb);
360 a.b(entry);
361 ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
362 }
363
364 // MT-safe patching of a jmp instruction.
replace_mt_safe(address instr_addr,address code_buffer)365 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
366 // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
367
368 // Finally patch out the jump.
369 volatile juint *jump_addr = (volatile juint*)instr_addr;
370 // Release not needed because caller uses invalidate_range after copying the remaining bytes.
371 //Atomic::release_store(jump_addr, *((juint*)code_buffer));
372 *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
373 ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
374 }
375
376
377 //-------------------------------------------------------------------
378
379 // Call trampoline stubs.
380 //
381 // Layout and instructions of a call trampoline stub:
382 // 0: load the TOC (part 1)
383 // 4: load the TOC (part 2)
384 // 8: load the call target from the constant pool (part 1)
385 // [12: load the call target from the constant pool (part 2, optional)]
386 // ..: branch via CTR
387 //
388
encoded_destination_addr() const389 address NativeCallTrampolineStub::encoded_destination_addr() const {
390 address instruction_addr = addr_at(0 * BytesPerInstWord);
391 if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
392 instruction_addr = addr_at(2 * BytesPerInstWord);
393 assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
394 "must be a ld with large offset (from the constant pool)");
395 }
396 return instruction_addr;
397 }
398
destination(nmethod * nm) const399 address NativeCallTrampolineStub::destination(nmethod *nm) const {
400 CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
401 address ctable = cb->content_begin();
402
403 return *(address*)(ctable + destination_toc_offset());
404 }
405
destination_toc_offset() const406 int NativeCallTrampolineStub::destination_toc_offset() const {
407 return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
408 }
409
set_destination(address new_destination)410 void NativeCallTrampolineStub::set_destination(address new_destination) {
411 CodeBlob* cb = CodeCache::find_blob(addr_at(0));
412 address ctable = cb->content_begin();
413
414 *(address*)(ctable + destination_toc_offset()) = new_destination;
415 }
416