1 /*
2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2012, 2015 SAP SE. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "asm/macroAssembler.inline.hpp"
28 #include "code/compiledIC.hpp"
29 #include "memory/resourceArea.hpp"
30 #include "nativeInst_ppc.hpp"
31 #include "oops/compressedOops.inline.hpp"
32 #include "oops/oop.hpp"
33 #include "runtime/handles.hpp"
34 #include "runtime/orderAccess.hpp"
35 #include "runtime/sharedRuntime.hpp"
36 #include "runtime/stubRoutines.hpp"
37 #include "utilities/ostream.hpp"
38 #ifdef COMPILER1
39 #include "c1/c1_Runtime1.hpp"
40 #endif
41
42 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
43 // Work around a C++ compiler bug which changes 'this'
is_sigill_zombie_not_entrant_at(address addr)44 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
45 assert(!UseSIGTRAP, "precondition");
46 if (*(int*)addr != 0 /*illtrap*/) return false;
47 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
48 if (cb == NULL || !cb->is_nmethod()) return false;
49 nmethod *nm = (nmethod *)cb;
50 // This method is not_entrant or zombie iff the illtrap instruction is
51 // located at the verified entry point.
52 return nm->verified_entry_point() == addr;
53 }
54
55 #ifdef ASSERT
verify()56 void NativeInstruction::verify() {
57 // Make sure code pattern is actually an instruction address.
58 address addr = addr_at(0);
59 if (addr == 0 || ((intptr_t)addr & 3) != 0) {
60 fatal("not an instruction address");
61 }
62 }
63 #endif // ASSERT
64
65 // Extract call destination from a NativeCall. The call might use a trampoline stub.
destination() const66 address NativeCall::destination() const {
67 address addr = (address)this;
68 address destination = Assembler::bxx_destination(addr);
69
70 // Do we use a trampoline stub for this call?
71 // Trampoline stubs are located behind the main code.
72 if (destination > addr) {
73 // Filter out recursive method invocation (call to verified/unverified entry point).
74 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // Else we get assertion if nmethod is zombie.
75 assert(cb && cb->is_nmethod(), "sanity");
76 nmethod *nm = (nmethod *)cb;
77 if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
78 // Yes we do, so get the destination from the trampoline stub.
79 const address trampoline_stub_addr = destination;
80 destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
81 }
82 }
83
84 return destination;
85 }
86
87 // Similar to replace_mt_safe, but just changes the destination. The
88 // important thing is that free-running threads are able to execute this
89 // call instruction at all times. Thus, the displacement field must be
90 // instruction-word-aligned.
91 //
92 // Used in the runtime linkage of calls; see class CompiledIC.
93 //
94 // Add parameter assert_lock to switch off assertion
95 // during code generation, where no patching lock is needed.
set_destination_mt_safe(address dest,bool assert_lock)96 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
97 assert(!assert_lock ||
98 (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
99 CompiledICLocker::is_safe(addr_at(0)),
100 "concurrent code patching");
101
102 ResourceMark rm;
103 int code_size = 1 * BytesPerInstWord;
104 address addr_call = addr_at(0);
105 assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
106
107 CodeBuffer cb(addr_call, code_size + 1);
108 MacroAssembler* a = new MacroAssembler(&cb);
109
110 // Patch the call.
111 if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
112 address trampoline_stub_addr = get_trampoline();
113
114 // We did not find a trampoline stub because the current codeblob
115 // does not provide this information. The branch will be patched
116 // later during a final fixup, when all necessary information is
117 // available.
118 if (trampoline_stub_addr == 0)
119 return;
120
121 // Patch the constant in the call's trampoline stub.
122 NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
123 dest = trampoline_stub_addr;
124 }
125
126 OrderAccess::release();
127 a->bl(dest);
128
129 ICache::ppc64_flush_icache_bytes(addr_call, code_size);
130 }
131
get_trampoline()132 address NativeCall::get_trampoline() {
133 address call_addr = addr_at(0);
134
135 CodeBlob *code = CodeCache::find_blob(call_addr);
136 assert(code != NULL, "Could not find the containing code blob");
137
138 // There are no relocations available when the code gets relocated
139 // because of CodeBuffer expansion.
140 if (code->relocation_size() == 0)
141 return NULL;
142
143 address bl_destination = Assembler::bxx_destination(call_addr);
144 if (code->contains(bl_destination) &&
145 is_NativeCallTrampolineStub_at(bl_destination))
146 return bl_destination;
147
148 // If the codeBlob is not a nmethod, this is because we get here from the
149 // CodeBlob constructor, which is called within the nmethod constructor.
150 return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
151 }
152
153 #ifdef ASSERT
verify()154 void NativeCall::verify() {
155 address addr = addr_at(0);
156
157 if (!NativeCall::is_call_at(addr)) {
158 tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
159 // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
160 fatal("not a NativeCall at " PTR_FORMAT, p2i(addr));
161 }
162 }
163 #endif // ASSERT
164
165 #ifdef ASSERT
verify()166 void NativeFarCall::verify() {
167 address addr = addr_at(0);
168
169 NativeInstruction::verify();
170 if (!NativeFarCall::is_far_call_at(addr)) {
171 tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
172 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
173 fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
174 }
175 }
176 #endif // ASSERT
177
next_instruction_address() const178 address NativeMovConstReg::next_instruction_address() const {
179 #ifdef ASSERT
180 CodeBlob* nm = CodeCache::find_blob(instruction_address());
181 assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
182 #endif
183
184 if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
185 return addr_at(load_const_from_method_toc_instruction_size);
186 } else {
187 return addr_at(load_const_instruction_size);
188 }
189 }
190
data() const191 intptr_t NativeMovConstReg::data() const {
192 address addr = addr_at(0);
193
194 if (MacroAssembler::is_load_const_at(addr)) {
195 return MacroAssembler::get_const(addr);
196 }
197
198 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
199 if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
200 narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
201 return cast_from_oop<intptr_t>(CompressedOops::decode(no));
202 } else {
203 assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
204
205 address ctable = cb->content_begin();
206 int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
207 return *(intptr_t *)(ctable + offset);
208 }
209 }
210
set_data_plain(intptr_t data,CodeBlob * cb)211 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
212 address addr = instruction_address();
213 address next_address = NULL;
214 if (!cb) cb = CodeCache::find_blob(addr);
215
216 if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
217 // A load from the method's TOC (ctable).
218 assert(cb->is_nmethod(), "must be nmethod");
219 const address ctable = cb->content_begin();
220 const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
221 *(intptr_t *)(ctable + toc_offset) = data;
222 next_address = addr + BytesPerInstWord;
223 } else if (cb != NULL &&
224 MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
225 // A calculation relative to the global TOC.
226 if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
227 (address)data) {
228 const address inst2_addr = addr;
229 const address inst1_addr =
230 MacroAssembler::patch_calculate_address_from_global_toc_at(inst2_addr, cb->content_begin(),
231 (address)data);
232 assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
233 const int range = inst2_addr - inst1_addr + BytesPerInstWord;
234 ICache::ppc64_flush_icache_bytes(inst1_addr, range);
235 }
236 next_address = addr + 1 * BytesPerInstWord;
237 } else if (MacroAssembler::is_load_const_at(addr)) {
238 // A normal 5 instruction load_const code sequence.
239 if (MacroAssembler::get_const(addr) != (long)data) {
240 // This is not mt safe, ok in methods like CodeBuffer::copy_code().
241 MacroAssembler::patch_const(addr, (long)data);
242 ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
243 }
244 next_address = addr + 5 * BytesPerInstWord;
245 } else if (MacroAssembler::is_bl(* (int*) addr)) {
246 // A single branch-and-link instruction.
247 ResourceMark rm;
248 const int code_size = 1 * BytesPerInstWord;
249 CodeBuffer cb(addr, code_size + 1);
250 MacroAssembler* a = new MacroAssembler(&cb);
251 a->bl((address) data);
252 ICache::ppc64_flush_icache_bytes(addr, code_size);
253 next_address = addr + code_size;
254 } else {
255 ShouldNotReachHere();
256 }
257
258 return next_address;
259 }
260
set_data(intptr_t data)261 void NativeMovConstReg::set_data(intptr_t data) {
262 // Store the value into the instruction stream.
263 CodeBlob *cb = CodeCache::find_blob(instruction_address());
264 address next_address = set_data_plain(data, cb);
265
266 // Also store the value into an oop_Relocation cell, if any.
267 if (cb && cb->is_nmethod()) {
268 RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
269 oop* oop_addr = NULL;
270 Metadata** metadata_addr = NULL;
271 while (iter.next()) {
272 if (iter.type() == relocInfo::oop_type) {
273 oop_Relocation *r = iter.oop_reloc();
274 if (oop_addr == NULL) {
275 oop_addr = r->oop_addr();
276 *oop_addr = cast_to_oop(data);
277 } else {
278 assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
279 }
280 }
281 if (iter.type() == relocInfo::metadata_type) {
282 metadata_Relocation *r = iter.metadata_reloc();
283 if (metadata_addr == NULL) {
284 metadata_addr = r->metadata_addr();
285 *metadata_addr = (Metadata*)data;
286 } else {
287 assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
288 }
289 }
290 }
291 }
292 }
293
set_narrow_oop(narrowOop data,CodeBlob * code)294 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
295 address inst2_addr = addr_at(0);
296 CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
297 if (MacroAssembler::get_narrow_oop(inst2_addr, cb->content_begin()) == (long)data)
298 return;
299 const address inst1_addr =
300 MacroAssembler::patch_set_narrow_oop(inst2_addr, cb->content_begin(), (long)data);
301 assert(inst1_addr != NULL && inst1_addr < inst2_addr, "first instruction must be found");
302 const int range = inst2_addr - inst1_addr + BytesPerInstWord;
303 ICache::ppc64_flush_icache_bytes(inst1_addr, range);
304 }
305
306 // Do not use an assertion here. Let clients decide whether they only
307 // want this when assertions are enabled.
308 #ifdef ASSERT
verify()309 void NativeMovConstReg::verify() {
310 address addr = addr_at(0);
311 if (! MacroAssembler::is_load_const_at(addr) &&
312 ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
313 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // find_nmethod() asserts if nmethod is zombie.
314 if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
315 ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
316 ! MacroAssembler::is_bl(*((int*) addr))) {
317 tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
318 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
319 fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
320 }
321 }
322 }
323 #endif // ASSERT
324
patch_verified_entry(address entry,address verified_entry,address dest)325 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
326 ResourceMark rm;
327 int code_size = 1 * BytesPerInstWord;
328 CodeBuffer cb(verified_entry, code_size + 1);
329 MacroAssembler* a = new MacroAssembler(&cb);
330 #ifdef COMPILER2
331 assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
332 #endif
333 // Patch this nmethod atomically. Always use illtrap/trap in debug build.
334 if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
335 a->b(dest);
336 } else {
337 // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
338 if (TrapBasedNotEntrantChecks) {
339 // We use a special trap for marking a method as not_entrant or zombie.
340 a->trap_zombie_not_entrant();
341 } else {
342 // We use an illtrap for marking a method as not_entrant or zombie.
343 a->illtrap();
344 }
345 }
346 ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
347 }
348
349 #ifdef ASSERT
verify()350 void NativeJump::verify() {
351 address addr = addr_at(0);
352
353 NativeInstruction::verify();
354 if (!NativeJump::is_jump_at(addr)) {
355 tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
356 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
357 fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
358 }
359 }
360 #endif // ASSERT
361
362
insert_unconditional(address code_pos,address entry)363 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
364 CodeBuffer cb(code_pos, BytesPerInstWord + 1);
365 MacroAssembler* a = new MacroAssembler(&cb);
366 a->b(entry);
367 ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
368 }
369
370 // MT-safe patching of a jmp instruction.
replace_mt_safe(address instr_addr,address code_buffer)371 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
372 // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
373
374 // Finally patch out the jump.
375 volatile juint *jump_addr = (volatile juint*)instr_addr;
376 // Release not needed because caller uses invalidate_range after copying the remaining bytes.
377 //OrderAccess::release_store(jump_addr, *((juint*)code_buffer));
378 *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
379 ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
380 }
381
382
383 //-------------------------------------------------------------------
384
385 // Call trampoline stubs.
386 //
387 // Layout and instructions of a call trampoline stub:
388 // 0: load the TOC (part 1)
389 // 4: load the TOC (part 2)
390 // 8: load the call target from the constant pool (part 1)
391 // [12: load the call target from the constant pool (part 2, optional)]
392 // ..: branch via CTR
393 //
394
encoded_destination_addr() const395 address NativeCallTrampolineStub::encoded_destination_addr() const {
396 address instruction_addr = addr_at(0 * BytesPerInstWord);
397 if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
398 instruction_addr = addr_at(2 * BytesPerInstWord);
399 assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
400 "must be a ld with large offset (from the constant pool)");
401 }
402 return instruction_addr;
403 }
404
destination(nmethod * nm) const405 address NativeCallTrampolineStub::destination(nmethod *nm) const {
406 CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
407 address ctable = cb->content_begin();
408
409 return *(address*)(ctable + destination_toc_offset());
410 }
411
destination_toc_offset() const412 int NativeCallTrampolineStub::destination_toc_offset() const {
413 return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
414 }
415
set_destination(address new_destination)416 void NativeCallTrampolineStub::set_destination(address new_destination) {
417 CodeBlob* cb = CodeCache::find_blob(addr_at(0));
418 address ctable = cb->content_begin();
419
420 *(address*)(ctable + destination_toc_offset()) = new_destination;
421 }
422