1 /*
2  * Copyright (c) 2002, 2020, Oracle and/or its affiliates. All rights reserved.
3  * Copyright (c) 2012, 2018 SAP SE. All rights reserved.
4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5  *
6  * This code is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 only, as
8  * published by the Free Software Foundation.
9  *
10  * This code is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
13  * version 2 for more details (a copy is included in the LICENSE file that
14  * accompanied this code).
15  *
16  * You should have received a copy of the GNU General Public License version
17  * 2 along with this work; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19  *
20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21  * or visit www.oracle.com if you need additional information or have any
22  * questions.
23  *
24  */
25 
26 #ifndef CPU_PPC_NATIVEINST_PPC_HPP
27 #define CPU_PPC_NATIVEINST_PPC_HPP
28 
29 #include "asm/macroAssembler.hpp"
30 #include "runtime/icache.hpp"
31 #include "runtime/os.hpp"
32 #include "runtime/safepointMechanism.hpp"
33 
34 // We have interfaces for the following instructions:
35 //
36 // - NativeInstruction
37 //   - NativeCall
38 //   - NativeFarCall
39 //   - NativeMovConstReg
40 //   - NativeJump
41 //   - NativeIllegalInstruction
42 //   - NativeConditionalFarBranch
43 //   - NativeCallTrampolineStub
44 
45 // The base class for different kinds of native instruction abstractions.
46 // It provides the primitive operations to manipulate code relative to this.
47 class NativeInstruction {
48   friend class Relocation;
49 
50  public:
is_jump()51   bool is_jump() { return Assembler::is_b(long_at(0)); } // See NativeGeneralJump.
52 
is_sigtrap_ic_miss_check()53   bool is_sigtrap_ic_miss_check() {
54     assert(UseSIGTRAP, "precondition");
55     return MacroAssembler::is_trap_ic_miss_check(long_at(0));
56   }
57 
is_sigtrap_null_check()58   bool is_sigtrap_null_check() {
59     assert(UseSIGTRAP && TrapBasedNullChecks, "precondition");
60     return MacroAssembler::is_trap_null_check(long_at(0));
61   }
62 
get_stop_type()63   int get_stop_type() {
64     return MacroAssembler::tdi_get_si16(long_at(0), Assembler::traptoUnconditional, 0);
65   }
66 
67   // We use an illtrap for marking a method as not_entrant or zombie.
is_sigill_zombie_not_entrant()68   bool is_sigill_zombie_not_entrant() {
69     // Work around a C++ compiler bug which changes 'this'.
70     return NativeInstruction::is_sigill_zombie_not_entrant_at(addr_at(0));
71   }
72   static bool is_sigill_zombie_not_entrant_at(address addr);
73 
74 #ifdef COMPILER2
75   // SIGTRAP-based implicit range checks
is_sigtrap_range_check()76   bool is_sigtrap_range_check() {
77     assert(UseSIGTRAP && TrapBasedRangeChecks, "precondition");
78     return MacroAssembler::is_trap_range_check(long_at(0));
79   }
80 #endif
81 
is_safepoint_poll()82   bool is_safepoint_poll() {
83     // Is the current instruction a POTENTIAL read access to the polling page?
84     // The current arguments of the instruction are not checked!
85     if (USE_POLL_BIT_ONLY) {
86       int encoding = SafepointMechanism::poll_bit();
87       return MacroAssembler::is_tdi(long_at(0), Assembler::traptoGreaterThanUnsigned | Assembler::traptoEqual,
88                                     -1, encoding);
89     }
90     return MacroAssembler::is_load_from_polling_page(long_at(0), NULL);
91   }
92 
get_stack_bang_address(void * ucontext)93   address get_stack_bang_address(void *ucontext) {
94     // If long_at(0) is not a stack bang, return 0. Otherwise, return
95     // banged address.
96     return MacroAssembler::get_stack_bang_address(long_at(0), ucontext);
97   }
98 
99  protected:
addr_at(int offset) const100   address  addr_at(int offset) const    { return address(this) + offset; }
long_at(int offset) const101   int      long_at(int offset) const    { return *(int*)addr_at(offset); }
102 
103  public:
104   void verify() NOT_DEBUG_RETURN;
105 };
106 
nativeInstruction_at(address address)107 inline NativeInstruction* nativeInstruction_at(address address) {
108   NativeInstruction* inst = (NativeInstruction*)address;
109   inst->verify();
110   return inst;
111 }
112 
113 // The NativeCall is an abstraction for accessing/manipulating call
114 // instructions. It is used to manipulate inline caches, primitive &
115 // dll calls, etc.
116 //
117 // Sparc distinguishes `NativeCall' and `NativeFarCall'. On PPC64,
118 // at present, we provide a single class `NativeCall' representing the
119 // sequence `load_const, mtctr, bctrl' or the sequence 'ld_from_toc,
120 // mtctr, bctrl'.
121 class NativeCall: public NativeInstruction {
122  public:
123 
124   enum ppc_specific_constants {
125     load_const_instruction_size                 = 28,
126     load_const_from_method_toc_instruction_size = 16,
127     instruction_size                            = 16 // Used in shared code for calls with reloc_info.
128   };
129 
is_call_at(address a)130   static bool is_call_at(address a) {
131     return Assembler::is_bl(*(int*)(a));
132   }
133 
is_call_before(address return_address)134   static bool is_call_before(address return_address) {
135     return NativeCall::is_call_at(return_address - 4);
136   }
137 
instruction_address() const138   address instruction_address() const {
139     return addr_at(0);
140   }
141 
next_instruction_address() const142   address next_instruction_address() const {
143     // We have only bl.
144     assert(MacroAssembler::is_bl(*(int*)instruction_address()), "Should be bl instruction!");
145     return addr_at(4);
146   }
147 
return_address() const148   address return_address() const {
149     return next_instruction_address();
150   }
151 
152   address destination() const;
153 
154   // The parameter assert_lock disables the assertion during code generation.
155   void set_destination_mt_safe(address dest, bool assert_lock = true);
156 
157   address get_trampoline();
158 
verify_alignment()159   void verify_alignment() {} // do nothing on ppc
160   void verify() NOT_DEBUG_RETURN;
161 };
162 
nativeCall_at(address instr)163 inline NativeCall* nativeCall_at(address instr) {
164   NativeCall* call = (NativeCall*)instr;
165   call->verify();
166   return call;
167 }
168 
nativeCall_before(address return_address)169 inline NativeCall* nativeCall_before(address return_address) {
170   NativeCall* call = NULL;
171   if (MacroAssembler::is_bl(*(int*)(return_address - 4)))
172     call = (NativeCall*)(return_address - 4);
173   call->verify();
174   return call;
175 }
176 
177 // The NativeFarCall is an abstraction for accessing/manipulating native
178 // call-anywhere instructions.
179 // Used to call native methods which may be loaded anywhere in the address
180 // space, possibly out of reach of a call instruction.
181 class NativeFarCall: public NativeInstruction {
182  public:
183   // We use MacroAssembler::bl64_patchable() for implementing a
184   // call-anywhere instruction.
185 
186   // Checks whether instr points at a NativeFarCall instruction.
is_far_call_at(address instr)187   static bool is_far_call_at(address instr) {
188     return MacroAssembler::is_bl64_patchable_at(instr);
189   }
190 
191   // Does the NativeFarCall implementation use a pc-relative encoding
192   // of the call destination?
193   // Used when relocating code.
is_pcrelative()194   bool is_pcrelative() {
195     assert(MacroAssembler::is_bl64_patchable_at((address)this),
196            "unexpected call type");
197     return MacroAssembler::is_bl64_patchable_pcrelative_at((address)this);
198   }
199 
200   // Returns the NativeFarCall's destination.
destination() const201   address destination() const {
202     assert(MacroAssembler::is_bl64_patchable_at((address)this),
203            "unexpected call type");
204     return MacroAssembler::get_dest_of_bl64_patchable_at((address)this);
205   }
206 
207   // Sets the NativeCall's destination, not necessarily mt-safe.
208   // Used when relocating code.
set_destination(address dest)209   void set_destination(address dest) {
210     // Set new destination (implementation of call may change here).
211     assert(MacroAssembler::is_bl64_patchable_at((address)this),
212            "unexpected call type");
213     MacroAssembler::set_dest_of_bl64_patchable_at((address)this, dest);
214   }
215 
216   void verify() NOT_DEBUG_RETURN;
217 };
218 
219 // Instantiates a NativeFarCall object starting at the given instruction
220 // address and returns the NativeFarCall object.
nativeFarCall_at(address instr)221 inline NativeFarCall* nativeFarCall_at(address instr) {
222   NativeFarCall* call = (NativeFarCall*)instr;
223   call->verify();
224   return call;
225 }
226 
227 // An interface for accessing/manipulating native set_oop imm, reg instructions
228 // (used to manipulate inlined data references, etc.).
229 class NativeMovConstReg: public NativeInstruction {
230  public:
231 
232   enum ppc_specific_constants {
233     load_const_instruction_size                 = 20,
234     load_const_from_method_toc_instruction_size =  8,
235     instruction_size                            =  8 // Used in shared code for calls with reloc_info.
236   };
237 
instruction_address() const238   address instruction_address() const {
239     return addr_at(0);
240   }
241 
242   address next_instruction_address() const;
243 
244   // (The [set_]data accessor respects oop_type relocs also.)
245   intptr_t data() const;
246 
247   // Patch the code stream.
248   address set_data_plain(intptr_t x, CodeBlob *code);
249   // Patch the code stream and oop pool.
250   void set_data(intptr_t x);
251 
252   // Patch narrow oop constants. Use this also for narrow klass.
253   void set_narrow_oop(narrowOop data, CodeBlob *code = NULL);
254 
255   void verify() NOT_DEBUG_RETURN;
256 };
257 
nativeMovConstReg_at(address address)258 inline NativeMovConstReg* nativeMovConstReg_at(address address) {
259   NativeMovConstReg* test = (NativeMovConstReg*)address;
260   test->verify();
261   return test;
262 }
263 
264 // The NativeJump is an abstraction for accessing/manipulating native
265 // jump-anywhere instructions.
266 class NativeJump: public NativeInstruction {
267  public:
268   // We use MacroAssembler::b64_patchable() for implementing a
269   // jump-anywhere instruction.
270 
271   enum ppc_specific_constants {
272     instruction_size = MacroAssembler::b64_patchable_size
273   };
274 
275   // Checks whether instr points at a NativeJump instruction.
is_jump_at(address instr)276   static bool is_jump_at(address instr) {
277     return MacroAssembler::is_b64_patchable_at(instr)
278       || (   MacroAssembler::is_load_const_from_method_toc_at(instr)
279           && Assembler::is_mtctr(*(int*)(instr + 2 * 4))
280           && Assembler::is_bctr(*(int*)(instr + 3 * 4)));
281   }
282 
283   // Does the NativeJump implementation use a pc-relative encoding
284   // of the call destination?
285   // Used when relocating code or patching jumps.
is_pcrelative()286   bool is_pcrelative() {
287     return MacroAssembler::is_b64_patchable_pcrelative_at((address)this);
288   }
289 
290   // Returns the NativeJump's destination.
jump_destination() const291   address jump_destination() const {
292     if (MacroAssembler::is_b64_patchable_at((address)this)) {
293       return MacroAssembler::get_dest_of_b64_patchable_at((address)this);
294     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
295                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
296                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
297       return (address)((NativeMovConstReg *)this)->data();
298     } else {
299       ShouldNotReachHere();
300       return NULL;
301     }
302   }
303 
304   // Sets the NativeJump's destination, not necessarily mt-safe.
305   // Used when relocating code or patching jumps.
set_jump_destination(address dest)306   void set_jump_destination(address dest) {
307     // Set new destination (implementation of call may change here).
308     if (MacroAssembler::is_b64_patchable_at((address)this)) {
309       MacroAssembler::set_dest_of_b64_patchable_at((address)this, dest);
310     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
311                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
312                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
313       ((NativeMovConstReg *)this)->set_data((intptr_t)dest);
314     } else {
315       ShouldNotReachHere();
316     }
317   }
318 
319   // MT-safe insertion of native jump at verified method entry
320   static void patch_verified_entry(address entry, address verified_entry, address dest);
321 
322   void verify() NOT_DEBUG_RETURN;
323 
check_verified_entry_alignment(address entry,address verified_entry)324   static void check_verified_entry_alignment(address entry, address verified_entry) {
325     // We just patch one instruction on ppc64, so the jump doesn't have to
326     // be aligned. Nothing to do here.
327   }
328 };
329 
330 // Instantiates a NativeJump object starting at the given instruction
331 // address and returns the NativeJump object.
nativeJump_at(address instr)332 inline NativeJump* nativeJump_at(address instr) {
333   NativeJump* call = (NativeJump*)instr;
334   call->verify();
335   return call;
336 }
337 
338 // NativeConditionalFarBranch is abstraction for accessing/manipulating
339 // conditional far branches.
340 class NativeConditionalFarBranch : public NativeInstruction {
341  public:
342 
is_conditional_far_branch_at(address instr)343   static bool is_conditional_far_branch_at(address instr) {
344     return MacroAssembler::is_bc_far_at(instr);
345   }
346 
branch_destination() const347   address branch_destination() const {
348     return MacroAssembler::get_dest_of_bc_far_at((address)this);
349   }
350 
set_branch_destination(address dest)351   void set_branch_destination(address dest) {
352     MacroAssembler::set_dest_of_bc_far_at((address)this, dest);
353   }
354 };
355 
NativeConditionalFarBranch_at(address address)356 inline NativeConditionalFarBranch* NativeConditionalFarBranch_at(address address) {
357   assert(NativeConditionalFarBranch::is_conditional_far_branch_at(address),
358          "must be a conditional far branch");
359   return (NativeConditionalFarBranch*)address;
360 }
361 
362 // Call trampoline stubs.
363 class NativeCallTrampolineStub : public NativeInstruction {
364  private:
365 
366   address encoded_destination_addr() const;
367 
368  public:
369 
370   address destination(nmethod *nm = NULL) const;
371   int destination_toc_offset() const;
372 
373   void set_destination(address new_destination);
374 };
375 
376 // Note: Other stubs must not begin with this pattern.
is_NativeCallTrampolineStub_at(address address)377 inline bool is_NativeCallTrampolineStub_at(address address) {
378   int first_instr = *(int*)address;
379   // calculate_address_from_global_toc and long form of ld_largeoffset_unchecked begin with addis with target R12
380   if (Assembler::is_addis(first_instr) &&
381       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2) return true;
382 
383   // short form of ld_largeoffset_unchecked is ld which is followed by mtctr
384   int second_instr = *((int*)address + 1);
385   if (Assembler::is_ld(first_instr) &&
386       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2 &&
387       Assembler::is_mtctr(second_instr) &&
388       (Register)(intptr_t)Assembler::inv_rs_field(second_instr) == R12_scratch2) return true;
389 
390   return false;
391 }
392 
NativeCallTrampolineStub_at(address address)393 inline NativeCallTrampolineStub* NativeCallTrampolineStub_at(address address) {
394   assert(is_NativeCallTrampolineStub_at(address), "no call trampoline found");
395   return (NativeCallTrampolineStub*)address;
396 }
397 
398 ///////////////////////////////////////////////////////////////////////////////////////////////////
399 
400 //-------------------------------------
401 //  N a t i v e G e n e r a l J u m p
402 //-------------------------------------
403 
404 // Despite the name, handles only simple branches.
405 class NativeGeneralJump;
406 inline NativeGeneralJump* nativeGeneralJump_at(address address);
407 
408 // Currently only implemented as single unconditional branch.
409 class NativeGeneralJump: public NativeInstruction {
410  public:
411 
412   enum PPC64_specific_constants {
413     instruction_size = 4
414   };
415 
instruction_address() const416   address instruction_address() const { return addr_at(0); }
417 
418   // Creation.
nativeGeneralJump_at(address addr)419   friend inline NativeGeneralJump* nativeGeneralJump_at(address addr) {
420     NativeGeneralJump* jump = (NativeGeneralJump*)(addr);
421     DEBUG_ONLY( jump->verify(); )
422     return jump;
423   }
424 
425   // Insertion of native general jump instruction.
426   static void insert_unconditional(address code_pos, address entry);
427 
jump_destination() const428   address jump_destination() const {
429     DEBUG_ONLY( verify(); )
430     return addr_at(0) + Assembler::inv_li_field(long_at(0));
431   }
432 
set_jump_destination(address dest)433   void set_jump_destination(address dest) {
434     DEBUG_ONLY( verify(); )
435     insert_unconditional(addr_at(0), dest);
436   }
437 
438   static void replace_mt_safe(address instr_addr, address code_buffer);
439 
verify() const440   void verify() const { guarantee(Assembler::is_b(long_at(0)), "invalid NativeGeneralJump"); }
441 };
442 
443 // An interface for accessing/manipulating native load int (load_const32).
444 class NativeMovRegMem;
445 inline NativeMovRegMem* nativeMovRegMem_at(address address);
446 class NativeMovRegMem: public NativeInstruction {
447  public:
448 
449   enum PPC64_specific_constants {
450     instruction_size = 8
451   };
452 
instruction_address() const453   address instruction_address() const { return addr_at(0); }
454 
num_bytes_to_end_of_patch() const455   int num_bytes_to_end_of_patch() const { return instruction_size; }
456 
offset() const457   intptr_t offset() const {
458 #ifdef VM_LITTLE_ENDIAN
459     short *hi_ptr = (short*)(addr_at(0));
460     short *lo_ptr = (short*)(addr_at(4));
461 #else
462     short *hi_ptr = (short*)(addr_at(0) + 2);
463     short *lo_ptr = (short*)(addr_at(4) + 2);
464 #endif
465     return ((*hi_ptr) << 16) | ((*lo_ptr) & 0xFFFF);
466   }
467 
set_offset(intptr_t x)468   void set_offset(intptr_t x) {
469 #ifdef VM_LITTLE_ENDIAN
470     short *hi_ptr = (short*)(addr_at(0));
471     short *lo_ptr = (short*)(addr_at(4));
472 #else
473     short *hi_ptr = (short*)(addr_at(0) + 2);
474     short *lo_ptr = (short*)(addr_at(4) + 2);
475 #endif
476     *hi_ptr = x >> 16;
477     *lo_ptr = x & 0xFFFF;
478     ICache::ppc64_flush_icache_bytes(addr_at(0), NativeMovRegMem::instruction_size);
479   }
480 
add_offset_in_bytes(intptr_t radd_offset)481   void add_offset_in_bytes(intptr_t radd_offset) {
482     set_offset(offset() + radd_offset);
483   }
484 
verify() const485   void verify() const {
486     guarantee(Assembler::is_lis(long_at(0)), "load_const32 1st instr");
487     guarantee(Assembler::is_ori(long_at(4)), "load_const32 2nd instr");
488   }
489 
490  private:
nativeMovRegMem_at(address address)491   friend inline NativeMovRegMem* nativeMovRegMem_at(address address) {
492     NativeMovRegMem* test = (NativeMovRegMem*)address;
493     DEBUG_ONLY( test->verify(); )
494     return test;
495   }
496 };
497 
498 #endif // CPU_PPC_NATIVEINST_PPC_HPP
499