1 /*
2  * Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved.
3  * Copyright (c) 2012, 2018 SAP SE. All rights reserved.
4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5  *
6  * This code is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 only, as
8  * published by the Free Software Foundation.
9  *
10  * This code is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
13  * version 2 for more details (a copy is included in the LICENSE file that
14  * accompanied this code).
15  *
16  * You should have received a copy of the GNU General Public License version
17  * 2 along with this work; if not, write to the Free Software Foundation,
18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19  *
20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21  * or visit www.oracle.com if you need additional information or have any
22  * questions.
23  *
24  */
25 
26 #ifndef CPU_PPC_VM_NATIVEINST_PPC_HPP
27 #define CPU_PPC_VM_NATIVEINST_PPC_HPP
28 
29 #include "asm/macroAssembler.hpp"
30 #include "runtime/icache.hpp"
31 #include "runtime/os.hpp"
32 #include "runtime/safepointMechanism.hpp"
33 
34 // We have interfaces for the following instructions:
35 //
36 // - NativeInstruction
37 //   - NativeCall
38 //   - NativeFarCall
39 //   - NativeMovConstReg
40 //   - NativeJump
41 //   - NativeIllegalInstruction
42 //   - NativeConditionalFarBranch
43 //   - NativeCallTrampolineStub
44 
45 // The base class for different kinds of native instruction abstractions.
46 // It provides the primitive operations to manipulate code relative to this.
47 class NativeInstruction {
48   friend class Relocation;
49 
50  public:
is_jump()51   bool is_jump() { return Assembler::is_b(long_at(0)); } // See NativeGeneralJump.
52 
is_sigtrap_ic_miss_check()53   bool is_sigtrap_ic_miss_check() {
54     assert(UseSIGTRAP, "precondition");
55     return MacroAssembler::is_trap_ic_miss_check(long_at(0));
56   }
57 
is_sigtrap_null_check()58   bool is_sigtrap_null_check() {
59     assert(UseSIGTRAP && TrapBasedNullChecks, "precondition");
60     return MacroAssembler::is_trap_null_check(long_at(0));
61   }
62 
63   // We use a special trap for marking a method as not_entrant or zombie
64   // iff UseSIGTRAP.
is_sigtrap_zombie_not_entrant()65   bool is_sigtrap_zombie_not_entrant() {
66     assert(UseSIGTRAP, "precondition");
67     return MacroAssembler::is_trap_zombie_not_entrant(long_at(0));
68   }
69 
70   // We use an illtrap for marking a method as not_entrant or zombie
71   // iff !UseSIGTRAP.
is_sigill_zombie_not_entrant()72   bool is_sigill_zombie_not_entrant() {
73     assert(!UseSIGTRAP, "precondition");
74     // Work around a C++ compiler bug which changes 'this'.
75     return NativeInstruction::is_sigill_zombie_not_entrant_at(addr_at(0));
76   }
77   static bool is_sigill_zombie_not_entrant_at(address addr);
78 
79 #ifdef COMPILER2
80   // SIGTRAP-based implicit range checks
is_sigtrap_range_check()81   bool is_sigtrap_range_check() {
82     assert(UseSIGTRAP && TrapBasedRangeChecks, "precondition");
83     return MacroAssembler::is_trap_range_check(long_at(0));
84   }
85 #endif
86 
87   // 'should not reach here'.
is_sigtrap_should_not_reach_here()88   bool is_sigtrap_should_not_reach_here() {
89     return MacroAssembler::is_trap_should_not_reach_here(long_at(0));
90   }
91 
is_safepoint_poll()92   bool is_safepoint_poll() {
93     // Is the current instruction a POTENTIAL read access to the polling page?
94     // The current arguments of the instruction are not checked!
95     if (SafepointMechanism::uses_thread_local_poll() && USE_POLL_BIT_ONLY) {
96       int encoding = SafepointMechanism::poll_bit();
97       return MacroAssembler::is_tdi(long_at(0), Assembler::traptoGreaterThanUnsigned | Assembler::traptoEqual,
98                                     -1, encoding);
99     }
100     return MacroAssembler::is_load_from_polling_page(long_at(0), NULL);
101   }
102 
is_memory_serialization(JavaThread * thread,void * ucontext)103   bool is_memory_serialization(JavaThread *thread, void *ucontext) {
104     // Is the current instruction a write access of thread to the
105     // memory serialization page?
106     return MacroAssembler::is_memory_serialization(long_at(0), thread, ucontext);
107   }
108 
get_stack_bang_address(void * ucontext)109   address get_stack_bang_address(void *ucontext) {
110     // If long_at(0) is not a stack bang, return 0. Otherwise, return
111     // banged address.
112     return MacroAssembler::get_stack_bang_address(long_at(0), ucontext);
113   }
114 
115  protected:
addr_at(int offset) const116   address  addr_at(int offset) const    { return address(this) + offset; }
long_at(int offset) const117   int      long_at(int offset) const    { return *(int*)addr_at(offset); }
118 
119  public:
120   void verify() NOT_DEBUG_RETURN;
121 };
122 
nativeInstruction_at(address address)123 inline NativeInstruction* nativeInstruction_at(address address) {
124   NativeInstruction* inst = (NativeInstruction*)address;
125   inst->verify();
126   return inst;
127 }
128 
129 // The NativeCall is an abstraction for accessing/manipulating call
130 // instructions. It is used to manipulate inline caches, primitive &
131 // dll calls, etc.
132 //
133 // Sparc distinguishes `NativeCall' and `NativeFarCall'. On PPC64,
134 // at present, we provide a single class `NativeCall' representing the
135 // sequence `load_const, mtctr, bctrl' or the sequence 'ld_from_toc,
136 // mtctr, bctrl'.
137 class NativeCall: public NativeInstruction {
138  public:
139 
140   enum ppc_specific_constants {
141     load_const_instruction_size                 = 28,
142     load_const_from_method_toc_instruction_size = 16,
143     instruction_size                            = 16 // Used in shared code for calls with reloc_info.
144   };
145 
is_call_at(address a)146   static bool is_call_at(address a) {
147     return Assembler::is_bl(*(int*)(a));
148   }
149 
is_call_before(address return_address)150   static bool is_call_before(address return_address) {
151     return NativeCall::is_call_at(return_address - 4);
152   }
153 
instruction_address() const154   address instruction_address() const {
155     return addr_at(0);
156   }
157 
next_instruction_address() const158   address next_instruction_address() const {
159     // We have only bl.
160     assert(MacroAssembler::is_bl(*(int*)instruction_address()), "Should be bl instruction!");
161     return addr_at(4);
162   }
163 
return_address() const164   address return_address() const {
165     return next_instruction_address();
166   }
167 
168   address destination() const;
169 
170   // The parameter assert_lock disables the assertion during code generation.
171   void set_destination_mt_safe(address dest, bool assert_lock = true);
172 
173   address get_trampoline();
174 
verify_alignment()175   void verify_alignment() {} // do nothing on ppc
176   void verify() NOT_DEBUG_RETURN;
177 };
178 
nativeCall_at(address instr)179 inline NativeCall* nativeCall_at(address instr) {
180   NativeCall* call = (NativeCall*)instr;
181   call->verify();
182   return call;
183 }
184 
nativeCall_before(address return_address)185 inline NativeCall* nativeCall_before(address return_address) {
186   NativeCall* call = NULL;
187   if (MacroAssembler::is_bl(*(int*)(return_address - 4)))
188     call = (NativeCall*)(return_address - 4);
189   call->verify();
190   return call;
191 }
192 
193 // The NativeFarCall is an abstraction for accessing/manipulating native
194 // call-anywhere instructions.
195 // Used to call native methods which may be loaded anywhere in the address
196 // space, possibly out of reach of a call instruction.
197 class NativeFarCall: public NativeInstruction {
198  public:
199   // We use MacroAssembler::bl64_patchable() for implementing a
200   // call-anywhere instruction.
201 
202   // Checks whether instr points at a NativeFarCall instruction.
is_far_call_at(address instr)203   static bool is_far_call_at(address instr) {
204     return MacroAssembler::is_bl64_patchable_at(instr);
205   }
206 
207   // Does the NativeFarCall implementation use a pc-relative encoding
208   // of the call destination?
209   // Used when relocating code.
is_pcrelative()210   bool is_pcrelative() {
211     assert(MacroAssembler::is_bl64_patchable_at((address)this),
212            "unexpected call type");
213     return MacroAssembler::is_bl64_patchable_pcrelative_at((address)this);
214   }
215 
216   // Returns the NativeFarCall's destination.
destination() const217   address destination() const {
218     assert(MacroAssembler::is_bl64_patchable_at((address)this),
219            "unexpected call type");
220     return MacroAssembler::get_dest_of_bl64_patchable_at((address)this);
221   }
222 
223   // Sets the NativeCall's destination, not necessarily mt-safe.
224   // Used when relocating code.
set_destination(address dest)225   void set_destination(address dest) {
226     // Set new destination (implementation of call may change here).
227     assert(MacroAssembler::is_bl64_patchable_at((address)this),
228            "unexpected call type");
229     MacroAssembler::set_dest_of_bl64_patchable_at((address)this, dest);
230   }
231 
232   void verify() NOT_DEBUG_RETURN;
233 };
234 
235 // Instantiates a NativeFarCall object starting at the given instruction
236 // address and returns the NativeFarCall object.
nativeFarCall_at(address instr)237 inline NativeFarCall* nativeFarCall_at(address instr) {
238   NativeFarCall* call = (NativeFarCall*)instr;
239   call->verify();
240   return call;
241 }
242 
243 // An interface for accessing/manipulating native set_oop imm, reg instructions
244 // (used to manipulate inlined data references, etc.).
245 class NativeMovConstReg: public NativeInstruction {
246  public:
247 
248   enum ppc_specific_constants {
249     load_const_instruction_size                 = 20,
250     load_const_from_method_toc_instruction_size =  8,
251     instruction_size                            =  8 // Used in shared code for calls with reloc_info.
252   };
253 
instruction_address() const254   address instruction_address() const {
255     return addr_at(0);
256   }
257 
258   address next_instruction_address() const;
259 
260   // (The [set_]data accessor respects oop_type relocs also.)
261   intptr_t data() const;
262 
263   // Patch the code stream.
264   address set_data_plain(intptr_t x, CodeBlob *code);
265   // Patch the code stream and oop pool.
266   void set_data(intptr_t x);
267 
268   // Patch narrow oop constants. Use this also for narrow klass.
269   void set_narrow_oop(narrowOop data, CodeBlob *code = NULL);
270 
271   void verify() NOT_DEBUG_RETURN;
272 };
273 
nativeMovConstReg_at(address address)274 inline NativeMovConstReg* nativeMovConstReg_at(address address) {
275   NativeMovConstReg* test = (NativeMovConstReg*)address;
276   test->verify();
277   return test;
278 }
279 
280 // The NativeJump is an abstraction for accessing/manipulating native
281 // jump-anywhere instructions.
282 class NativeJump: public NativeInstruction {
283  public:
284   // We use MacroAssembler::b64_patchable() for implementing a
285   // jump-anywhere instruction.
286 
287   enum ppc_specific_constants {
288     instruction_size = MacroAssembler::b64_patchable_size
289   };
290 
291   // Checks whether instr points at a NativeJump instruction.
is_jump_at(address instr)292   static bool is_jump_at(address instr) {
293     return MacroAssembler::is_b64_patchable_at(instr)
294       || (   MacroAssembler::is_load_const_from_method_toc_at(instr)
295           && Assembler::is_mtctr(*(int*)(instr + 2 * 4))
296           && Assembler::is_bctr(*(int*)(instr + 3 * 4)));
297   }
298 
299   // Does the NativeJump implementation use a pc-relative encoding
300   // of the call destination?
301   // Used when relocating code or patching jumps.
is_pcrelative()302   bool is_pcrelative() {
303     return MacroAssembler::is_b64_patchable_pcrelative_at((address)this);
304   }
305 
306   // Returns the NativeJump's destination.
jump_destination() const307   address jump_destination() const {
308     if (MacroAssembler::is_b64_patchable_at((address)this)) {
309       return MacroAssembler::get_dest_of_b64_patchable_at((address)this);
310     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
311                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
312                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
313       return (address)((NativeMovConstReg *)this)->data();
314     } else {
315       ShouldNotReachHere();
316       return NULL;
317     }
318   }
319 
320   // Sets the NativeJump's destination, not necessarily mt-safe.
321   // Used when relocating code or patching jumps.
set_jump_destination(address dest)322   void set_jump_destination(address dest) {
323     // Set new destination (implementation of call may change here).
324     if (MacroAssembler::is_b64_patchable_at((address)this)) {
325       MacroAssembler::set_dest_of_b64_patchable_at((address)this, dest);
326     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
327                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
328                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
329       ((NativeMovConstReg *)this)->set_data((intptr_t)dest);
330     } else {
331       ShouldNotReachHere();
332     }
333   }
334 
335   // MT-safe insertion of native jump at verified method entry
336   static void patch_verified_entry(address entry, address verified_entry, address dest);
337 
338   void verify() NOT_DEBUG_RETURN;
339 
check_verified_entry_alignment(address entry,address verified_entry)340   static void check_verified_entry_alignment(address entry, address verified_entry) {
341     // We just patch one instruction on ppc64, so the jump doesn't have to
342     // be aligned. Nothing to do here.
343   }
344 };
345 
346 // Instantiates a NativeJump object starting at the given instruction
347 // address and returns the NativeJump object.
nativeJump_at(address instr)348 inline NativeJump* nativeJump_at(address instr) {
349   NativeJump* call = (NativeJump*)instr;
350   call->verify();
351   return call;
352 }
353 
354 // NativeConditionalFarBranch is abstraction for accessing/manipulating
355 // conditional far branches.
356 class NativeConditionalFarBranch : public NativeInstruction {
357  public:
358 
is_conditional_far_branch_at(address instr)359   static bool is_conditional_far_branch_at(address instr) {
360     return MacroAssembler::is_bc_far_at(instr);
361   }
362 
branch_destination() const363   address branch_destination() const {
364     return MacroAssembler::get_dest_of_bc_far_at((address)this);
365   }
366 
set_branch_destination(address dest)367   void set_branch_destination(address dest) {
368     MacroAssembler::set_dest_of_bc_far_at((address)this, dest);
369   }
370 };
371 
NativeConditionalFarBranch_at(address address)372 inline NativeConditionalFarBranch* NativeConditionalFarBranch_at(address address) {
373   assert(NativeConditionalFarBranch::is_conditional_far_branch_at(address),
374          "must be a conditional far branch");
375   return (NativeConditionalFarBranch*)address;
376 }
377 
378 // Call trampoline stubs.
379 class NativeCallTrampolineStub : public NativeInstruction {
380  private:
381 
382   address encoded_destination_addr() const;
383 
384  public:
385 
386   address destination(nmethod *nm = NULL) const;
387   int destination_toc_offset() const;
388 
389   void set_destination(address new_destination);
390 };
391 
392 // Note: Other stubs must not begin with this pattern.
is_NativeCallTrampolineStub_at(address address)393 inline bool is_NativeCallTrampolineStub_at(address address) {
394   int first_instr = *(int*)address;
395   // calculate_address_from_global_toc and long form of ld_largeoffset_unchecked begin with addis with target R12
396   if (Assembler::is_addis(first_instr) &&
397       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2) return true;
398 
399   // short form of ld_largeoffset_unchecked is ld which is followed by mtctr
400   int second_instr = *((int*)address + 1);
401   if (Assembler::is_ld(first_instr) &&
402       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2 &&
403       Assembler::is_mtctr(second_instr) &&
404       (Register)(intptr_t)Assembler::inv_rs_field(second_instr) == R12_scratch2) return true;
405 
406   return false;
407 }
408 
NativeCallTrampolineStub_at(address address)409 inline NativeCallTrampolineStub* NativeCallTrampolineStub_at(address address) {
410   assert(is_NativeCallTrampolineStub_at(address), "no call trampoline found");
411   return (NativeCallTrampolineStub*)address;
412 }
413 
414 ///////////////////////////////////////////////////////////////////////////////////////////////////
415 
416 //-------------------------------------
417 //  N a t i v e G e n e r a l J u m p
418 //-------------------------------------
419 
420 // Despite the name, handles only simple branches.
421 class NativeGeneralJump;
422 inline NativeGeneralJump* nativeGeneralJump_at(address address);
423 
424 // Currently only implemented as single unconditional branch.
425 class NativeGeneralJump: public NativeInstruction {
426  public:
427 
428   enum PPC64_specific_constants {
429     instruction_size = 4
430   };
431 
instruction_address() const432   address instruction_address() const { return addr_at(0); }
433 
434   // Creation.
nativeGeneralJump_at(address addr)435   friend inline NativeGeneralJump* nativeGeneralJump_at(address addr) {
436     NativeGeneralJump* jump = (NativeGeneralJump*)(addr);
437     DEBUG_ONLY( jump->verify(); )
438     return jump;
439   }
440 
441   // Insertion of native general jump instruction.
442   static void insert_unconditional(address code_pos, address entry);
443 
jump_destination() const444   address jump_destination() const {
445     DEBUG_ONLY( verify(); )
446     return addr_at(0) + Assembler::inv_li_field(long_at(0));
447   }
448 
set_jump_destination(address dest)449   void set_jump_destination(address dest) {
450     DEBUG_ONLY( verify(); )
451     insert_unconditional(addr_at(0), dest);
452   }
453 
454   static void replace_mt_safe(address instr_addr, address code_buffer);
455 
verify() const456   void verify() const { guarantee(Assembler::is_b(long_at(0)), "invalid NativeGeneralJump"); }
457 };
458 
459 // An interface for accessing/manipulating native load int (load_const32).
460 class NativeMovRegMem;
461 inline NativeMovRegMem* nativeMovRegMem_at(address address);
462 class NativeMovRegMem: public NativeInstruction {
463  public:
464 
465   enum PPC64_specific_constants {
466     instruction_size = 8
467   };
468 
instruction_address() const469   address instruction_address() const { return addr_at(0); }
470 
num_bytes_to_end_of_patch() const471   int num_bytes_to_end_of_patch() const { return instruction_size; }
472 
offset() const473   intptr_t offset() const {
474 #ifdef VM_LITTLE_ENDIAN
475     short *hi_ptr = (short*)(addr_at(0));
476     short *lo_ptr = (short*)(addr_at(4));
477 #else
478     short *hi_ptr = (short*)(addr_at(0) + 2);
479     short *lo_ptr = (short*)(addr_at(4) + 2);
480 #endif
481     return ((*hi_ptr) << 16) | ((*lo_ptr) & 0xFFFF);
482   }
483 
set_offset(intptr_t x)484   void set_offset(intptr_t x) {
485 #ifdef VM_LITTLE_ENDIAN
486     short *hi_ptr = (short*)(addr_at(0));
487     short *lo_ptr = (short*)(addr_at(4));
488 #else
489     short *hi_ptr = (short*)(addr_at(0) + 2);
490     short *lo_ptr = (short*)(addr_at(4) + 2);
491 #endif
492     *hi_ptr = x >> 16;
493     *lo_ptr = x & 0xFFFF;
494     ICache::ppc64_flush_icache_bytes(addr_at(0), NativeMovRegMem::instruction_size);
495   }
496 
add_offset_in_bytes(intptr_t radd_offset)497   void add_offset_in_bytes(intptr_t radd_offset) {
498     set_offset(offset() + radd_offset);
499   }
500 
verify() const501   void verify() const {
502     guarantee(Assembler::is_lis(long_at(0)), "load_const32 1st instr");
503     guarantee(Assembler::is_ori(long_at(4)), "load_const32 2nd instr");
504   }
505 
506  private:
nativeMovRegMem_at(address address)507   friend inline NativeMovRegMem* nativeMovRegMem_at(address address) {
508     NativeMovRegMem* test = (NativeMovRegMem*)address;
509     DEBUG_ONLY( test->verify(); )
510     return test;
511   }
512 };
513 
514 #endif // CPU_PPC_VM_NATIVEINST_PPC_HPP
515