1 /*
2 * Cisco router simulation platform.
3 * Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr)
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <unistd.h>
9 #include <string.h>
10 #include <sys/types.h>
11 #include <sys/stat.h>
12 #include <fcntl.h>
13
14 #include "cpu.h"
15 #include "jit_op.h"
16 #include "ppc32_jit.h"
17 #include "ppc32_amd64_trans.h"
18 #include "memory.h"
19
20 /* Macros for CPU structure access */
21 #define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)]))
22 #define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)]))
23
24 #define DECLARE_INSN(name) \
25 static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \
26 ppc_insn_t insn)
27
28 /* EFLAGS to Condition Register (CR) field - signed */
29 static m_uint32_t eflags_to_cr_signed[64] = {
30 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
31 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
32 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
33 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
34 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
35 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
36 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
37 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
38 };
39
40 /* EFLAGS to Condition Register (CR) field - unsigned */
41 static m_uint32_t eflags_to_cr_unsigned[256] = {
42 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
43 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
44 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
45 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
46 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
47 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
48 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
49 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
50 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
51 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
52 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
53 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
54 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
55 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
56 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
57 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
58 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
59 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
60 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
61 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
62 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
63 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
64 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
65 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
66 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
67 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
68 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
69 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
70 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
71 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
72 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
73 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
74 };
75
76 /* Load a 32 bit immediate value */
ppc32_load_imm(u_char ** ptr,u_int reg,m_uint32_t val)77 static inline void ppc32_load_imm(u_char **ptr,u_int reg,m_uint32_t val)
78 {
79 if (val)
80 amd64_mov_reg_imm_size(*ptr,reg,val,4);
81 else
82 amd64_alu_reg_reg_size(*ptr,X86_XOR,reg,reg,4);
83 }
84
85 /* Set the Instruction Address (IA) register */
ppc32_set_ia(u_char ** ptr,m_uint32_t new_ia)86 void ppc32_set_ia(u_char **ptr,m_uint32_t new_ia)
87 {
88 amd64_mov_membase_imm(*ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4);
89 }
90
91 /* Set the Link Register (LR) */
ppc32_set_lr(jit_op_t * iop,m_uint32_t new_lr)92 static void ppc32_set_lr(jit_op_t *iop,m_uint32_t new_lr)
93 {
94 amd64_mov_membase_imm(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4);
95 }
96
97 /*
98 * Try to branch directly to the specified JIT block without returning to
99 * main loop.
100 */
ppc32_try_direct_far_jump(cpu_ppc_t * cpu,jit_op_t * iop,m_uint32_t new_ia)101 static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,jit_op_t *iop,
102 m_uint32_t new_ia)
103 {
104 m_uint32_t new_page,ia_hash,ia_offset;
105 u_char *test1,*test2,*test3;
106
107 /* Indicate that we throw %rbx, %rdx */
108 ppc32_op_emit_alter_host_reg(cpu,AMD64_RBX);
109 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
110 ppc32_op_emit_alter_host_reg(cpu,AMD64_RSI);
111
112 new_page = new_ia & PPC32_MIN_PAGE_MASK;
113 ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2;
114 ia_hash = ppc32_jit_get_ia_hash(new_ia);
115
116 /* Get JIT block info in %rdx */
117 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
118 AMD64_R15,OFFSET(cpu_ppc_t,exec_blk_map),8);
119 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RDX,
120 AMD64_RBX,ia_hash*sizeof(void *),8);
121
122 /* no JIT block found ? */
123 amd64_test_reg_reg(iop->ob_ptr,AMD64_RDX,AMD64_RDX);
124 test1 = iop->ob_ptr;
125 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
126
127 /* Check block IA */
128 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,new_page);
129 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,AMD64_RAX,AMD64_RDX,
130 OFFSET(ppc32_jit_tcb_t,start_ia),4);
131 test2 = iop->ob_ptr;
132 amd64_branch8(iop->ob_ptr, X86_CC_NE, 0, 1);
133
134 /* Jump to the code */
135 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RSI,
136 AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8);
137 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
138 AMD64_RSI,ia_offset * sizeof(void *),8);
139
140 amd64_test_reg_reg(iop->ob_ptr,AMD64_RBX,AMD64_RBX);
141 test3 = iop->ob_ptr;
142 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
143 amd64_jump_reg(iop->ob_ptr,AMD64_RBX);
144
145 /* Returns to caller... */
146 amd64_patch(test1,iop->ob_ptr);
147 amd64_patch(test2,iop->ob_ptr);
148 amd64_patch(test3,iop->ob_ptr);
149
150 ppc32_set_ia(&iop->ob_ptr,new_ia);
151 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
152 }
153
154 /* Set Jump */
ppc32_set_jump(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,jit_op_t * iop,m_uint32_t new_ia,int local_jump)155 static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,jit_op_t *iop,
156 m_uint32_t new_ia,int local_jump)
157 {
158 int return_to_caller = FALSE;
159 u_char *jump_ptr;
160
161 #if 0
162 if (cpu->sym_trace && !local_jump)
163 return_to_caller = TRUE;
164 #endif
165
166 if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) {
167 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
168 amd64_jump32(iop->ob_ptr,0);
169 } else {
170 if (cpu->exec_blk_direct_jump) {
171 /* Block lookup optimization */
172 ppc32_try_direct_far_jump(cpu,iop,new_ia);
173 } else {
174 ppc32_set_ia(&iop->ob_ptr,new_ia);
175 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
176 }
177 }
178 }
179
180 /* Jump to the next page */
ppc32_set_page_jump(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b)181 void ppc32_set_page_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
182 {
183 jit_op_t *iop,*op_list = NULL;
184
185 cpu->gen->jit_op_current = &op_list;
186
187 iop = ppc32_op_emit_insn_output(cpu,4,"set_page_jump");
188 ppc32_set_jump(cpu,b,iop,b->start_ia + PPC32_MIN_PAGE_SIZE,FALSE);
189 ppc32_op_insn_output(b,iop);
190
191 jit_op_free_list(cpu->gen,op_list);
192 cpu->gen->jit_op_current = NULL;
193 }
194
195 /* Load a GPR into the specified host register */
ppc32_load_gpr(u_char ** ptr,u_int host_reg,u_int ppc_reg)196 static forced_inline void ppc32_load_gpr(u_char **ptr,u_int host_reg,
197 u_int ppc_reg)
198 {
199 amd64_mov_reg_membase(*ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4);
200 }
201
202 /* Store contents for a host register into a GPR register */
ppc32_store_gpr(u_char ** ptr,u_int ppc_reg,u_int host_reg)203 static forced_inline void ppc32_store_gpr(u_char **ptr,u_int ppc_reg,
204 u_int host_reg)
205 {
206 amd64_mov_membase_reg(*ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4);
207 }
208
209 /* Apply an ALU operation on a GPR register and a host register */
ppc32_alu_gpr(u_char ** ptr,u_int op,u_int host_reg,u_int ppc_reg)210 static forced_inline void ppc32_alu_gpr(u_char **ptr,u_int op,
211 u_int host_reg,u_int ppc_reg)
212 {
213 amd64_alu_reg_membase_size(*ptr,op,host_reg,
214 AMD64_R15,REG_OFFSET(ppc_reg),4);
215 }
216
217 /*
218 * Update CR from %eflags
219 * %rax, %rdx, %rsi are modified.
220 */
ppc32_update_cr(ppc32_jit_tcb_t * b,int field,int is_signed)221 static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed)
222 {
223 /* Get status bits from EFLAGS */
224 amd64_pushfd_size(b->jit_ptr,8);
225 amd64_pop_reg(b->jit_ptr,AMD64_RAX);
226
227 if (!is_signed) {
228 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF);
229 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8);
230 } else {
231 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RAX,6);
232 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x3F);
233 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8);
234 }
235
236 amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4);
237
238 #if 0
239 /* Check XER Summary of Overflow and report it */
240 amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
241 AMD64_R15,OFFSET(cpu_ppc_t,xer),4);
242 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO);
243 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3);
244 amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX);
245 #endif
246
247 /* Store modified CR field */
248 amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field),
249 AMD64_RAX,4);
250 }
251
252 /*
253 * Update CR0 from %eflags
254 * %eax, %ecx, %edx, %esi are modified.
255 */
ppc32_update_cr0(ppc32_jit_tcb_t * b)256 static void ppc32_update_cr0(ppc32_jit_tcb_t *b)
257 {
258 ppc32_update_cr(b,0,TRUE);
259 }
260
261 /* Indicate registers modified by ppc32_update_cr() functions */
ppc32_update_cr_set_altered_hreg(cpu_ppc_t * cpu)262 void ppc32_update_cr_set_altered_hreg(cpu_ppc_t *cpu)
263 {
264 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
265 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
266 }
267
268 /* Basic C call */
ppc32_emit_basic_c_call(u_char ** ptr,void * f)269 static forced_inline void ppc32_emit_basic_c_call(u_char **ptr,void *f)
270 {
271 amd64_mov_reg_imm(*ptr,AMD64_RBX,f);
272 amd64_call_reg(*ptr,AMD64_RBX);
273 }
274
275 /* Emit a simple call to a C function without any parameter */
ppc32_emit_c_call(ppc32_jit_tcb_t * b,jit_op_t * iop,void * f)276 static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,jit_op_t *iop,void *f)
277 {
278 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
279 ppc32_emit_basic_c_call(&iop->ob_ptr,f);
280 }
281
282 /* ======================================================================== */
283
284 /* Initialize register mapping */
ppc32_jit_init_hreg_mapping(cpu_ppc_t * cpu)285 void ppc32_jit_init_hreg_mapping(cpu_ppc_t *cpu)
286 {
287 int avail_hregs[] = { AMD64_RSI, AMD64_RAX, AMD64_RCX, AMD64_RDX,
288 AMD64_R13, AMD64_R14, AMD64_RDI, -1 };
289 struct hreg_map *map;
290 int i,hreg;
291
292 cpu->hreg_map_list = cpu->hreg_lru = NULL;
293
294 /* Add the available registers to the map list */
295 for(i=0;avail_hregs[i]!=-1;i++) {
296 hreg = avail_hregs[i];
297 map = &cpu->hreg_map[hreg];
298
299 /* Initialize mapping. At the beginning, no PPC reg is mapped */
300 map->flags = 0;
301 map->hreg = hreg;
302 map->vreg = -1;
303 ppc32_jit_insert_hreg_mru(cpu,map);
304 }
305
306 /* Clear PPC registers mapping */
307 for(i=0;i<PPC32_GPR_NR;i++)
308 cpu->ppc_reg_map[i] = -1;
309 }
310
311 /* Allocate a specific temp register */
ppc32_jit_get_tmp_hreg(cpu_ppc_t * cpu)312 static int ppc32_jit_get_tmp_hreg(cpu_ppc_t *cpu)
313 {
314 return(AMD64_RBX);
315 }
316
317 /* ======================================================================== */
318 /* JIT operations (specific to target CPU). */
319 /* ======================================================================== */
320
321 /* INSN_OUTPUT */
ppc32_op_insn_output(ppc32_jit_tcb_t * b,jit_op_t * op)322 void ppc32_op_insn_output(ppc32_jit_tcb_t *b,jit_op_t *op)
323 {
324 op->ob_final = b->jit_ptr;
325 memcpy(b->jit_ptr,op->ob_data,op->ob_ptr - op->ob_data);
326 b->jit_ptr += op->ob_ptr - op->ob_data;
327 }
328
329 /* LOAD_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
ppc32_op_load_gpr(ppc32_jit_tcb_t * b,jit_op_t * op)330 void ppc32_op_load_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
331 {
332 if (op->param[0] != JIT_OP_INV_REG)
333 ppc32_load_gpr(&b->jit_ptr,op->param[0],op->param[1]);
334 }
335
336 /* STORE_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
ppc32_op_store_gpr(ppc32_jit_tcb_t * b,jit_op_t * op)337 void ppc32_op_store_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
338 {
339 if (op->param[0] != JIT_OP_INV_REG)
340 ppc32_store_gpr(&b->jit_ptr,op->param[1],op->param[0]);
341 }
342
343 /* UPDATE_FLAGS: p[0] = cr_field, p[1] = is_signed */
ppc32_op_update_flags(ppc32_jit_tcb_t * b,jit_op_t * op)344 void ppc32_op_update_flags(ppc32_jit_tcb_t *b,jit_op_t *op)
345 {
346 if (op->param[0] != JIT_OP_INV_REG)
347 ppc32_update_cr(b,op->param[0],op->param[1]);
348 }
349
350 /* MOVE_HOST_REG: p[0] = %host_dst_reg, p[1] = %host_src_reg */
ppc32_op_move_host_reg(ppc32_jit_tcb_t * b,jit_op_t * op)351 void ppc32_op_move_host_reg(ppc32_jit_tcb_t *b,jit_op_t *op)
352 {
353 if ((op->param[0] != JIT_OP_INV_REG) && (op->param[1] != JIT_OP_INV_REG))
354 amd64_mov_reg_reg(b->jit_ptr,op->param[0],op->param[1],4);
355 }
356
357 /* SET_HOST_REG_IMM32: p[0] = %host_reg, p[1] = imm32 */
ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t * b,jit_op_t * op)358 void ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t *b,jit_op_t *op)
359 {
360 if (op->param[0] != JIT_OP_INV_REG)
361 ppc32_load_imm(&b->jit_ptr,op->param[0],op->param[1]);
362 }
363
364 /* ======================================================================== */
365
366 /* Memory operation */
ppc32_emit_memop(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int op,int base,int offset,int target,int update)367 static void ppc32_emit_memop(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
368 int op,int base,int offset,int target,int update)
369 {
370 m_uint32_t val = sign_extend(offset,16);
371 jit_op_t *iop;
372
373 /*
374 * Since an exception can be triggered, clear JIT state. This allows
375 * to use branch target tag (we can directly branch on this instruction).
376 */
377 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
378 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
379
380 iop = ppc32_op_emit_insn_output(cpu,5,"memop");
381
382 /* Save PC for exception handling */
383 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
384
385 /* RSI = sign-extended offset */
386 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
387
388 /* RSI = GPR[base] + sign-extended offset */
389 if (update || (base != 0))
390 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
391
392 if (update)
393 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
394
395 /* RDX = target register */
396 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
397
398 /* RDI = CPU instance pointer */
399 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
400
401 /* Call memory function */
402 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
403
404 if (update)
405 ppc32_store_gpr(&iop->ob_ptr,base,AMD64_R14);
406 }
407
408 /* Memory operation (indexed) */
ppc32_emit_memop_idx(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int op,int ra,int rb,int target,int update)409 static void ppc32_emit_memop_idx(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
410 int op,int ra,int rb,int target,int update)
411 {
412 jit_op_t *iop;
413
414 /*
415 * Since an exception can be triggered, clear JIT state. This allows
416 * to use branch target tag (we can directly branch on this instruction).
417 */
418 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
419 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
420
421 iop = ppc32_op_emit_insn_output(cpu,5,"memop_idx");
422
423 /* Save PC for exception handling */
424 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
425
426 /* RSI = $rb */
427 ppc32_load_gpr(&iop->ob_ptr,AMD64_RSI,rb);
428
429 /* RSI = GPR[base] + sign-extended offset */
430 if (update || (ra != 0))
431 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,ra);
432
433 if (update)
434 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
435
436 /* RDX = target register */
437 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
438
439 /* RDI = CPU instance pointer */
440 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
441
442 /* Call memory function */
443 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
444
445 if (update)
446 ppc32_store_gpr(&iop->ob_ptr,ra,AMD64_R14);
447 }
448
449 typedef void (*memop_fast_access)(jit_op_t *iop,int target);
450
451 /* Fast LBZ */
ppc32_memop_fast_lbz(jit_op_t * iop,int target)452 static void ppc32_memop_fast_lbz(jit_op_t *iop,int target)
453 {
454 amd64_clear_reg(iop->ob_ptr,AMD64_RCX);
455 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1);
456 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RCX);
457 }
458
459 /* Fast STB */
ppc32_memop_fast_stb(jit_op_t * iop,int target)460 static void ppc32_memop_fast_stb(jit_op_t *iop,int target)
461 {
462 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
463 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1);
464 }
465
466 /* Fast LWZ */
ppc32_memop_fast_lwz(jit_op_t * iop,int target)467 static void ppc32_memop_fast_lwz(jit_op_t *iop,int target)
468 {
469 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4);
470 amd64_bswap32(iop->ob_ptr,AMD64_RAX);
471 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RAX);
472 }
473
474 /* Fast STW */
ppc32_memop_fast_stw(jit_op_t * iop,int target)475 static void ppc32_memop_fast_stw(jit_op_t *iop,int target)
476 {
477 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
478 amd64_bswap32(iop->ob_ptr,AMD64_RDX);
479 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4);
480 }
481
482 /* Fast memory operation */
ppc32_emit_memop_fast(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int write_op,int opcode,int base,int offset,int target,memop_fast_access op_handler)483 static void ppc32_emit_memop_fast(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
484 int write_op,int opcode,
485 int base,int offset,int target,
486 memop_fast_access op_handler)
487 {
488 m_uint32_t val = sign_extend(offset,16);
489 u_char *test1,*test2,*p_exit;
490 jit_op_t *iop;
491
492 /*
493 * Since an exception can be triggered, clear JIT state. This allows
494 * to use branch target tag (we can directly branch on this instruction).
495 */
496 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
497 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
498
499 iop = ppc32_op_emit_insn_output(cpu,5,"memop_fast");
500
501 test2 = NULL;
502
503 /* RSI = GPR[base] + sign-extended offset */
504 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
505 if (base != 0)
506 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
507
508 /* RBX = mts32_entry index */
509 amd64_mov_reg_reg_size(iop->ob_ptr,X86_EBX,X86_ESI,4);
510 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT,4);
511 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4);
512
513 /* RCX = mts32 entry */
514 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RCX,
515 AMD64_R15,
516 OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8);
517 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */
518 amd64_alu_reg_reg(iop->ob_ptr,X86_ADD,AMD64_RCX,AMD64_RBX);
519
520 /* Compare virtual page address (EAX = vpage) */
521 amd64_mov_reg_reg(iop->ob_ptr,X86_EAX,X86_ESI,4);
522 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK);
523
524 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,X86_EAX,AMD64_RCX,
525 OFFSET(mts32_entry_t,gvpa),4);
526 test1 = iop->ob_ptr;
527 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
528
529 /* Test if we are writing to a COW page */
530 if (write_op) {
531 amd64_test_membase_imm_size(iop->ob_ptr,
532 AMD64_RCX,OFFSET(mts32_entry_t,flags),
533 MTS_FLAG_COW|MTS_FLAG_EXEC,4);
534 test2 = iop->ob_ptr;
535 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
536 }
537
538 /* ESI = offset in page, RBX = Host Page Address */
539 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK);
540 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
541 AMD64_RCX,OFFSET(mts32_entry_t,hpa),8);
542
543 /* Memory access */
544 op_handler(iop,target);
545
546 p_exit = iop->ob_ptr;
547 amd64_jump8(iop->ob_ptr,0);
548
549 /* === Slow lookup === */
550 amd64_patch(test1,iop->ob_ptr);
551 if (test2)
552 amd64_patch(test2,iop->ob_ptr);
553
554 /* Save IA for exception handling */
555 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
556
557 /* RDX = target register */
558 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
559
560 /* RDI = CPU instance */
561 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
562
563 /* Call memory access function */
564 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(opcode));
565
566 amd64_patch(p_exit,iop->ob_ptr);
567 }
568
569 /* Emit unhandled instruction code */
ppc32_emit_unknown(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,ppc_insn_t opcode)570 static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
571 ppc_insn_t opcode)
572 {
573 u_char *test1;
574 jit_op_t *iop;
575
576 iop = ppc32_op_emit_insn_output(cpu,3,"unknown");
577
578 /* Update IA */
579 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
580
581 /* Fallback to non-JIT mode */
582 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
583 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RSI,opcode);
584
585 ppc32_emit_c_call(b,iop,ppc32_exec_single_insn_ext);
586 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
587 test1 = iop->ob_ptr;
588 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
589 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
590
591 amd64_patch(test1,iop->ob_ptr);
592
593 /* Signal this as an EOB to reset JIT state */
594 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
595 return(0);
596 }
597
598 /* Virtual Breakpoint */
ppc32_emit_breakpoint(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b)599 void ppc32_emit_breakpoint(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
600 {
601 jit_op_t *iop;
602
603 iop = ppc32_op_emit_insn_output(cpu,2,"breakpoint");
604
605 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
606 ppc32_emit_c_call(b,iop,ppc32_run_breakpoint);
607
608 /* Signal this as an EOB to to reset JIT state */
609 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
610 }
611
612 /* Increment the number of executed instructions (performance debugging) */
ppc32_inc_perf_counter(cpu_ppc_t * cpu)613 void ppc32_inc_perf_counter(cpu_ppc_t *cpu)
614 {
615 jit_op_t *iop;
616
617 iop = ppc32_op_emit_insn_output(cpu,1,"perf_cnt");
618 amd64_inc_membase_size(iop->ob_ptr,
619 AMD64_R15,OFFSET(cpu_ppc_t,perf_counter),4);
620 }
621
622 /* ======================================================================== */
623
624 /* BLR - Branch to Link Register */
DECLARE_INSN(BLR)625 DECLARE_INSN(BLR)
626 {
627 jit_op_t *iop;
628 int hreg;
629
630 ppc32_jit_start_hreg_seq(cpu,"blr");
631 hreg = ppc32_jit_alloc_hreg(cpu,-1);
632 ppc32_op_emit_alter_host_reg(cpu,hreg);
633
634 iop = ppc32_op_emit_insn_output(cpu,2,"blr");
635
636 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
637 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
638
639 /* set the return address */
640 if (insn & 1)
641 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
642
643 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
644 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
645 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
646
647 ppc32_jit_close_hreg_seq(cpu);
648 return(0);
649 }
650
651 /* BCTR - Branch to Count Register */
DECLARE_INSN(BCTR)652 DECLARE_INSN(BCTR)
653 {
654 jit_op_t *iop;
655 int hreg;
656
657 ppc32_jit_start_hreg_seq(cpu,"bctr");
658 hreg = ppc32_jit_alloc_hreg(cpu,-1);
659 ppc32_op_emit_alter_host_reg(cpu,hreg);
660
661 iop = ppc32_op_emit_insn_output(cpu,2,"bctr");
662
663 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
664 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
665
666 /* set the return address */
667 if (insn & 1)
668 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
669
670 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
671 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
672 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
673
674 ppc32_jit_close_hreg_seq(cpu);
675 return(0);
676 }
677
678 /* MFLR - Move From Link Register */
DECLARE_INSN(MFLR)679 DECLARE_INSN(MFLR)
680 {
681 int rd = bits(insn,21,25);
682 int hreg_rd;
683 jit_op_t *iop;
684
685 ppc32_jit_start_hreg_seq(cpu,"mflr");
686 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
687 iop = ppc32_op_emit_insn_output(cpu,1,"mflr");
688
689 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
690 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
691
692 ppc32_jit_close_hreg_seq(cpu);
693 return(0);
694 }
695
696 /* MTLR - Move To Link Register */
DECLARE_INSN(MTLR)697 DECLARE_INSN(MTLR)
698 {
699 int rs = bits(insn,21,25);
700 int hreg_rs;
701 jit_op_t *iop;
702
703 ppc32_jit_start_hreg_seq(cpu,"mtlr");
704 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
705 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
706
707 iop = ppc32_op_emit_insn_output(cpu,1,"mtlr");
708 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),hreg_rs,4);
709 return(0);
710 }
711
712 /* MFCTR - Move From Counter Register */
DECLARE_INSN(MFCTR)713 DECLARE_INSN(MFCTR)
714 {
715 int rd = bits(insn,21,25);
716 int hreg_rd;
717 jit_op_t *iop;
718
719 ppc32_jit_start_hreg_seq(cpu,"mfctr");
720 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
721
722 iop = ppc32_op_emit_insn_output(cpu,1,"mfctr");
723
724 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
725 AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
726 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
727
728 ppc32_jit_close_hreg_seq(cpu);
729 return(0);
730 }
731
732 /* MTCTR - Move To Counter Register */
DECLARE_INSN(MTCTR)733 DECLARE_INSN(MTCTR)
734 {
735 int rs = bits(insn,21,25);
736 int hreg_rs;
737 jit_op_t *iop;
738
739 ppc32_jit_start_hreg_seq(cpu,"mtctr");
740 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
741 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
742
743 iop = ppc32_op_emit_insn_output(cpu,1,"mtctr");
744
745 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),
746 hreg_rs,4);
747
748 ppc32_jit_close_hreg_seq(cpu);
749 return(0);
750 }
751
752 /* MFTBU - Move from Time Base (Up) */
DECLARE_INSN(MFTBU)753 DECLARE_INSN(MFTBU)
754 {
755 int rd = bits(insn,21,25);
756 int hreg_rd;
757 jit_op_t *iop;
758
759 ppc32_jit_start_hreg_seq(cpu,"mftbu");
760 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
761
762 iop = ppc32_op_emit_insn_output(cpu,1,"mftbu");
763
764 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
765 AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4);
766 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
767
768 ppc32_jit_close_hreg_seq(cpu);
769 return(0);
770 }
771
772 #define PPC32_TB_INCREMENT 50
773
774 /* MFTBL - Move from Time Base (Lo) */
DECLARE_INSN(MFTBL)775 DECLARE_INSN(MFTBL)
776 {
777 int rd = bits(insn,21,25);
778 int hreg_rd;
779 jit_op_t *iop;
780
781 ppc32_jit_start_hreg_seq(cpu,"mftbl");
782 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
783
784 iop = ppc32_op_emit_insn_output(cpu,3,"mftbl");
785
786 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
787 AMD64_R15,OFFSET(cpu_ppc_t,tb),8);
788 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,hreg_rd,PPC32_TB_INCREMENT);
789 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb),
790 hreg_rd,8);
791
792 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
793
794 ppc32_jit_close_hreg_seq(cpu);
795 return(0);
796 }
797
798 /* ADD */
DECLARE_INSN(ADD)799 DECLARE_INSN(ADD)
800 {
801 int rd = bits(insn,21,25);
802 int ra = bits(insn,16,20);
803 int rb = bits(insn,11,15);
804 int hreg_rd,hreg_ra,hreg_rb;
805 jit_op_t *iop;
806
807 /* $rd = $ra + $rb */
808 ppc32_jit_start_hreg_seq(cpu,"add");
809 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
810 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
811 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
812
813 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
814 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
815
816 iop = ppc32_op_emit_insn_output(cpu,2,"add");
817
818 if (rd == ra)
819 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
820 else if (rd == rb)
821 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
822 else {
823 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
824 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
825 }
826
827 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
828
829 if (insn & 1)
830 ppc32_op_emit_update_flags(cpu,0,TRUE);
831
832 ppc32_jit_close_hreg_seq(cpu);
833 return(0);
834 }
835
836 /* ADDC */
DECLARE_INSN(ADDC)837 DECLARE_INSN(ADDC)
838 {
839 int rd = bits(insn,21,25);
840 int ra = bits(insn,16,20);
841 int rb = bits(insn,11,15);
842 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
843 jit_op_t *iop;
844
845 /* $rd = $ra + $rb */
846 ppc32_jit_start_hreg_seq(cpu,"addc");
847 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
848 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
849 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
850
851 /* store the carry flag */
852 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
853
854 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
855 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
856
857 iop = ppc32_op_emit_insn_output(cpu,2,"addc");
858
859 if (rd == ra)
860 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
861 else if (rd == rb)
862 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
863 else {
864 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
865 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
866 }
867
868 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
869
870 /* store the carry flag */
871 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
872 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
873 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
874 hreg_t0,4);
875
876 if (insn & 1) {
877 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
878 ppc32_op_emit_update_flags(cpu,0,TRUE);
879 }
880
881 ppc32_jit_close_hreg_seq(cpu);
882 return(0);
883 }
884
885 /* ADDE - Add Extended */
DECLARE_INSN(ADDE)886 DECLARE_INSN(ADDE)
887 {
888 int rd = bits(insn,21,25);
889 int ra = bits(insn,16,20);
890 int rb = bits(insn,11,15);
891 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
892 jit_op_t *iop;
893
894 ppc32_jit_start_hreg_seq(cpu,"adde");
895 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
896 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
897 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
898
899 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
900 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
901
902 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
903 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
904 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
905
906 iop = ppc32_op_emit_insn_output(cpu,3,"adde");
907
908 /* $t0 = $ra + carry */
909 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
910 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
911
912 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
913 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
914 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
915 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
916 hreg_t1,4);
917
918 /* $t0 += $rb */
919 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
920 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
921 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
922 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
923 hreg_t1,4);
924
925 /* update cr0 */
926 if (insn & 1)
927 amd64_test_reg_reg_size(iop->ob_ptr,hreg_t0,hreg_t0,4);
928
929 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
930 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
931
932 if (insn & 1)
933 ppc32_op_emit_update_flags(cpu,0,TRUE);
934
935 ppc32_jit_close_hreg_seq(cpu);
936 return(0);
937 }
938
939 /* ADDI - ADD Immediate */
DECLARE_INSN(ADDI)940 DECLARE_INSN(ADDI)
941 {
942 int rd = bits(insn,21,25);
943 int ra = bits(insn,16,20);
944 int imm = bits(insn,0,15);
945 m_uint32_t tmp = sign_extend_32(imm,16);
946 int hreg_rd,hreg_ra;
947 jit_op_t *iop;
948
949 /* $rd = $ra + imm */
950 ppc32_jit_start_hreg_seq(cpu,"addi");
951 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
952
953 if (ra != 0) {
954 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
955 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
956
957 iop = ppc32_op_emit_insn_output(cpu,2,"addi");
958
959 if (rd != ra)
960 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
961
962 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
963 } else {
964 iop = ppc32_op_emit_insn_output(cpu,1,"addi");
965 ppc32_load_imm(&iop->ob_ptr,hreg_rd,tmp);
966 }
967
968 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
969
970 ppc32_jit_close_hreg_seq(cpu);
971 return(0);
972 }
973
974 /* ADDIC - ADD Immediate with Carry */
DECLARE_INSN(ADDIC)975 DECLARE_INSN(ADDIC)
976 {
977 int rd = bits(insn,21,25);
978 int ra = bits(insn,16,20);
979 int imm = bits(insn,0,15);
980 m_uint32_t tmp = sign_extend_32(imm,16);
981 int hreg_rd,hreg_ra;
982 jit_op_t *iop;
983
984 /* $rd = $ra + imm */
985 ppc32_jit_start_hreg_seq(cpu,"addic");
986 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
987 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
988
989 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
990
991 iop = ppc32_op_emit_insn_output(cpu,1,"addic");
992
993 if (rd != ra)
994 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
995
996 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
997 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
998
999 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1000 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1001
1002 ppc32_jit_close_hreg_seq(cpu);
1003 return(0);
1004 }
1005
1006 /* ADDIC. */
DECLARE_INSN(ADDIC_dot)1007 DECLARE_INSN(ADDIC_dot)
1008 {
1009 int rd = bits(insn,21,25);
1010 int ra = bits(insn,16,20);
1011 int imm = bits(insn,0,15);
1012 m_uint32_t tmp = sign_extend_32(imm,16);
1013 int hreg_rd,hreg_ra;
1014 jit_op_t *iop;
1015
1016 /* $rd = $ra + imm */
1017 ppc32_jit_start_hreg_seq(cpu,"addic.");
1018 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1019 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1020
1021 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1022
1023 iop = ppc32_op_emit_insn_output(cpu,1,"addic.");
1024
1025 if (rd != ra)
1026 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1027
1028 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1029 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1030
1031 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1032 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1033
1034 ppc32_op_emit_update_flags(cpu,0,TRUE);
1035
1036 ppc32_jit_close_hreg_seq(cpu);
1037 return(0);
1038 }
1039
1040 /* ADDIS - ADD Immediate Shifted */
DECLARE_INSN(ADDIS)1041 DECLARE_INSN(ADDIS)
1042 {
1043 int rd = bits(insn,21,25);
1044 int ra = bits(insn,16,20);
1045 m_uint32_t imm = bits(insn,0,15);
1046 m_uint32_t tmp = imm << 16;
1047 int hreg_rd,hreg_ra;
1048 jit_op_t *iop;
1049
1050 /* $rd = $ra + (imm << 16) */
1051 ppc32_jit_start_hreg_seq(cpu,"addis");
1052 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1053
1054 if (ra != 0) {
1055 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1056 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1057
1058 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1059
1060 if (rd != ra)
1061 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1062
1063 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1064 } else {
1065 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1066 amd64_mov_reg_imm(iop->ob_ptr,hreg_rd,tmp);
1067 }
1068
1069 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1070
1071 ppc32_jit_close_hreg_seq(cpu);
1072 return(0);
1073 }
1074
1075 /* ADDZE */
DECLARE_INSN(ADDZE)1076 DECLARE_INSN(ADDZE)
1077 {
1078 int rd = bits(insn,21,25);
1079 int ra = bits(insn,16,20);
1080 int hreg_rd,hreg_ra,hreg_t0;
1081 jit_op_t *iop;
1082
1083 /* $rd = $ra + xer_ca + set_carry */
1084 ppc32_jit_start_hreg_seq(cpu,"addze");
1085 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1086 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1087 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1088
1089 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1090
1091 iop = ppc32_op_emit_insn_output(cpu,2,"addze");
1092
1093 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,hreg_t0);
1094
1095 if (rd != ra)
1096 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1097
1098 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_rd,
1099 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
1100
1101 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
1102 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
1103 hreg_t0,4);
1104
1105 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1106
1107 if (insn & 1)
1108 ppc32_op_emit_update_flags(cpu,0,TRUE);
1109
1110 ppc32_jit_close_hreg_seq(cpu);
1111 return(0);
1112 }
1113
1114 /* AND */
DECLARE_INSN(AND)1115 DECLARE_INSN(AND)
1116 {
1117 int rs = bits(insn,21,25);
1118 int ra = bits(insn,16,20);
1119 int rb = bits(insn,11,15);
1120 int hreg_rs,hreg_ra,hreg_rb;
1121 jit_op_t *iop;
1122
1123 /* $ra = $rs & $rb */
1124 ppc32_jit_start_hreg_seq(cpu,"and");
1125 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1126 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1127 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1128
1129 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1130 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1131
1132 iop = ppc32_op_emit_insn_output(cpu,1,"and");
1133
1134 if (ra == rs)
1135 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1136 else if (ra == rb)
1137 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
1138 else {
1139 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1140 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1141 }
1142
1143 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1144
1145 if (insn & 1)
1146 ppc32_op_emit_update_flags(cpu,0,TRUE);
1147
1148 ppc32_jit_close_hreg_seq(cpu);
1149 return(0);
1150 }
1151
1152 /* ANDC */
DECLARE_INSN(ANDC)1153 DECLARE_INSN(ANDC)
1154 {
1155 int rs = bits(insn,21,25);
1156 int ra = bits(insn,16,20);
1157 int rb = bits(insn,11,15);
1158 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
1159 jit_op_t *iop;
1160
1161 /* $ra = $rs & ~$rb */
1162 ppc32_jit_start_hreg_seq(cpu,"andc");
1163 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1164 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1165 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1166
1167 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1168 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1169
1170 iop = ppc32_op_emit_insn_output(cpu,1,"andc");
1171
1172 /* $t0 = ~$rb */
1173 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1174 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
1175 amd64_not_reg(iop->ob_ptr,hreg_t0);
1176
1177 /* $ra = $rs & $t0 */
1178 if (ra == rs)
1179 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_t0,4);
1180 else {
1181 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_t0,hreg_rs,4);
1182 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
1183 }
1184
1185 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1186
1187 if (insn & 1)
1188 ppc32_op_emit_update_flags(cpu,0,TRUE);
1189
1190 ppc32_jit_close_hreg_seq(cpu);
1191 return(0);
1192 }
1193
1194 /* AND Immediate */
DECLARE_INSN(ANDI)1195 DECLARE_INSN(ANDI)
1196 {
1197 int rs = bits(insn,21,25);
1198 int ra = bits(insn,16,20);
1199 m_uint16_t imm = bits(insn,0,15);
1200 m_uint32_t tmp = imm;
1201 int hreg_rs,hreg_ra;
1202 jit_op_t *iop;
1203
1204 /* $ra = $rs & imm */
1205 ppc32_jit_start_hreg_seq(cpu,"andi");
1206 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1207 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1208
1209 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1210
1211 iop = ppc32_op_emit_insn_output(cpu,2,"andi");
1212
1213 if (ra != rs)
1214 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1215
1216 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1217 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1218
1219 ppc32_op_emit_update_flags(cpu,0,TRUE);
1220
1221 ppc32_jit_close_hreg_seq(cpu);
1222 return(0);
1223 }
1224
1225 /* AND Immediate Shifted */
DECLARE_INSN(ANDIS)1226 DECLARE_INSN(ANDIS)
1227 {
1228 int rs = bits(insn,21,25);
1229 int ra = bits(insn,16,20);
1230 m_uint32_t imm = bits(insn,0,15);
1231 m_uint32_t tmp = imm << 16;
1232 int hreg_rs,hreg_ra;
1233 jit_op_t *iop;
1234
1235 /* $ra = $rs & imm */
1236 ppc32_jit_start_hreg_seq(cpu,"andis");
1237 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1238 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1239
1240 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1241
1242 iop = ppc32_op_emit_insn_output(cpu,2,"andis");
1243
1244 if (ra != rs)
1245 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1246
1247 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1248 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1249
1250 ppc32_op_emit_update_flags(cpu,0,TRUE);
1251
1252 ppc32_jit_close_hreg_seq(cpu);
1253 return(0);
1254 }
1255
1256 /* B - Branch */
DECLARE_INSN(B)1257 DECLARE_INSN(B)
1258 {
1259 m_uint32_t offset = bits(insn,2,25);
1260 m_uint32_t new_ia;
1261 jit_op_t *iop;
1262
1263 iop = ppc32_op_emit_insn_output(cpu,4,"b");
1264
1265 /* compute the new ia */
1266 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1267 new_ia += sign_extend(offset << 2,26);
1268 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1269
1270 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1271 ppc32_op_emit_branch_target(cpu,b,new_ia);
1272 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1273 return(0);
1274 }
1275
1276 /* BA - Branch Absolute */
DECLARE_INSN(BA)1277 DECLARE_INSN(BA)
1278 {
1279 m_uint32_t offset = bits(insn,2,25);
1280 m_uint32_t new_ia;
1281 jit_op_t *iop;
1282
1283 iop = ppc32_op_emit_insn_output(cpu,4,"ba");
1284
1285 /* compute the new ia */
1286 new_ia = sign_extend(offset << 2,26);
1287 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1288
1289 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1290 ppc32_op_emit_branch_target(cpu,b,new_ia);
1291 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1292 return(0);
1293 }
1294
1295 /* BL - Branch and Link */
DECLARE_INSN(BL)1296 DECLARE_INSN(BL)
1297 {
1298 m_uint32_t offset = bits(insn,2,25);
1299 m_uint32_t new_ia;
1300 jit_op_t *iop;
1301
1302 iop = ppc32_op_emit_insn_output(cpu,4,"bl");
1303
1304 /* compute the new ia */
1305 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1306 new_ia += sign_extend(offset << 2,26);
1307
1308 /* set the return address */
1309 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1310 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1311
1312 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1313 ppc32_op_emit_branch_target(cpu,b,new_ia);
1314 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1315 return(0);
1316 }
1317
1318 /* BLA - Branch and Link Absolute */
DECLARE_INSN(BLA)1319 DECLARE_INSN(BLA)
1320 {
1321 m_uint32_t offset = bits(insn,2,25);
1322 m_uint32_t new_ia;
1323 jit_op_t *iop;
1324
1325 iop = ppc32_op_emit_insn_output(cpu,4,"bla");
1326
1327 /* compute the new ia */
1328 new_ia = sign_extend(offset << 2,26);
1329
1330 /* set the return address */
1331 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1332 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1333
1334 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1335 ppc32_op_emit_branch_target(cpu,b,new_ia);
1336 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1337 return(0);
1338 }
1339
1340 /* BC - Branch Conditional (Condition Check only) */
DECLARE_INSN(BCC)1341 DECLARE_INSN(BCC)
1342 {
1343 int bo = bits(insn,21,25);
1344 int bi = bits(insn,16,20);
1345 int bd = bits(insn,2,15);
1346 jit_op_t *iop;
1347 u_int cr_field,cr_bit;
1348 m_uint32_t new_ia;
1349 u_char *jump_ptr;
1350 int local_jump;
1351 int cond;
1352
1353 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1354
1355 iop = ppc32_op_emit_insn_output(cpu,5,"bcc");
1356
1357 /* Get the wanted value for the condition bit */
1358 cond = (bo >> 3) & 0x1;
1359
1360 /* Set the return address */
1361 if (insn & 1) {
1362 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1363 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1364 }
1365
1366 /* Compute the new ia */
1367 new_ia = sign_extend_32(bd << 2,16);
1368 if (!(insn & 0x02))
1369 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1370
1371 /* Test the condition bit */
1372 cr_field = ppc32_get_cr_field(bi);
1373 cr_bit = ppc32_get_cr_bit(bi);
1374
1375 ppc32_op_emit_require_flags(cpu,cr_field);
1376
1377 amd64_test_membase_imm_size(iop->ob_ptr,
1378 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1379 (1 << cr_bit),4);
1380
1381 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1382
1383 /*
1384 * Optimize the jump, depending if the destination is in the same
1385 * page or not.
1386 */
1387 if (local_jump) {
1388 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1389 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE);
1390 } else {
1391 jump_ptr = iop->ob_ptr;
1392 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE);
1393 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1394 amd64_patch(jump_ptr,iop->ob_ptr);
1395 }
1396
1397 ppc32_op_emit_branch_target(cpu,b,new_ia);
1398 return(0);
1399 }
1400
1401 /* BC - Branch Conditional */
DECLARE_INSN(BC)1402 DECLARE_INSN(BC)
1403 {
1404 int bo = bits(insn,21,25);
1405 int bi = bits(insn,16,20);
1406 int bd = bits(insn,2,15);
1407 int hreg_t0,hreg_t1;
1408 jit_op_t *iop;
1409 u_int cr_field,cr_bit;
1410 m_uint32_t new_ia;
1411 u_char *jump_ptr;
1412 int local_jump;
1413 int cond,ctr;
1414
1415 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1416
1417 iop = ppc32_op_emit_insn_output(cpu,5,"bc");
1418
1419 ppc32_jit_start_hreg_seq(cpu,"bc");
1420 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1421 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1422
1423 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1424
1425 /* Get the wanted value for the condition bit and CTR value */
1426 cond = (bo >> 3) & 0x1;
1427 ctr = (bo >> 1) & 0x1;
1428
1429 /* Set the return address */
1430 if (insn & 1) {
1431 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1432 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1433 }
1434
1435 /* Compute the new ia */
1436 new_ia = sign_extend_32(bd << 2,16);
1437 if (!(insn & 0x02))
1438 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1439
1440 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1441
1442 /* Decrement the count register */
1443 if (!(bo & 0x04)) {
1444 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1445 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1446 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1447 }
1448
1449 /* Test the condition bit */
1450 if (!((bo >> 4) & 0x01)) {
1451 cr_field = ppc32_get_cr_field(bi);
1452 cr_bit = ppc32_get_cr_bit(bi);
1453
1454 ppc32_op_emit_require_flags(cpu,cr_field);
1455
1456 amd64_test_membase_imm_size(iop->ob_ptr,
1457 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1458 (1 << cr_bit),4);
1459
1460 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1461 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1462 }
1463
1464 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1465
1466 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1467
1468 /*
1469 * Optimize the jump, depending if the destination is in the same
1470 * page or not.
1471 */
1472 if (local_jump) {
1473 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1474 amd64_branch32(iop->ob_ptr,X86_CC_NZ,0,FALSE);
1475 } else {
1476 jump_ptr = iop->ob_ptr;
1477 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1478 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1479 amd64_patch(jump_ptr,iop->ob_ptr);
1480 }
1481
1482 ppc32_op_emit_branch_target(cpu,b,new_ia);
1483
1484 ppc32_jit_close_hreg_seq(cpu);
1485 return(0);
1486 }
1487
1488 /* BCLR - Branch Conditional to Link register */
DECLARE_INSN(BCLR)1489 DECLARE_INSN(BCLR)
1490 {
1491 int bo = bits(insn,21,25);
1492 int bi = bits(insn,16,20);
1493 int bd = bits(insn,2,15);
1494 int hreg_t0,hreg_t1;
1495 jit_op_t *iop;
1496 u_int cr_field,cr_bit;
1497 m_uint32_t new_ia;
1498 u_char *jump_ptr;
1499 int cond,ctr;
1500
1501 ppc32_jit_start_hreg_seq(cpu,"bclr");
1502 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1503 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1504
1505 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1506
1507 iop = ppc32_op_emit_insn_output(cpu,5,"bclr");
1508
1509 /* Get the wanted value for the condition bit and CTR value */
1510 cond = (bo >> 3) & 0x1;
1511 ctr = (bo >> 1) & 0x1;
1512
1513 /* Compute the new ia */
1514 new_ia = sign_extend_32(bd << 2,16);
1515 if (!(insn & 0x02))
1516 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1517
1518 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1519
1520 /* Decrement the count register */
1521 if (!(bo & 0x04)) {
1522 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1523 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1524 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1525 }
1526
1527 /* Test the condition bit */
1528 if (!((bo >> 4) & 0x01)) {
1529 cr_field = ppc32_get_cr_field(bi);
1530 cr_bit = ppc32_get_cr_bit(bi);
1531
1532 ppc32_op_emit_require_flags(cpu,cr_field);
1533
1534 amd64_test_membase_imm_size(iop->ob_ptr,
1535 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1536 (1 << cr_bit),4);
1537
1538 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1539 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1540 }
1541
1542 /* Set the return address */
1543 amd64_mov_reg_membase(iop->ob_ptr,hreg_t1,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
1544
1545 if (insn & 1) {
1546 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1547 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1548 }
1549
1550 /* Branching */
1551 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1552
1553 jump_ptr = iop->ob_ptr;
1554 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1555
1556 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t1,0xFFFFFFFC);
1557 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg_t1,4);
1558 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
1559
1560 amd64_patch(jump_ptr,iop->ob_ptr);
1561
1562 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1563
1564 ppc32_jit_close_hreg_seq(cpu);
1565 return(0);
1566 }
1567
1568 /* CMP - Compare */
DECLARE_INSN(CMP)1569 DECLARE_INSN(CMP)
1570 {
1571 int rd = bits(insn,23,25);
1572 int ra = bits(insn,16,20);
1573 int rb = bits(insn,11,15);
1574 int hreg_ra,hreg_rb;
1575 jit_op_t *iop;
1576
1577 ppc32_jit_start_hreg_seq(cpu,"cmp");
1578 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1579 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1580
1581 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1582 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1583
1584 iop = ppc32_op_emit_insn_output(cpu,1,"cmp");
1585
1586 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1587 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1588
1589 ppc32_jit_close_hreg_seq(cpu);
1590 return(0);
1591 }
1592
1593 /* CMPI - Compare Immediate */
DECLARE_INSN(CMPI)1594 DECLARE_INSN(CMPI)
1595 {
1596 int rd = bits(insn,23,25);
1597 int ra = bits(insn,16,20);
1598 m_uint16_t imm = bits(insn,0,15);
1599 m_uint32_t tmp = sign_extend_32(imm,16);
1600 int hreg_ra;
1601 jit_op_t *iop;
1602
1603 ppc32_jit_start_hreg_seq(cpu,"cmpi");
1604 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1605 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1606
1607 iop = ppc32_op_emit_insn_output(cpu,1,"cmpi");
1608
1609 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,tmp,4);
1610 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1611
1612 ppc32_jit_close_hreg_seq(cpu);
1613 return(0);
1614 }
1615
1616 /* CMPL - Compare Logical */
DECLARE_INSN(CMPL)1617 DECLARE_INSN(CMPL)
1618 {
1619 int rd = bits(insn,23,25);
1620 int ra = bits(insn,16,20);
1621 int rb = bits(insn,11,15);
1622 int hreg_ra,hreg_rb;
1623 jit_op_t *iop;
1624
1625 ppc32_jit_start_hreg_seq(cpu,"cmpl");
1626 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1627 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1628
1629 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1630 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1631
1632 iop = ppc32_op_emit_insn_output(cpu,1,"cmpl");
1633
1634 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1635 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1636
1637 ppc32_jit_close_hreg_seq(cpu);
1638 return(0);
1639 }
1640
1641 /* CMPLI - Compare Immediate */
DECLARE_INSN(CMPLI)1642 DECLARE_INSN(CMPLI)
1643 {
1644 int rd = bits(insn,23,25);
1645 int ra = bits(insn,16,20);
1646 m_uint32_t imm = bits(insn,0,15);
1647 int hreg_ra;
1648 jit_op_t *iop;
1649
1650 ppc32_jit_start_hreg_seq(cpu,"cmpli");
1651 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1652 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1653
1654 iop = ppc32_op_emit_insn_output(cpu,1,"cmpli");
1655
1656 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,imm,4);
1657 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1658
1659 ppc32_jit_close_hreg_seq(cpu);
1660 return(0);
1661 }
1662
1663 /* CRAND - Condition Register AND */
DECLARE_INSN(CRAND)1664 DECLARE_INSN(CRAND)
1665 {
1666 int bd = bits(insn,21,25);
1667 int bb = bits(insn,16,20);
1668 int ba = bits(insn,11,15);
1669 int hreg_t0;
1670 jit_op_t *iop;
1671
1672 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1673
1674 ppc32_jit_start_hreg_seq(cpu,"crand");
1675 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1676 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1677
1678 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1679 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1680 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1681
1682 iop = ppc32_op_emit_insn_output(cpu,3,"crand");
1683
1684 /* test $ba bit */
1685 amd64_test_membase_imm(iop->ob_ptr,
1686 AMD64_R15,
1687 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1688 (1 << ppc32_get_cr_bit(ba)));
1689 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1690
1691 /* test $bb bit */
1692 amd64_test_membase_imm(iop->ob_ptr,
1693 AMD64_R15,
1694 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1695 (1 << ppc32_get_cr_bit(bb)));
1696 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1697
1698 /* result of AND between $ba and $bb */
1699 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1700 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1701
1702 /* set/clear $bd bit depending on the result */
1703 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1704 AMD64_R15,
1705 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1706 ~(1 << ppc32_get_cr_bit(bd)),4);
1707
1708 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1709 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1710 AMD64_R15,
1711 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1712 hreg_t0,4);
1713
1714 ppc32_jit_close_hreg_seq(cpu);
1715 return(0);
1716 }
1717
1718 /* CRANDC - Condition Register AND with Complement */
DECLARE_INSN(CRANDC)1719 DECLARE_INSN(CRANDC)
1720 {
1721 int bd = bits(insn,21,25);
1722 int bb = bits(insn,16,20);
1723 int ba = bits(insn,11,15);
1724 int hreg_t0;
1725 jit_op_t *iop;
1726
1727 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1728
1729 ppc32_jit_start_hreg_seq(cpu,"crandc");
1730 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1731 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1732
1733 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1734 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1735 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1736
1737 iop = ppc32_op_emit_insn_output(cpu,3,"crandc");
1738
1739 /* test $ba bit */
1740 amd64_test_membase_imm(iop->ob_ptr,
1741 AMD64_R15,
1742 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1743 (1 << ppc32_get_cr_bit(ba)));
1744 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1745
1746 /* test $bb bit */
1747 amd64_test_membase_imm(iop->ob_ptr,
1748 AMD64_R15,
1749 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1750 (1 << ppc32_get_cr_bit(bb)));
1751 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
1752
1753 /* result of AND between $ba and $bb */
1754 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1755 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1756
1757 /* set/clear $bd bit depending on the result */
1758 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1759 AMD64_R15,
1760 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1761 ~(1 << ppc32_get_cr_bit(bd)),4);
1762
1763 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1764 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1765 AMD64_R15,
1766 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1767 hreg_t0,4);
1768
1769 ppc32_jit_close_hreg_seq(cpu);
1770 return(0);
1771 }
1772
1773 /* CREQV - Condition Register EQV */
DECLARE_INSN(CREQV)1774 DECLARE_INSN(CREQV)
1775 {
1776 int bd = bits(insn,21,25);
1777 int bb = bits(insn,16,20);
1778 int ba = bits(insn,11,15);
1779 int hreg_t0;
1780 jit_op_t *iop;
1781
1782 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1783
1784 ppc32_jit_start_hreg_seq(cpu,"creqv");
1785 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1786 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1787
1788 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1789 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1790 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1791
1792 iop = ppc32_op_emit_insn_output(cpu,3,"creqv");
1793
1794 /* test $ba bit */
1795 amd64_test_membase_imm(iop->ob_ptr,
1796 AMD64_R15,
1797 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1798 (1 << ppc32_get_cr_bit(ba)));
1799 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1800
1801 /* test $bb bit */
1802 amd64_test_membase_imm(iop->ob_ptr,
1803 AMD64_R15,
1804 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1805 (1 << ppc32_get_cr_bit(bb)));
1806 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1807
1808 /* result of XOR between $ba and $bb */
1809 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
1810 amd64_not_reg(iop->ob_ptr,hreg_t0);
1811 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1812
1813 /* set/clear $bd bit depending on the result */
1814 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1815 AMD64_R15,
1816 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1817 ~(1 << ppc32_get_cr_bit(bd)),4);
1818
1819 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1820 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1821 AMD64_R15,
1822 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1823 hreg_t0,4);
1824
1825 ppc32_jit_close_hreg_seq(cpu);
1826 return(0);
1827 }
1828
1829 /* CRNAND - Condition Register NAND */
DECLARE_INSN(CRNAND)1830 DECLARE_INSN(CRNAND)
1831 {
1832 int bd = bits(insn,21,25);
1833 int bb = bits(insn,16,20);
1834 int ba = bits(insn,11,15);
1835 int hreg_t0;
1836 jit_op_t *iop;
1837
1838 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1839
1840 ppc32_jit_start_hreg_seq(cpu,"crnand");
1841 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1842 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1843
1844 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1845 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1846 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1847
1848 iop = ppc32_op_emit_insn_output(cpu,3,"crnand");
1849
1850 /* test $ba bit */
1851 amd64_test_membase_imm(iop->ob_ptr,
1852 AMD64_R15,
1853 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1854 (1 << ppc32_get_cr_bit(ba)));
1855 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1856
1857 /* test $bb bit */
1858 amd64_test_membase_imm(iop->ob_ptr,
1859 AMD64_R15,
1860 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1861 (1 << ppc32_get_cr_bit(bb)));
1862 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1863
1864 /* result of NAND between $ba and $bb */
1865 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1866 amd64_not_reg(iop->ob_ptr,hreg_t0);
1867 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1868
1869 /* set/clear $bd bit depending on the result */
1870 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1871 AMD64_R15,
1872 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1873 ~(1 << ppc32_get_cr_bit(bd)),4);
1874
1875 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1876 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1877 AMD64_R15,
1878 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1879 hreg_t0,4);
1880
1881 ppc32_jit_close_hreg_seq(cpu);
1882 return(0);
1883 }
1884
1885 /* CRNOR - Condition Register NOR */
DECLARE_INSN(CRNOR)1886 DECLARE_INSN(CRNOR)
1887 {
1888 int bd = bits(insn,21,25);
1889 int bb = bits(insn,16,20);
1890 int ba = bits(insn,11,15);
1891 int hreg_t0;
1892 jit_op_t *iop;
1893
1894 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1895
1896 ppc32_jit_start_hreg_seq(cpu,"crnor");
1897 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1898 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1899
1900 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1901 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1902 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1903
1904 iop = ppc32_op_emit_insn_output(cpu,3,"crnor");
1905
1906 /* test $ba bit */
1907 amd64_test_membase_imm(iop->ob_ptr,
1908 AMD64_R15,
1909 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1910 (1 << ppc32_get_cr_bit(ba)));
1911 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1912
1913 /* test $bb bit */
1914 amd64_test_membase_imm(iop->ob_ptr,
1915 AMD64_R15,
1916 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1917 (1 << ppc32_get_cr_bit(bb)));
1918 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1919
1920 /* result of NOR between $ba and $bb */
1921 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
1922 amd64_not_reg(iop->ob_ptr,hreg_t0);
1923 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1924
1925 /* set/clear $bd bit depending on the result */
1926 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1927 AMD64_R15,
1928 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1929 ~(1 << ppc32_get_cr_bit(bd)),4);
1930
1931 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1932 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1933 AMD64_R15,
1934 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1935 hreg_t0,4);
1936
1937 ppc32_jit_close_hreg_seq(cpu);
1938 return(0);
1939 }
1940
1941 /* CROR - Condition Register OR */
DECLARE_INSN(CROR)1942 DECLARE_INSN(CROR)
1943 {
1944 int bd = bits(insn,21,25);
1945 int bb = bits(insn,16,20);
1946 int ba = bits(insn,11,15);
1947 int hreg_t0;
1948 jit_op_t *iop;
1949
1950 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1951
1952 ppc32_jit_start_hreg_seq(cpu,"cror");
1953 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1954 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1955
1956 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1957 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1958 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1959
1960 iop = ppc32_op_emit_insn_output(cpu,3,"cror");
1961
1962 /* test $ba bit */
1963 amd64_test_membase_imm(iop->ob_ptr,
1964 AMD64_R15,
1965 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1966 (1 << ppc32_get_cr_bit(ba)));
1967 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1968
1969 /* test $bb bit */
1970 amd64_test_membase_imm(iop->ob_ptr,
1971 AMD64_R15,
1972 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1973 (1 << ppc32_get_cr_bit(bb)));
1974 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1975
1976 /* result of NOR between $ba and $bb */
1977 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
1978 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1979
1980 /* set/clear $bd bit depending on the result */
1981 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1982 AMD64_R15,
1983 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1984 ~(1 << ppc32_get_cr_bit(bd)),4);
1985
1986 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1987 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1988 AMD64_R15,
1989 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1990 hreg_t0,4);
1991
1992 ppc32_jit_close_hreg_seq(cpu);
1993 return(0);
1994 }
1995
1996 /* CRORC - Condition Register OR with Complement */
DECLARE_INSN(CRORC)1997 DECLARE_INSN(CRORC)
1998 {
1999 int bd = bits(insn,21,25);
2000 int bb = bits(insn,16,20);
2001 int ba = bits(insn,11,15);
2002 int hreg_t0;
2003 jit_op_t *iop;
2004
2005 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2006
2007 ppc32_jit_start_hreg_seq(cpu,"crorc");
2008 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2009 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2010
2011 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2012 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2013 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2014
2015 iop = ppc32_op_emit_insn_output(cpu,3,"crorc");
2016
2017 /* test $ba bit */
2018 amd64_test_membase_imm(iop->ob_ptr,
2019 AMD64_R15,
2020 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2021 (1 << ppc32_get_cr_bit(ba)));
2022 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2023
2024 /* test $bb bit */
2025 amd64_test_membase_imm(iop->ob_ptr,
2026 AMD64_R15,
2027 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2028 (1 << ppc32_get_cr_bit(bb)));
2029 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
2030
2031 /* result of ORC between $ba and $bb */
2032 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2033 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2034
2035 /* set/clear $bd bit depending on the result */
2036 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2037 AMD64_R15,
2038 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2039 ~(1 << ppc32_get_cr_bit(bd)),4);
2040
2041 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2042 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2043 AMD64_R15,
2044 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2045 hreg_t0,4);
2046
2047 ppc32_jit_close_hreg_seq(cpu);
2048 return(0);
2049 }
2050
2051 /* CRXOR - Condition Register XOR */
DECLARE_INSN(CRXOR)2052 DECLARE_INSN(CRXOR)
2053 {
2054 int bd = bits(insn,21,25);
2055 int bb = bits(insn,16,20);
2056 int ba = bits(insn,11,15);
2057 int hreg_t0;
2058 jit_op_t *iop;
2059
2060 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2061
2062 ppc32_jit_start_hreg_seq(cpu,"crxor");
2063 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2064 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2065
2066 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2067 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2068 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2069
2070 iop = ppc32_op_emit_insn_output(cpu,3,"crxor");
2071
2072 /* test $ba bit */
2073 amd64_test_membase_imm(iop->ob_ptr,
2074 AMD64_R15,
2075 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2076 (1 << ppc32_get_cr_bit(ba)));
2077 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2078
2079 /* test $bb bit */
2080 amd64_test_membase_imm(iop->ob_ptr,
2081 AMD64_R15,
2082 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2083 (1 << ppc32_get_cr_bit(bb)));
2084 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2085
2086 /* result of XOR between $ba and $bb */
2087 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
2088 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2089
2090 /* set/clear $bd bit depending on the result */
2091 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2092 AMD64_R15,
2093 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2094 ~(1 << ppc32_get_cr_bit(bd)),4);
2095
2096 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2097 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2098 AMD64_R15,
2099 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2100 hreg_t0,4);
2101
2102 ppc32_jit_close_hreg_seq(cpu);
2103 return(0);
2104 }
2105
2106 /* DIVWU - Divide Word Unsigned */
DECLARE_INSN(DIVWU)2107 DECLARE_INSN(DIVWU)
2108 {
2109 int rd = bits(insn,21,25);
2110 int ra = bits(insn,16,20);
2111 int rb = bits(insn,11,15);
2112 int hreg_rb;
2113 jit_op_t *iop;
2114
2115 ppc32_jit_start_hreg_seq(cpu,"divwu");
2116 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2117 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2118 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2119
2120 /* $rd = $ra / $rb */
2121 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2122 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2123
2124 iop = ppc32_op_emit_insn_output(cpu,2,"divwu");
2125 ppc32_load_imm(&iop->ob_ptr,AMD64_RDX,0);
2126
2127 amd64_div_reg_size(iop->ob_ptr,hreg_rb,0,4);
2128
2129 if (insn & 1)
2130 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2131
2132 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2133
2134 if (insn & 1)
2135 ppc32_op_emit_update_flags(cpu,0,TRUE);
2136
2137 /* edx:eax are directly modified: throw them */
2138 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2139 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2140
2141 ppc32_jit_close_hreg_seq(cpu);
2142 return(0);
2143 }
2144
2145 /* EQV */
DECLARE_INSN(EQV)2146 DECLARE_INSN(EQV)
2147 {
2148 int rs = bits(insn,21,25);
2149 int ra = bits(insn,16,20);
2150 int rb = bits(insn,11,15);
2151 int hreg_rs,hreg_ra,hreg_rb;
2152 jit_op_t *iop;
2153
2154 /* $ra = ~($rs ^ $rb) */
2155 ppc32_jit_start_hreg_seq(cpu,"eqv");
2156 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2157 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2158 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2159
2160 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2161 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2162
2163 iop = ppc32_op_emit_insn_output(cpu,1,"eqv");
2164
2165 if (ra == rs)
2166 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2167 else if (ra == rb)
2168 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
2169 else {
2170 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2171 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2172 }
2173
2174 amd64_not_reg(iop->ob_ptr,hreg_ra);
2175
2176 if (insn & 1)
2177 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2178
2179 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2180
2181 if (insn & 1)
2182 ppc32_op_emit_update_flags(cpu,0,TRUE);
2183
2184 ppc32_jit_close_hreg_seq(cpu);
2185 return(0);
2186 }
2187
2188 /* EXTSB - Extend Sign Byte */
DECLARE_INSN(EXTSB)2189 DECLARE_INSN(EXTSB)
2190 {
2191 int rs = bits(insn,21,25);
2192 int ra = bits(insn,16,20);
2193 int hreg_rs,hreg_ra;
2194 jit_op_t *iop;
2195
2196 /* $ra = extsb($rs) */
2197 ppc32_jit_start_hreg_seq(cpu,"extsb");
2198 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2199 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2200
2201 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2202
2203 iop = ppc32_op_emit_insn_output(cpu,2,"extsb");
2204
2205 if (rs != ra)
2206 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2207
2208 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,24,4);
2209 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,24,4);
2210
2211 if (insn & 1)
2212 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2213
2214 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2215
2216 if (insn & 1)
2217 ppc32_op_emit_update_flags(cpu,0,TRUE);
2218
2219 ppc32_jit_close_hreg_seq(cpu);
2220 return(0);
2221 }
2222
2223 /* EXTSH - Extend Sign Word */
DECLARE_INSN(EXTSH)2224 DECLARE_INSN(EXTSH)
2225 {
2226 int rs = bits(insn,21,25);
2227 int ra = bits(insn,16,20);
2228 int hreg_rs,hreg_ra;
2229 jit_op_t *iop;
2230
2231 /* $ra = extsh($rs) */
2232 ppc32_jit_start_hreg_seq(cpu,"extsh");
2233 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2234 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2235
2236 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2237
2238 iop = ppc32_op_emit_insn_output(cpu,2,"extsh");
2239
2240 if (rs != ra)
2241 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2242
2243 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,16,4);
2244 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,16,4);
2245
2246 if (insn & 1)
2247 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2248
2249 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2250
2251 if (insn & 1)
2252 ppc32_op_emit_update_flags(cpu,0,TRUE);
2253
2254 ppc32_jit_close_hreg_seq(cpu);
2255 return(0);
2256 }
2257
2258 /* LBZ - Load Byte and Zero */
DECLARE_INSN(LBZ)2259 DECLARE_INSN(LBZ)
2260 {
2261 int rs = bits(insn,21,25);
2262 int ra = bits(insn,16,20);
2263 m_uint16_t offset = bits(insn,0,15);
2264
2265 //ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0);
2266 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LBZ,ra,offset,rs,
2267 ppc32_memop_fast_lbz);
2268 return(0);
2269 }
2270
2271 /* LBZU - Load Byte and Zero with Update */
DECLARE_INSN(LBZU)2272 DECLARE_INSN(LBZU)
2273 {
2274 int rs = bits(insn,21,25);
2275 int ra = bits(insn,16,20);
2276 m_uint16_t offset = bits(insn,0,15);
2277
2278 ppc32_emit_memop(cpu,b,PPC_MEMOP_LBZ,ra,offset,rs,1);
2279 return(0);
2280 }
2281
2282 /* LBZUX - Load Byte and Zero with Update Indexed */
DECLARE_INSN(LBZUX)2283 DECLARE_INSN(LBZUX)
2284 {
2285 int rs = bits(insn,21,25);
2286 int ra = bits(insn,16,20);
2287 int rb = bits(insn,11,15);
2288
2289 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,1);
2290 return(0);
2291 }
2292
2293 /* LBZX - Load Byte and Zero Indexed */
DECLARE_INSN(LBZX)2294 DECLARE_INSN(LBZX)
2295 {
2296 int rs = bits(insn,21,25);
2297 int ra = bits(insn,16,20);
2298 int rb = bits(insn,11,15);
2299
2300 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,0);
2301 return(0);
2302 }
2303
2304 /* LHA - Load Half-Word Algebraic */
DECLARE_INSN(LHA)2305 DECLARE_INSN(LHA)
2306 {
2307 int rs = bits(insn,21,25);
2308 int ra = bits(insn,16,20);
2309 m_uint16_t offset = bits(insn,0,15);
2310
2311 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,0);
2312 return(0);
2313 }
2314
2315 /* LHAU - Load Half-Word Algebraic with Update */
DECLARE_INSN(LHAU)2316 DECLARE_INSN(LHAU)
2317 {
2318 int rs = bits(insn,21,25);
2319 int ra = bits(insn,16,20);
2320 m_uint16_t offset = bits(insn,0,15);
2321
2322 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,1);
2323 return(0);
2324 }
2325
2326 /* LHAUX - Load Half-Word Algebraic with Update Indexed */
DECLARE_INSN(LHAUX)2327 DECLARE_INSN(LHAUX)
2328 {
2329 int rs = bits(insn,21,25);
2330 int ra = bits(insn,16,20);
2331 int rb = bits(insn,11,15);
2332
2333 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,1);
2334 return(0);
2335 }
2336
2337 /* LHAX - Load Half-Word Algebraic Indexed */
DECLARE_INSN(LHAX)2338 DECLARE_INSN(LHAX)
2339 {
2340 int rs = bits(insn,21,25);
2341 int ra = bits(insn,16,20);
2342 int rb = bits(insn,11,15);
2343
2344 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,0);
2345 return(0);
2346 }
2347
2348 /* LHZ - Load Half-Word and Zero */
DECLARE_INSN(LHZ)2349 DECLARE_INSN(LHZ)
2350 {
2351 int rs = bits(insn,21,25);
2352 int ra = bits(insn,16,20);
2353 m_uint16_t offset = bits(insn,0,15);
2354
2355 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,0);
2356 return(0);
2357 }
2358
2359 /* LHZU - Load Half-Word and Zero with Update */
DECLARE_INSN(LHZU)2360 DECLARE_INSN(LHZU)
2361 {
2362 int rs = bits(insn,21,25);
2363 int ra = bits(insn,16,20);
2364 m_uint16_t offset = bits(insn,0,15);
2365
2366 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,1);
2367 return(0);
2368 }
2369
2370 /* LHZUX - Load Half-Word and Zero with Update Indexed */
DECLARE_INSN(LHZUX)2371 DECLARE_INSN(LHZUX)
2372 {
2373 int rs = bits(insn,21,25);
2374 int ra = bits(insn,16,20);
2375 int rb = bits(insn,11,15);
2376
2377 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,1);
2378 return(0);
2379 }
2380
2381 /* LHZX - Load Half-Word and Zero Indexed */
DECLARE_INSN(LHZX)2382 DECLARE_INSN(LHZX)
2383 {
2384 int rs = bits(insn,21,25);
2385 int ra = bits(insn,16,20);
2386 int rb = bits(insn,11,15);
2387
2388 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,0);
2389 return(0);
2390 }
2391
2392 /* LWZ - Load Word and Zero */
DECLARE_INSN(LWZ)2393 DECLARE_INSN(LWZ)
2394 {
2395 int rs = bits(insn,21,25);
2396 int ra = bits(insn,16,20);
2397 m_uint16_t offset = bits(insn,0,15);
2398
2399 //ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0);
2400 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LWZ,ra,offset,rs,
2401 ppc32_memop_fast_lwz);
2402 return(0);
2403 }
2404
2405 /* LWZU - Load Word and Zero with Update */
DECLARE_INSN(LWZU)2406 DECLARE_INSN(LWZU)
2407 {
2408 int rs = bits(insn,21,25);
2409 int ra = bits(insn,16,20);
2410 m_uint16_t offset = bits(insn,0,15);
2411
2412 ppc32_emit_memop(cpu,b,PPC_MEMOP_LWZ,ra,offset,rs,1);
2413 return(0);
2414 }
2415
2416 /* LWZUX - Load Word and Zero with Update Indexed */
DECLARE_INSN(LWZUX)2417 DECLARE_INSN(LWZUX)
2418 {
2419 int rs = bits(insn,21,25);
2420 int ra = bits(insn,16,20);
2421 int rb = bits(insn,11,15);
2422
2423 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,1);
2424 return(0);
2425 }
2426
2427 /* LWZX - Load Word and Zero Indexed */
DECLARE_INSN(LWZX)2428 DECLARE_INSN(LWZX)
2429 {
2430 int rs = bits(insn,21,25);
2431 int ra = bits(insn,16,20);
2432 int rb = bits(insn,11,15);
2433
2434 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,0);
2435 return(0);
2436 }
2437
2438 /* MCRF - Move Condition Register Field */
DECLARE_INSN(MCRF)2439 DECLARE_INSN(MCRF)
2440 {
2441 int rd = bits(insn,23,25);
2442 int rs = bits(insn,18,20);
2443 int hreg_t0;
2444 jit_op_t *iop;
2445
2446 ppc32_jit_start_hreg_seq(cpu,"mcrf");
2447 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2448 ppc32_op_emit_require_flags(cpu,rs);
2449
2450 iop = ppc32_op_emit_insn_output(cpu,1,"mcrf");
2451
2452 /* Load "rs" field in %edx */
2453 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2454 AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4);
2455
2456 /* Store it in "rd" field */
2457 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd),
2458 hreg_t0,4);
2459
2460 ppc32_jit_close_hreg_seq(cpu);
2461 return(0);
2462 }
2463
2464 /* MFCR - Move from Condition Register */
DECLARE_INSN(MFCR)2465 DECLARE_INSN(MFCR)
2466 {
2467 int rd = bits(insn,21,25);
2468 int hreg_rd,hreg_t0;
2469 jit_op_t *iop;
2470 int i;
2471
2472 ppc32_jit_start_hreg_seq(cpu,"mfcr");
2473 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2474 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2475 ppc32_op_emit_require_flags(cpu,JIT_OP_PPC_ALL_FLAGS);
2476
2477 iop = ppc32_op_emit_insn_output(cpu,3,"mfcr");
2478
2479 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_rd,hreg_rd);
2480
2481 for(i=0;i<8;i++) {
2482 /* load field in %edx */
2483 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2484 AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4);
2485 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_rd,4);
2486 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_rd,hreg_t0);
2487 }
2488
2489 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2490
2491 ppc32_jit_close_hreg_seq(cpu);
2492 return(0);
2493 }
2494
2495 /* MFMSR - Move from Machine State Register */
DECLARE_INSN(MFMSR)2496 DECLARE_INSN(MFMSR)
2497 {
2498 int rd = bits(insn,21,25);
2499 int hreg_rd;
2500 jit_op_t *iop;
2501
2502 ppc32_jit_start_hreg_seq(cpu,"mfmsr");
2503 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2504
2505 iop = ppc32_op_emit_insn_output(cpu,1,"mfmsr");
2506 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2507 AMD64_R15,OFFSET(cpu_ppc_t,msr),4);
2508 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2509
2510 ppc32_jit_close_hreg_seq(cpu);
2511 return(0);
2512 }
2513
2514 /* MFSR - Move From Segment Register */
DECLARE_INSN(MFSR)2515 DECLARE_INSN(MFSR)
2516 {
2517 int rd = bits(insn,21,25);
2518 int sr = bits(insn,16,19);
2519 int hreg_rd;
2520 jit_op_t *iop;
2521
2522 ppc32_jit_start_hreg_seq(cpu,"mfsr");
2523 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2524
2525 iop = ppc32_op_emit_insn_output(cpu,1,"mfsr");
2526
2527 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2528 AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4);
2529 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2530
2531 ppc32_jit_close_hreg_seq(cpu);
2532 return(0);
2533 }
2534
2535 /* MTCRF - Move to Condition Register Fields */
DECLARE_INSN(MTCRF)2536 DECLARE_INSN(MTCRF)
2537 {
2538 int rs = bits(insn,21,25);
2539 int crm = bits(insn,12,19);
2540 int hreg_rs,hreg_t0;
2541 jit_op_t *iop;
2542 int i;
2543
2544 ppc32_jit_start_hreg_seq(cpu,"mtcrf");
2545 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2546 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2547
2548 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2549
2550 iop = ppc32_op_emit_insn_output(cpu,4,"mtcrf");
2551
2552 for(i=0;i<8;i++)
2553 if (crm & (1 << (7 - i))) {
2554 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
2555
2556 if (i != 7)
2557 amd64_shift_reg_imm(iop->ob_ptr,X86_SHR,hreg_t0,28 - (i << 2));
2558
2559 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x0F);
2560 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i),
2561 hreg_t0,4);
2562 }
2563
2564 ppc32_op_emit_basic_opcode(cpu,JIT_OP_TRASH_FLAGS);
2565
2566 ppc32_jit_close_hreg_seq(cpu);
2567 return(0);
2568 }
2569
2570 /* MULHW - Multiply High Word */
DECLARE_INSN(MULHW)2571 DECLARE_INSN(MULHW)
2572 {
2573 int rd = bits(insn,21,25);
2574 int ra = bits(insn,16,20);
2575 int rb = bits(insn,11,15);
2576 int hreg_rb;
2577 jit_op_t *iop;
2578
2579 ppc32_jit_start_hreg_seq(cpu,"mulhw");
2580 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2581 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2582 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2583
2584 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2585 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2586
2587 /* rd = hi(ra * rb) */
2588 iop = ppc32_op_emit_insn_output(cpu,2,"mulhw");
2589 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2590
2591 if (insn & 1)
2592 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2593
2594 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2595
2596 if (insn & 1)
2597 ppc32_op_emit_update_flags(cpu,0,TRUE);
2598
2599 /* edx:eax are directly modified: throw them */
2600 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2601 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2602
2603 ppc32_jit_close_hreg_seq(cpu);
2604 return(0);
2605 }
2606
2607 /* MULHWU - Multiply High Word Unsigned */
DECLARE_INSN(MULHWU)2608 DECLARE_INSN(MULHWU)
2609 {
2610 int rd = bits(insn,21,25);
2611 int ra = bits(insn,16,20);
2612 int rb = bits(insn,11,15);
2613 int hreg_rb;
2614 jit_op_t *iop;
2615
2616 ppc32_jit_start_hreg_seq(cpu,"mulhwu");
2617 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2618 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2619 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2620
2621 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2622 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2623
2624 /* rd = hi(ra * rb) */
2625 iop = ppc32_op_emit_insn_output(cpu,2,"mulhwu");
2626 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,0,4);
2627
2628 if (insn & 1)
2629 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2630
2631 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2632
2633 if (insn & 1)
2634 ppc32_op_emit_update_flags(cpu,0,TRUE);
2635
2636 /* edx:eax are directly modified: throw them */
2637 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2638 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2639
2640 ppc32_jit_close_hreg_seq(cpu);
2641 return(0);
2642 }
2643
2644 /* MULLI - Multiply Low Immediate */
DECLARE_INSN(MULLI)2645 DECLARE_INSN(MULLI)
2646 {
2647 int rd = bits(insn,21,25);
2648 int ra = bits(insn,16,20);
2649 m_uint32_t imm = bits(insn,0,15);
2650 int hreg_t0;
2651 jit_op_t *iop;
2652
2653 ppc32_jit_start_hreg_seq(cpu,"mulli");
2654 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2655 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2656 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2657
2658 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2659
2660 /* rd = lo(ra * imm) */
2661 iop = ppc32_op_emit_insn_output(cpu,2,"mulli");
2662
2663 ppc32_load_imm(&iop->ob_ptr,hreg_t0,sign_extend_32(imm,16));
2664 amd64_mul_reg_size(iop->ob_ptr,hreg_t0,1,4);
2665 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2666
2667 /* edx:eax are directly modified: throw them */
2668 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2669 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2670
2671 ppc32_jit_close_hreg_seq(cpu);
2672 return(0);
2673 }
2674
2675 /* MULLW - Multiply Low Word */
DECLARE_INSN(MULLW)2676 DECLARE_INSN(MULLW)
2677 {
2678 int rd = bits(insn,21,25);
2679 int ra = bits(insn,16,20);
2680 int rb = bits(insn,11,15);
2681 int hreg_rb;
2682 jit_op_t *iop;
2683
2684 ppc32_jit_start_hreg_seq(cpu,"mullw");
2685 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2686 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2687 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2688
2689 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2690 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2691
2692 /* rd = lo(ra * rb) */
2693 iop = ppc32_op_emit_insn_output(cpu,2,"mullw");
2694 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2695
2696 if (insn & 1)
2697 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2698
2699 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2700
2701 if (insn & 1)
2702 ppc32_op_emit_update_flags(cpu,0,TRUE);
2703
2704 /* edx:eax are directly modified: throw them */
2705 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2706 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2707
2708 ppc32_jit_close_hreg_seq(cpu);
2709 return(0);
2710 }
2711
2712 /* NAND */
DECLARE_INSN(NAND)2713 DECLARE_INSN(NAND)
2714 {
2715 int rs = bits(insn,21,25);
2716 int ra = bits(insn,16,20);
2717 int rb = bits(insn,11,15);
2718 int hreg_rs,hreg_ra,hreg_rb;
2719 jit_op_t *iop;
2720
2721 /* $ra = ~($rs & $rb) */
2722 ppc32_jit_start_hreg_seq(cpu,"nand");
2723 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2724 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2725 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2726
2727 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2728 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2729
2730 iop = ppc32_op_emit_insn_output(cpu,2,"nand");
2731
2732 if (ra == rs)
2733 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2734 else if (ra == rb)
2735 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
2736 else {
2737 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2738 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2739 }
2740
2741 amd64_not_reg(iop->ob_ptr,hreg_ra);
2742
2743 if (insn & 1)
2744 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2745
2746 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2747
2748 if (insn & 1)
2749 ppc32_op_emit_update_flags(cpu,0,TRUE);
2750
2751 ppc32_jit_close_hreg_seq(cpu);
2752 return(0);
2753 }
2754
2755 /* NEG */
DECLARE_INSN(NEG)2756 DECLARE_INSN(NEG)
2757 {
2758 int rd = bits(insn,21,25);
2759 int ra = bits(insn,16,20);
2760 int hreg_rd,hreg_ra;
2761 jit_op_t *iop;
2762
2763 /* $rd = neg($ra) */
2764 ppc32_jit_start_hreg_seq(cpu,"neg");
2765 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2766 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2767
2768 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
2769
2770 iop = ppc32_op_emit_insn_output(cpu,1,"neg");
2771
2772 if (rd != ra)
2773 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
2774
2775 amd64_neg_reg(iop->ob_ptr,hreg_rd);
2776
2777 if (insn & 1)
2778 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
2779
2780 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2781
2782 if (insn & 1)
2783 ppc32_op_emit_update_flags(cpu,0,TRUE);
2784
2785 ppc32_jit_close_hreg_seq(cpu);
2786 return(0);
2787 }
2788
2789 /* NOR */
DECLARE_INSN(NOR)2790 DECLARE_INSN(NOR)
2791 {
2792 int rs = bits(insn,21,25);
2793 int ra = bits(insn,16,20);
2794 int rb = bits(insn,11,15);
2795 int hreg_rs,hreg_ra,hreg_rb;
2796 jit_op_t *iop;
2797
2798 /* $ra = ~($rs | $rb) */
2799 ppc32_jit_start_hreg_seq(cpu,"nor");
2800 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2801 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2802 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2803
2804 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2805 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2806
2807 iop = ppc32_op_emit_insn_output(cpu,2,"nor");
2808
2809 if (ra == rs)
2810 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2811 else if (ra == rb)
2812 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2813 else {
2814 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2815 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2816 }
2817
2818 amd64_not_reg(iop->ob_ptr,hreg_ra);
2819
2820 if (insn & 1)
2821 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2822
2823 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2824
2825 if (insn & 1)
2826 ppc32_op_emit_update_flags(cpu,0,TRUE);
2827
2828 ppc32_jit_close_hreg_seq(cpu);
2829 return(0);
2830 }
2831
2832 /* OR */
DECLARE_INSN(OR)2833 DECLARE_INSN(OR)
2834 {
2835 int rs = bits(insn,21,25);
2836 int ra = bits(insn,16,20);
2837 int rb = bits(insn,11,15);
2838 int hreg_rs,hreg_ra,hreg_rb;
2839 jit_op_t *iop;
2840
2841 /* $ra = $rs | $rb */
2842 ppc32_jit_start_hreg_seq(cpu,"or");
2843 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2844 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2845 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2846
2847 /* special optimization for move/nop operation */
2848 if (rs == rb) {
2849 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2850 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2851
2852 if (ra != rs)
2853 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2854
2855 if (insn & 1)
2856 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2857
2858 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2859
2860 if (insn & 1)
2861 ppc32_op_emit_update_flags(cpu,0,TRUE);
2862
2863 ppc32_jit_close_hreg_seq(cpu);
2864 return(0);
2865 }
2866
2867 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2868 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2869
2870 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2871
2872 if (ra == rs) {
2873 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2874 } else if (ra == rb)
2875 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2876 else {
2877 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2878 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2879 }
2880
2881 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2882
2883 if (insn & 1)
2884 ppc32_op_emit_update_flags(cpu,0,TRUE);
2885
2886 ppc32_jit_close_hreg_seq(cpu);
2887 return(0);
2888 }
2889
2890 /* OR with Complement */
DECLARE_INSN(ORC)2891 DECLARE_INSN(ORC)
2892 {
2893 int rs = bits(insn,21,25);
2894 int ra = bits(insn,16,20);
2895 int rb = bits(insn,11,15);
2896 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
2897 jit_op_t *iop;
2898
2899 /* $ra = $rs & ~$rb */
2900 ppc32_jit_start_hreg_seq(cpu,"orc");
2901 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2902 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2903 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2904
2905 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2906 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2907
2908 iop = ppc32_op_emit_insn_output(cpu,1,"orc");
2909
2910 /* $t0 = ~$rb */
2911 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2912 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
2913 amd64_not_reg(iop->ob_ptr,hreg_t0);
2914
2915 /* $ra = $rs | $t0 */
2916 if (ra == rs)
2917 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
2918 else {
2919 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_t0,hreg_rs,4);
2920 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
2921 }
2922
2923 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2924
2925 if (insn & 1)
2926 ppc32_op_emit_update_flags(cpu,0,TRUE);
2927
2928 ppc32_jit_close_hreg_seq(cpu);
2929 return(0);
2930 }
2931
2932 /* OR Immediate */
DECLARE_INSN(ORI)2933 DECLARE_INSN(ORI)
2934 {
2935 int rs = bits(insn,21,25);
2936 int ra = bits(insn,16,20);
2937 m_uint16_t imm = bits(insn,0,15);
2938 m_uint32_t tmp = imm;
2939 int hreg_rs,hreg_ra;
2940 jit_op_t *iop;
2941
2942 /* $ra = $rs | imm */
2943 ppc32_jit_start_hreg_seq(cpu,"ori");
2944 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2945 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2946
2947 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2948
2949 iop = ppc32_op_emit_insn_output(cpu,1,"ori");
2950
2951 if (ra != rs)
2952 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2953
2954 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
2955 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2956
2957 ppc32_jit_close_hreg_seq(cpu);
2958 return(0);
2959 }
2960
2961 /* OR Immediate Shifted */
DECLARE_INSN(ORIS)2962 DECLARE_INSN(ORIS)
2963 {
2964 int rs = bits(insn,21,25);
2965 int ra = bits(insn,16,20);
2966 m_uint16_t imm = bits(insn,0,15);
2967 m_uint32_t tmp = imm << 16;
2968 int hreg_rs,hreg_ra;
2969 jit_op_t *iop;
2970
2971 /* $ra = $rs | imm */
2972 ppc32_jit_start_hreg_seq(cpu,"oris");
2973 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2974 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2975
2976 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2977
2978 iop = ppc32_op_emit_insn_output(cpu,1,"oris");
2979
2980 if (ra != rs)
2981 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2982
2983 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
2984 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2985
2986 ppc32_jit_close_hreg_seq(cpu);
2987 return(0);
2988 }
2989
2990 /* RLWIMI - Rotate Left Word Immediate then Mask Insert */
DECLARE_INSN(RLWIMI)2991 DECLARE_INSN(RLWIMI)
2992 {
2993 int rs = bits(insn,21,25);
2994 int ra = bits(insn,16,20);
2995 int sh = bits(insn,11,15);
2996 int mb = bits(insn,6,10);
2997 int me = bits(insn,1,5);
2998 register m_uint32_t mask;
2999 int hreg_rs,hreg_ra,hreg_t0;
3000 jit_op_t *iop;
3001
3002 ppc32_jit_start_hreg_seq(cpu,"rlwimi");
3003 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3004 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3005 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3006
3007 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3008 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3009
3010 mask = ppc32_rotate_mask(mb,me);
3011
3012 iop = ppc32_op_emit_insn_output(cpu,2,"rlwimi");
3013
3014 /* Apply inverse mask to $ra */
3015 if (mask != 0)
3016 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,~mask);
3017
3018 /* Rotate $rs of "sh" bits and apply the mask */
3019 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3020
3021 if (sh != 0)
3022 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_t0,sh,4);
3023
3024 if (mask != 0xFFFFFFFF)
3025 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3026
3027 /* Store the result */
3028 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
3029 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3030
3031 if (insn & 1)
3032 ppc32_op_emit_update_flags(cpu,0,TRUE);
3033
3034 ppc32_jit_close_hreg_seq(cpu);
3035 return(0);
3036 }
3037
3038 /* RLWINM - Rotate Left Word Immediate AND with Mask */
DECLARE_INSN(RLWINM)3039 DECLARE_INSN(RLWINM)
3040 {
3041 int rs = bits(insn,21,25);
3042 int ra = bits(insn,16,20);
3043 int sh = bits(insn,11,15);
3044 int mb = bits(insn,6,10);
3045 int me = bits(insn,1,5);
3046 register m_uint32_t mask;
3047 int hreg_rs,hreg_ra;
3048 jit_op_t *iop;
3049
3050 ppc32_jit_start_hreg_seq(cpu,"rlwinm");
3051 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3052 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3053
3054 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3055
3056 iop = ppc32_op_emit_insn_output(cpu,2,"rlwinm");
3057
3058 /* Rotate $rs of "sh" bits and apply the mask */
3059 mask = ppc32_rotate_mask(mb,me);
3060
3061 if (rs != ra)
3062 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3063
3064 if (sh != 0)
3065 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_ra,sh,4);
3066
3067 if (mask != 0xFFFFFFFF)
3068 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,mask);
3069
3070 if (insn & 1)
3071 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3072
3073 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3074
3075 if (insn & 1)
3076 ppc32_op_emit_update_flags(cpu,0,TRUE);
3077
3078 ppc32_jit_close_hreg_seq(cpu);
3079 return(0);
3080 }
3081
3082 /* RLWNM - Rotate Left Word then Mask Insert */
DECLARE_INSN(RLWNM)3083 DECLARE_INSN(RLWNM)
3084 {
3085 int rs = bits(insn,21,25);
3086 int ra = bits(insn,16,20);
3087 int rb = bits(insn,11,15);
3088 int mb = bits(insn,6,10);
3089 int me = bits(insn,1,5);
3090 register m_uint32_t mask;
3091 int hreg_rs,hreg_ra,hreg_t0;
3092 jit_op_t *iop;
3093
3094 /* ecx is directly modified: throw it */
3095 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3096
3097 ppc32_jit_start_hreg_seq(cpu,"rlwnm");
3098 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3099
3100 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3101 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3102 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3103
3104 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3105 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3106 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3107
3108 iop = ppc32_op_emit_insn_output(cpu,2,"rlwnm");
3109
3110 /* Load the shift register ("sh") */
3111 mask = ppc32_rotate_mask(mb,me);
3112
3113 /* Rotate $rs and apply the mask */
3114 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3115
3116 amd64_shift_reg_size(iop->ob_ptr,X86_ROL,hreg_t0,4);
3117
3118 if (mask != 0xFFFFFFFF)
3119 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3120
3121 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
3122
3123 if (insn & 1)
3124 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3125
3126 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3127
3128 if (insn & 1)
3129 ppc32_op_emit_update_flags(cpu,0,TRUE);
3130
3131 ppc32_jit_close_hreg_seq(cpu);
3132 return(0);
3133 }
3134
3135 /* Shift Left Word */
DECLARE_INSN(SLW)3136 DECLARE_INSN(SLW)
3137 {
3138 int rs = bits(insn,21,25);
3139 int ra = bits(insn,16,20);
3140 int rb = bits(insn,11,15);
3141 int hreg_rs,hreg_ra;
3142 jit_op_t *iop;
3143
3144 /* ecx is directly modified: throw it */
3145 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3146
3147 ppc32_jit_start_hreg_seq(cpu,"slw");
3148 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3149 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3150 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3151
3152 /* $ra = $rs << $rb. If count >= 32, then null result */
3153 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3154 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3155
3156 iop = ppc32_op_emit_insn_output(cpu,3,"slw");
3157
3158 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3159
3160 if (ra != rs)
3161 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3162
3163 amd64_shift_reg(iop->ob_ptr,X86_SHL,hreg_ra);
3164
3165 /* store the result */
3166 if (insn & 1)
3167 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3168
3169 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3170
3171 if (insn & 1)
3172 ppc32_op_emit_update_flags(cpu,0,TRUE);
3173
3174 ppc32_jit_close_hreg_seq(cpu);
3175 return(0);
3176 }
3177
3178 /* SRAWI - Shift Right Algebraic Word Immediate */
DECLARE_INSN(SRAWI)3179 DECLARE_INSN(SRAWI)
3180 {
3181 int rs = bits(insn,21,25);
3182 int ra = bits(insn,16,20);
3183 int sh = bits(insn,11,15);
3184 register m_uint32_t mask;
3185 int hreg_rs,hreg_ra,hreg_t0;
3186 jit_op_t *iop;
3187
3188 ppc32_jit_start_hreg_seq(cpu,"srawi");
3189 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3190 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3191 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3192
3193 /* $ra = (int32)$rs >> sh */
3194 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3195
3196 iop = ppc32_op_emit_insn_output(cpu,3,"srawi");
3197 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3198
3199 if (ra != rs)
3200 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3201 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,sh,4);
3202
3203 /* set XER_CA depending on the result */
3204 mask = ~(0xFFFFFFFFU << sh) | 0x80000000;
3205
3206 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3207 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_t0,0x80000000,4);
3208 amd64_set_reg(iop->ob_ptr,X86_CC_A,hreg_t0,FALSE);
3209 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
3210 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3211 hreg_t0,4);
3212
3213 if (insn & 1)
3214 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3215
3216 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3217
3218 if (insn & 1)
3219 ppc32_op_emit_update_flags(cpu,0,TRUE);
3220
3221 ppc32_jit_close_hreg_seq(cpu);
3222 return(0);
3223 }
3224
3225 /* Shift Right Word */
DECLARE_INSN(SRW)3226 DECLARE_INSN(SRW)
3227 {
3228 int rs = bits(insn,21,25);
3229 int ra = bits(insn,16,20);
3230 int rb = bits(insn,11,15);
3231 int hreg_rs,hreg_ra;
3232 jit_op_t *iop;
3233
3234 /* ecx is directly modified: throw it */
3235 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3236
3237 ppc32_jit_start_hreg_seq(cpu,"srw");
3238 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3239 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3240 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3241
3242 /* $ra = $rs >> $rb. If count >= 32, then null result */
3243 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3244 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3245
3246 iop = ppc32_op_emit_insn_output(cpu,3,"srw");
3247
3248 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3249
3250 if (ra != rs)
3251 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3252
3253 amd64_shift_reg(iop->ob_ptr,X86_SHR,hreg_ra);
3254
3255 /* store the result */
3256 if (insn & 1)
3257 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3258
3259 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3260
3261 if (insn & 1)
3262 ppc32_op_emit_update_flags(cpu,0,TRUE);
3263
3264 ppc32_jit_close_hreg_seq(cpu);
3265 return(0);
3266 }
3267
3268 /* STB - Store Byte */
DECLARE_INSN(STB)3269 DECLARE_INSN(STB)
3270 {
3271 int rs = bits(insn,21,25);
3272 int ra = bits(insn,16,20);
3273 m_uint16_t offset = bits(insn,0,15);
3274
3275 //ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0);
3276 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STB,ra,offset,rs,
3277 ppc32_memop_fast_stb);
3278 return(0);
3279 }
3280
3281 /* STBU - Store Byte with Update */
DECLARE_INSN(STBU)3282 DECLARE_INSN(STBU)
3283 {
3284 int rs = bits(insn,21,25);
3285 int ra = bits(insn,16,20);
3286 m_uint16_t offset = bits(insn,0,15);
3287
3288 ppc32_emit_memop(cpu,b,PPC_MEMOP_STB,ra,offset,rs,1);
3289 return(0);
3290 }
3291
3292 /* STBUX - Store Byte with Update Indexed */
DECLARE_INSN(STBUX)3293 DECLARE_INSN(STBUX)
3294 {
3295 int rs = bits(insn,21,25);
3296 int ra = bits(insn,16,20);
3297 int rb = bits(insn,11,15);
3298
3299 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,1);
3300 return(0);
3301 }
3302
3303 /* STBUX - Store Byte Indexed */
DECLARE_INSN(STBX)3304 DECLARE_INSN(STBX)
3305 {
3306 int rs = bits(insn,21,25);
3307 int ra = bits(insn,16,20);
3308 int rb = bits(insn,11,15);
3309
3310 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,0);
3311 return(0);
3312 }
3313
3314 /* STH - Store Half-Word */
DECLARE_INSN(STH)3315 DECLARE_INSN(STH)
3316 {
3317 int rs = bits(insn,21,25);
3318 int ra = bits(insn,16,20);
3319 m_uint16_t offset = bits(insn,0,15);
3320
3321 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,0);
3322 return(0);
3323 }
3324
3325 /* STHU - Store Half-Word with Update */
DECLARE_INSN(STHU)3326 DECLARE_INSN(STHU)
3327 {
3328 int rs = bits(insn,21,25);
3329 int ra = bits(insn,16,20);
3330 m_uint16_t offset = bits(insn,0,15);
3331
3332 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,1);
3333 return(0);
3334 }
3335
3336 /* STHUX - Store Half-Word with Update Indexed */
DECLARE_INSN(STHUX)3337 DECLARE_INSN(STHUX)
3338 {
3339 int rs = bits(insn,21,25);
3340 int ra = bits(insn,16,20);
3341 int rb = bits(insn,11,15);
3342
3343 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,1);
3344 return(0);
3345 }
3346
3347 /* STHUX - Store Half-Word Indexed */
DECLARE_INSN(STHX)3348 DECLARE_INSN(STHX)
3349 {
3350 int rs = bits(insn,21,25);
3351 int ra = bits(insn,16,20);
3352 int rb = bits(insn,11,15);
3353
3354 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,0);
3355 return(0);
3356 }
3357
3358 /* STW - Store Word */
DECLARE_INSN(STW)3359 DECLARE_INSN(STW)
3360 {
3361 int rs = bits(insn,21,25);
3362 int ra = bits(insn,16,20);
3363 m_uint16_t offset = bits(insn,0,15);
3364
3365 //ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0);
3366 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STW,ra,offset,rs,
3367 ppc32_memop_fast_stw);
3368 return(0);
3369 }
3370
3371 /* STWU - Store Word with Update */
DECLARE_INSN(STWU)3372 DECLARE_INSN(STWU)
3373 {
3374 int rs = bits(insn,21,25);
3375 int ra = bits(insn,16,20);
3376 m_uint16_t offset = bits(insn,0,15);
3377
3378 ppc32_emit_memop(cpu,b,PPC_MEMOP_STW,ra,offset,rs,1);
3379 return(0);
3380 }
3381
3382 /* STWUX - Store Word with Update Indexed */
DECLARE_INSN(STWUX)3383 DECLARE_INSN(STWUX)
3384 {
3385 int rs = bits(insn,21,25);
3386 int ra = bits(insn,16,20);
3387 int rb = bits(insn,11,15);
3388
3389 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,1);
3390 return(0);
3391 }
3392
3393 /* STWUX - Store Word Indexed */
DECLARE_INSN(STWX)3394 DECLARE_INSN(STWX)
3395 {
3396 int rs = bits(insn,21,25);
3397 int ra = bits(insn,16,20);
3398 int rb = bits(insn,11,15);
3399
3400 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,0);
3401 return(0);
3402 }
3403
3404 /* SUBF - Subtract From */
DECLARE_INSN(SUBF)3405 DECLARE_INSN(SUBF)
3406 {
3407 int rd = bits(insn,21,25);
3408 int ra = bits(insn,16,20);
3409 int rb = bits(insn,11,15);
3410 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
3411 jit_op_t *iop;
3412
3413 /* $rd = $rb - $ra */
3414 ppc32_jit_start_hreg_seq(cpu,"subf");
3415 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3416
3417 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3418 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3419 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3420
3421 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3422 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3423
3424 iop = ppc32_op_emit_insn_output(cpu,2,"subf");
3425
3426 if (rd == rb)
3427 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3428 else if (rd == ra) {
3429 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
3430 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_t0,hreg_ra,4);
3431 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3432 } else {
3433 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_rb,4);
3434 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3435 }
3436
3437 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3438
3439 if (insn & 1)
3440 ppc32_op_emit_update_flags(cpu,0,TRUE);
3441
3442 ppc32_jit_close_hreg_seq(cpu);
3443 return(0);
3444 }
3445
3446 /* SUBFC - Subtract From Carrying */
DECLARE_INSN(SUBFC)3447 DECLARE_INSN(SUBFC)
3448 {
3449 int rd = bits(insn,21,25);
3450 int ra = bits(insn,16,20);
3451 int rb = bits(insn,11,15);
3452 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3453 jit_op_t *iop;
3454
3455 /* $rd = ~$ra + 1 + $rb */
3456 ppc32_jit_start_hreg_seq(cpu,"subfc");
3457 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3458 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3459 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3460
3461 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3462 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3463
3464 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3465 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3466 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3467
3468 iop = ppc32_op_emit_insn_output(cpu,3,"subfc");
3469
3470 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3471
3472 /* $t0 = ~$ra + 1 */
3473 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3474 amd64_not_reg(iop->ob_ptr,hreg_t0);
3475 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3476 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3477 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3478 hreg_t1,4);
3479
3480 /* $t0 += $rb */
3481 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3482 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3483 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3484 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3485 hreg_t1,4);
3486
3487 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3488
3489 if (insn & 1)
3490 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3491
3492 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3493
3494 /* update cr0 */
3495 if (insn & 1)
3496 ppc32_update_cr0(b);
3497
3498 ppc32_jit_close_hreg_seq(cpu);
3499 return(0);
3500 }
3501
3502 /* SUBFE - Subtract From Extended */
DECLARE_INSN(SUBFE)3503 DECLARE_INSN(SUBFE)
3504 {
3505 int rd = bits(insn,21,25);
3506 int ra = bits(insn,16,20);
3507 int rb = bits(insn,11,15);
3508 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3509 jit_op_t *iop;
3510
3511 /* $rd = ~$ra + $carry (xer_ca) + $rb */
3512 ppc32_jit_start_hreg_seq(cpu,"subfe");
3513 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3514 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3515 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3516
3517 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3518 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3519
3520 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3521 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3522 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3523
3524 iop = ppc32_op_emit_insn_output(cpu,3,"subfe");
3525
3526 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3527
3528 /* $t0 = ~$ra + $carry */
3529 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3530 amd64_not_reg(iop->ob_ptr,hreg_t0);
3531 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
3532 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
3533
3534 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3535 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3536 hreg_t1,4);
3537
3538 /* $t0 += $rb */
3539 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3540 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3541 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3542 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3543 hreg_t1,4);
3544
3545 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3546
3547 if (insn & 1)
3548 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3549
3550 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3551
3552 /* update cr0 */
3553 if (insn & 1)
3554 ppc32_update_cr0(b);
3555
3556 ppc32_jit_close_hreg_seq(cpu);
3557 return(0);
3558 }
3559
3560 /* SUBFIC - Subtract From Immediate Carrying */
DECLARE_INSN(SUBFIC)3561 DECLARE_INSN(SUBFIC)
3562 {
3563 int rd = bits(insn,21,25);
3564 int ra = bits(insn,16,20);
3565 m_uint16_t imm = bits(insn,0,15);
3566 m_uint32_t tmp = sign_extend_32(imm,16);
3567 int hreg_ra,hreg_rd,hreg_t0,hreg_t1;
3568 jit_op_t *iop;
3569
3570 /* $rd = ~$ra + 1 + sign_extend(imm,16) */
3571 ppc32_jit_start_hreg_seq(cpu,"subfic");
3572 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3573 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3574
3575 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3576 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3577
3578 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3579 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3580
3581 iop = ppc32_op_emit_insn_output(cpu,3,"subfic");
3582
3583 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3584
3585 /* $t0 = ~$ra + 1 */
3586 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3587 amd64_not_reg(iop->ob_ptr,hreg_t0);
3588 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3589
3590 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3591 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3592 hreg_t1,4);
3593
3594 /* $t0 += sign_extend(imm,16) */
3595 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,tmp,4);
3596 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3597 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3598 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3599 hreg_t1,4);
3600
3601 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3602 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3603
3604 ppc32_jit_close_hreg_seq(cpu);
3605 return(0);
3606 }
3607
3608 /* SYNC - Synchronize */
DECLARE_INSN(SYNC)3609 DECLARE_INSN(SYNC)
3610 {
3611 return(0);
3612 }
3613
3614 /* XOR */
DECLARE_INSN(XOR)3615 DECLARE_INSN(XOR)
3616 {
3617 int rs = bits(insn,21,25);
3618 int ra = bits(insn,16,20);
3619 int rb = bits(insn,11,15);
3620 int hreg_rs,hreg_ra,hreg_rb;
3621 jit_op_t *iop;
3622
3623 /* $ra = $rs ^ $rb */
3624 ppc32_jit_start_hreg_seq(cpu,"xor");
3625 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3626 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3627 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3628
3629 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3630 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3631
3632 iop = ppc32_op_emit_insn_output(cpu,1,"xor");
3633
3634 if (ra == rs)
3635 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3636 else if (ra == rb)
3637 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
3638 else {
3639 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3640 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3641 }
3642
3643 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3644
3645 if (insn & 1)
3646 ppc32_op_emit_update_flags(cpu,0,TRUE);
3647
3648 ppc32_jit_close_hreg_seq(cpu);
3649 return(0);
3650 }
3651
3652 /* XORI - XOR Immediate */
DECLARE_INSN(XORI)3653 DECLARE_INSN(XORI)
3654 {
3655 int rs = bits(insn,21,25);
3656 int ra = bits(insn,16,20);
3657 m_uint32_t imm = bits(insn,0,15);
3658 int hreg_rs,hreg_ra;
3659 jit_op_t *iop;
3660
3661 /* $ra = $rs ^ imm */
3662 ppc32_jit_start_hreg_seq(cpu,"xori");
3663 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3664 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3665
3666 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3667
3668 iop = ppc32_op_emit_insn_output(cpu,1,"xori");
3669
3670 if (ra != rs)
3671 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3672
3673 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,imm);
3674 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3675
3676 ppc32_jit_close_hreg_seq(cpu);
3677 return(0);
3678 }
3679
3680 /* XORIS - XOR Immediate Shifted */
DECLARE_INSN(XORIS)3681 DECLARE_INSN(XORIS)
3682 {
3683 int rs = bits(insn,21,25);
3684 int ra = bits(insn,16,20);
3685 m_uint16_t imm = bits(insn,0,15);
3686 m_uint32_t tmp = imm << 16;
3687 int hreg_rs,hreg_ra;
3688 jit_op_t *iop;
3689
3690 /* $ra = $rs ^ (imm << 16) */
3691 ppc32_jit_start_hreg_seq(cpu,"xoris");
3692 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3693 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3694
3695 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3696
3697 iop = ppc32_op_emit_insn_output(cpu,1,"xoris");
3698
3699 if (ra != rs)
3700 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3701
3702 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,tmp);
3703 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3704
3705 ppc32_jit_close_hreg_seq(cpu);
3706 return(0);
3707 }
3708
3709 /* PPC instruction array */
3710 struct ppc32_insn_tag ppc32_insn_tags[] = {
3711 { ppc32_emit_BLR , 0xfffffffe , 0x4e800020 },
3712 { ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 },
3713 { ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 },
3714 { ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 },
3715 { ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 },
3716 { ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 },
3717 { ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 },
3718 { ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 },
3719 { ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 },
3720 { ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 },
3721 { ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 },
3722 { ppc32_emit_ADDI , 0xfc000000 , 0x38000000 },
3723 { ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 },
3724 { ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 },
3725 { ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 },
3726 { ppc32_emit_ADDZE , 0xfc00fffe , 0x7c000194 },
3727 { ppc32_emit_AND , 0xfc0007fe , 0x7c000038 },
3728 { ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 },
3729 { ppc32_emit_ANDI , 0xfc000000 , 0x70000000 },
3730 { ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 },
3731 { ppc32_emit_B , 0xfc000003 , 0x48000000 },
3732 { ppc32_emit_BA , 0xfc000003 , 0x48000002 },
3733 { ppc32_emit_BL , 0xfc000003 , 0x48000001 },
3734 { ppc32_emit_BLA , 0xfc000003 , 0x48000003 },
3735 { ppc32_emit_BCC , 0xfe800000 , 0x40800000 },
3736 { ppc32_emit_BC , 0xfc000000 , 0x40000000 },
3737 { ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 },
3738 { ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 },
3739 { ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 },
3740 { ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 },
3741 { ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 },
3742 { ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 },
3743 { ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 },
3744 { ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 },
3745 { ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 },
3746 { ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 },
3747 { ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 },
3748 { ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 },
3749 { ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 },
3750 { ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 },
3751 { ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 },
3752 { ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 },
3753 { ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 },
3754 { ppc32_emit_LBZ , 0xfc000000 , 0x88000000 },
3755 { ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 },
3756 { ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee },
3757 { ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae },
3758 { ppc32_emit_LHA , 0xfc000000 , 0xa8000000 },
3759 { ppc32_emit_LHAU , 0xfc000000 , 0xac000000 },
3760 { ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee },
3761 { ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae },
3762 { ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 },
3763 { ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 },
3764 { ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e },
3765 { ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e },
3766 { ppc32_emit_LWZ , 0xfc000000 , 0x80000000 },
3767 { ppc32_emit_LWZU , 0xfc000000 , 0x84000000 },
3768 { ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e },
3769 { ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e },
3770 { ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 },
3771 { ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 },
3772 { ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 },
3773 { ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 },
3774 { ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 },
3775 { ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 },
3776 { ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 },
3777 { ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 },
3778 { ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 },
3779 { ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 },
3780 { ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 },
3781 { ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 },
3782 { ppc32_emit_OR , 0xfc0007fe , 0x7c000378 },
3783 { ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 },
3784 { ppc32_emit_ORI , 0xfc000000 , 0x60000000 },
3785 { ppc32_emit_ORIS , 0xfc000000 , 0x64000000 },
3786 { ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 },
3787 { ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 },
3788 { ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 },
3789 { ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 },
3790 { ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 },
3791 { ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 },
3792 { ppc32_emit_STB , 0xfc000000 , 0x98000000 },
3793 { ppc32_emit_STBU , 0xfc000000 , 0x9c000000 },
3794 { ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee },
3795 { ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae },
3796 { ppc32_emit_STH , 0xfc000000 , 0xb0000000 },
3797 { ppc32_emit_STHU , 0xfc000000 , 0xb4000000 },
3798 { ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e },
3799 { ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e },
3800 { ppc32_emit_STW , 0xfc000000 , 0x90000000 },
3801 { ppc32_emit_STWU , 0xfc000000 , 0x94000000 },
3802 { ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e },
3803 { ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e },
3804 { ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 },
3805 { ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 },
3806 { ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 },
3807 { ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 },
3808 { ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac },
3809 { ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 },
3810 { ppc32_emit_XORI , 0xfc000000 , 0x68000000 },
3811 { ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 },
3812 { ppc32_emit_unknown , 0x00000000 , 0x00000000 },
3813 { NULL , 0x00000000 , 0x00000000 },
3814 };
3815