1 /*
2 * Cisco router simulation platform.
3 * Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr)
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <unistd.h>
9 #include <string.h>
10 #include <sys/types.h>
11 #include <sys/stat.h>
12 #include <fcntl.h>
13
14 #include "cpu.h"
15 #include "jit_op.h"
16 #include "ppc32_jit.h"
17 #include "ppc32_amd64_trans.h"
18 #include "memory.h"
19
20 /* Macros for CPU structure access */
21 #define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)]))
22 #define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)]))
23
24 #define DECLARE_INSN(name) \
25 static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \
26 ppc_insn_t insn)
27
28 /* EFLAGS to Condition Register (CR) field - signed */
29 static m_uint32_t eflags_to_cr_signed[64] = {
30 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
31 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
32 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
33 0x04, 0x02, 0x08, 0x02, 0x04, 0x02, 0x08, 0x02,
34 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
35 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
36 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
37 0x08, 0x02, 0x04, 0x02, 0x08, 0x02, 0x04, 0x02,
38 };
39
40 /* EFLAGS to Condition Register (CR) field - unsigned */
41 static m_uint32_t eflags_to_cr_unsigned[256] = {
42 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
43 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
44 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
45 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
46 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
47 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
48 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
49 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
50 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
51 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
52 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
53 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
54 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
55 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
56 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
57 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
58 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
59 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
60 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
61 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
62 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
63 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
64 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
65 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
66 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
67 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
68 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
69 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
70 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
71 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
72 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
73 0x02, 0x08, 0x02, 0x08, 0x02, 0x08, 0x02, 0x08,
74 };
75
76 /* Load a 32 bit immediate value */
ppc32_load_imm(u_char ** ptr,u_int reg,m_uint32_t val)77 static inline void ppc32_load_imm(u_char **ptr,u_int reg,m_uint32_t val)
78 {
79 if (val)
80 amd64_mov_reg_imm_size(*ptr,reg,val,4);
81 else
82 amd64_alu_reg_reg_size(*ptr,X86_XOR,reg,reg,4);
83 }
84
85 /* Set the Instruction Address (IA) register */
ppc32_set_ia(u_char ** ptr,m_uint32_t new_ia)86 void ppc32_set_ia(u_char **ptr,m_uint32_t new_ia)
87 {
88 amd64_mov_membase_imm(*ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4);
89 }
90
91 /* Set the Link Register (LR) */
ppc32_set_lr(jit_op_t * iop,m_uint32_t new_lr)92 static void ppc32_set_lr(jit_op_t *iop,m_uint32_t new_lr)
93 {
94 amd64_mov_membase_imm(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4);
95 }
96
97 /*
98 * Try to branch directly to the specified JIT block without returning to
99 * main loop.
100 */
ppc32_try_direct_far_jump(cpu_ppc_t * cpu,jit_op_t * iop,m_uint32_t new_ia)101 static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,jit_op_t *iop,
102 m_uint32_t new_ia)
103 {
104 m_uint32_t new_page,ia_hash,ia_offset;
105 u_char *test1,*test2,*test3,*test4;
106
107 /* Indicate that we throw %rbx, %rdx */
108 ppc32_op_emit_alter_host_reg(cpu,AMD64_RBX);
109 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
110 ppc32_op_emit_alter_host_reg(cpu,AMD64_RSI);
111
112 new_page = new_ia & PPC32_MIN_PAGE_MASK;
113 ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2;
114 ia_hash = ppc32_jit_get_virt_hash(new_ia);
115
116 /* Get JIT block info in %rdx */
117 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
118 AMD64_R15,OFFSET(cpu_ppc_t,tcb_virt_hash),8);
119 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RDX,
120 AMD64_RBX,ia_hash*sizeof(void *),8);
121
122 /* no JIT block found ? */
123 amd64_test_reg_reg(iop->ob_ptr,AMD64_RDX,AMD64_RDX);
124 test1 = iop->ob_ptr;
125 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
126
127 /* Check block IA */
128 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,new_page);
129 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,AMD64_RAX,AMD64_RDX,
130 OFFSET(ppc32_jit_tcb_t,start_ia),4);
131 test2 = iop->ob_ptr;
132 amd64_branch8(iop->ob_ptr, X86_CC_NE, 0, 1);
133
134 /* Jump to the code */
135 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RSI,
136 AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8);
137
138 amd64_test_reg_reg(iop->ob_ptr,AMD64_RSI,AMD64_RSI);
139 test3 = iop->ob_ptr;
140 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
141
142 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
143 AMD64_RSI,ia_offset * sizeof(void *),8);
144
145 amd64_test_reg_reg(iop->ob_ptr,AMD64_RBX,AMD64_RBX);
146 test4 = iop->ob_ptr;
147 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
148 amd64_jump_reg(iop->ob_ptr,AMD64_RBX);
149
150 /* Returns to caller... */
151 amd64_patch(test1,iop->ob_ptr);
152 amd64_patch(test2,iop->ob_ptr);
153 amd64_patch(test3,iop->ob_ptr);
154 amd64_patch(test4,iop->ob_ptr);
155
156 ppc32_set_ia(&iop->ob_ptr,new_ia);
157 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
158 }
159
160 /* Set Jump */
ppc32_set_jump(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,jit_op_t * iop,m_uint32_t new_ia,int local_jump)161 static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,jit_op_t *iop,
162 m_uint32_t new_ia,int local_jump)
163 {
164 int return_to_caller = FALSE;
165 u_char *jump_ptr;
166
167 #if 0
168 if (cpu->sym_trace && !local_jump)
169 return_to_caller = TRUE;
170 #endif
171
172 if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) {
173 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
174 amd64_jump32(iop->ob_ptr,0);
175 } else {
176 if (cpu->exec_blk_direct_jump) {
177 /* Block lookup optimization */
178 ppc32_try_direct_far_jump(cpu,iop,new_ia);
179 } else {
180 ppc32_set_ia(&iop->ob_ptr,new_ia);
181 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
182 }
183 }
184 }
185
186 /* Jump to the next page */
ppc32_set_page_jump(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b)187 void ppc32_set_page_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
188 {
189 jit_op_t *iop,*op_list = NULL;
190
191 cpu->gen->jit_op_current = &op_list;
192
193 iop = ppc32_op_emit_insn_output(cpu,4,"set_page_jump");
194 ppc32_set_jump(cpu,b,iop,b->start_ia + PPC32_MIN_PAGE_SIZE,FALSE);
195 ppc32_op_insn_output(b,iop);
196
197 jit_op_free_list(cpu->gen,op_list);
198 cpu->gen->jit_op_current = NULL;
199 }
200
201 /* Load a GPR into the specified host register */
ppc32_load_gpr(u_char ** ptr,u_int host_reg,u_int ppc_reg)202 static forced_inline void ppc32_load_gpr(u_char **ptr,u_int host_reg,
203 u_int ppc_reg)
204 {
205 amd64_mov_reg_membase(*ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4);
206 }
207
208 /* Store contents for a host register into a GPR register */
ppc32_store_gpr(u_char ** ptr,u_int ppc_reg,u_int host_reg)209 static forced_inline void ppc32_store_gpr(u_char **ptr,u_int ppc_reg,
210 u_int host_reg)
211 {
212 amd64_mov_membase_reg(*ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4);
213 }
214
215 /* Apply an ALU operation on a GPR register and a host register */
ppc32_alu_gpr(u_char ** ptr,u_int op,u_int host_reg,u_int ppc_reg)216 static forced_inline void ppc32_alu_gpr(u_char **ptr,u_int op,
217 u_int host_reg,u_int ppc_reg)
218 {
219 amd64_alu_reg_membase_size(*ptr,op,host_reg,
220 AMD64_R15,REG_OFFSET(ppc_reg),4);
221 }
222
223 /*
224 * Update CR from %eflags
225 * %rax, %rdx, %rsi are modified.
226 */
ppc32_update_cr(ppc32_jit_tcb_t * b,int field,int is_signed)227 static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed)
228 {
229 /* Get status bits from EFLAGS */
230 amd64_pushfd_size(b->jit_ptr,8);
231 amd64_pop_reg(b->jit_ptr,AMD64_RAX);
232
233 if (!is_signed) {
234 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF);
235 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8);
236 } else {
237 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RAX,6);
238 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x3F);
239 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8);
240 }
241
242 amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4);
243
244 #if 0
245 /* Check XER Summary of Overflow and report it */
246 amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
247 AMD64_R15,OFFSET(cpu_ppc_t,xer),4);
248 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO);
249 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3);
250 amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX);
251 #endif
252
253 /* Store modified CR field */
254 amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field),
255 AMD64_RAX,4);
256 }
257
258 /*
259 * Update CR0 from %eflags
260 * %eax, %ecx, %edx, %esi are modified.
261 */
ppc32_update_cr0(ppc32_jit_tcb_t * b)262 static void ppc32_update_cr0(ppc32_jit_tcb_t *b)
263 {
264 ppc32_update_cr(b,0,TRUE);
265 }
266
267 /* Indicate registers modified by ppc32_update_cr() functions */
ppc32_update_cr_set_altered_hreg(cpu_ppc_t * cpu)268 void ppc32_update_cr_set_altered_hreg(cpu_ppc_t *cpu)
269 {
270 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
271 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
272 }
273
274 /* Basic C call */
ppc32_emit_basic_c_call(u_char ** ptr,void * f)275 static forced_inline void ppc32_emit_basic_c_call(u_char **ptr,void *f)
276 {
277 amd64_mov_reg_imm(*ptr,AMD64_RBX,f);
278 amd64_call_reg(*ptr,AMD64_RBX);
279 }
280
281 /* Emit a simple call to a C function without any parameter */
ppc32_emit_c_call(ppc32_jit_tcb_t * b,jit_op_t * iop,void * f)282 static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,jit_op_t *iop,void *f)
283 {
284 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
285 ppc32_emit_basic_c_call(&iop->ob_ptr,f);
286 }
287
288 /* ======================================================================== */
289
290 /* Initialize register mapping */
ppc32_jit_init_hreg_mapping(cpu_ppc_t * cpu)291 void ppc32_jit_init_hreg_mapping(cpu_ppc_t *cpu)
292 {
293 int avail_hregs[] = { AMD64_RSI, AMD64_RAX, AMD64_RCX, AMD64_RDX,
294 AMD64_R13, AMD64_R14, AMD64_RDI, -1 };
295 struct hreg_map *map;
296 int i,hreg;
297
298 cpu->hreg_map_list = cpu->hreg_lru = NULL;
299
300 /* Add the available registers to the map list */
301 for(i=0;avail_hregs[i]!=-1;i++) {
302 hreg = avail_hregs[i];
303 map = &cpu->hreg_map[hreg];
304
305 /* Initialize mapping. At the beginning, no PPC reg is mapped */
306 map->flags = 0;
307 map->hreg = hreg;
308 map->vreg = -1;
309 ppc32_jit_insert_hreg_mru(cpu,map);
310 }
311
312 /* Clear PPC registers mapping */
313 for(i=0;i<PPC32_GPR_NR;i++)
314 cpu->ppc_reg_map[i] = -1;
315 }
316
317 /* Allocate a specific temp register */
ppc32_jit_get_tmp_hreg(cpu_ppc_t * cpu)318 static int ppc32_jit_get_tmp_hreg(cpu_ppc_t *cpu)
319 {
320 return(AMD64_RBX);
321 }
322
323 /* ======================================================================== */
324 /* JIT operations (specific to target CPU). */
325 /* ======================================================================== */
326
327 /* INSN_OUTPUT */
ppc32_op_insn_output(ppc32_jit_tcb_t * b,jit_op_t * op)328 void ppc32_op_insn_output(ppc32_jit_tcb_t *b,jit_op_t *op)
329 {
330 op->ob_final = b->jit_ptr;
331 memcpy(b->jit_ptr,op->ob_data,op->ob_ptr - op->ob_data);
332 b->jit_ptr += op->ob_ptr - op->ob_data;
333 }
334
335 /* LOAD_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
ppc32_op_load_gpr(ppc32_jit_tcb_t * b,jit_op_t * op)336 void ppc32_op_load_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
337 {
338 if (op->param[0] != JIT_OP_INV_REG)
339 ppc32_load_gpr(&b->jit_ptr,op->param[0],op->param[1]);
340 }
341
342 /* STORE_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
ppc32_op_store_gpr(ppc32_jit_tcb_t * b,jit_op_t * op)343 void ppc32_op_store_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
344 {
345 if (op->param[0] != JIT_OP_INV_REG)
346 ppc32_store_gpr(&b->jit_ptr,op->param[1],op->param[0]);
347 }
348
349 /* UPDATE_FLAGS: p[0] = cr_field, p[1] = is_signed */
ppc32_op_update_flags(ppc32_jit_tcb_t * b,jit_op_t * op)350 void ppc32_op_update_flags(ppc32_jit_tcb_t *b,jit_op_t *op)
351 {
352 if (op->param[0] != JIT_OP_INV_REG)
353 ppc32_update_cr(b,op->param[0],op->param[1]);
354 }
355
356 /* MOVE_HOST_REG: p[0] = %host_dst_reg, p[1] = %host_src_reg */
ppc32_op_move_host_reg(ppc32_jit_tcb_t * b,jit_op_t * op)357 void ppc32_op_move_host_reg(ppc32_jit_tcb_t *b,jit_op_t *op)
358 {
359 if ((op->param[0] != JIT_OP_INV_REG) && (op->param[1] != JIT_OP_INV_REG))
360 amd64_mov_reg_reg(b->jit_ptr,op->param[0],op->param[1],4);
361 }
362
363 /* SET_HOST_REG_IMM32: p[0] = %host_reg, p[1] = imm32 */
ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t * b,jit_op_t * op)364 void ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t *b,jit_op_t *op)
365 {
366 if (op->param[0] != JIT_OP_INV_REG)
367 ppc32_load_imm(&b->jit_ptr,op->param[0],op->param[1]);
368 }
369
370 /* ======================================================================== */
371
372 /* Memory operation */
ppc32_emit_memop(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int op,int base,int offset,int target,int update)373 static void ppc32_emit_memop(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
374 int op,int base,int offset,int target,int update)
375 {
376 m_uint32_t val = sign_extend(offset,16);
377 jit_op_t *iop;
378
379 /*
380 * Since an exception can be triggered, clear JIT state. This allows
381 * to use branch target tag (we can directly branch on this instruction).
382 */
383 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
384 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
385
386 iop = ppc32_op_emit_insn_output(cpu,5,"memop");
387
388 /* Save PC for exception handling */
389 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
390
391 /* RSI = sign-extended offset */
392 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
393
394 /* RSI = GPR[base] + sign-extended offset */
395 if (update || (base != 0))
396 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
397
398 if (update)
399 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
400
401 /* RDX = target register */
402 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
403
404 /* RDI = CPU instance pointer */
405 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
406
407 /* Call memory function */
408 amd64_alu_reg_imm(iop->ob_ptr,X86_SUB,AMD64_RSP,8);
409 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
410 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,AMD64_RSP,8);
411
412 if (update)
413 ppc32_store_gpr(&iop->ob_ptr,base,AMD64_R14);
414 }
415
416 /* Memory operation (indexed) */
ppc32_emit_memop_idx(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int op,int ra,int rb,int target,int update)417 static void ppc32_emit_memop_idx(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
418 int op,int ra,int rb,int target,int update)
419 {
420 jit_op_t *iop;
421
422 /*
423 * Since an exception can be triggered, clear JIT state. This allows
424 * to use branch target tag (we can directly branch on this instruction).
425 */
426 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
427 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
428
429 iop = ppc32_op_emit_insn_output(cpu,5,"memop_idx");
430
431 /* Save PC for exception handling */
432 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
433
434 /* RSI = $rb */
435 ppc32_load_gpr(&iop->ob_ptr,AMD64_RSI,rb);
436
437 /* RSI = GPR[base] + sign-extended offset */
438 if (update || (ra != 0))
439 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,ra);
440
441 if (update)
442 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
443
444 /* RDX = target register */
445 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
446
447 /* RDI = CPU instance pointer */
448 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
449
450 /* Call memory function */
451 amd64_alu_reg_imm(iop->ob_ptr,X86_SUB,AMD64_RSP,8);
452 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
453 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,AMD64_RSP,8);
454
455 if (update)
456 ppc32_store_gpr(&iop->ob_ptr,ra,AMD64_R14);
457 }
458
459 typedef void (*memop_fast_access)(jit_op_t *iop,int target);
460
461 /* Fast LBZ */
ppc32_memop_fast_lbz(jit_op_t * iop,int target)462 static void ppc32_memop_fast_lbz(jit_op_t *iop,int target)
463 {
464 amd64_clear_reg(iop->ob_ptr,AMD64_RCX);
465 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1);
466 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RCX);
467 }
468
469 /* Fast STB */
ppc32_memop_fast_stb(jit_op_t * iop,int target)470 static void ppc32_memop_fast_stb(jit_op_t *iop,int target)
471 {
472 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
473 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1);
474 }
475
476 /* Fast LWZ */
ppc32_memop_fast_lwz(jit_op_t * iop,int target)477 static void ppc32_memop_fast_lwz(jit_op_t *iop,int target)
478 {
479 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4);
480 amd64_bswap32(iop->ob_ptr,AMD64_RAX);
481 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RAX);
482 }
483
484 /* Fast STW */
ppc32_memop_fast_stw(jit_op_t * iop,int target)485 static void ppc32_memop_fast_stw(jit_op_t *iop,int target)
486 {
487 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
488 amd64_bswap32(iop->ob_ptr,AMD64_RDX);
489 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4);
490 }
491
492 /* Fast memory operation */
ppc32_emit_memop_fast(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,int write_op,int opcode,int base,int offset,int target,memop_fast_access op_handler)493 static void ppc32_emit_memop_fast(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
494 int write_op,int opcode,
495 int base,int offset,int target,
496 memop_fast_access op_handler)
497 {
498 m_uint32_t val = sign_extend(offset,16);
499 u_char *test1,*test2,*p_exit;
500 jit_op_t *iop;
501
502 /*
503 * Since an exception can be triggered, clear JIT state. This allows
504 * to use branch target tag (we can directly branch on this instruction).
505 */
506 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
507 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
508
509 iop = ppc32_op_emit_insn_output(cpu,5,"memop_fast");
510
511 test2 = NULL;
512
513 /* XXX */
514 amd64_inc_membase(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,mts_lookups));
515
516 /* RSI = GPR[base] + sign-extended offset */
517 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
518 if (base != 0)
519 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
520
521 /* RBX = mts32_entry index */
522 amd64_mov_reg_reg_size(iop->ob_ptr,X86_EBX,X86_ESI,4);
523 amd64_mov_reg_reg_size(iop->ob_ptr,X86_EAX,X86_ESI,4);
524
525 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT1,4);
526 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHR,X86_EAX,MTS32_HASH_SHIFT2,4);
527 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,AMD64_RBX,AMD64_RAX);
528
529 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4);
530
531 /* RCX = mts32 entry */
532 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RCX,
533 AMD64_R15,
534 OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8);
535 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */
536 amd64_alu_reg_reg(iop->ob_ptr,X86_ADD,AMD64_RCX,AMD64_RBX);
537
538 /* Compare virtual page address (EAX = vpage) */
539 amd64_mov_reg_reg(iop->ob_ptr,X86_EAX,X86_ESI,4);
540 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK);
541
542 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,X86_EAX,AMD64_RCX,
543 OFFSET(mts32_entry_t,gvpa),4);
544 test1 = iop->ob_ptr;
545 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
546
547 /* Test if we are writing to a COW page */
548 if (write_op) {
549 amd64_test_membase_imm_size(iop->ob_ptr,
550 AMD64_RCX,OFFSET(mts32_entry_t,flags),
551 MTS_FLAG_COW|MTS_FLAG_EXEC,4);
552 test2 = iop->ob_ptr;
553 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
554 }
555
556 /* ESI = offset in page, RBX = Host Page Address */
557 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK);
558 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
559 AMD64_RCX,OFFSET(mts32_entry_t,hpa),8);
560
561 /* Memory access */
562 op_handler(iop,target);
563
564 p_exit = iop->ob_ptr;
565 amd64_jump8(iop->ob_ptr,0);
566
567 /* === Slow lookup === */
568 amd64_patch(test1,iop->ob_ptr);
569 if (test2)
570 amd64_patch(test2,iop->ob_ptr);
571
572 /* Save IA for exception handling */
573 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
574
575 /* RDX = target register */
576 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
577
578 /* RDI = CPU instance */
579 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
580
581 /* Call memory access function */
582 amd64_alu_reg_imm(iop->ob_ptr,X86_SUB,AMD64_RSP,8);
583 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(opcode));
584 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,AMD64_RSP,8);
585
586 amd64_patch(p_exit,iop->ob_ptr);
587 }
588
589 /* Emit unhandled instruction code */
ppc32_emit_unknown(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b,ppc_insn_t opcode)590 static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
591 ppc_insn_t opcode)
592 {
593 u_char *test1;
594 jit_op_t *iop;
595
596 iop = ppc32_op_emit_insn_output(cpu,3,"unknown");
597
598 /* Update IA */
599 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
600
601 /* Fallback to non-JIT mode */
602 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
603 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RSI,opcode);
604
605 amd64_alu_reg_imm(iop->ob_ptr,X86_SUB,AMD64_RSP,8);
606 ppc32_emit_c_call(b,iop,ppc32_exec_single_insn_ext);
607 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,AMD64_RSP,8);
608
609 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
610 test1 = iop->ob_ptr;
611 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
612 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
613
614 amd64_patch(test1,iop->ob_ptr);
615
616 /* Signal this as an EOB to reset JIT state */
617 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
618 return(0);
619 }
620
621 /* Virtual Breakpoint */
ppc32_emit_breakpoint(cpu_ppc_t * cpu,ppc32_jit_tcb_t * b)622 void ppc32_emit_breakpoint(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
623 {
624 jit_op_t *iop;
625
626 iop = ppc32_op_emit_insn_output(cpu,2,"breakpoint");
627
628 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
629
630 amd64_alu_reg_imm(iop->ob_ptr,X86_SUB,AMD64_RSP,8);
631 ppc32_emit_c_call(b,iop,ppc32_run_breakpoint);
632 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,AMD64_RSP,8);
633
634 /* Signal this as an EOB to to reset JIT state */
635 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
636 }
637
638 /* Increment the number of executed instructions (performance debugging) */
ppc32_inc_perf_counter(cpu_ppc_t * cpu)639 void ppc32_inc_perf_counter(cpu_ppc_t *cpu)
640 {
641 jit_op_t *iop;
642
643 iop = ppc32_op_emit_insn_output(cpu,1,"perf_cnt");
644 amd64_inc_membase_size(iop->ob_ptr,
645 AMD64_R15,OFFSET(cpu_ppc_t,perf_counter),4);
646 }
647
648 /* ======================================================================== */
649
650 /* BLR - Branch to Link Register */
DECLARE_INSN(BLR)651 DECLARE_INSN(BLR)
652 {
653 jit_op_t *iop;
654 int hreg;
655
656 ppc32_jit_start_hreg_seq(cpu,"blr");
657 hreg = ppc32_jit_alloc_hreg(cpu,-1);
658 ppc32_op_emit_alter_host_reg(cpu,hreg);
659
660 iop = ppc32_op_emit_insn_output(cpu,2,"blr");
661
662 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
663 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
664
665 /* set the return address */
666 if (insn & 1)
667 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
668
669 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
670 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
671 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
672
673 ppc32_jit_close_hreg_seq(cpu);
674 return(0);
675 }
676
677 /* BCTR - Branch to Count Register */
DECLARE_INSN(BCTR)678 DECLARE_INSN(BCTR)
679 {
680 jit_op_t *iop;
681 int hreg;
682
683 ppc32_jit_start_hreg_seq(cpu,"bctr");
684 hreg = ppc32_jit_alloc_hreg(cpu,-1);
685 ppc32_op_emit_alter_host_reg(cpu,hreg);
686
687 iop = ppc32_op_emit_insn_output(cpu,2,"bctr");
688
689 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
690 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
691
692 /* set the return address */
693 if (insn & 1)
694 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
695
696 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
697 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
698 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
699
700 ppc32_jit_close_hreg_seq(cpu);
701 return(0);
702 }
703
704 /* MFLR - Move From Link Register */
DECLARE_INSN(MFLR)705 DECLARE_INSN(MFLR)
706 {
707 int rd = bits(insn,21,25);
708 int hreg_rd;
709 jit_op_t *iop;
710
711 ppc32_jit_start_hreg_seq(cpu,"mflr");
712 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
713 iop = ppc32_op_emit_insn_output(cpu,1,"mflr");
714
715 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
716 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
717
718 ppc32_jit_close_hreg_seq(cpu);
719 return(0);
720 }
721
722 /* MTLR - Move To Link Register */
DECLARE_INSN(MTLR)723 DECLARE_INSN(MTLR)
724 {
725 int rs = bits(insn,21,25);
726 int hreg_rs;
727 jit_op_t *iop;
728
729 ppc32_jit_start_hreg_seq(cpu,"mtlr");
730 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
731 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
732
733 iop = ppc32_op_emit_insn_output(cpu,1,"mtlr");
734 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),hreg_rs,4);
735 return(0);
736 }
737
738 /* MFCTR - Move From Counter Register */
DECLARE_INSN(MFCTR)739 DECLARE_INSN(MFCTR)
740 {
741 int rd = bits(insn,21,25);
742 int hreg_rd;
743 jit_op_t *iop;
744
745 ppc32_jit_start_hreg_seq(cpu,"mfctr");
746 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
747
748 iop = ppc32_op_emit_insn_output(cpu,1,"mfctr");
749
750 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
751 AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
752 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
753
754 ppc32_jit_close_hreg_seq(cpu);
755 return(0);
756 }
757
758 /* MTCTR - Move To Counter Register */
DECLARE_INSN(MTCTR)759 DECLARE_INSN(MTCTR)
760 {
761 int rs = bits(insn,21,25);
762 int hreg_rs;
763 jit_op_t *iop;
764
765 ppc32_jit_start_hreg_seq(cpu,"mtctr");
766 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
767 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
768
769 iop = ppc32_op_emit_insn_output(cpu,1,"mtctr");
770
771 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),
772 hreg_rs,4);
773
774 ppc32_jit_close_hreg_seq(cpu);
775 return(0);
776 }
777
778 /* MFTBU - Move from Time Base (Up) */
DECLARE_INSN(MFTBU)779 DECLARE_INSN(MFTBU)
780 {
781 int rd = bits(insn,21,25);
782 int hreg_rd;
783 jit_op_t *iop;
784
785 ppc32_jit_start_hreg_seq(cpu,"mftbu");
786 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
787
788 iop = ppc32_op_emit_insn_output(cpu,1,"mftbu");
789
790 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
791 AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4);
792 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
793
794 ppc32_jit_close_hreg_seq(cpu);
795 return(0);
796 }
797
798 #define PPC32_TB_INCREMENT 50
799
800 /* MFTBL - Move from Time Base (Lo) */
DECLARE_INSN(MFTBL)801 DECLARE_INSN(MFTBL)
802 {
803 int rd = bits(insn,21,25);
804 int hreg_rd;
805 jit_op_t *iop;
806
807 ppc32_jit_start_hreg_seq(cpu,"mftbl");
808 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
809
810 iop = ppc32_op_emit_insn_output(cpu,3,"mftbl");
811
812 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
813 AMD64_R15,OFFSET(cpu_ppc_t,tb),8);
814 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,hreg_rd,PPC32_TB_INCREMENT);
815 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb),
816 hreg_rd,8);
817
818 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
819
820 ppc32_jit_close_hreg_seq(cpu);
821 return(0);
822 }
823
824 /* ADD */
DECLARE_INSN(ADD)825 DECLARE_INSN(ADD)
826 {
827 int rd = bits(insn,21,25);
828 int ra = bits(insn,16,20);
829 int rb = bits(insn,11,15);
830 int hreg_rd,hreg_ra,hreg_rb;
831 jit_op_t *iop;
832
833 /* $rd = $ra + $rb */
834 ppc32_jit_start_hreg_seq(cpu,"add");
835 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
836 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
837 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
838
839 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
840 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
841
842 iop = ppc32_op_emit_insn_output(cpu,2,"add");
843
844 if (rd == ra)
845 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
846 else if (rd == rb)
847 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
848 else {
849 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
850 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
851 }
852
853 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
854
855 if (insn & 1)
856 ppc32_op_emit_update_flags(cpu,0,TRUE);
857
858 ppc32_jit_close_hreg_seq(cpu);
859 return(0);
860 }
861
862 /* ADDC */
DECLARE_INSN(ADDC)863 DECLARE_INSN(ADDC)
864 {
865 int rd = bits(insn,21,25);
866 int ra = bits(insn,16,20);
867 int rb = bits(insn,11,15);
868 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
869 jit_op_t *iop;
870
871 /* $rd = $ra + $rb */
872 ppc32_jit_start_hreg_seq(cpu,"addc");
873 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
874 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
875 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
876
877 /* store the carry flag */
878 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
879
880 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
881 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
882
883 iop = ppc32_op_emit_insn_output(cpu,2,"addc");
884
885 if (rd == ra)
886 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
887 else if (rd == rb)
888 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
889 else {
890 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
891 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
892 }
893
894 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
895
896 /* store the carry flag */
897 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
898 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
899 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
900 hreg_t0,4);
901
902 if (insn & 1) {
903 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
904 ppc32_op_emit_update_flags(cpu,0,TRUE);
905 }
906
907 ppc32_jit_close_hreg_seq(cpu);
908 return(0);
909 }
910
911 /* ADDE - Add Extended */
DECLARE_INSN(ADDE)912 DECLARE_INSN(ADDE)
913 {
914 int rd = bits(insn,21,25);
915 int ra = bits(insn,16,20);
916 int rb = bits(insn,11,15);
917 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
918 jit_op_t *iop;
919
920 ppc32_jit_start_hreg_seq(cpu,"adde");
921 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
922 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
923 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
924
925 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
926 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
927
928 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
929 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
930 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
931
932 iop = ppc32_op_emit_insn_output(cpu,3,"adde");
933
934 /* $t0 = $ra + carry */
935 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
936 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
937
938 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
939 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
940 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
941 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
942 hreg_t1,4);
943
944 /* $t0 += $rb */
945 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
946 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
947 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
948 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
949 hreg_t1,4);
950
951 /* update cr0 */
952 if (insn & 1)
953 amd64_test_reg_reg_size(iop->ob_ptr,hreg_t0,hreg_t0,4);
954
955 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
956 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
957
958 if (insn & 1)
959 ppc32_op_emit_update_flags(cpu,0,TRUE);
960
961 ppc32_jit_close_hreg_seq(cpu);
962 return(0);
963 }
964
965 /* ADDI - ADD Immediate */
DECLARE_INSN(ADDI)966 DECLARE_INSN(ADDI)
967 {
968 int rd = bits(insn,21,25);
969 int ra = bits(insn,16,20);
970 int imm = bits(insn,0,15);
971 m_uint32_t tmp = sign_extend_32(imm,16);
972 int hreg_rd,hreg_ra;
973 jit_op_t *iop;
974
975 /* $rd = $ra + imm */
976 ppc32_jit_start_hreg_seq(cpu,"addi");
977 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
978
979 if (ra != 0) {
980 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
981 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
982
983 iop = ppc32_op_emit_insn_output(cpu,2,"addi");
984
985 if (rd != ra)
986 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
987
988 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
989 } else {
990 iop = ppc32_op_emit_insn_output(cpu,1,"addi");
991 ppc32_load_imm(&iop->ob_ptr,hreg_rd,tmp);
992 }
993
994 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
995
996 ppc32_jit_close_hreg_seq(cpu);
997 return(0);
998 }
999
1000 /* ADDIC - ADD Immediate with Carry */
DECLARE_INSN(ADDIC)1001 DECLARE_INSN(ADDIC)
1002 {
1003 int rd = bits(insn,21,25);
1004 int ra = bits(insn,16,20);
1005 int imm = bits(insn,0,15);
1006 m_uint32_t tmp = sign_extend_32(imm,16);
1007 int hreg_rd,hreg_ra;
1008 jit_op_t *iop;
1009
1010 /* $rd = $ra + imm */
1011 ppc32_jit_start_hreg_seq(cpu,"addic");
1012 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1013 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1014
1015 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1016
1017 iop = ppc32_op_emit_insn_output(cpu,1,"addic");
1018
1019 if (rd != ra)
1020 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1021
1022 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1023 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1024
1025 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1026 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1027
1028 ppc32_jit_close_hreg_seq(cpu);
1029 return(0);
1030 }
1031
1032 /* ADDIC. */
DECLARE_INSN(ADDIC_dot)1033 DECLARE_INSN(ADDIC_dot)
1034 {
1035 int rd = bits(insn,21,25);
1036 int ra = bits(insn,16,20);
1037 int imm = bits(insn,0,15);
1038 m_uint32_t tmp = sign_extend_32(imm,16);
1039 int hreg_rd,hreg_ra;
1040 jit_op_t *iop;
1041
1042 /* $rd = $ra + imm */
1043 ppc32_jit_start_hreg_seq(cpu,"addic.");
1044 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1045 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1046
1047 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1048
1049 iop = ppc32_op_emit_insn_output(cpu,1,"addic.");
1050
1051 if (rd != ra)
1052 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1053
1054 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1055 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1056
1057 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1058 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1059
1060 ppc32_op_emit_update_flags(cpu,0,TRUE);
1061
1062 ppc32_jit_close_hreg_seq(cpu);
1063 return(0);
1064 }
1065
1066 /* ADDIS - ADD Immediate Shifted */
DECLARE_INSN(ADDIS)1067 DECLARE_INSN(ADDIS)
1068 {
1069 int rd = bits(insn,21,25);
1070 int ra = bits(insn,16,20);
1071 m_uint32_t imm = bits(insn,0,15);
1072 m_uint32_t tmp = imm << 16;
1073 int hreg_rd,hreg_ra;
1074 jit_op_t *iop;
1075
1076 /* $rd = $ra + (imm << 16) */
1077 ppc32_jit_start_hreg_seq(cpu,"addis");
1078 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1079
1080 if (ra != 0) {
1081 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1082 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1083
1084 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1085
1086 if (rd != ra)
1087 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1088
1089 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1090 } else {
1091 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1092 amd64_mov_reg_imm(iop->ob_ptr,hreg_rd,tmp);
1093 }
1094
1095 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1096
1097 ppc32_jit_close_hreg_seq(cpu);
1098 return(0);
1099 }
1100
1101 /* ADDZE */
DECLARE_INSN(ADDZE)1102 DECLARE_INSN(ADDZE)
1103 {
1104 int rd = bits(insn,21,25);
1105 int ra = bits(insn,16,20);
1106 int hreg_rd,hreg_ra,hreg_t0;
1107 jit_op_t *iop;
1108
1109 /* $rd = $ra + xer_ca + set_carry */
1110 ppc32_jit_start_hreg_seq(cpu,"addze");
1111 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1112 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1113 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1114
1115 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1116
1117 iop = ppc32_op_emit_insn_output(cpu,2,"addze");
1118
1119 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,hreg_t0);
1120
1121 if (rd != ra)
1122 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1123
1124 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_rd,
1125 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
1126
1127 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
1128 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
1129 hreg_t0,4);
1130
1131 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1132
1133 if (insn & 1)
1134 ppc32_op_emit_update_flags(cpu,0,TRUE);
1135
1136 ppc32_jit_close_hreg_seq(cpu);
1137 return(0);
1138 }
1139
1140 /* AND */
DECLARE_INSN(AND)1141 DECLARE_INSN(AND)
1142 {
1143 int rs = bits(insn,21,25);
1144 int ra = bits(insn,16,20);
1145 int rb = bits(insn,11,15);
1146 int hreg_rs,hreg_ra,hreg_rb;
1147 jit_op_t *iop;
1148
1149 /* $ra = $rs & $rb */
1150 ppc32_jit_start_hreg_seq(cpu,"and");
1151 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1152 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1153 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1154
1155 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1156 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1157
1158 iop = ppc32_op_emit_insn_output(cpu,1,"and");
1159
1160 if (ra == rs)
1161 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1162 else if (ra == rb)
1163 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
1164 else {
1165 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1166 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1167 }
1168
1169 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1170
1171 if (insn & 1)
1172 ppc32_op_emit_update_flags(cpu,0,TRUE);
1173
1174 ppc32_jit_close_hreg_seq(cpu);
1175 return(0);
1176 }
1177
1178 /* ANDC */
DECLARE_INSN(ANDC)1179 DECLARE_INSN(ANDC)
1180 {
1181 int rs = bits(insn,21,25);
1182 int ra = bits(insn,16,20);
1183 int rb = bits(insn,11,15);
1184 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
1185 jit_op_t *iop;
1186
1187 /* $ra = $rs & ~$rb */
1188 ppc32_jit_start_hreg_seq(cpu,"andc");
1189 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1190 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1191 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1192
1193 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1194 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1195
1196 iop = ppc32_op_emit_insn_output(cpu,1,"andc");
1197
1198 /* $t0 = ~$rb */
1199 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1200 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
1201 amd64_not_reg(iop->ob_ptr,hreg_t0);
1202
1203 /* $ra = $rs & $t0 */
1204 if (ra == rs)
1205 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_t0,4);
1206 else {
1207 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_t0,hreg_rs,4);
1208 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
1209 }
1210
1211 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1212
1213 if (insn & 1)
1214 ppc32_op_emit_update_flags(cpu,0,TRUE);
1215
1216 ppc32_jit_close_hreg_seq(cpu);
1217 return(0);
1218 }
1219
1220 /* AND Immediate */
DECLARE_INSN(ANDI)1221 DECLARE_INSN(ANDI)
1222 {
1223 int rs = bits(insn,21,25);
1224 int ra = bits(insn,16,20);
1225 m_uint16_t imm = bits(insn,0,15);
1226 m_uint32_t tmp = imm;
1227 int hreg_rs,hreg_ra;
1228 jit_op_t *iop;
1229
1230 /* $ra = $rs & imm */
1231 ppc32_jit_start_hreg_seq(cpu,"andi");
1232 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1233 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1234
1235 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1236
1237 iop = ppc32_op_emit_insn_output(cpu,2,"andi");
1238
1239 if (ra != rs)
1240 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1241
1242 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1243 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1244
1245 ppc32_op_emit_update_flags(cpu,0,TRUE);
1246
1247 ppc32_jit_close_hreg_seq(cpu);
1248 return(0);
1249 }
1250
1251 /* AND Immediate Shifted */
DECLARE_INSN(ANDIS)1252 DECLARE_INSN(ANDIS)
1253 {
1254 int rs = bits(insn,21,25);
1255 int ra = bits(insn,16,20);
1256 m_uint32_t imm = bits(insn,0,15);
1257 m_uint32_t tmp = imm << 16;
1258 int hreg_rs,hreg_ra;
1259 jit_op_t *iop;
1260
1261 /* $ra = $rs & imm */
1262 ppc32_jit_start_hreg_seq(cpu,"andis");
1263 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1264 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1265
1266 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1267
1268 iop = ppc32_op_emit_insn_output(cpu,2,"andis");
1269
1270 if (ra != rs)
1271 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1272
1273 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1274 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1275
1276 ppc32_op_emit_update_flags(cpu,0,TRUE);
1277
1278 ppc32_jit_close_hreg_seq(cpu);
1279 return(0);
1280 }
1281
1282 /* B - Branch */
DECLARE_INSN(B)1283 DECLARE_INSN(B)
1284 {
1285 m_uint32_t offset = bits(insn,2,25);
1286 m_uint32_t new_ia;
1287 jit_op_t *iop;
1288
1289 iop = ppc32_op_emit_insn_output(cpu,4,"b");
1290
1291 /* compute the new ia */
1292 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1293 new_ia += sign_extend(offset << 2,26);
1294 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1295
1296 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1297 ppc32_op_emit_branch_target(cpu,b,new_ia);
1298 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1299 return(0);
1300 }
1301
1302 /* BA - Branch Absolute */
DECLARE_INSN(BA)1303 DECLARE_INSN(BA)
1304 {
1305 m_uint32_t offset = bits(insn,2,25);
1306 m_uint32_t new_ia;
1307 jit_op_t *iop;
1308
1309 iop = ppc32_op_emit_insn_output(cpu,4,"ba");
1310
1311 /* compute the new ia */
1312 new_ia = sign_extend(offset << 2,26);
1313 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1314
1315 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1316 ppc32_op_emit_branch_target(cpu,b,new_ia);
1317 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1318 return(0);
1319 }
1320
1321 /* BL - Branch and Link */
DECLARE_INSN(BL)1322 DECLARE_INSN(BL)
1323 {
1324 m_uint32_t offset = bits(insn,2,25);
1325 m_uint32_t new_ia;
1326 jit_op_t *iop;
1327
1328 iop = ppc32_op_emit_insn_output(cpu,4,"bl");
1329
1330 /* compute the new ia */
1331 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1332 new_ia += sign_extend(offset << 2,26);
1333
1334 /* set the return address */
1335 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1336 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1337
1338 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1339 ppc32_op_emit_branch_target(cpu,b,new_ia);
1340 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1341 return(0);
1342 }
1343
1344 /* BLA - Branch and Link Absolute */
DECLARE_INSN(BLA)1345 DECLARE_INSN(BLA)
1346 {
1347 m_uint32_t offset = bits(insn,2,25);
1348 m_uint32_t new_ia;
1349 jit_op_t *iop;
1350
1351 iop = ppc32_op_emit_insn_output(cpu,4,"bla");
1352
1353 /* compute the new ia */
1354 new_ia = sign_extend(offset << 2,26);
1355
1356 /* set the return address */
1357 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1358 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1359
1360 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1361 ppc32_op_emit_branch_target(cpu,b,new_ia);
1362 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1363 return(0);
1364 }
1365
1366 /* BC - Branch Conditional (Condition Check only) */
DECLARE_INSN(BCC)1367 DECLARE_INSN(BCC)
1368 {
1369 int bo = bits(insn,21,25);
1370 int bi = bits(insn,16,20);
1371 int bd = bits(insn,2,15);
1372 jit_op_t *iop;
1373 u_int cr_field,cr_bit;
1374 m_uint32_t new_ia;
1375 u_char *jump_ptr;
1376 int local_jump;
1377 int cond;
1378
1379 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1380
1381 iop = ppc32_op_emit_insn_output(cpu,5,"bcc");
1382
1383 /* Get the wanted value for the condition bit */
1384 cond = (bo >> 3) & 0x1;
1385
1386 /* Set the return address */
1387 if (insn & 1) {
1388 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1389 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1390 }
1391
1392 /* Compute the new ia */
1393 new_ia = sign_extend_32(bd << 2,16);
1394 if (!(insn & 0x02))
1395 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1396
1397 /* Test the condition bit */
1398 cr_field = ppc32_get_cr_field(bi);
1399 cr_bit = ppc32_get_cr_bit(bi);
1400
1401 ppc32_op_emit_require_flags(cpu,cr_field);
1402
1403 amd64_test_membase_imm_size(iop->ob_ptr,
1404 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1405 (1 << cr_bit),4);
1406
1407 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1408
1409 /*
1410 * Optimize the jump, depending if the destination is in the same
1411 * page or not.
1412 */
1413 if (local_jump) {
1414 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1415 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE);
1416 } else {
1417 jump_ptr = iop->ob_ptr;
1418 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE);
1419 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1420 amd64_patch(jump_ptr,iop->ob_ptr);
1421 }
1422
1423 ppc32_op_emit_branch_target(cpu,b,new_ia);
1424 return(0);
1425 }
1426
1427 /* BC - Branch Conditional */
DECLARE_INSN(BC)1428 DECLARE_INSN(BC)
1429 {
1430 int bo = bits(insn,21,25);
1431 int bi = bits(insn,16,20);
1432 int bd = bits(insn,2,15);
1433 int hreg_t0,hreg_t1;
1434 jit_op_t *iop;
1435 u_int cr_field,cr_bit;
1436 m_uint32_t new_ia;
1437 u_char *jump_ptr;
1438 int local_jump;
1439 int cond,ctr;
1440
1441 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1442
1443 iop = ppc32_op_emit_insn_output(cpu,5,"bc");
1444
1445 ppc32_jit_start_hreg_seq(cpu,"bc");
1446 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1447 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1448
1449 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1450
1451 /* Get the wanted value for the condition bit and CTR value */
1452 cond = (bo >> 3) & 0x1;
1453 ctr = (bo >> 1) & 0x1;
1454
1455 /* Set the return address */
1456 if (insn & 1) {
1457 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1458 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1459 }
1460
1461 /* Compute the new ia */
1462 new_ia = sign_extend_32(bd << 2,16);
1463 if (!(insn & 0x02))
1464 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1465
1466 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1467
1468 /* Decrement the count register */
1469 if (!(bo & 0x04)) {
1470 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1471 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1472 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1473 }
1474
1475 /* Test the condition bit */
1476 if (!((bo >> 4) & 0x01)) {
1477 cr_field = ppc32_get_cr_field(bi);
1478 cr_bit = ppc32_get_cr_bit(bi);
1479
1480 ppc32_op_emit_require_flags(cpu,cr_field);
1481
1482 amd64_test_membase_imm_size(iop->ob_ptr,
1483 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1484 (1 << cr_bit),4);
1485
1486 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1487 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1488 }
1489
1490 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1491
1492 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1493
1494 /*
1495 * Optimize the jump, depending if the destination is in the same
1496 * page or not.
1497 */
1498 if (local_jump) {
1499 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1500 amd64_branch32(iop->ob_ptr,X86_CC_NZ,0,FALSE);
1501 } else {
1502 jump_ptr = iop->ob_ptr;
1503 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1504 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1505 amd64_patch(jump_ptr,iop->ob_ptr);
1506 }
1507
1508 ppc32_op_emit_branch_target(cpu,b,new_ia);
1509
1510 ppc32_jit_close_hreg_seq(cpu);
1511 return(0);
1512 }
1513
1514 /* BCLR - Branch Conditional to Link register */
DECLARE_INSN(BCLR)1515 DECLARE_INSN(BCLR)
1516 {
1517 int bo = bits(insn,21,25);
1518 int bi = bits(insn,16,20);
1519 int bd = bits(insn,2,15);
1520 int hreg_t0,hreg_t1;
1521 jit_op_t *iop;
1522 u_int cr_field,cr_bit;
1523 m_uint32_t new_ia;
1524 u_char *jump_ptr;
1525 int cond,ctr;
1526
1527 ppc32_jit_start_hreg_seq(cpu,"bclr");
1528 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1529 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1530
1531 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1532
1533 iop = ppc32_op_emit_insn_output(cpu,5,"bclr");
1534
1535 /* Get the wanted value for the condition bit and CTR value */
1536 cond = (bo >> 3) & 0x1;
1537 ctr = (bo >> 1) & 0x1;
1538
1539 /* Compute the new ia */
1540 new_ia = sign_extend_32(bd << 2,16);
1541 if (!(insn & 0x02))
1542 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1543
1544 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1545
1546 /* Decrement the count register */
1547 if (!(bo & 0x04)) {
1548 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1549 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1550 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1551 }
1552
1553 /* Test the condition bit */
1554 if (!((bo >> 4) & 0x01)) {
1555 cr_field = ppc32_get_cr_field(bi);
1556 cr_bit = ppc32_get_cr_bit(bi);
1557
1558 ppc32_op_emit_require_flags(cpu,cr_field);
1559
1560 amd64_test_membase_imm_size(iop->ob_ptr,
1561 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1562 (1 << cr_bit),4);
1563
1564 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1565 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1566 }
1567
1568 /* Set the return address */
1569 amd64_mov_reg_membase(iop->ob_ptr,hreg_t1,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
1570
1571 if (insn & 1) {
1572 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1573 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1574 }
1575
1576 /* Branching */
1577 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1578
1579 jump_ptr = iop->ob_ptr;
1580 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1581
1582 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t1,0xFFFFFFFC);
1583 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg_t1,4);
1584 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
1585
1586 amd64_patch(jump_ptr,iop->ob_ptr);
1587
1588 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1589
1590 ppc32_jit_close_hreg_seq(cpu);
1591 return(0);
1592 }
1593
1594 /* CMP - Compare */
DECLARE_INSN(CMP)1595 DECLARE_INSN(CMP)
1596 {
1597 int rd = bits(insn,23,25);
1598 int ra = bits(insn,16,20);
1599 int rb = bits(insn,11,15);
1600 int hreg_ra,hreg_rb;
1601 jit_op_t *iop;
1602
1603 ppc32_jit_start_hreg_seq(cpu,"cmp");
1604 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1605 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1606
1607 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1608 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1609
1610 iop = ppc32_op_emit_insn_output(cpu,1,"cmp");
1611
1612 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1613 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1614
1615 ppc32_jit_close_hreg_seq(cpu);
1616 return(0);
1617 }
1618
1619 /* CMPI - Compare Immediate */
DECLARE_INSN(CMPI)1620 DECLARE_INSN(CMPI)
1621 {
1622 int rd = bits(insn,23,25);
1623 int ra = bits(insn,16,20);
1624 m_uint16_t imm = bits(insn,0,15);
1625 m_uint32_t tmp = sign_extend_32(imm,16);
1626 int hreg_ra;
1627 jit_op_t *iop;
1628
1629 ppc32_jit_start_hreg_seq(cpu,"cmpi");
1630 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1631 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1632
1633 iop = ppc32_op_emit_insn_output(cpu,1,"cmpi");
1634
1635 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,tmp,4);
1636 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1637
1638 ppc32_jit_close_hreg_seq(cpu);
1639 return(0);
1640 }
1641
1642 /* CMPL - Compare Logical */
DECLARE_INSN(CMPL)1643 DECLARE_INSN(CMPL)
1644 {
1645 int rd = bits(insn,23,25);
1646 int ra = bits(insn,16,20);
1647 int rb = bits(insn,11,15);
1648 int hreg_ra,hreg_rb;
1649 jit_op_t *iop;
1650
1651 ppc32_jit_start_hreg_seq(cpu,"cmpl");
1652 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1653 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1654
1655 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1656 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1657
1658 iop = ppc32_op_emit_insn_output(cpu,1,"cmpl");
1659
1660 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1661 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1662
1663 ppc32_jit_close_hreg_seq(cpu);
1664 return(0);
1665 }
1666
1667 /* CMPLI - Compare Immediate */
DECLARE_INSN(CMPLI)1668 DECLARE_INSN(CMPLI)
1669 {
1670 int rd = bits(insn,23,25);
1671 int ra = bits(insn,16,20);
1672 m_uint32_t imm = bits(insn,0,15);
1673 int hreg_ra;
1674 jit_op_t *iop;
1675
1676 ppc32_jit_start_hreg_seq(cpu,"cmpli");
1677 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1678 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1679
1680 iop = ppc32_op_emit_insn_output(cpu,1,"cmpli");
1681
1682 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,imm,4);
1683 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1684
1685 ppc32_jit_close_hreg_seq(cpu);
1686 return(0);
1687 }
1688
1689 /* CRAND - Condition Register AND */
DECLARE_INSN(CRAND)1690 DECLARE_INSN(CRAND)
1691 {
1692 int bd = bits(insn,21,25);
1693 int bb = bits(insn,16,20);
1694 int ba = bits(insn,11,15);
1695 int hreg_t0;
1696 jit_op_t *iop;
1697
1698 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1699
1700 ppc32_jit_start_hreg_seq(cpu,"crand");
1701 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1702 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1703
1704 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1705 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1706 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1707
1708 iop = ppc32_op_emit_insn_output(cpu,3,"crand");
1709
1710 /* test $ba bit */
1711 amd64_test_membase_imm(iop->ob_ptr,
1712 AMD64_R15,
1713 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1714 (1 << ppc32_get_cr_bit(ba)));
1715 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1716
1717 /* test $bb bit */
1718 amd64_test_membase_imm(iop->ob_ptr,
1719 AMD64_R15,
1720 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1721 (1 << ppc32_get_cr_bit(bb)));
1722 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1723
1724 /* result of AND between $ba and $bb */
1725 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1726 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1727
1728 /* set/clear $bd bit depending on the result */
1729 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1730 AMD64_R15,
1731 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1732 ~(1 << ppc32_get_cr_bit(bd)),4);
1733
1734 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1735 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1736 AMD64_R15,
1737 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1738 hreg_t0,4);
1739
1740 ppc32_jit_close_hreg_seq(cpu);
1741 return(0);
1742 }
1743
1744 /* CRANDC - Condition Register AND with Complement */
DECLARE_INSN(CRANDC)1745 DECLARE_INSN(CRANDC)
1746 {
1747 int bd = bits(insn,21,25);
1748 int bb = bits(insn,16,20);
1749 int ba = bits(insn,11,15);
1750 int hreg_t0;
1751 jit_op_t *iop;
1752
1753 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1754
1755 ppc32_jit_start_hreg_seq(cpu,"crandc");
1756 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1757 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1758
1759 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1760 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1761 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1762
1763 iop = ppc32_op_emit_insn_output(cpu,3,"crandc");
1764
1765 /* test $ba bit */
1766 amd64_test_membase_imm(iop->ob_ptr,
1767 AMD64_R15,
1768 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1769 (1 << ppc32_get_cr_bit(ba)));
1770 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1771
1772 /* test $bb bit */
1773 amd64_test_membase_imm(iop->ob_ptr,
1774 AMD64_R15,
1775 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1776 (1 << ppc32_get_cr_bit(bb)));
1777 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
1778
1779 /* result of AND between $ba and $bb */
1780 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1781 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1782
1783 /* set/clear $bd bit depending on the result */
1784 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1785 AMD64_R15,
1786 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1787 ~(1 << ppc32_get_cr_bit(bd)),4);
1788
1789 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1790 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1791 AMD64_R15,
1792 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1793 hreg_t0,4);
1794
1795 ppc32_jit_close_hreg_seq(cpu);
1796 return(0);
1797 }
1798
1799 /* CREQV - Condition Register EQV */
DECLARE_INSN(CREQV)1800 DECLARE_INSN(CREQV)
1801 {
1802 int bd = bits(insn,21,25);
1803 int bb = bits(insn,16,20);
1804 int ba = bits(insn,11,15);
1805 int hreg_t0;
1806 jit_op_t *iop;
1807
1808 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1809
1810 ppc32_jit_start_hreg_seq(cpu,"creqv");
1811 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1812 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1813
1814 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1815 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1816 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1817
1818 iop = ppc32_op_emit_insn_output(cpu,3,"creqv");
1819
1820 /* test $ba bit */
1821 amd64_test_membase_imm(iop->ob_ptr,
1822 AMD64_R15,
1823 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1824 (1 << ppc32_get_cr_bit(ba)));
1825 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1826
1827 /* test $bb bit */
1828 amd64_test_membase_imm(iop->ob_ptr,
1829 AMD64_R15,
1830 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1831 (1 << ppc32_get_cr_bit(bb)));
1832 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1833
1834 /* result of XOR between $ba and $bb */
1835 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
1836 amd64_not_reg(iop->ob_ptr,hreg_t0);
1837 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1838
1839 /* set/clear $bd bit depending on the result */
1840 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1841 AMD64_R15,
1842 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1843 ~(1 << ppc32_get_cr_bit(bd)),4);
1844
1845 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1846 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1847 AMD64_R15,
1848 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1849 hreg_t0,4);
1850
1851 ppc32_jit_close_hreg_seq(cpu);
1852 return(0);
1853 }
1854
1855 /* CRNAND - Condition Register NAND */
DECLARE_INSN(CRNAND)1856 DECLARE_INSN(CRNAND)
1857 {
1858 int bd = bits(insn,21,25);
1859 int bb = bits(insn,16,20);
1860 int ba = bits(insn,11,15);
1861 int hreg_t0;
1862 jit_op_t *iop;
1863
1864 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1865
1866 ppc32_jit_start_hreg_seq(cpu,"crnand");
1867 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1868 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1869
1870 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1871 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1872 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1873
1874 iop = ppc32_op_emit_insn_output(cpu,3,"crnand");
1875
1876 /* test $ba bit */
1877 amd64_test_membase_imm(iop->ob_ptr,
1878 AMD64_R15,
1879 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1880 (1 << ppc32_get_cr_bit(ba)));
1881 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1882
1883 /* test $bb bit */
1884 amd64_test_membase_imm(iop->ob_ptr,
1885 AMD64_R15,
1886 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1887 (1 << ppc32_get_cr_bit(bb)));
1888 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1889
1890 /* result of NAND between $ba and $bb */
1891 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1892 amd64_not_reg(iop->ob_ptr,hreg_t0);
1893 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1894
1895 /* set/clear $bd bit depending on the result */
1896 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1897 AMD64_R15,
1898 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1899 ~(1 << ppc32_get_cr_bit(bd)),4);
1900
1901 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1902 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1903 AMD64_R15,
1904 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1905 hreg_t0,4);
1906
1907 ppc32_jit_close_hreg_seq(cpu);
1908 return(0);
1909 }
1910
1911 /* CRNOR - Condition Register NOR */
DECLARE_INSN(CRNOR)1912 DECLARE_INSN(CRNOR)
1913 {
1914 int bd = bits(insn,21,25);
1915 int bb = bits(insn,16,20);
1916 int ba = bits(insn,11,15);
1917 int hreg_t0;
1918 jit_op_t *iop;
1919
1920 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1921
1922 ppc32_jit_start_hreg_seq(cpu,"crnor");
1923 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1924 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1925
1926 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1927 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1928 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1929
1930 iop = ppc32_op_emit_insn_output(cpu,3,"crnor");
1931
1932 /* test $ba bit */
1933 amd64_test_membase_imm(iop->ob_ptr,
1934 AMD64_R15,
1935 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1936 (1 << ppc32_get_cr_bit(ba)));
1937 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1938
1939 /* test $bb bit */
1940 amd64_test_membase_imm(iop->ob_ptr,
1941 AMD64_R15,
1942 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1943 (1 << ppc32_get_cr_bit(bb)));
1944 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1945
1946 /* result of NOR between $ba and $bb */
1947 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
1948 amd64_not_reg(iop->ob_ptr,hreg_t0);
1949 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1950
1951 /* set/clear $bd bit depending on the result */
1952 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1953 AMD64_R15,
1954 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1955 ~(1 << ppc32_get_cr_bit(bd)),4);
1956
1957 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1958 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1959 AMD64_R15,
1960 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1961 hreg_t0,4);
1962
1963 ppc32_jit_close_hreg_seq(cpu);
1964 return(0);
1965 }
1966
1967 /* CROR - Condition Register OR */
DECLARE_INSN(CROR)1968 DECLARE_INSN(CROR)
1969 {
1970 int bd = bits(insn,21,25);
1971 int bb = bits(insn,16,20);
1972 int ba = bits(insn,11,15);
1973 int hreg_t0;
1974 jit_op_t *iop;
1975
1976 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1977
1978 ppc32_jit_start_hreg_seq(cpu,"cror");
1979 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1980 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1981
1982 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1983 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1984 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1985
1986 iop = ppc32_op_emit_insn_output(cpu,3,"cror");
1987
1988 /* test $ba bit */
1989 amd64_test_membase_imm(iop->ob_ptr,
1990 AMD64_R15,
1991 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1992 (1 << ppc32_get_cr_bit(ba)));
1993 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1994
1995 /* test $bb bit */
1996 amd64_test_membase_imm(iop->ob_ptr,
1997 AMD64_R15,
1998 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1999 (1 << ppc32_get_cr_bit(bb)));
2000 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2001
2002 /* result of NOR between $ba and $bb */
2003 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2004 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2005
2006 /* set/clear $bd bit depending on the result */
2007 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2008 AMD64_R15,
2009 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2010 ~(1 << ppc32_get_cr_bit(bd)),4);
2011
2012 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2013 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2014 AMD64_R15,
2015 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2016 hreg_t0,4);
2017
2018 ppc32_jit_close_hreg_seq(cpu);
2019 return(0);
2020 }
2021
2022 /* CRORC - Condition Register OR with Complement */
DECLARE_INSN(CRORC)2023 DECLARE_INSN(CRORC)
2024 {
2025 int bd = bits(insn,21,25);
2026 int bb = bits(insn,16,20);
2027 int ba = bits(insn,11,15);
2028 int hreg_t0;
2029 jit_op_t *iop;
2030
2031 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2032
2033 ppc32_jit_start_hreg_seq(cpu,"crorc");
2034 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2035 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2036
2037 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2038 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2039 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2040
2041 iop = ppc32_op_emit_insn_output(cpu,3,"crorc");
2042
2043 /* test $ba bit */
2044 amd64_test_membase_imm(iop->ob_ptr,
2045 AMD64_R15,
2046 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2047 (1 << ppc32_get_cr_bit(ba)));
2048 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2049
2050 /* test $bb bit */
2051 amd64_test_membase_imm(iop->ob_ptr,
2052 AMD64_R15,
2053 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2054 (1 << ppc32_get_cr_bit(bb)));
2055 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
2056
2057 /* result of ORC between $ba and $bb */
2058 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2059 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2060
2061 /* set/clear $bd bit depending on the result */
2062 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2063 AMD64_R15,
2064 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2065 ~(1 << ppc32_get_cr_bit(bd)),4);
2066
2067 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2068 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2069 AMD64_R15,
2070 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2071 hreg_t0,4);
2072
2073 ppc32_jit_close_hreg_seq(cpu);
2074 return(0);
2075 }
2076
2077 /* CRXOR - Condition Register XOR */
DECLARE_INSN(CRXOR)2078 DECLARE_INSN(CRXOR)
2079 {
2080 int bd = bits(insn,21,25);
2081 int bb = bits(insn,16,20);
2082 int ba = bits(insn,11,15);
2083 int hreg_t0;
2084 jit_op_t *iop;
2085
2086 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2087
2088 ppc32_jit_start_hreg_seq(cpu,"crxor");
2089 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2090 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2091
2092 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2093 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2094 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2095
2096 iop = ppc32_op_emit_insn_output(cpu,3,"crxor");
2097
2098 /* test $ba bit */
2099 amd64_test_membase_imm(iop->ob_ptr,
2100 AMD64_R15,
2101 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2102 (1 << ppc32_get_cr_bit(ba)));
2103 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2104
2105 /* test $bb bit */
2106 amd64_test_membase_imm(iop->ob_ptr,
2107 AMD64_R15,
2108 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2109 (1 << ppc32_get_cr_bit(bb)));
2110 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2111
2112 /* result of XOR between $ba and $bb */
2113 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
2114 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2115
2116 /* set/clear $bd bit depending on the result */
2117 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2118 AMD64_R15,
2119 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2120 ~(1 << ppc32_get_cr_bit(bd)),4);
2121
2122 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2123 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2124 AMD64_R15,
2125 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2126 hreg_t0,4);
2127
2128 ppc32_jit_close_hreg_seq(cpu);
2129 return(0);
2130 }
2131
2132 /* DIVWU - Divide Word Unsigned */
DECLARE_INSN(DIVWU)2133 DECLARE_INSN(DIVWU)
2134 {
2135 int rd = bits(insn,21,25);
2136 int ra = bits(insn,16,20);
2137 int rb = bits(insn,11,15);
2138 int hreg_rb;
2139 jit_op_t *iop;
2140
2141 ppc32_jit_start_hreg_seq(cpu,"divwu");
2142 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2143 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2144 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2145
2146 /* $rd = $ra / $rb */
2147 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2148 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2149
2150 iop = ppc32_op_emit_insn_output(cpu,2,"divwu");
2151 ppc32_load_imm(&iop->ob_ptr,AMD64_RDX,0);
2152
2153 amd64_div_reg_size(iop->ob_ptr,hreg_rb,0,4);
2154
2155 if (insn & 1)
2156 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2157
2158 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2159
2160 if (insn & 1)
2161 ppc32_op_emit_update_flags(cpu,0,TRUE);
2162
2163 /* edx:eax are directly modified: throw them */
2164 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2165 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2166
2167 ppc32_jit_close_hreg_seq(cpu);
2168 return(0);
2169 }
2170
2171 /* EQV */
DECLARE_INSN(EQV)2172 DECLARE_INSN(EQV)
2173 {
2174 int rs = bits(insn,21,25);
2175 int ra = bits(insn,16,20);
2176 int rb = bits(insn,11,15);
2177 int hreg_rs,hreg_ra,hreg_rb;
2178 jit_op_t *iop;
2179
2180 /* $ra = ~($rs ^ $rb) */
2181 ppc32_jit_start_hreg_seq(cpu,"eqv");
2182 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2183 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2184 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2185
2186 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2187 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2188
2189 iop = ppc32_op_emit_insn_output(cpu,1,"eqv");
2190
2191 if (ra == rs)
2192 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2193 else if (ra == rb)
2194 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
2195 else {
2196 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2197 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2198 }
2199
2200 amd64_not_reg(iop->ob_ptr,hreg_ra);
2201
2202 if (insn & 1)
2203 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2204
2205 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2206
2207 if (insn & 1)
2208 ppc32_op_emit_update_flags(cpu,0,TRUE);
2209
2210 ppc32_jit_close_hreg_seq(cpu);
2211 return(0);
2212 }
2213
2214 /* EXTSB - Extend Sign Byte */
DECLARE_INSN(EXTSB)2215 DECLARE_INSN(EXTSB)
2216 {
2217 int rs = bits(insn,21,25);
2218 int ra = bits(insn,16,20);
2219 int hreg_rs,hreg_ra;
2220 jit_op_t *iop;
2221
2222 /* $ra = extsb($rs) */
2223 ppc32_jit_start_hreg_seq(cpu,"extsb");
2224 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2225 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2226
2227 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2228
2229 iop = ppc32_op_emit_insn_output(cpu,2,"extsb");
2230
2231 if (rs != ra)
2232 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2233
2234 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,24,4);
2235 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,24,4);
2236
2237 if (insn & 1)
2238 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2239
2240 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2241
2242 if (insn & 1)
2243 ppc32_op_emit_update_flags(cpu,0,TRUE);
2244
2245 ppc32_jit_close_hreg_seq(cpu);
2246 return(0);
2247 }
2248
2249 /* EXTSH - Extend Sign Word */
DECLARE_INSN(EXTSH)2250 DECLARE_INSN(EXTSH)
2251 {
2252 int rs = bits(insn,21,25);
2253 int ra = bits(insn,16,20);
2254 int hreg_rs,hreg_ra;
2255 jit_op_t *iop;
2256
2257 /* $ra = extsh($rs) */
2258 ppc32_jit_start_hreg_seq(cpu,"extsh");
2259 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2260 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2261
2262 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2263
2264 iop = ppc32_op_emit_insn_output(cpu,2,"extsh");
2265
2266 if (rs != ra)
2267 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2268
2269 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,16,4);
2270 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,16,4);
2271
2272 if (insn & 1)
2273 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2274
2275 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2276
2277 if (insn & 1)
2278 ppc32_op_emit_update_flags(cpu,0,TRUE);
2279
2280 ppc32_jit_close_hreg_seq(cpu);
2281 return(0);
2282 }
2283
2284 /* LBZ - Load Byte and Zero */
DECLARE_INSN(LBZ)2285 DECLARE_INSN(LBZ)
2286 {
2287 int rs = bits(insn,21,25);
2288 int ra = bits(insn,16,20);
2289 m_uint16_t offset = bits(insn,0,15);
2290
2291 //ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0);
2292 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LBZ,ra,offset,rs,
2293 ppc32_memop_fast_lbz);
2294 return(0);
2295 }
2296
2297 /* LBZU - Load Byte and Zero with Update */
DECLARE_INSN(LBZU)2298 DECLARE_INSN(LBZU)
2299 {
2300 int rs = bits(insn,21,25);
2301 int ra = bits(insn,16,20);
2302 m_uint16_t offset = bits(insn,0,15);
2303
2304 ppc32_emit_memop(cpu,b,PPC_MEMOP_LBZ,ra,offset,rs,1);
2305 return(0);
2306 }
2307
2308 /* LBZUX - Load Byte and Zero with Update Indexed */
DECLARE_INSN(LBZUX)2309 DECLARE_INSN(LBZUX)
2310 {
2311 int rs = bits(insn,21,25);
2312 int ra = bits(insn,16,20);
2313 int rb = bits(insn,11,15);
2314
2315 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,1);
2316 return(0);
2317 }
2318
2319 /* LBZX - Load Byte and Zero Indexed */
DECLARE_INSN(LBZX)2320 DECLARE_INSN(LBZX)
2321 {
2322 int rs = bits(insn,21,25);
2323 int ra = bits(insn,16,20);
2324 int rb = bits(insn,11,15);
2325
2326 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,0);
2327 return(0);
2328 }
2329
2330 /* LHA - Load Half-Word Algebraic */
DECLARE_INSN(LHA)2331 DECLARE_INSN(LHA)
2332 {
2333 int rs = bits(insn,21,25);
2334 int ra = bits(insn,16,20);
2335 m_uint16_t offset = bits(insn,0,15);
2336
2337 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,0);
2338 return(0);
2339 }
2340
2341 /* LHAU - Load Half-Word Algebraic with Update */
DECLARE_INSN(LHAU)2342 DECLARE_INSN(LHAU)
2343 {
2344 int rs = bits(insn,21,25);
2345 int ra = bits(insn,16,20);
2346 m_uint16_t offset = bits(insn,0,15);
2347
2348 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,1);
2349 return(0);
2350 }
2351
2352 /* LHAUX - Load Half-Word Algebraic with Update Indexed */
DECLARE_INSN(LHAUX)2353 DECLARE_INSN(LHAUX)
2354 {
2355 int rs = bits(insn,21,25);
2356 int ra = bits(insn,16,20);
2357 int rb = bits(insn,11,15);
2358
2359 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,1);
2360 return(0);
2361 }
2362
2363 /* LHAX - Load Half-Word Algebraic Indexed */
DECLARE_INSN(LHAX)2364 DECLARE_INSN(LHAX)
2365 {
2366 int rs = bits(insn,21,25);
2367 int ra = bits(insn,16,20);
2368 int rb = bits(insn,11,15);
2369
2370 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,0);
2371 return(0);
2372 }
2373
2374 /* LHZ - Load Half-Word and Zero */
DECLARE_INSN(LHZ)2375 DECLARE_INSN(LHZ)
2376 {
2377 int rs = bits(insn,21,25);
2378 int ra = bits(insn,16,20);
2379 m_uint16_t offset = bits(insn,0,15);
2380
2381 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,0);
2382 return(0);
2383 }
2384
2385 /* LHZU - Load Half-Word and Zero with Update */
DECLARE_INSN(LHZU)2386 DECLARE_INSN(LHZU)
2387 {
2388 int rs = bits(insn,21,25);
2389 int ra = bits(insn,16,20);
2390 m_uint16_t offset = bits(insn,0,15);
2391
2392 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,1);
2393 return(0);
2394 }
2395
2396 /* LHZUX - Load Half-Word and Zero with Update Indexed */
DECLARE_INSN(LHZUX)2397 DECLARE_INSN(LHZUX)
2398 {
2399 int rs = bits(insn,21,25);
2400 int ra = bits(insn,16,20);
2401 int rb = bits(insn,11,15);
2402
2403 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,1);
2404 return(0);
2405 }
2406
2407 /* LHZX - Load Half-Word and Zero Indexed */
DECLARE_INSN(LHZX)2408 DECLARE_INSN(LHZX)
2409 {
2410 int rs = bits(insn,21,25);
2411 int ra = bits(insn,16,20);
2412 int rb = bits(insn,11,15);
2413
2414 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,0);
2415 return(0);
2416 }
2417
2418 /* LWZ - Load Word and Zero */
DECLARE_INSN(LWZ)2419 DECLARE_INSN(LWZ)
2420 {
2421 int rs = bits(insn,21,25);
2422 int ra = bits(insn,16,20);
2423 m_uint16_t offset = bits(insn,0,15);
2424
2425 //ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0);
2426 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LWZ,ra,offset,rs,
2427 ppc32_memop_fast_lwz);
2428 return(0);
2429 }
2430
2431 /* LWZU - Load Word and Zero with Update */
DECLARE_INSN(LWZU)2432 DECLARE_INSN(LWZU)
2433 {
2434 int rs = bits(insn,21,25);
2435 int ra = bits(insn,16,20);
2436 m_uint16_t offset = bits(insn,0,15);
2437
2438 ppc32_emit_memop(cpu,b,PPC_MEMOP_LWZ,ra,offset,rs,1);
2439 return(0);
2440 }
2441
2442 /* LWZUX - Load Word and Zero with Update Indexed */
DECLARE_INSN(LWZUX)2443 DECLARE_INSN(LWZUX)
2444 {
2445 int rs = bits(insn,21,25);
2446 int ra = bits(insn,16,20);
2447 int rb = bits(insn,11,15);
2448
2449 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,1);
2450 return(0);
2451 }
2452
2453 /* LWZX - Load Word and Zero Indexed */
DECLARE_INSN(LWZX)2454 DECLARE_INSN(LWZX)
2455 {
2456 int rs = bits(insn,21,25);
2457 int ra = bits(insn,16,20);
2458 int rb = bits(insn,11,15);
2459
2460 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,0);
2461 return(0);
2462 }
2463
2464 /* MCRF - Move Condition Register Field */
DECLARE_INSN(MCRF)2465 DECLARE_INSN(MCRF)
2466 {
2467 int rd = bits(insn,23,25);
2468 int rs = bits(insn,18,20);
2469 int hreg_t0;
2470 jit_op_t *iop;
2471
2472 ppc32_jit_start_hreg_seq(cpu,"mcrf");
2473 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2474 ppc32_op_emit_require_flags(cpu,rs);
2475
2476 iop = ppc32_op_emit_insn_output(cpu,1,"mcrf");
2477
2478 /* Load "rs" field in %edx */
2479 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2480 AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4);
2481
2482 /* Store it in "rd" field */
2483 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd),
2484 hreg_t0,4);
2485
2486 ppc32_jit_close_hreg_seq(cpu);
2487 return(0);
2488 }
2489
2490 /* MFCR - Move from Condition Register */
DECLARE_INSN(MFCR)2491 DECLARE_INSN(MFCR)
2492 {
2493 int rd = bits(insn,21,25);
2494 int hreg_rd,hreg_t0;
2495 jit_op_t *iop;
2496 int i;
2497
2498 ppc32_jit_start_hreg_seq(cpu,"mfcr");
2499 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2500 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2501 ppc32_op_emit_require_flags(cpu,JIT_OP_PPC_ALL_FLAGS);
2502
2503 iop = ppc32_op_emit_insn_output(cpu,3,"mfcr");
2504
2505 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_rd,hreg_rd);
2506
2507 for(i=0;i<8;i++) {
2508 /* load field in %edx */
2509 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2510 AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4);
2511 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_rd,4);
2512 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_rd,hreg_t0);
2513 }
2514
2515 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2516
2517 ppc32_jit_close_hreg_seq(cpu);
2518 return(0);
2519 }
2520
2521 /* MFMSR - Move from Machine State Register */
DECLARE_INSN(MFMSR)2522 DECLARE_INSN(MFMSR)
2523 {
2524 int rd = bits(insn,21,25);
2525 int hreg_rd;
2526 jit_op_t *iop;
2527
2528 ppc32_jit_start_hreg_seq(cpu,"mfmsr");
2529 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2530
2531 iop = ppc32_op_emit_insn_output(cpu,1,"mfmsr");
2532 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2533 AMD64_R15,OFFSET(cpu_ppc_t,msr),4);
2534 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2535
2536 ppc32_jit_close_hreg_seq(cpu);
2537 return(0);
2538 }
2539
2540 /* MFSR - Move From Segment Register */
DECLARE_INSN(MFSR)2541 DECLARE_INSN(MFSR)
2542 {
2543 int rd = bits(insn,21,25);
2544 int sr = bits(insn,16,19);
2545 int hreg_rd;
2546 jit_op_t *iop;
2547
2548 ppc32_jit_start_hreg_seq(cpu,"mfsr");
2549 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2550
2551 iop = ppc32_op_emit_insn_output(cpu,1,"mfsr");
2552
2553 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2554 AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4);
2555 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2556
2557 ppc32_jit_close_hreg_seq(cpu);
2558 return(0);
2559 }
2560
2561 /* MTCRF - Move to Condition Register Fields */
DECLARE_INSN(MTCRF)2562 DECLARE_INSN(MTCRF)
2563 {
2564 int rs = bits(insn,21,25);
2565 int crm = bits(insn,12,19);
2566 int hreg_rs,hreg_t0;
2567 jit_op_t *iop;
2568 int i;
2569
2570 ppc32_jit_start_hreg_seq(cpu,"mtcrf");
2571 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2572 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2573
2574 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2575
2576 iop = ppc32_op_emit_insn_output(cpu,4,"mtcrf");
2577
2578 for(i=0;i<8;i++)
2579 if (crm & (1 << (7 - i))) {
2580 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
2581
2582 if (i != 7)
2583 amd64_shift_reg_imm(iop->ob_ptr,X86_SHR,hreg_t0,28 - (i << 2));
2584
2585 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x0F);
2586 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i),
2587 hreg_t0,4);
2588 }
2589
2590 ppc32_op_emit_basic_opcode(cpu,JIT_OP_TRASH_FLAGS);
2591
2592 ppc32_jit_close_hreg_seq(cpu);
2593 return(0);
2594 }
2595
2596 /* MULHW - Multiply High Word */
DECLARE_INSN(MULHW)2597 DECLARE_INSN(MULHW)
2598 {
2599 int rd = bits(insn,21,25);
2600 int ra = bits(insn,16,20);
2601 int rb = bits(insn,11,15);
2602 int hreg_rb;
2603 jit_op_t *iop;
2604
2605 ppc32_jit_start_hreg_seq(cpu,"mulhw");
2606 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2607 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2608 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2609
2610 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2611 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2612
2613 /* rd = hi(ra * rb) */
2614 iop = ppc32_op_emit_insn_output(cpu,2,"mulhw");
2615 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2616
2617 if (insn & 1)
2618 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2619
2620 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2621
2622 if (insn & 1)
2623 ppc32_op_emit_update_flags(cpu,0,TRUE);
2624
2625 /* edx:eax are directly modified: throw them */
2626 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2627 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2628
2629 ppc32_jit_close_hreg_seq(cpu);
2630 return(0);
2631 }
2632
2633 /* MULHWU - Multiply High Word Unsigned */
DECLARE_INSN(MULHWU)2634 DECLARE_INSN(MULHWU)
2635 {
2636 int rd = bits(insn,21,25);
2637 int ra = bits(insn,16,20);
2638 int rb = bits(insn,11,15);
2639 int hreg_rb;
2640 jit_op_t *iop;
2641
2642 ppc32_jit_start_hreg_seq(cpu,"mulhwu");
2643 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2644 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2645 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2646
2647 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2648 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2649
2650 /* rd = hi(ra * rb) */
2651 iop = ppc32_op_emit_insn_output(cpu,2,"mulhwu");
2652 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,0,4);
2653
2654 if (insn & 1)
2655 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2656
2657 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2658
2659 if (insn & 1)
2660 ppc32_op_emit_update_flags(cpu,0,TRUE);
2661
2662 /* edx:eax are directly modified: throw them */
2663 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2664 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2665
2666 ppc32_jit_close_hreg_seq(cpu);
2667 return(0);
2668 }
2669
2670 /* MULLI - Multiply Low Immediate */
DECLARE_INSN(MULLI)2671 DECLARE_INSN(MULLI)
2672 {
2673 int rd = bits(insn,21,25);
2674 int ra = bits(insn,16,20);
2675 m_uint32_t imm = bits(insn,0,15);
2676 int hreg_t0;
2677 jit_op_t *iop;
2678
2679 ppc32_jit_start_hreg_seq(cpu,"mulli");
2680 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2681 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2682 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2683
2684 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2685
2686 /* rd = lo(ra * imm) */
2687 iop = ppc32_op_emit_insn_output(cpu,2,"mulli");
2688
2689 ppc32_load_imm(&iop->ob_ptr,hreg_t0,sign_extend_32(imm,16));
2690 amd64_mul_reg_size(iop->ob_ptr,hreg_t0,1,4);
2691 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2692
2693 /* edx:eax are directly modified: throw them */
2694 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2695 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2696
2697 ppc32_jit_close_hreg_seq(cpu);
2698 return(0);
2699 }
2700
2701 /* MULLW - Multiply Low Word */
DECLARE_INSN(MULLW)2702 DECLARE_INSN(MULLW)
2703 {
2704 int rd = bits(insn,21,25);
2705 int ra = bits(insn,16,20);
2706 int rb = bits(insn,11,15);
2707 int hreg_rb;
2708 jit_op_t *iop;
2709
2710 ppc32_jit_start_hreg_seq(cpu,"mullw");
2711 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2712 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2713 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2714
2715 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2716 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2717
2718 /* rd = lo(ra * rb) */
2719 iop = ppc32_op_emit_insn_output(cpu,2,"mullw");
2720 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2721
2722 if (insn & 1)
2723 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2724
2725 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2726
2727 if (insn & 1)
2728 ppc32_op_emit_update_flags(cpu,0,TRUE);
2729
2730 /* edx:eax are directly modified: throw them */
2731 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2732 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2733
2734 ppc32_jit_close_hreg_seq(cpu);
2735 return(0);
2736 }
2737
2738 /* NAND */
DECLARE_INSN(NAND)2739 DECLARE_INSN(NAND)
2740 {
2741 int rs = bits(insn,21,25);
2742 int ra = bits(insn,16,20);
2743 int rb = bits(insn,11,15);
2744 int hreg_rs,hreg_ra,hreg_rb;
2745 jit_op_t *iop;
2746
2747 /* $ra = ~($rs & $rb) */
2748 ppc32_jit_start_hreg_seq(cpu,"nand");
2749 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2750 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2751 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2752
2753 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2754 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2755
2756 iop = ppc32_op_emit_insn_output(cpu,2,"nand");
2757
2758 if (ra == rs)
2759 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2760 else if (ra == rb)
2761 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
2762 else {
2763 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2764 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2765 }
2766
2767 amd64_not_reg(iop->ob_ptr,hreg_ra);
2768
2769 if (insn & 1)
2770 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2771
2772 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2773
2774 if (insn & 1)
2775 ppc32_op_emit_update_flags(cpu,0,TRUE);
2776
2777 ppc32_jit_close_hreg_seq(cpu);
2778 return(0);
2779 }
2780
2781 /* NEG */
DECLARE_INSN(NEG)2782 DECLARE_INSN(NEG)
2783 {
2784 int rd = bits(insn,21,25);
2785 int ra = bits(insn,16,20);
2786 int hreg_rd,hreg_ra;
2787 jit_op_t *iop;
2788
2789 /* $rd = neg($ra) */
2790 ppc32_jit_start_hreg_seq(cpu,"neg");
2791 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2792 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2793
2794 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
2795
2796 iop = ppc32_op_emit_insn_output(cpu,1,"neg");
2797
2798 if (rd != ra)
2799 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
2800
2801 amd64_neg_reg(iop->ob_ptr,hreg_rd);
2802
2803 if (insn & 1)
2804 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
2805
2806 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2807
2808 if (insn & 1)
2809 ppc32_op_emit_update_flags(cpu,0,TRUE);
2810
2811 ppc32_jit_close_hreg_seq(cpu);
2812 return(0);
2813 }
2814
2815 /* NOR */
DECLARE_INSN(NOR)2816 DECLARE_INSN(NOR)
2817 {
2818 int rs = bits(insn,21,25);
2819 int ra = bits(insn,16,20);
2820 int rb = bits(insn,11,15);
2821 int hreg_rs,hreg_ra,hreg_rb;
2822 jit_op_t *iop;
2823
2824 /* $ra = ~($rs | $rb) */
2825 ppc32_jit_start_hreg_seq(cpu,"nor");
2826 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2827 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2828 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2829
2830 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2831 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2832
2833 iop = ppc32_op_emit_insn_output(cpu,2,"nor");
2834
2835 if (ra == rs)
2836 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2837 else if (ra == rb)
2838 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2839 else {
2840 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2841 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2842 }
2843
2844 amd64_not_reg(iop->ob_ptr,hreg_ra);
2845
2846 if (insn & 1)
2847 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2848
2849 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2850
2851 if (insn & 1)
2852 ppc32_op_emit_update_flags(cpu,0,TRUE);
2853
2854 ppc32_jit_close_hreg_seq(cpu);
2855 return(0);
2856 }
2857
2858 /* OR */
DECLARE_INSN(OR)2859 DECLARE_INSN(OR)
2860 {
2861 int rs = bits(insn,21,25);
2862 int ra = bits(insn,16,20);
2863 int rb = bits(insn,11,15);
2864 int hreg_rs,hreg_ra,hreg_rb;
2865 jit_op_t *iop;
2866
2867 /* $ra = $rs | $rb */
2868 ppc32_jit_start_hreg_seq(cpu,"or");
2869 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2870 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2871 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2872
2873 /* special optimization for move/nop operation */
2874 if (rs == rb) {
2875 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2876 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2877
2878 if (ra != rs)
2879 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2880
2881 if (insn & 1)
2882 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2883
2884 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2885
2886 if (insn & 1)
2887 ppc32_op_emit_update_flags(cpu,0,TRUE);
2888
2889 ppc32_jit_close_hreg_seq(cpu);
2890 return(0);
2891 }
2892
2893 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2894 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2895
2896 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2897
2898 if (ra == rs) {
2899 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2900 } else if (ra == rb)
2901 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2902 else {
2903 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2904 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2905 }
2906
2907 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2908
2909 if (insn & 1)
2910 ppc32_op_emit_update_flags(cpu,0,TRUE);
2911
2912 ppc32_jit_close_hreg_seq(cpu);
2913 return(0);
2914 }
2915
2916 /* OR with Complement */
DECLARE_INSN(ORC)2917 DECLARE_INSN(ORC)
2918 {
2919 int rs = bits(insn,21,25);
2920 int ra = bits(insn,16,20);
2921 int rb = bits(insn,11,15);
2922 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
2923 jit_op_t *iop;
2924
2925 /* $ra = $rs & ~$rb */
2926 ppc32_jit_start_hreg_seq(cpu,"orc");
2927 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2928 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2929 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2930
2931 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2932 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2933
2934 iop = ppc32_op_emit_insn_output(cpu,1,"orc");
2935
2936 /* $t0 = ~$rb */
2937 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2938 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
2939 amd64_not_reg(iop->ob_ptr,hreg_t0);
2940
2941 /* $ra = $rs | $t0 */
2942 if (ra == rs)
2943 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
2944 else {
2945 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_t0,hreg_rs,4);
2946 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
2947 }
2948
2949 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2950
2951 if (insn & 1)
2952 ppc32_op_emit_update_flags(cpu,0,TRUE);
2953
2954 ppc32_jit_close_hreg_seq(cpu);
2955 return(0);
2956 }
2957
2958 /* OR Immediate */
DECLARE_INSN(ORI)2959 DECLARE_INSN(ORI)
2960 {
2961 int rs = bits(insn,21,25);
2962 int ra = bits(insn,16,20);
2963 m_uint16_t imm = bits(insn,0,15);
2964 m_uint32_t tmp = imm;
2965 int hreg_rs,hreg_ra;
2966 jit_op_t *iop;
2967
2968 /* $ra = $rs | imm */
2969 ppc32_jit_start_hreg_seq(cpu,"ori");
2970 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2971 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2972
2973 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2974
2975 iop = ppc32_op_emit_insn_output(cpu,1,"ori");
2976
2977 if (ra != rs)
2978 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2979
2980 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
2981 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2982
2983 ppc32_jit_close_hreg_seq(cpu);
2984 return(0);
2985 }
2986
2987 /* OR Immediate Shifted */
DECLARE_INSN(ORIS)2988 DECLARE_INSN(ORIS)
2989 {
2990 int rs = bits(insn,21,25);
2991 int ra = bits(insn,16,20);
2992 m_uint16_t imm = bits(insn,0,15);
2993 m_uint32_t tmp = imm << 16;
2994 int hreg_rs,hreg_ra;
2995 jit_op_t *iop;
2996
2997 /* $ra = $rs | imm */
2998 ppc32_jit_start_hreg_seq(cpu,"oris");
2999 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3000 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3001
3002 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3003
3004 iop = ppc32_op_emit_insn_output(cpu,1,"oris");
3005
3006 if (ra != rs)
3007 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3008
3009 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
3010 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3011
3012 ppc32_jit_close_hreg_seq(cpu);
3013 return(0);
3014 }
3015
3016 /* RLWIMI - Rotate Left Word Immediate then Mask Insert */
DECLARE_INSN(RLWIMI)3017 DECLARE_INSN(RLWIMI)
3018 {
3019 int rs = bits(insn,21,25);
3020 int ra = bits(insn,16,20);
3021 int sh = bits(insn,11,15);
3022 int mb = bits(insn,6,10);
3023 int me = bits(insn,1,5);
3024 register m_uint32_t mask;
3025 int hreg_rs,hreg_ra,hreg_t0;
3026 jit_op_t *iop;
3027
3028 ppc32_jit_start_hreg_seq(cpu,"rlwimi");
3029 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3030 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3031 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3032
3033 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3034 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3035
3036 mask = ppc32_rotate_mask(mb,me);
3037
3038 iop = ppc32_op_emit_insn_output(cpu,2,"rlwimi");
3039
3040 /* Apply inverse mask to $ra */
3041 if (mask != 0)
3042 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,~mask);
3043
3044 /* Rotate $rs of "sh" bits and apply the mask */
3045 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3046
3047 if (sh != 0)
3048 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_t0,sh,4);
3049
3050 if (mask != 0xFFFFFFFF)
3051 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3052
3053 /* Store the result */
3054 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
3055 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3056
3057 if (insn & 1)
3058 ppc32_op_emit_update_flags(cpu,0,TRUE);
3059
3060 ppc32_jit_close_hreg_seq(cpu);
3061 return(0);
3062 }
3063
3064 /* RLWINM - Rotate Left Word Immediate AND with Mask */
DECLARE_INSN(RLWINM)3065 DECLARE_INSN(RLWINM)
3066 {
3067 int rs = bits(insn,21,25);
3068 int ra = bits(insn,16,20);
3069 int sh = bits(insn,11,15);
3070 int mb = bits(insn,6,10);
3071 int me = bits(insn,1,5);
3072 register m_uint32_t mask;
3073 int hreg_rs,hreg_ra;
3074 jit_op_t *iop;
3075
3076 ppc32_jit_start_hreg_seq(cpu,"rlwinm");
3077 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3078 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3079
3080 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3081
3082 iop = ppc32_op_emit_insn_output(cpu,2,"rlwinm");
3083
3084 /* Rotate $rs of "sh" bits and apply the mask */
3085 mask = ppc32_rotate_mask(mb,me);
3086
3087 if (rs != ra)
3088 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3089
3090 if (sh != 0)
3091 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_ra,sh,4);
3092
3093 if (mask != 0xFFFFFFFF)
3094 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,mask);
3095
3096 if (insn & 1)
3097 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3098
3099 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3100
3101 if (insn & 1)
3102 ppc32_op_emit_update_flags(cpu,0,TRUE);
3103
3104 ppc32_jit_close_hreg_seq(cpu);
3105 return(0);
3106 }
3107
3108 /* RLWNM - Rotate Left Word then Mask Insert */
DECLARE_INSN(RLWNM)3109 DECLARE_INSN(RLWNM)
3110 {
3111 int rs = bits(insn,21,25);
3112 int ra = bits(insn,16,20);
3113 int rb = bits(insn,11,15);
3114 int mb = bits(insn,6,10);
3115 int me = bits(insn,1,5);
3116 register m_uint32_t mask;
3117 int hreg_rs,hreg_ra,hreg_t0;
3118 jit_op_t *iop;
3119
3120 /* ecx is directly modified: throw it */
3121 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3122
3123 ppc32_jit_start_hreg_seq(cpu,"rlwnm");
3124 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3125
3126 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3127 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3128 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3129
3130 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3131 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3132 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3133
3134 iop = ppc32_op_emit_insn_output(cpu,2,"rlwnm");
3135
3136 /* Load the shift register ("sh") */
3137 mask = ppc32_rotate_mask(mb,me);
3138
3139 /* Rotate $rs and apply the mask */
3140 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3141
3142 amd64_shift_reg_size(iop->ob_ptr,X86_ROL,hreg_t0,4);
3143
3144 if (mask != 0xFFFFFFFF)
3145 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3146
3147 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
3148
3149 if (insn & 1)
3150 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3151
3152 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3153
3154 if (insn & 1)
3155 ppc32_op_emit_update_flags(cpu,0,TRUE);
3156
3157 ppc32_jit_close_hreg_seq(cpu);
3158 return(0);
3159 }
3160
3161 /* Shift Left Word */
DECLARE_INSN(SLW)3162 DECLARE_INSN(SLW)
3163 {
3164 int rs = bits(insn,21,25);
3165 int ra = bits(insn,16,20);
3166 int rb = bits(insn,11,15);
3167 int hreg_rs,hreg_ra;
3168 jit_op_t *iop;
3169
3170 /* ecx is directly modified: throw it */
3171 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3172
3173 ppc32_jit_start_hreg_seq(cpu,"slw");
3174 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3175 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3176 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3177
3178 /* $ra = $rs << $rb. If count >= 32, then null result */
3179 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3180 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3181
3182 iop = ppc32_op_emit_insn_output(cpu,3,"slw");
3183
3184 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3185
3186 if (ra != rs)
3187 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3188
3189 amd64_shift_reg(iop->ob_ptr,X86_SHL,hreg_ra);
3190
3191 /* store the result */
3192 if (insn & 1)
3193 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3194
3195 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3196
3197 if (insn & 1)
3198 ppc32_op_emit_update_flags(cpu,0,TRUE);
3199
3200 ppc32_jit_close_hreg_seq(cpu);
3201 return(0);
3202 }
3203
3204 /* SRAWI - Shift Right Algebraic Word Immediate */
DECLARE_INSN(SRAWI)3205 DECLARE_INSN(SRAWI)
3206 {
3207 int rs = bits(insn,21,25);
3208 int ra = bits(insn,16,20);
3209 int sh = bits(insn,11,15);
3210 register m_uint32_t mask;
3211 int hreg_rs,hreg_ra,hreg_t0;
3212 jit_op_t *iop;
3213
3214 ppc32_jit_start_hreg_seq(cpu,"srawi");
3215 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3216 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3217 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3218
3219 /* $ra = (int32)$rs >> sh */
3220 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3221
3222 iop = ppc32_op_emit_insn_output(cpu,3,"srawi");
3223 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3224
3225 if (ra != rs)
3226 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3227 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,sh,4);
3228
3229 /* set XER_CA depending on the result */
3230 mask = ~(0xFFFFFFFFU << sh) | 0x80000000;
3231
3232 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3233 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_t0,0x80000000,4);
3234 amd64_set_reg(iop->ob_ptr,X86_CC_A,hreg_t0,FALSE);
3235 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
3236 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3237 hreg_t0,4);
3238
3239 if (insn & 1)
3240 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3241
3242 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3243
3244 if (insn & 1)
3245 ppc32_op_emit_update_flags(cpu,0,TRUE);
3246
3247 ppc32_jit_close_hreg_seq(cpu);
3248 return(0);
3249 }
3250
3251 /* Shift Right Word */
DECLARE_INSN(SRW)3252 DECLARE_INSN(SRW)
3253 {
3254 int rs = bits(insn,21,25);
3255 int ra = bits(insn,16,20);
3256 int rb = bits(insn,11,15);
3257 int hreg_rs,hreg_ra;
3258 jit_op_t *iop;
3259
3260 /* ecx is directly modified: throw it */
3261 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3262
3263 ppc32_jit_start_hreg_seq(cpu,"srw");
3264 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3265 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3266 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3267
3268 /* $ra = $rs >> $rb. If count >= 32, then null result */
3269 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3270 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3271
3272 iop = ppc32_op_emit_insn_output(cpu,3,"srw");
3273
3274 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3275
3276 if (ra != rs)
3277 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3278
3279 amd64_shift_reg(iop->ob_ptr,X86_SHR,hreg_ra);
3280
3281 /* store the result */
3282 if (insn & 1)
3283 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3284
3285 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3286
3287 if (insn & 1)
3288 ppc32_op_emit_update_flags(cpu,0,TRUE);
3289
3290 ppc32_jit_close_hreg_seq(cpu);
3291 return(0);
3292 }
3293
3294 /* STB - Store Byte */
DECLARE_INSN(STB)3295 DECLARE_INSN(STB)
3296 {
3297 int rs = bits(insn,21,25);
3298 int ra = bits(insn,16,20);
3299 m_uint16_t offset = bits(insn,0,15);
3300
3301 //ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0);
3302 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STB,ra,offset,rs,
3303 ppc32_memop_fast_stb);
3304 return(0);
3305 }
3306
3307 /* STBU - Store Byte with Update */
DECLARE_INSN(STBU)3308 DECLARE_INSN(STBU)
3309 {
3310 int rs = bits(insn,21,25);
3311 int ra = bits(insn,16,20);
3312 m_uint16_t offset = bits(insn,0,15);
3313
3314 ppc32_emit_memop(cpu,b,PPC_MEMOP_STB,ra,offset,rs,1);
3315 return(0);
3316 }
3317
3318 /* STBUX - Store Byte with Update Indexed */
DECLARE_INSN(STBUX)3319 DECLARE_INSN(STBUX)
3320 {
3321 int rs = bits(insn,21,25);
3322 int ra = bits(insn,16,20);
3323 int rb = bits(insn,11,15);
3324
3325 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,1);
3326 return(0);
3327 }
3328
3329 /* STBUX - Store Byte Indexed */
DECLARE_INSN(STBX)3330 DECLARE_INSN(STBX)
3331 {
3332 int rs = bits(insn,21,25);
3333 int ra = bits(insn,16,20);
3334 int rb = bits(insn,11,15);
3335
3336 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,0);
3337 return(0);
3338 }
3339
3340 /* STH - Store Half-Word */
DECLARE_INSN(STH)3341 DECLARE_INSN(STH)
3342 {
3343 int rs = bits(insn,21,25);
3344 int ra = bits(insn,16,20);
3345 m_uint16_t offset = bits(insn,0,15);
3346
3347 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,0);
3348 return(0);
3349 }
3350
3351 /* STHU - Store Half-Word with Update */
DECLARE_INSN(STHU)3352 DECLARE_INSN(STHU)
3353 {
3354 int rs = bits(insn,21,25);
3355 int ra = bits(insn,16,20);
3356 m_uint16_t offset = bits(insn,0,15);
3357
3358 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,1);
3359 return(0);
3360 }
3361
3362 /* STHUX - Store Half-Word with Update Indexed */
DECLARE_INSN(STHUX)3363 DECLARE_INSN(STHUX)
3364 {
3365 int rs = bits(insn,21,25);
3366 int ra = bits(insn,16,20);
3367 int rb = bits(insn,11,15);
3368
3369 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,1);
3370 return(0);
3371 }
3372
3373 /* STHUX - Store Half-Word Indexed */
DECLARE_INSN(STHX)3374 DECLARE_INSN(STHX)
3375 {
3376 int rs = bits(insn,21,25);
3377 int ra = bits(insn,16,20);
3378 int rb = bits(insn,11,15);
3379
3380 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,0);
3381 return(0);
3382 }
3383
3384 /* STW - Store Word */
DECLARE_INSN(STW)3385 DECLARE_INSN(STW)
3386 {
3387 int rs = bits(insn,21,25);
3388 int ra = bits(insn,16,20);
3389 m_uint16_t offset = bits(insn,0,15);
3390
3391 //ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0);
3392 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STW,ra,offset,rs,
3393 ppc32_memop_fast_stw);
3394 return(0);
3395 }
3396
3397 /* STWU - Store Word with Update */
DECLARE_INSN(STWU)3398 DECLARE_INSN(STWU)
3399 {
3400 int rs = bits(insn,21,25);
3401 int ra = bits(insn,16,20);
3402 m_uint16_t offset = bits(insn,0,15);
3403
3404 ppc32_emit_memop(cpu,b,PPC_MEMOP_STW,ra,offset,rs,1);
3405 return(0);
3406 }
3407
3408 /* STWUX - Store Word with Update Indexed */
DECLARE_INSN(STWUX)3409 DECLARE_INSN(STWUX)
3410 {
3411 int rs = bits(insn,21,25);
3412 int ra = bits(insn,16,20);
3413 int rb = bits(insn,11,15);
3414
3415 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,1);
3416 return(0);
3417 }
3418
3419 /* STWUX - Store Word Indexed */
DECLARE_INSN(STWX)3420 DECLARE_INSN(STWX)
3421 {
3422 int rs = bits(insn,21,25);
3423 int ra = bits(insn,16,20);
3424 int rb = bits(insn,11,15);
3425
3426 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,0);
3427 return(0);
3428 }
3429
3430 /* SUBF - Subtract From */
DECLARE_INSN(SUBF)3431 DECLARE_INSN(SUBF)
3432 {
3433 int rd = bits(insn,21,25);
3434 int ra = bits(insn,16,20);
3435 int rb = bits(insn,11,15);
3436 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
3437 jit_op_t *iop;
3438
3439 /* $rd = $rb - $ra */
3440 ppc32_jit_start_hreg_seq(cpu,"subf");
3441 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3442
3443 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3444 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3445 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3446
3447 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3448 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3449
3450 iop = ppc32_op_emit_insn_output(cpu,2,"subf");
3451
3452 if (rd == rb)
3453 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3454 else if (rd == ra) {
3455 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
3456 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_t0,hreg_ra,4);
3457 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3458 } else {
3459 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_rb,4);
3460 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3461 }
3462
3463 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3464
3465 if (insn & 1)
3466 ppc32_op_emit_update_flags(cpu,0,TRUE);
3467
3468 ppc32_jit_close_hreg_seq(cpu);
3469 return(0);
3470 }
3471
3472 /* SUBFC - Subtract From Carrying */
DECLARE_INSN(SUBFC)3473 DECLARE_INSN(SUBFC)
3474 {
3475 int rd = bits(insn,21,25);
3476 int ra = bits(insn,16,20);
3477 int rb = bits(insn,11,15);
3478 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3479 jit_op_t *iop;
3480
3481 /* $rd = ~$ra + 1 + $rb */
3482 ppc32_jit_start_hreg_seq(cpu,"subfc");
3483 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3484 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3485 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3486
3487 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3488 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3489
3490 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3491 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3492 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3493
3494 iop = ppc32_op_emit_insn_output(cpu,3,"subfc");
3495
3496 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3497
3498 /* $t0 = ~$ra + 1 */
3499 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3500 amd64_not_reg(iop->ob_ptr,hreg_t0);
3501 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3502 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3503 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3504 hreg_t1,4);
3505
3506 /* $t0 += $rb */
3507 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3508 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3509 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3510 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3511 hreg_t1,4);
3512
3513 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3514
3515 if (insn & 1)
3516 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3517
3518 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3519
3520 /* update cr0 */
3521 if (insn & 1)
3522 ppc32_update_cr0(b);
3523
3524 ppc32_jit_close_hreg_seq(cpu);
3525 return(0);
3526 }
3527
3528 /* SUBFE - Subtract From Extended */
DECLARE_INSN(SUBFE)3529 DECLARE_INSN(SUBFE)
3530 {
3531 int rd = bits(insn,21,25);
3532 int ra = bits(insn,16,20);
3533 int rb = bits(insn,11,15);
3534 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3535 jit_op_t *iop;
3536
3537 /* $rd = ~$ra + $carry (xer_ca) + $rb */
3538 ppc32_jit_start_hreg_seq(cpu,"subfe");
3539 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3540 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3541 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3542
3543 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3544 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3545
3546 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3547 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3548 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3549
3550 iop = ppc32_op_emit_insn_output(cpu,3,"subfe");
3551
3552 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3553
3554 /* $t0 = ~$ra + $carry */
3555 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3556 amd64_not_reg(iop->ob_ptr,hreg_t0);
3557 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
3558 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
3559
3560 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3561 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3562 hreg_t1,4);
3563
3564 /* $t0 += $rb */
3565 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3566 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3567 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3568 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3569 hreg_t1,4);
3570
3571 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3572
3573 if (insn & 1)
3574 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3575
3576 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3577
3578 /* update cr0 */
3579 if (insn & 1)
3580 ppc32_update_cr0(b);
3581
3582 ppc32_jit_close_hreg_seq(cpu);
3583 return(0);
3584 }
3585
3586 /* SUBFIC - Subtract From Immediate Carrying */
DECLARE_INSN(SUBFIC)3587 DECLARE_INSN(SUBFIC)
3588 {
3589 int rd = bits(insn,21,25);
3590 int ra = bits(insn,16,20);
3591 m_uint16_t imm = bits(insn,0,15);
3592 m_uint32_t tmp = sign_extend_32(imm,16);
3593 int hreg_ra,hreg_rd,hreg_t0,hreg_t1;
3594 jit_op_t *iop;
3595
3596 /* $rd = ~$ra + 1 + sign_extend(imm,16) */
3597 ppc32_jit_start_hreg_seq(cpu,"subfic");
3598 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3599 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3600
3601 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3602 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3603
3604 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3605 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3606
3607 iop = ppc32_op_emit_insn_output(cpu,3,"subfic");
3608
3609 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3610
3611 /* $t0 = ~$ra + 1 */
3612 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3613 amd64_not_reg(iop->ob_ptr,hreg_t0);
3614 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3615
3616 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3617 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3618 hreg_t1,4);
3619
3620 /* $t0 += sign_extend(imm,16) */
3621 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,tmp,4);
3622 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3623 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3624 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3625 hreg_t1,4);
3626
3627 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3628 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3629
3630 ppc32_jit_close_hreg_seq(cpu);
3631 return(0);
3632 }
3633
3634 /* SYNC - Synchronize */
DECLARE_INSN(SYNC)3635 DECLARE_INSN(SYNC)
3636 {
3637 return(0);
3638 }
3639
3640 /* XOR */
DECLARE_INSN(XOR)3641 DECLARE_INSN(XOR)
3642 {
3643 int rs = bits(insn,21,25);
3644 int ra = bits(insn,16,20);
3645 int rb = bits(insn,11,15);
3646 int hreg_rs,hreg_ra,hreg_rb;
3647 jit_op_t *iop;
3648
3649 /* $ra = $rs ^ $rb */
3650 ppc32_jit_start_hreg_seq(cpu,"xor");
3651 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3652 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3653 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3654
3655 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3656 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3657
3658 iop = ppc32_op_emit_insn_output(cpu,1,"xor");
3659
3660 if (ra == rs)
3661 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3662 else if (ra == rb)
3663 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
3664 else {
3665 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3666 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3667 }
3668
3669 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3670
3671 if (insn & 1)
3672 ppc32_op_emit_update_flags(cpu,0,TRUE);
3673
3674 ppc32_jit_close_hreg_seq(cpu);
3675 return(0);
3676 }
3677
3678 /* XORI - XOR Immediate */
DECLARE_INSN(XORI)3679 DECLARE_INSN(XORI)
3680 {
3681 int rs = bits(insn,21,25);
3682 int ra = bits(insn,16,20);
3683 m_uint32_t imm = bits(insn,0,15);
3684 int hreg_rs,hreg_ra;
3685 jit_op_t *iop;
3686
3687 /* $ra = $rs ^ imm */
3688 ppc32_jit_start_hreg_seq(cpu,"xori");
3689 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3690 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3691
3692 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3693
3694 iop = ppc32_op_emit_insn_output(cpu,1,"xori");
3695
3696 if (ra != rs)
3697 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3698
3699 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,imm);
3700 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3701
3702 ppc32_jit_close_hreg_seq(cpu);
3703 return(0);
3704 }
3705
3706 /* XORIS - XOR Immediate Shifted */
DECLARE_INSN(XORIS)3707 DECLARE_INSN(XORIS)
3708 {
3709 int rs = bits(insn,21,25);
3710 int ra = bits(insn,16,20);
3711 m_uint16_t imm = bits(insn,0,15);
3712 m_uint32_t tmp = imm << 16;
3713 int hreg_rs,hreg_ra;
3714 jit_op_t *iop;
3715
3716 /* $ra = $rs ^ (imm << 16) */
3717 ppc32_jit_start_hreg_seq(cpu,"xoris");
3718 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3719 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3720
3721 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3722
3723 iop = ppc32_op_emit_insn_output(cpu,1,"xoris");
3724
3725 if (ra != rs)
3726 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3727
3728 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,tmp);
3729 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3730
3731 ppc32_jit_close_hreg_seq(cpu);
3732 return(0);
3733 }
3734
3735 /* PPC instruction array */
3736 struct ppc32_insn_tag ppc32_insn_tags[] = {
3737 { ppc32_emit_BLR , 0xfffffffe , 0x4e800020 },
3738 { ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 },
3739 { ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 },
3740 { ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 },
3741 { ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 },
3742 { ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 },
3743 { ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 },
3744 { ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 },
3745 { ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 },
3746 { ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 },
3747 { ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 },
3748 { ppc32_emit_ADDI , 0xfc000000 , 0x38000000 },
3749 { ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 },
3750 { ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 },
3751 { ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 },
3752 { ppc32_emit_ADDZE , 0xfc00fffe , 0x7c000194 },
3753 { ppc32_emit_AND , 0xfc0007fe , 0x7c000038 },
3754 { ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 },
3755 { ppc32_emit_ANDI , 0xfc000000 , 0x70000000 },
3756 { ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 },
3757 { ppc32_emit_B , 0xfc000003 , 0x48000000 },
3758 { ppc32_emit_BA , 0xfc000003 , 0x48000002 },
3759 { ppc32_emit_BL , 0xfc000003 , 0x48000001 },
3760 { ppc32_emit_BLA , 0xfc000003 , 0x48000003 },
3761 { ppc32_emit_BCC , 0xfe800000 , 0x40800000 },
3762 { ppc32_emit_BC , 0xfc000000 , 0x40000000 },
3763 { ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 },
3764 { ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 },
3765 { ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 },
3766 { ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 },
3767 { ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 },
3768 { ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 },
3769 { ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 },
3770 { ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 },
3771 { ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 },
3772 { ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 },
3773 { ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 },
3774 { ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 },
3775 { ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 },
3776 { ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 },
3777 { ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 },
3778 { ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 },
3779 { ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 },
3780 { ppc32_emit_LBZ , 0xfc000000 , 0x88000000 },
3781 { ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 },
3782 { ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee },
3783 { ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae },
3784 { ppc32_emit_LHA , 0xfc000000 , 0xa8000000 },
3785 { ppc32_emit_LHAU , 0xfc000000 , 0xac000000 },
3786 { ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee },
3787 { ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae },
3788 { ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 },
3789 { ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 },
3790 { ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e },
3791 { ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e },
3792 { ppc32_emit_LWZ , 0xfc000000 , 0x80000000 },
3793 { ppc32_emit_LWZU , 0xfc000000 , 0x84000000 },
3794 { ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e },
3795 { ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e },
3796 { ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 },
3797 { ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 },
3798 { ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 },
3799 { ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 },
3800 { ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 },
3801 { ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 },
3802 { ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 },
3803 { ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 },
3804 { ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 },
3805 { ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 },
3806 { ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 },
3807 { ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 },
3808 { ppc32_emit_OR , 0xfc0007fe , 0x7c000378 },
3809 { ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 },
3810 { ppc32_emit_ORI , 0xfc000000 , 0x60000000 },
3811 { ppc32_emit_ORIS , 0xfc000000 , 0x64000000 },
3812 { ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 },
3813 { ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 },
3814 { ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 },
3815 { ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 },
3816 { ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 },
3817 { ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 },
3818 { ppc32_emit_STB , 0xfc000000 , 0x98000000 },
3819 { ppc32_emit_STBU , 0xfc000000 , 0x9c000000 },
3820 { ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee },
3821 { ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae },
3822 { ppc32_emit_STH , 0xfc000000 , 0xb0000000 },
3823 { ppc32_emit_STHU , 0xfc000000 , 0xb4000000 },
3824 { ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e },
3825 { ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e },
3826 { ppc32_emit_STW , 0xfc000000 , 0x90000000 },
3827 { ppc32_emit_STWU , 0xfc000000 , 0x94000000 },
3828 { ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e },
3829 { ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e },
3830 { ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 },
3831 { ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 },
3832 { ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 },
3833 { ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 },
3834 { ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac },
3835 { ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 },
3836 { ppc32_emit_XORI , 0xfc000000 , 0x68000000 },
3837 { ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 },
3838 { ppc32_emit_unknown , 0x00000000 , 0x00000000 },
3839 { NULL , 0x00000000 , 0x00000000 },
3840 };
3841