xref: /qemu/target/m68k/translate.c (revision 962a145c)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "exec/exec-all.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/log.h"
26 #include "qemu/qemu-print.h"
27 #include "exec/translator.h"
28 #include "exec/helper-proto.h"
29 #include "exec/helper-gen.h"
30 #include "exec/log.h"
31 #include "fpu/softfloat.h"
32 #include "semihosting/semihost.h"
33 
34 #define HELPER_H "helper.h"
35 #include "exec/helper-info.c.inc"
36 #undef  HELPER_H
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
m68k_tcg_init(void)65 void m68k_tcg_init(void)
66 {
67     char *p;
68     int i;
69 
70 #define DEFO32(name, offset) \
71     QREG_##name = tcg_global_mem_new_i32(tcg_env, \
72         offsetof(CPUM68KState, offset), #name);
73 #define DEFO64(name, offset) \
74     QREG_##name = tcg_global_mem_new_i64(tcg_env, \
75         offsetof(CPUM68KState, offset), #name);
76 #include "qregs.h.inc"
77 #undef DEFO32
78 #undef DEFO64
79 
80     cpu_halted = tcg_global_mem_new_i32(tcg_env,
81                                         -offsetof(M68kCPU, env) +
82                                         offsetof(CPUState, halted), "HALTED");
83     cpu_exception_index = tcg_global_mem_new_i32(tcg_env,
84                                                  -offsetof(M68kCPU, env) +
85                                                  offsetof(CPUState, exception_index),
86                                                  "EXCEPTION");
87 
88     p = cpu_reg_names;
89     for (i = 0; i < 8; i++) {
90         sprintf(p, "D%d", i);
91         cpu_dregs[i] = tcg_global_mem_new(tcg_env,
92                                           offsetof(CPUM68KState, dregs[i]), p);
93         p += 3;
94         sprintf(p, "A%d", i);
95         cpu_aregs[i] = tcg_global_mem_new(tcg_env,
96                                           offsetof(CPUM68KState, aregs[i]), p);
97         p += 3;
98     }
99     for (i = 0; i < 4; i++) {
100         sprintf(p, "ACC%d", i);
101         cpu_macc[i] = tcg_global_mem_new_i64(tcg_env,
102                                          offsetof(CPUM68KState, macc[i]), p);
103         p += 5;
104     }
105 
106     NULL_QREG = tcg_global_mem_new(tcg_env, -4, "NULL");
107     store_dummy = tcg_global_mem_new(tcg_env, -8, "NULL");
108 }
109 
110 /* internal defines */
111 typedef struct DisasContext {
112     DisasContextBase base;
113     CPUM68KState *env;
114     target_ulong pc;
115     target_ulong pc_prev;
116     CCOp cc_op; /* Current CC operation */
117     int cc_op_synced;
118     TCGv_i64 mactmp;
119     int done_mac;
120     int writeback_mask;
121     TCGv writeback[8];
122     bool ss_active;
123 } DisasContext;
124 
get_areg(DisasContext * s,unsigned regno)125 static TCGv get_areg(DisasContext *s, unsigned regno)
126 {
127     if (s->writeback_mask & (1 << regno)) {
128         return s->writeback[regno];
129     } else {
130         return cpu_aregs[regno];
131     }
132 }
133 
delay_set_areg(DisasContext * s,unsigned regno,TCGv val,bool give_temp)134 static void delay_set_areg(DisasContext *s, unsigned regno,
135                            TCGv val, bool give_temp)
136 {
137     if (s->writeback_mask & (1 << regno)) {
138         if (give_temp) {
139             s->writeback[regno] = val;
140         } else {
141             tcg_gen_mov_i32(s->writeback[regno], val);
142         }
143     } else {
144         s->writeback_mask |= 1 << regno;
145         if (give_temp) {
146             s->writeback[regno] = val;
147         } else {
148             TCGv tmp = tcg_temp_new();
149             s->writeback[regno] = tmp;
150             tcg_gen_mov_i32(tmp, val);
151         }
152     }
153 }
154 
do_writebacks(DisasContext * s)155 static void do_writebacks(DisasContext *s)
156 {
157     unsigned mask = s->writeback_mask;
158     if (mask) {
159         s->writeback_mask = 0;
160         do {
161             unsigned regno = ctz32(mask);
162             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
163             mask &= mask - 1;
164         } while (mask);
165     }
166 }
167 
168 /* is_jmp field values */
169 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
170 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
171 
172 #if defined(CONFIG_USER_ONLY)
173 #define IS_USER(s) 1
174 #else
175 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
176 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
177                       MMU_KERNEL_IDX : MMU_USER_IDX)
178 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
179                       MMU_KERNEL_IDX : MMU_USER_IDX)
180 #endif
181 
182 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
183 
184 #ifdef DEBUG_DISPATCH
185 #define DISAS_INSN(name)                                                \
186     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
187                                   uint16_t insn);                       \
188     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
189                              uint16_t insn)                             \
190     {                                                                   \
191         qemu_log("Dispatch " #name "\n");                               \
192         real_disas_##name(env, s, insn);                                \
193     }                                                                   \
194     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
195                                   uint16_t insn)
196 #else
197 #define DISAS_INSN(name)                                                \
198     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
199                              uint16_t insn)
200 #endif
201 
202 static const uint8_t cc_op_live[CC_OP_NB] = {
203     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
204     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
205     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
206     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
207     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
208     [CC_OP_LOGIC] = CCF_X | CCF_N
209 };
210 
set_cc_op(DisasContext * s,CCOp op)211 static void set_cc_op(DisasContext *s, CCOp op)
212 {
213     CCOp old_op = s->cc_op;
214     int dead;
215 
216     if (old_op == op) {
217         return;
218     }
219     s->cc_op = op;
220     s->cc_op_synced = 0;
221 
222     /*
223      * Discard CC computation that will no longer be used.
224      * Note that X and N are never dead.
225      */
226     dead = cc_op_live[old_op] & ~cc_op_live[op];
227     if (dead & CCF_C) {
228         tcg_gen_discard_i32(QREG_CC_C);
229     }
230     if (dead & CCF_Z) {
231         tcg_gen_discard_i32(QREG_CC_Z);
232     }
233     if (dead & CCF_V) {
234         tcg_gen_discard_i32(QREG_CC_V);
235     }
236 }
237 
238 /* Update the CPU env CC_OP state.  */
update_cc_op(DisasContext * s)239 static void update_cc_op(DisasContext *s)
240 {
241     if (!s->cc_op_synced) {
242         s->cc_op_synced = 1;
243         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
244     }
245 }
246 
247 /* Generate a jump to an immediate address.  */
gen_jmp_im(DisasContext * s,uint32_t dest)248 static void gen_jmp_im(DisasContext *s, uint32_t dest)
249 {
250     update_cc_op(s);
251     tcg_gen_movi_i32(QREG_PC, dest);
252     s->base.is_jmp = DISAS_JUMP;
253 }
254 
255 /* Generate a jump to the address in qreg DEST.  */
gen_jmp(DisasContext * s,TCGv dest)256 static void gen_jmp(DisasContext *s, TCGv dest)
257 {
258     update_cc_op(s);
259     tcg_gen_mov_i32(QREG_PC, dest);
260     s->base.is_jmp = DISAS_JUMP;
261 }
262 
gen_raise_exception(int nr)263 static void gen_raise_exception(int nr)
264 {
265     gen_helper_raise_exception(tcg_env, tcg_constant_i32(nr));
266 }
267 
gen_raise_exception_format2(DisasContext * s,int nr,target_ulong this_pc)268 static void gen_raise_exception_format2(DisasContext *s, int nr,
269                                         target_ulong this_pc)
270 {
271     /*
272      * Pass the address of the insn to the exception handler,
273      * for recording in the Format $2 (6-word) stack frame.
274      * Re-use mmu.ar for the purpose, since that's only valid
275      * after tlb_fill.
276      */
277     tcg_gen_st_i32(tcg_constant_i32(this_pc), tcg_env,
278                    offsetof(CPUM68KState, mmu.ar));
279     gen_raise_exception(nr);
280     s->base.is_jmp = DISAS_NORETURN;
281 }
282 
gen_exception(DisasContext * s,uint32_t dest,int nr)283 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
284 {
285     update_cc_op(s);
286     tcg_gen_movi_i32(QREG_PC, dest);
287 
288     gen_raise_exception(nr);
289 
290     s->base.is_jmp = DISAS_NORETURN;
291 }
292 
gen_addr_fault(DisasContext * s)293 static inline void gen_addr_fault(DisasContext *s)
294 {
295     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
296 }
297 
298 /*
299  * Generate a load from the specified address.  Narrow values are
300  *  sign extended to full register width.
301  */
gen_load(DisasContext * s,int opsize,TCGv addr,int sign,int index)302 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
303                             int sign, int index)
304 {
305     TCGv tmp = tcg_temp_new_i32();
306 
307     switch (opsize) {
308     case OS_BYTE:
309     case OS_WORD:
310     case OS_LONG:
311         tcg_gen_qemu_ld_tl(tmp, addr, index,
312                            opsize | (sign ? MO_SIGN : 0) | MO_TE);
313         break;
314     default:
315         g_assert_not_reached();
316     }
317     return tmp;
318 }
319 
320 /* Generate a store.  */
gen_store(DisasContext * s,int opsize,TCGv addr,TCGv val,int index)321 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
322                              int index)
323 {
324     switch (opsize) {
325     case OS_BYTE:
326     case OS_WORD:
327     case OS_LONG:
328         tcg_gen_qemu_st_tl(val, addr, index, opsize | MO_TE);
329         break;
330     default:
331         g_assert_not_reached();
332     }
333 }
334 
335 typedef enum {
336     EA_STORE,
337     EA_LOADU,
338     EA_LOADS
339 } ea_what;
340 
341 /*
342  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
343  * otherwise generate a store.
344  */
gen_ldst(DisasContext * s,int opsize,TCGv addr,TCGv val,ea_what what,int index)345 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
346                      ea_what what, int index)
347 {
348     if (what == EA_STORE) {
349         gen_store(s, opsize, addr, val, index);
350         return store_dummy;
351     } else {
352         return gen_load(s, opsize, addr, what == EA_LOADS, index);
353     }
354 }
355 
356 /* Read a 16-bit immediate constant */
read_im16(CPUM68KState * env,DisasContext * s)357 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
358 {
359     uint16_t im;
360     im = translator_lduw(env, &s->base, s->pc);
361     s->pc += 2;
362     return im;
363 }
364 
365 /* Read an 8-bit immediate constant */
read_im8(CPUM68KState * env,DisasContext * s)366 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
367 {
368     return read_im16(env, s);
369 }
370 
371 /* Read a 32-bit immediate constant.  */
read_im32(CPUM68KState * env,DisasContext * s)372 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
373 {
374     uint32_t im;
375     im = read_im16(env, s) << 16;
376     im |= 0xffff & read_im16(env, s);
377     return im;
378 }
379 
380 /* Read a 64-bit immediate constant.  */
read_im64(CPUM68KState * env,DisasContext * s)381 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
382 {
383     uint64_t im;
384     im = (uint64_t)read_im32(env, s) << 32;
385     im |= (uint64_t)read_im32(env, s);
386     return im;
387 }
388 
389 /* Calculate and address index.  */
gen_addr_index(DisasContext * s,uint16_t ext,TCGv tmp)390 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
391 {
392     TCGv add;
393     int scale;
394 
395     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
396     if ((ext & 0x800) == 0) {
397         tcg_gen_ext16s_i32(tmp, add);
398         add = tmp;
399     }
400     scale = (ext >> 9) & 3;
401     if (scale != 0) {
402         tcg_gen_shli_i32(tmp, add, scale);
403         add = tmp;
404     }
405     return add;
406 }
407 
408 /*
409  * Handle a base + index + displacement effective address.
410  * A NULL_QREG base means pc-relative.
411  */
gen_lea_indexed(CPUM68KState * env,DisasContext * s,TCGv base)412 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
413 {
414     uint32_t offset;
415     uint16_t ext;
416     TCGv add;
417     TCGv tmp;
418     uint32_t bd, od;
419 
420     offset = s->pc;
421     ext = read_im16(env, s);
422 
423     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
424         return NULL_QREG;
425 
426     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
427         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
428         ext &= ~(3 << 9);
429     }
430 
431     if (ext & 0x100) {
432         /* full extension word format */
433         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
434             return NULL_QREG;
435 
436         if ((ext & 0x30) > 0x10) {
437             /* base displacement */
438             if ((ext & 0x30) == 0x20) {
439                 bd = (int16_t)read_im16(env, s);
440             } else {
441                 bd = read_im32(env, s);
442             }
443         } else {
444             bd = 0;
445         }
446         tmp = tcg_temp_new();
447         if ((ext & 0x44) == 0) {
448             /* pre-index */
449             add = gen_addr_index(s, ext, tmp);
450         } else {
451             add = NULL_QREG;
452         }
453         if ((ext & 0x80) == 0) {
454             /* base not suppressed */
455             if (IS_NULL_QREG(base)) {
456                 base = tcg_constant_i32(offset + bd);
457                 bd = 0;
458             }
459             if (!IS_NULL_QREG(add)) {
460                 tcg_gen_add_i32(tmp, add, base);
461                 add = tmp;
462             } else {
463                 add = base;
464             }
465         }
466         if (!IS_NULL_QREG(add)) {
467             if (bd != 0) {
468                 tcg_gen_addi_i32(tmp, add, bd);
469                 add = tmp;
470             }
471         } else {
472             add = tcg_constant_i32(bd);
473         }
474         if ((ext & 3) != 0) {
475             /* memory indirect */
476             base = gen_load(s, OS_LONG, add, 0, IS_USER(s));
477             if ((ext & 0x44) == 4) {
478                 add = gen_addr_index(s, ext, tmp);
479                 tcg_gen_add_i32(tmp, add, base);
480                 add = tmp;
481             } else {
482                 add = base;
483             }
484             if ((ext & 3) > 1) {
485                 /* outer displacement */
486                 if ((ext & 3) == 2) {
487                     od = (int16_t)read_im16(env, s);
488                 } else {
489                     od = read_im32(env, s);
490                 }
491             } else {
492                 od = 0;
493             }
494             if (od != 0) {
495                 tcg_gen_addi_i32(tmp, add, od);
496                 add = tmp;
497             }
498         }
499     } else {
500         /* brief extension word format */
501         tmp = tcg_temp_new();
502         add = gen_addr_index(s, ext, tmp);
503         if (!IS_NULL_QREG(base)) {
504             tcg_gen_add_i32(tmp, add, base);
505             if ((int8_t)ext)
506                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
507         } else {
508             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
509         }
510         add = tmp;
511     }
512     return add;
513 }
514 
515 /* Sign or zero extend a value.  */
516 
gen_ext(TCGv res,TCGv val,int opsize,int sign)517 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
518 {
519     switch (opsize) {
520     case OS_BYTE:
521     case OS_WORD:
522     case OS_LONG:
523         tcg_gen_ext_i32(res, val, opsize | (sign ? MO_SIGN : 0));
524         break;
525     default:
526         g_assert_not_reached();
527     }
528 }
529 
530 /* Evaluate all the CC flags.  */
531 
gen_flush_flags(DisasContext * s)532 static void gen_flush_flags(DisasContext *s)
533 {
534     TCGv t0, t1;
535 
536     switch (s->cc_op) {
537     case CC_OP_FLAGS:
538         return;
539 
540     case CC_OP_ADDB:
541     case CC_OP_ADDW:
542     case CC_OP_ADDL:
543         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
544         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
545         /* Compute signed overflow for addition.  */
546         t0 = tcg_temp_new();
547         t1 = tcg_temp_new();
548         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
549         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
550         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
551         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
552         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
553         break;
554 
555     case CC_OP_SUBB:
556     case CC_OP_SUBW:
557     case CC_OP_SUBL:
558         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
559         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
560         /* Compute signed overflow for subtraction.  */
561         t0 = tcg_temp_new();
562         t1 = tcg_temp_new();
563         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
564         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
565         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
566         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
567         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
568         break;
569 
570     case CC_OP_CMPB:
571     case CC_OP_CMPW:
572     case CC_OP_CMPL:
573         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
574         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
575         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
576         /* Compute signed overflow for subtraction.  */
577         t0 = tcg_temp_new();
578         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
579         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
580         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
581         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
582         break;
583 
584     case CC_OP_LOGIC:
585         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
586         tcg_gen_movi_i32(QREG_CC_C, 0);
587         tcg_gen_movi_i32(QREG_CC_V, 0);
588         break;
589 
590     case CC_OP_DYNAMIC:
591         gen_helper_flush_flags(tcg_env, QREG_CC_OP);
592         s->cc_op_synced = 1;
593         break;
594 
595     default:
596         gen_helper_flush_flags(tcg_env, tcg_constant_i32(s->cc_op));
597         s->cc_op_synced = 1;
598         break;
599     }
600 
601     /* Note that flush_flags also assigned to env->cc_op.  */
602     s->cc_op = CC_OP_FLAGS;
603 }
604 
gen_extend(DisasContext * s,TCGv val,int opsize,int sign)605 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
606 {
607     TCGv tmp;
608 
609     if (opsize == OS_LONG) {
610         tmp = val;
611     } else {
612         tmp = tcg_temp_new();
613         gen_ext(tmp, val, opsize, sign);
614     }
615 
616     return tmp;
617 }
618 
gen_logic_cc(DisasContext * s,TCGv val,int opsize)619 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
620 {
621     gen_ext(QREG_CC_N, val, opsize, 1);
622     set_cc_op(s, CC_OP_LOGIC);
623 }
624 
gen_update_cc_cmp(DisasContext * s,TCGv dest,TCGv src,int opsize)625 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
626 {
627     tcg_gen_mov_i32(QREG_CC_N, dest);
628     tcg_gen_mov_i32(QREG_CC_V, src);
629     set_cc_op(s, CC_OP_CMPB + opsize);
630 }
631 
gen_update_cc_add(TCGv dest,TCGv src,int opsize)632 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
633 {
634     gen_ext(QREG_CC_N, dest, opsize, 1);
635     tcg_gen_mov_i32(QREG_CC_V, src);
636 }
637 
opsize_bytes(int opsize)638 static inline int opsize_bytes(int opsize)
639 {
640     switch (opsize) {
641     case OS_BYTE: return 1;
642     case OS_WORD: return 2;
643     case OS_LONG: return 4;
644     case OS_SINGLE: return 4;
645     case OS_DOUBLE: return 8;
646     case OS_EXTENDED: return 12;
647     case OS_PACKED: return 12;
648     default:
649         g_assert_not_reached();
650     }
651 }
652 
insn_opsize(int insn)653 static inline int insn_opsize(int insn)
654 {
655     switch ((insn >> 6) & 3) {
656     case 0: return OS_BYTE;
657     case 1: return OS_WORD;
658     case 2: return OS_LONG;
659     default:
660         g_assert_not_reached();
661     }
662 }
663 
ext_opsize(int ext,int pos)664 static inline int ext_opsize(int ext, int pos)
665 {
666     switch ((ext >> pos) & 7) {
667     case 0: return OS_LONG;
668     case 1: return OS_SINGLE;
669     case 2: return OS_EXTENDED;
670     case 3: return OS_PACKED;
671     case 4: return OS_WORD;
672     case 5: return OS_DOUBLE;
673     case 6: return OS_BYTE;
674     default:
675         g_assert_not_reached();
676     }
677 }
678 
679 /*
680  * Assign value to a register.  If the width is less than the register width
681  * only the low part of the register is set.
682  */
gen_partset_reg(int opsize,TCGv reg,TCGv val)683 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
684 {
685     switch (opsize) {
686     case OS_BYTE:
687         tcg_gen_deposit_i32(reg, reg, val, 0, 8);
688         break;
689     case OS_WORD:
690         tcg_gen_deposit_i32(reg, reg, val, 0, 16);
691         break;
692     case OS_LONG:
693     case OS_SINGLE:
694         tcg_gen_mov_i32(reg, val);
695         break;
696     default:
697         g_assert_not_reached();
698     }
699 }
700 
701 /*
702  * Generate code for an "effective address".  Does not adjust the base
703  * register for autoincrement addressing modes.
704  */
gen_lea_mode(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize)705 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
706                          int mode, int reg0, int opsize)
707 {
708     TCGv reg;
709     TCGv tmp;
710     uint16_t ext;
711     uint32_t offset;
712 
713     switch (mode) {
714     case 0: /* Data register direct.  */
715     case 1: /* Address register direct.  */
716         return NULL_QREG;
717     case 3: /* Indirect postincrement.  */
718         if (opsize == OS_UNSIZED) {
719             return NULL_QREG;
720         }
721         /* fallthru */
722     case 2: /* Indirect register */
723         return get_areg(s, reg0);
724     case 4: /* Indirect predecrememnt.  */
725         if (opsize == OS_UNSIZED) {
726             return NULL_QREG;
727         }
728         reg = get_areg(s, reg0);
729         tmp = tcg_temp_new();
730         if (reg0 == 7 && opsize == OS_BYTE &&
731             m68k_feature(s->env, M68K_FEATURE_M68K)) {
732             tcg_gen_subi_i32(tmp, reg, 2);
733         } else {
734             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
735         }
736         return tmp;
737     case 5: /* Indirect displacement.  */
738         reg = get_areg(s, reg0);
739         tmp = tcg_temp_new();
740         ext = read_im16(env, s);
741         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
742         return tmp;
743     case 6: /* Indirect index + displacement.  */
744         reg = get_areg(s, reg0);
745         return gen_lea_indexed(env, s, reg);
746     case 7: /* Other */
747         switch (reg0) {
748         case 0: /* Absolute short.  */
749             offset = (int16_t)read_im16(env, s);
750             return tcg_constant_i32(offset);
751         case 1: /* Absolute long.  */
752             offset = read_im32(env, s);
753             return tcg_constant_i32(offset);
754         case 2: /* pc displacement  */
755             offset = s->pc;
756             offset += (int16_t)read_im16(env, s);
757             return tcg_constant_i32(offset);
758         case 3: /* pc index+displacement.  */
759             return gen_lea_indexed(env, s, NULL_QREG);
760         case 4: /* Immediate.  */
761         default:
762             return NULL_QREG;
763         }
764     }
765     /* Should never happen.  */
766     return NULL_QREG;
767 }
768 
gen_lea(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize)769 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
770                     int opsize)
771 {
772     int mode = extract32(insn, 3, 3);
773     int reg0 = REG(insn, 0);
774     return gen_lea_mode(env, s, mode, reg0, opsize);
775 }
776 
777 /*
778  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
779  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
780  * ADDRP is non-null for readwrite operands.
781  */
gen_ea_mode(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize,TCGv val,TCGv * addrp,ea_what what,int index)782 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
783                         int opsize, TCGv val, TCGv *addrp, ea_what what,
784                         int index)
785 {
786     TCGv reg, tmp, result;
787     int32_t offset;
788 
789     switch (mode) {
790     case 0: /* Data register direct.  */
791         reg = cpu_dregs[reg0];
792         if (what == EA_STORE) {
793             gen_partset_reg(opsize, reg, val);
794             return store_dummy;
795         } else {
796             return gen_extend(s, reg, opsize, what == EA_LOADS);
797         }
798     case 1: /* Address register direct.  */
799         reg = get_areg(s, reg0);
800         if (what == EA_STORE) {
801             tcg_gen_mov_i32(reg, val);
802             return store_dummy;
803         } else {
804             return gen_extend(s, reg, opsize, what == EA_LOADS);
805         }
806     case 2: /* Indirect register */
807         reg = get_areg(s, reg0);
808         return gen_ldst(s, opsize, reg, val, what, index);
809     case 3: /* Indirect postincrement.  */
810         reg = get_areg(s, reg0);
811         result = gen_ldst(s, opsize, reg, val, what, index);
812         if (what == EA_STORE || !addrp) {
813             tmp = tcg_temp_new();
814             if (reg0 == 7 && opsize == OS_BYTE &&
815                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
816                 tcg_gen_addi_i32(tmp, reg, 2);
817             } else {
818                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
819             }
820             delay_set_areg(s, reg0, tmp, true);
821         }
822         return result;
823     case 4: /* Indirect predecrememnt.  */
824         if (addrp && what == EA_STORE) {
825             tmp = *addrp;
826         } else {
827             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
828             if (IS_NULL_QREG(tmp)) {
829                 return tmp;
830             }
831             if (addrp) {
832                 *addrp = tmp;
833             }
834         }
835         result = gen_ldst(s, opsize, tmp, val, what, index);
836         if (what == EA_STORE || !addrp) {
837             delay_set_areg(s, reg0, tmp, false);
838         }
839         return result;
840     case 5: /* Indirect displacement.  */
841     case 6: /* Indirect index + displacement.  */
842     do_indirect:
843         if (addrp && what == EA_STORE) {
844             tmp = *addrp;
845         } else {
846             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
847             if (IS_NULL_QREG(tmp)) {
848                 return tmp;
849             }
850             if (addrp) {
851                 *addrp = tmp;
852             }
853         }
854         return gen_ldst(s, opsize, tmp, val, what, index);
855     case 7: /* Other */
856         switch (reg0) {
857         case 0: /* Absolute short.  */
858         case 1: /* Absolute long.  */
859         case 2: /* pc displacement  */
860         case 3: /* pc index+displacement.  */
861             goto do_indirect;
862         case 4: /* Immediate.  */
863             /* Sign extend values for consistency.  */
864             switch (opsize) {
865             case OS_BYTE:
866                 if (what == EA_LOADS) {
867                     offset = (int8_t)read_im8(env, s);
868                 } else {
869                     offset = read_im8(env, s);
870                 }
871                 break;
872             case OS_WORD:
873                 if (what == EA_LOADS) {
874                     offset = (int16_t)read_im16(env, s);
875                 } else {
876                     offset = read_im16(env, s);
877                 }
878                 break;
879             case OS_LONG:
880                 offset = read_im32(env, s);
881                 break;
882             default:
883                 g_assert_not_reached();
884             }
885             return tcg_constant_i32(offset);
886         default:
887             return NULL_QREG;
888         }
889     }
890     /* Should never happen.  */
891     return NULL_QREG;
892 }
893 
gen_ea(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize,TCGv val,TCGv * addrp,ea_what what,int index)894 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
895                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
896 {
897     int mode = extract32(insn, 3, 3);
898     int reg0 = REG(insn, 0);
899     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
900 }
901 
gen_fp_ptr(int freg)902 static TCGv_ptr gen_fp_ptr(int freg)
903 {
904     TCGv_ptr fp = tcg_temp_new_ptr();
905     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fregs[freg]));
906     return fp;
907 }
908 
gen_fp_result_ptr(void)909 static TCGv_ptr gen_fp_result_ptr(void)
910 {
911     TCGv_ptr fp = tcg_temp_new_ptr();
912     tcg_gen_addi_ptr(fp, tcg_env, offsetof(CPUM68KState, fp_result));
913     return fp;
914 }
915 
gen_fp_move(TCGv_ptr dest,TCGv_ptr src)916 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
917 {
918     TCGv t32;
919     TCGv_i64 t64;
920 
921     t32 = tcg_temp_new();
922     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
923     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
924 
925     t64 = tcg_temp_new_i64();
926     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
927     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
928 }
929 
gen_load_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,int index)930 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
931                         int index)
932 {
933     TCGv tmp;
934     TCGv_i64 t64;
935 
936     t64 = tcg_temp_new_i64();
937     tmp = tcg_temp_new();
938     switch (opsize) {
939     case OS_BYTE:
940     case OS_WORD:
941     case OS_LONG:
942         tcg_gen_qemu_ld_tl(tmp, addr, index, opsize | MO_SIGN | MO_TE);
943         gen_helper_exts32(tcg_env, fp, tmp);
944         break;
945     case OS_SINGLE:
946         tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
947         gen_helper_extf32(tcg_env, fp, tmp);
948         break;
949     case OS_DOUBLE:
950         tcg_gen_qemu_ld_i64(t64, addr, index, MO_TEUQ);
951         gen_helper_extf64(tcg_env, fp, t64);
952         break;
953     case OS_EXTENDED:
954         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
955             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
956             break;
957         }
958         tcg_gen_qemu_ld_i32(tmp, addr, index, MO_TEUL);
959         tcg_gen_shri_i32(tmp, tmp, 16);
960         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
961         tcg_gen_addi_i32(tmp, addr, 4);
962         tcg_gen_qemu_ld_i64(t64, tmp, index, MO_TEUQ);
963         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
964         break;
965     case OS_PACKED:
966         /*
967          * unimplemented data type on 68040/ColdFire
968          * FIXME if needed for another FPU
969          */
970         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
971         break;
972     default:
973         g_assert_not_reached();
974     }
975 }
976 
gen_store_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,int index)977 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
978                          int index)
979 {
980     TCGv tmp;
981     TCGv_i64 t64;
982 
983     t64 = tcg_temp_new_i64();
984     tmp = tcg_temp_new();
985     switch (opsize) {
986     case OS_BYTE:
987     case OS_WORD:
988     case OS_LONG:
989         gen_helper_reds32(tmp, tcg_env, fp);
990         tcg_gen_qemu_st_tl(tmp, addr, index, opsize | MO_TE);
991         break;
992     case OS_SINGLE:
993         gen_helper_redf32(tmp, tcg_env, fp);
994         tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
995         break;
996     case OS_DOUBLE:
997         gen_helper_redf64(t64, tcg_env, fp);
998         tcg_gen_qemu_st_i64(t64, addr, index, MO_TEUQ);
999         break;
1000     case OS_EXTENDED:
1001         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1002             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1003             break;
1004         }
1005         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1006         tcg_gen_shli_i32(tmp, tmp, 16);
1007         tcg_gen_qemu_st_i32(tmp, addr, index, MO_TEUL);
1008         tcg_gen_addi_i32(tmp, addr, 4);
1009         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1010         tcg_gen_qemu_st_i64(t64, tmp, index, MO_TEUQ);
1011         break;
1012     case OS_PACKED:
1013         /*
1014          * unimplemented data type on 68040/ColdFire
1015          * FIXME if needed for another FPU
1016          */
1017         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1018         break;
1019     default:
1020         g_assert_not_reached();
1021     }
1022 }
1023 
gen_ldst_fp(DisasContext * s,int opsize,TCGv addr,TCGv_ptr fp,ea_what what,int index)1024 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1025                         TCGv_ptr fp, ea_what what, int index)
1026 {
1027     if (what == EA_STORE) {
1028         gen_store_fp(s, opsize, addr, fp, index);
1029     } else {
1030         gen_load_fp(s, opsize, addr, fp, index);
1031     }
1032 }
1033 
gen_ea_mode_fp(CPUM68KState * env,DisasContext * s,int mode,int reg0,int opsize,TCGv_ptr fp,ea_what what,int index)1034 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1035                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1036                           int index)
1037 {
1038     TCGv reg, addr, tmp;
1039     TCGv_i64 t64;
1040 
1041     switch (mode) {
1042     case 0: /* Data register direct.  */
1043         reg = cpu_dregs[reg0];
1044         if (what == EA_STORE) {
1045             switch (opsize) {
1046             case OS_BYTE:
1047             case OS_WORD:
1048             case OS_LONG:
1049                 gen_helper_reds32(reg, tcg_env, fp);
1050                 break;
1051             case OS_SINGLE:
1052                 gen_helper_redf32(reg, tcg_env, fp);
1053                 break;
1054             default:
1055                 g_assert_not_reached();
1056             }
1057         } else {
1058             tmp = tcg_temp_new();
1059             switch (opsize) {
1060             case OS_BYTE:
1061             case OS_WORD:
1062             case OS_LONG:
1063                 tcg_gen_ext_i32(tmp, reg, opsize | MO_SIGN);
1064                 gen_helper_exts32(tcg_env, fp, tmp);
1065                 break;
1066             case OS_SINGLE:
1067                 gen_helper_extf32(tcg_env, fp, reg);
1068                 break;
1069             default:
1070                 g_assert_not_reached();
1071             }
1072         }
1073         return 0;
1074     case 1: /* Address register direct.  */
1075         return -1;
1076     case 2: /* Indirect register */
1077         addr = get_areg(s, reg0);
1078         gen_ldst_fp(s, opsize, addr, fp, what, index);
1079         return 0;
1080     case 3: /* Indirect postincrement.  */
1081         addr = cpu_aregs[reg0];
1082         gen_ldst_fp(s, opsize, addr, fp, what, index);
1083         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1084         return 0;
1085     case 4: /* Indirect predecrememnt.  */
1086         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1087         if (IS_NULL_QREG(addr)) {
1088             return -1;
1089         }
1090         gen_ldst_fp(s, opsize, addr, fp, what, index);
1091         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1092         return 0;
1093     case 5: /* Indirect displacement.  */
1094     case 6: /* Indirect index + displacement.  */
1095     do_indirect:
1096         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1097         if (IS_NULL_QREG(addr)) {
1098             return -1;
1099         }
1100         gen_ldst_fp(s, opsize, addr, fp, what, index);
1101         return 0;
1102     case 7: /* Other */
1103         switch (reg0) {
1104         case 0: /* Absolute short.  */
1105         case 1: /* Absolute long.  */
1106         case 2: /* pc displacement  */
1107         case 3: /* pc index+displacement.  */
1108             goto do_indirect;
1109         case 4: /* Immediate.  */
1110             if (what == EA_STORE) {
1111                 return -1;
1112             }
1113             switch (opsize) {
1114             case OS_BYTE:
1115                 tmp = tcg_constant_i32((int8_t)read_im8(env, s));
1116                 gen_helper_exts32(tcg_env, fp, tmp);
1117                 break;
1118             case OS_WORD:
1119                 tmp = tcg_constant_i32((int16_t)read_im16(env, s));
1120                 gen_helper_exts32(tcg_env, fp, tmp);
1121                 break;
1122             case OS_LONG:
1123                 tmp = tcg_constant_i32(read_im32(env, s));
1124                 gen_helper_exts32(tcg_env, fp, tmp);
1125                 break;
1126             case OS_SINGLE:
1127                 tmp = tcg_constant_i32(read_im32(env, s));
1128                 gen_helper_extf32(tcg_env, fp, tmp);
1129                 break;
1130             case OS_DOUBLE:
1131                 t64 = tcg_constant_i64(read_im64(env, s));
1132                 gen_helper_extf64(tcg_env, fp, t64);
1133                 break;
1134             case OS_EXTENDED:
1135                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1136                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1137                     break;
1138                 }
1139                 tmp = tcg_constant_i32(read_im32(env, s) >> 16);
1140                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1141                 t64 = tcg_constant_i64(read_im64(env, s));
1142                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1143                 break;
1144             case OS_PACKED:
1145                 /*
1146                  * unimplemented data type on 68040/ColdFire
1147                  * FIXME if needed for another FPU
1148                  */
1149                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1150                 break;
1151             default:
1152                 g_assert_not_reached();
1153             }
1154             return 0;
1155         default:
1156             return -1;
1157         }
1158     }
1159     return -1;
1160 }
1161 
gen_ea_fp(CPUM68KState * env,DisasContext * s,uint16_t insn,int opsize,TCGv_ptr fp,ea_what what,int index)1162 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1163                        int opsize, TCGv_ptr fp, ea_what what, int index)
1164 {
1165     int mode = extract32(insn, 3, 3);
1166     int reg0 = REG(insn, 0);
1167     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1168 }
1169 
1170 typedef struct {
1171     TCGCond tcond;
1172     TCGv v1;
1173     TCGv v2;
1174 } DisasCompare;
1175 
gen_cc_cond(DisasCompare * c,DisasContext * s,int cond)1176 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1177 {
1178     TCGv tmp, tmp2;
1179     TCGCond tcond;
1180     CCOp op = s->cc_op;
1181 
1182     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1183     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1184         c->v1 = QREG_CC_N;
1185         c->v2 = QREG_CC_V;
1186         switch (cond) {
1187         case 2: /* HI */
1188         case 3: /* LS */
1189             tcond = TCG_COND_LEU;
1190             goto done;
1191         case 4: /* CC */
1192         case 5: /* CS */
1193             tcond = TCG_COND_LTU;
1194             goto done;
1195         case 6: /* NE */
1196         case 7: /* EQ */
1197             tcond = TCG_COND_EQ;
1198             goto done;
1199         case 10: /* PL */
1200         case 11: /* MI */
1201             c->v2 = tcg_constant_i32(0);
1202             c->v1 = tmp = tcg_temp_new();
1203             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1204             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1205             /* fallthru */
1206         case 12: /* GE */
1207         case 13: /* LT */
1208             tcond = TCG_COND_LT;
1209             goto done;
1210         case 14: /* GT */
1211         case 15: /* LE */
1212             tcond = TCG_COND_LE;
1213             goto done;
1214         }
1215     }
1216 
1217     c->v2 = tcg_constant_i32(0);
1218 
1219     switch (cond) {
1220     case 0: /* T */
1221     case 1: /* F */
1222         c->v1 = c->v2;
1223         tcond = TCG_COND_NEVER;
1224         goto done;
1225     case 14: /* GT (!(Z || (N ^ V))) */
1226     case 15: /* LE (Z || (N ^ V)) */
1227         /*
1228          * Logic operations clear V, which simplifies LE to (Z || N),
1229          * and since Z and N are co-located, this becomes a normal
1230          * comparison vs N.
1231          */
1232         if (op == CC_OP_LOGIC) {
1233             c->v1 = QREG_CC_N;
1234             tcond = TCG_COND_LE;
1235             goto done;
1236         }
1237         break;
1238     case 12: /* GE (!(N ^ V)) */
1239     case 13: /* LT (N ^ V) */
1240         /* Logic operations clear V, which simplifies this to N.  */
1241         if (op != CC_OP_LOGIC) {
1242             break;
1243         }
1244         /* fallthru */
1245     case 10: /* PL (!N) */
1246     case 11: /* MI (N) */
1247         /* Several cases represent N normally.  */
1248         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1249             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1250             op == CC_OP_LOGIC) {
1251             c->v1 = QREG_CC_N;
1252             tcond = TCG_COND_LT;
1253             goto done;
1254         }
1255         break;
1256     case 6: /* NE (!Z) */
1257     case 7: /* EQ (Z) */
1258         /* Some cases fold Z into N.  */
1259         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1260             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1261             op == CC_OP_LOGIC) {
1262             tcond = TCG_COND_EQ;
1263             c->v1 = QREG_CC_N;
1264             goto done;
1265         }
1266         break;
1267     case 4: /* CC (!C) */
1268     case 5: /* CS (C) */
1269         /* Some cases fold C into X.  */
1270         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1271             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1272             tcond = TCG_COND_NE;
1273             c->v1 = QREG_CC_X;
1274             goto done;
1275         }
1276         /* fallthru */
1277     case 8: /* VC (!V) */
1278     case 9: /* VS (V) */
1279         /* Logic operations clear V and C.  */
1280         if (op == CC_OP_LOGIC) {
1281             tcond = TCG_COND_NEVER;
1282             c->v1 = c->v2;
1283             goto done;
1284         }
1285         break;
1286     }
1287 
1288     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1289     gen_flush_flags(s);
1290 
1291     switch (cond) {
1292     case 0: /* T */
1293     case 1: /* F */
1294     default:
1295         /* Invalid, or handled above.  */
1296         abort();
1297     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1298     case 3: /* LS (C || Z) */
1299         c->v1 = tmp = tcg_temp_new();
1300         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1301         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1302         tcond = TCG_COND_NE;
1303         break;
1304     case 4: /* CC (!C) */
1305     case 5: /* CS (C) */
1306         c->v1 = QREG_CC_C;
1307         tcond = TCG_COND_NE;
1308         break;
1309     case 6: /* NE (!Z) */
1310     case 7: /* EQ (Z) */
1311         c->v1 = QREG_CC_Z;
1312         tcond = TCG_COND_EQ;
1313         break;
1314     case 8: /* VC (!V) */
1315     case 9: /* VS (V) */
1316         c->v1 = QREG_CC_V;
1317         tcond = TCG_COND_LT;
1318         break;
1319     case 10: /* PL (!N) */
1320     case 11: /* MI (N) */
1321         c->v1 = QREG_CC_N;
1322         tcond = TCG_COND_LT;
1323         break;
1324     case 12: /* GE (!(N ^ V)) */
1325     case 13: /* LT (N ^ V) */
1326         c->v1 = tmp = tcg_temp_new();
1327         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1328         tcond = TCG_COND_LT;
1329         break;
1330     case 14: /* GT (!(Z || (N ^ V))) */
1331     case 15: /* LE (Z || (N ^ V)) */
1332         c->v1 = tmp = tcg_temp_new();
1333         tcg_gen_negsetcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1334         tmp2 = tcg_temp_new();
1335         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1336         tcg_gen_or_i32(tmp, tmp, tmp2);
1337         tcond = TCG_COND_LT;
1338         break;
1339     }
1340 
1341  done:
1342     if ((cond & 1) == 0) {
1343         tcond = tcg_invert_cond(tcond);
1344     }
1345     c->tcond = tcond;
1346 }
1347 
gen_jmpcc(DisasContext * s,int cond,TCGLabel * l1)1348 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1349 {
1350   DisasCompare c;
1351 
1352   gen_cc_cond(&c, s, cond);
1353   update_cc_op(s);
1354   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1355 }
1356 
1357 /* Force a TB lookup after an instruction that changes the CPU state.  */
gen_exit_tb(DisasContext * s)1358 static void gen_exit_tb(DisasContext *s)
1359 {
1360     update_cc_op(s);
1361     tcg_gen_movi_i32(QREG_PC, s->pc);
1362     s->base.is_jmp = DISAS_EXIT;
1363 }
1364 
1365 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1366         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1367                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1368         if (IS_NULL_QREG(result)) {                                     \
1369             gen_addr_fault(s);                                          \
1370             return;                                                     \
1371         }                                                               \
1372     } while (0)
1373 
1374 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1375         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1376                                 EA_STORE, IS_USER(s));                  \
1377         if (IS_NULL_QREG(ea_result)) {                                  \
1378             gen_addr_fault(s);                                          \
1379             return;                                                     \
1380         }                                                               \
1381     } while (0)
1382 
1383 /* Generate a jump to an immediate address.  */
gen_jmp_tb(DisasContext * s,int n,target_ulong dest,target_ulong src)1384 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1385                        target_ulong src)
1386 {
1387     if (unlikely(s->ss_active)) {
1388         update_cc_op(s);
1389         tcg_gen_movi_i32(QREG_PC, dest);
1390         gen_raise_exception_format2(s, EXCP_TRACE, src);
1391     } else if (translator_use_goto_tb(&s->base, dest)) {
1392         tcg_gen_goto_tb(n);
1393         tcg_gen_movi_i32(QREG_PC, dest);
1394         tcg_gen_exit_tb(s->base.tb, n);
1395     } else {
1396         gen_jmp_im(s, dest);
1397         tcg_gen_exit_tb(NULL, 0);
1398     }
1399     s->base.is_jmp = DISAS_NORETURN;
1400 }
1401 
1402 #ifndef CONFIG_USER_ONLY
semihosting_test(DisasContext * s)1403 static bool semihosting_test(DisasContext *s)
1404 {
1405     uint32_t test;
1406 
1407     if (!semihosting_enabled(IS_USER(s))) {
1408         return false;
1409     }
1410 
1411     /*
1412      * "The semihosting instruction is immediately preceded by a
1413      * nop aligned to a 4-byte boundary..."
1414      * The preceding 2-byte (aligned) nop plus the 2-byte halt/bkpt
1415      * means that we have advanced 4 bytes from the required nop.
1416      */
1417     if (s->pc % 4 != 0) {
1418         return false;
1419     }
1420     test = translator_lduw(s->env, &s->base, s->pc - 4);
1421     if (test != 0x4e71) {
1422         return false;
1423     }
1424     /* "... and followed by an invalid sentinel instruction movec %sp,0." */
1425     test = translator_ldl(s->env, &s->base, s->pc);
1426     if (test != 0x4e7bf000) {
1427         return false;
1428     }
1429 
1430     /* Consume the sentinel. */
1431     s->pc += 4;
1432     return true;
1433 }
1434 #endif /* !CONFIG_USER_ONLY */
1435 
DISAS_INSN(scc)1436 DISAS_INSN(scc)
1437 {
1438     DisasCompare c;
1439     int cond;
1440     TCGv tmp;
1441 
1442     cond = (insn >> 8) & 0xf;
1443     gen_cc_cond(&c, s, cond);
1444 
1445     tmp = tcg_temp_new();
1446     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
1447 
1448     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1449 }
1450 
DISAS_INSN(dbcc)1451 DISAS_INSN(dbcc)
1452 {
1453     TCGLabel *l1;
1454     TCGv reg;
1455     TCGv tmp;
1456     int16_t offset;
1457     uint32_t base;
1458 
1459     reg = DREG(insn, 0);
1460     base = s->pc;
1461     offset = (int16_t)read_im16(env, s);
1462     l1 = gen_new_label();
1463     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1464 
1465     tmp = tcg_temp_new();
1466     tcg_gen_ext16s_i32(tmp, reg);
1467     tcg_gen_addi_i32(tmp, tmp, -1);
1468     gen_partset_reg(OS_WORD, reg, tmp);
1469     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1470     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1471     gen_set_label(l1);
1472     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1473 }
1474 
DISAS_INSN(undef_mac)1475 DISAS_INSN(undef_mac)
1476 {
1477     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1478 }
1479 
DISAS_INSN(undef_fpu)1480 DISAS_INSN(undef_fpu)
1481 {
1482     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1483 }
1484 
DISAS_INSN(undef)1485 DISAS_INSN(undef)
1486 {
1487     /*
1488      * ??? This is both instructions that are as yet unimplemented
1489      * for the 680x0 series, as well as those that are implemented
1490      * but actually illegal for CPU32 or pre-68020.
1491      */
1492     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %" VADDR_PRIx "\n",
1493                   insn, s->base.pc_next);
1494     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1495 }
1496 
DISAS_INSN(mulw)1497 DISAS_INSN(mulw)
1498 {
1499     TCGv reg;
1500     TCGv tmp;
1501     TCGv src;
1502     int sign;
1503 
1504     sign = (insn & 0x100) != 0;
1505     reg = DREG(insn, 9);
1506     tmp = tcg_temp_new();
1507     if (sign)
1508         tcg_gen_ext16s_i32(tmp, reg);
1509     else
1510         tcg_gen_ext16u_i32(tmp, reg);
1511     SRC_EA(env, src, OS_WORD, sign, NULL);
1512     tcg_gen_mul_i32(tmp, tmp, src);
1513     tcg_gen_mov_i32(reg, tmp);
1514     gen_logic_cc(s, tmp, OS_LONG);
1515 }
1516 
DISAS_INSN(divw)1517 DISAS_INSN(divw)
1518 {
1519     int sign;
1520     TCGv src;
1521     TCGv destr;
1522     TCGv ilen;
1523 
1524     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1525 
1526     sign = (insn & 0x100) != 0;
1527 
1528     /* dest.l / src.w */
1529 
1530     SRC_EA(env, src, OS_WORD, sign, NULL);
1531     destr = tcg_constant_i32(REG(insn, 9));
1532     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1533     if (sign) {
1534         gen_helper_divsw(tcg_env, destr, src, ilen);
1535     } else {
1536         gen_helper_divuw(tcg_env, destr, src, ilen);
1537     }
1538 
1539     set_cc_op(s, CC_OP_FLAGS);
1540 }
1541 
DISAS_INSN(divl)1542 DISAS_INSN(divl)
1543 {
1544     TCGv num, reg, den, ilen;
1545     int sign;
1546     uint16_t ext;
1547 
1548     ext = read_im16(env, s);
1549 
1550     sign = (ext & 0x0800) != 0;
1551 
1552     if (ext & 0x400) {
1553         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1554             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1555             return;
1556         }
1557 
1558         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1559 
1560         SRC_EA(env, den, OS_LONG, 0, NULL);
1561         num = tcg_constant_i32(REG(ext, 12));
1562         reg = tcg_constant_i32(REG(ext, 0));
1563         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1564         if (sign) {
1565             gen_helper_divsll(tcg_env, num, reg, den, ilen);
1566         } else {
1567             gen_helper_divull(tcg_env, num, reg, den, ilen);
1568         }
1569         set_cc_op(s, CC_OP_FLAGS);
1570         return;
1571     }
1572 
1573     /* divX.l <EA>, Dq        32/32 -> 32q     */
1574     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1575 
1576     SRC_EA(env, den, OS_LONG, 0, NULL);
1577     num = tcg_constant_i32(REG(ext, 12));
1578     reg = tcg_constant_i32(REG(ext, 0));
1579     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1580     if (sign) {
1581         gen_helper_divsl(tcg_env, num, reg, den, ilen);
1582     } else {
1583         gen_helper_divul(tcg_env, num, reg, den, ilen);
1584     }
1585 
1586     set_cc_op(s, CC_OP_FLAGS);
1587 }
1588 
bcd_add(TCGv dest,TCGv src)1589 static void bcd_add(TCGv dest, TCGv src)
1590 {
1591     TCGv t0, t1;
1592 
1593     /*
1594      * dest10 = dest10 + src10 + X
1595      *
1596      *        t1 = src
1597      *        t2 = t1 + 0x066
1598      *        t3 = t2 + dest + X
1599      *        t4 = t2 ^ dest
1600      *        t5 = t3 ^ t4
1601      *        t6 = ~t5 & 0x110
1602      *        t7 = (t6 >> 2) | (t6 >> 3)
1603      *        return t3 - t7
1604      */
1605 
1606     /*
1607      * t1 = (src + 0x066) + dest + X
1608      *    = result with some possible exceeding 0x6
1609      */
1610 
1611     t0 = tcg_temp_new();
1612     tcg_gen_addi_i32(t0, src, 0x066);
1613 
1614     t1 = tcg_temp_new();
1615     tcg_gen_add_i32(t1, t0, dest);
1616     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1617 
1618     /* we will remove exceeding 0x6 where there is no carry */
1619 
1620     /*
1621      * t0 = (src + 0x0066) ^ dest
1622      *    = t1 without carries
1623      */
1624 
1625     tcg_gen_xor_i32(t0, t0, dest);
1626 
1627     /*
1628      * extract the carries
1629      * t0 = t0 ^ t1
1630      *    = only the carries
1631      */
1632 
1633     tcg_gen_xor_i32(t0, t0, t1);
1634 
1635     /*
1636      * generate 0x1 where there is no carry
1637      * and for each 0x10, generate a 0x6
1638      */
1639 
1640     tcg_gen_shri_i32(t0, t0, 3);
1641     tcg_gen_not_i32(t0, t0);
1642     tcg_gen_andi_i32(t0, t0, 0x22);
1643     tcg_gen_add_i32(dest, t0, t0);
1644     tcg_gen_add_i32(dest, dest, t0);
1645 
1646     /*
1647      * remove the exceeding 0x6
1648      * for digits that have not generated a carry
1649      */
1650 
1651     tcg_gen_sub_i32(dest, t1, dest);
1652 }
1653 
bcd_sub(TCGv dest,TCGv src)1654 static void bcd_sub(TCGv dest, TCGv src)
1655 {
1656     TCGv t0, t1, t2;
1657 
1658     /*
1659      *  dest10 = dest10 - src10 - X
1660      *         = bcd_add(dest + 1 - X, 0x199 - src)
1661      */
1662 
1663     /* t0 = 0x066 + (0x199 - src) */
1664 
1665     t0 = tcg_temp_new();
1666     tcg_gen_subfi_i32(t0, 0x1ff, src);
1667 
1668     /* t1 = t0 + dest + 1 - X*/
1669 
1670     t1 = tcg_temp_new();
1671     tcg_gen_add_i32(t1, t0, dest);
1672     tcg_gen_addi_i32(t1, t1, 1);
1673     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1674 
1675     /* t2 = t0 ^ dest */
1676 
1677     t2 = tcg_temp_new();
1678     tcg_gen_xor_i32(t2, t0, dest);
1679 
1680     /* t0 = t1 ^ t2 */
1681 
1682     tcg_gen_xor_i32(t0, t1, t2);
1683 
1684     /*
1685      * t2 = ~t0 & 0x110
1686      * t0 = (t2 >> 2) | (t2 >> 3)
1687      *
1688      * to fit on 8bit operands, changed in:
1689      *
1690      * t2 = ~(t0 >> 3) & 0x22
1691      * t0 = t2 + t2
1692      * t0 = t0 + t2
1693      */
1694 
1695     tcg_gen_shri_i32(t2, t0, 3);
1696     tcg_gen_not_i32(t2, t2);
1697     tcg_gen_andi_i32(t2, t2, 0x22);
1698     tcg_gen_add_i32(t0, t2, t2);
1699     tcg_gen_add_i32(t0, t0, t2);
1700 
1701     /* return t1 - t0 */
1702 
1703     tcg_gen_sub_i32(dest, t1, t0);
1704 }
1705 
bcd_flags(TCGv val)1706 static void bcd_flags(TCGv val)
1707 {
1708     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1709     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1710 
1711     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1712 
1713     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1714 }
1715 
DISAS_INSN(abcd_reg)1716 DISAS_INSN(abcd_reg)
1717 {
1718     TCGv src;
1719     TCGv dest;
1720 
1721     gen_flush_flags(s); /* !Z is sticky */
1722 
1723     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1724     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1725     bcd_add(dest, src);
1726     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1727 
1728     bcd_flags(dest);
1729 }
1730 
DISAS_INSN(abcd_mem)1731 DISAS_INSN(abcd_mem)
1732 {
1733     TCGv src, dest, addr;
1734 
1735     gen_flush_flags(s); /* !Z is sticky */
1736 
1737     /* Indirect pre-decrement load (mode 4) */
1738 
1739     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1740                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1741     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1742                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1743 
1744     bcd_add(dest, src);
1745 
1746     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1747                 EA_STORE, IS_USER(s));
1748 
1749     bcd_flags(dest);
1750 }
1751 
DISAS_INSN(sbcd_reg)1752 DISAS_INSN(sbcd_reg)
1753 {
1754     TCGv src, dest;
1755 
1756     gen_flush_flags(s); /* !Z is sticky */
1757 
1758     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1759     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1760 
1761     bcd_sub(dest, src);
1762 
1763     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1764 
1765     bcd_flags(dest);
1766 }
1767 
DISAS_INSN(sbcd_mem)1768 DISAS_INSN(sbcd_mem)
1769 {
1770     TCGv src, dest, addr;
1771 
1772     gen_flush_flags(s); /* !Z is sticky */
1773 
1774     /* Indirect pre-decrement load (mode 4) */
1775 
1776     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1777                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1778     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1779                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1780 
1781     bcd_sub(dest, src);
1782 
1783     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1784                 EA_STORE, IS_USER(s));
1785 
1786     bcd_flags(dest);
1787 }
1788 
DISAS_INSN(nbcd)1789 DISAS_INSN(nbcd)
1790 {
1791     TCGv src, dest;
1792     TCGv addr;
1793 
1794     gen_flush_flags(s); /* !Z is sticky */
1795 
1796     SRC_EA(env, src, OS_BYTE, 0, &addr);
1797 
1798     dest = tcg_temp_new();
1799     tcg_gen_movi_i32(dest, 0);
1800     bcd_sub(dest, src);
1801 
1802     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1803 
1804     bcd_flags(dest);
1805 }
1806 
DISAS_INSN(addsub)1807 DISAS_INSN(addsub)
1808 {
1809     TCGv reg;
1810     TCGv dest;
1811     TCGv src;
1812     TCGv tmp;
1813     TCGv addr;
1814     int add;
1815     int opsize;
1816 
1817     add = (insn & 0x4000) != 0;
1818     opsize = insn_opsize(insn);
1819     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1820     dest = tcg_temp_new();
1821     if (insn & 0x100) {
1822         SRC_EA(env, tmp, opsize, 1, &addr);
1823         src = reg;
1824     } else {
1825         tmp = reg;
1826         SRC_EA(env, src, opsize, 1, NULL);
1827     }
1828     if (add) {
1829         tcg_gen_add_i32(dest, tmp, src);
1830         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1831         set_cc_op(s, CC_OP_ADDB + opsize);
1832     } else {
1833         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1834         tcg_gen_sub_i32(dest, tmp, src);
1835         set_cc_op(s, CC_OP_SUBB + opsize);
1836     }
1837     gen_update_cc_add(dest, src, opsize);
1838     if (insn & 0x100) {
1839         DEST_EA(env, insn, opsize, dest, &addr);
1840     } else {
1841         gen_partset_reg(opsize, DREG(insn, 9), dest);
1842     }
1843 }
1844 
1845 /* Reverse the order of the bits in REG.  */
DISAS_INSN(bitrev)1846 DISAS_INSN(bitrev)
1847 {
1848     TCGv reg;
1849     reg = DREG(insn, 0);
1850     gen_helper_bitrev(reg, reg);
1851 }
1852 
DISAS_INSN(bitop_reg)1853 DISAS_INSN(bitop_reg)
1854 {
1855     int opsize;
1856     int op;
1857     TCGv src1;
1858     TCGv src2;
1859     TCGv tmp;
1860     TCGv addr;
1861     TCGv dest;
1862 
1863     if ((insn & 0x38) != 0)
1864         opsize = OS_BYTE;
1865     else
1866         opsize = OS_LONG;
1867     op = (insn >> 6) & 3;
1868     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1869 
1870     gen_flush_flags(s);
1871     src2 = tcg_temp_new();
1872     if (opsize == OS_BYTE)
1873         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1874     else
1875         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1876 
1877     tmp = tcg_temp_new();
1878     tcg_gen_shl_i32(tmp, tcg_constant_i32(1), src2);
1879 
1880     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1881 
1882     dest = tcg_temp_new();
1883     switch (op) {
1884     case 1: /* bchg */
1885         tcg_gen_xor_i32(dest, src1, tmp);
1886         break;
1887     case 2: /* bclr */
1888         tcg_gen_andc_i32(dest, src1, tmp);
1889         break;
1890     case 3: /* bset */
1891         tcg_gen_or_i32(dest, src1, tmp);
1892         break;
1893     default: /* btst */
1894         break;
1895     }
1896     if (op) {
1897         DEST_EA(env, insn, opsize, dest, &addr);
1898     }
1899 }
1900 
DISAS_INSN(sats)1901 DISAS_INSN(sats)
1902 {
1903     TCGv reg;
1904     reg = DREG(insn, 0);
1905     gen_flush_flags(s);
1906     gen_helper_sats(reg, reg, QREG_CC_V);
1907     gen_logic_cc(s, reg, OS_LONG);
1908 }
1909 
gen_push(DisasContext * s,TCGv val)1910 static void gen_push(DisasContext *s, TCGv val)
1911 {
1912     TCGv tmp;
1913 
1914     tmp = tcg_temp_new();
1915     tcg_gen_subi_i32(tmp, QREG_SP, 4);
1916     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
1917     tcg_gen_mov_i32(QREG_SP, tmp);
1918 }
1919 
mreg(int reg)1920 static TCGv mreg(int reg)
1921 {
1922     if (reg < 8) {
1923         /* Dx */
1924         return cpu_dregs[reg];
1925     }
1926     /* Ax */
1927     return cpu_aregs[reg & 7];
1928 }
1929 
DISAS_INSN(movem)1930 DISAS_INSN(movem)
1931 {
1932     TCGv addr, incr, tmp, r[16];
1933     int is_load = (insn & 0x0400) != 0;
1934     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
1935     uint16_t mask = read_im16(env, s);
1936     int mode = extract32(insn, 3, 3);
1937     int reg0 = REG(insn, 0);
1938     int i;
1939 
1940     tmp = cpu_aregs[reg0];
1941 
1942     switch (mode) {
1943     case 0: /* data register direct */
1944     case 1: /* addr register direct */
1945     do_addr_fault:
1946         gen_addr_fault(s);
1947         return;
1948 
1949     case 2: /* indirect */
1950         break;
1951 
1952     case 3: /* indirect post-increment */
1953         if (!is_load) {
1954             /* post-increment is not allowed */
1955             goto do_addr_fault;
1956         }
1957         break;
1958 
1959     case 4: /* indirect pre-decrement */
1960         if (is_load) {
1961             /* pre-decrement is not allowed */
1962             goto do_addr_fault;
1963         }
1964         /*
1965          * We want a bare copy of the address reg, without any pre-decrement
1966          * adjustment, as gen_lea would provide.
1967          */
1968         break;
1969 
1970     default:
1971         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
1972         if (IS_NULL_QREG(tmp)) {
1973             goto do_addr_fault;
1974         }
1975         break;
1976     }
1977 
1978     addr = tcg_temp_new();
1979     tcg_gen_mov_i32(addr, tmp);
1980     incr = tcg_constant_i32(opsize_bytes(opsize));
1981 
1982     if (is_load) {
1983         /* memory to register */
1984         for (i = 0; i < 16; i++) {
1985             if (mask & (1 << i)) {
1986                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
1987                 tcg_gen_add_i32(addr, addr, incr);
1988             }
1989         }
1990         for (i = 0; i < 16; i++) {
1991             if (mask & (1 << i)) {
1992                 tcg_gen_mov_i32(mreg(i), r[i]);
1993             }
1994         }
1995         if (mode == 3) {
1996             /* post-increment: movem (An)+,X */
1997             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1998         }
1999     } else {
2000         /* register to memory */
2001         if (mode == 4) {
2002             /* pre-decrement: movem X,-(An) */
2003             for (i = 15; i >= 0; i--) {
2004                 if ((mask << i) & 0x8000) {
2005                     tcg_gen_sub_i32(addr, addr, incr);
2006                     if (reg0 + 8 == i &&
2007                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2008                         /*
2009                          * M68020+: if the addressing register is the
2010                          * register moved to memory, the value written
2011                          * is the initial value decremented by the size of
2012                          * the operation, regardless of how many actual
2013                          * stores have been performed until this point.
2014                          * M68000/M68010: the value is the initial value.
2015                          */
2016                         tmp = tcg_temp_new();
2017                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2018                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2019                     } else {
2020                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2021                     }
2022                 }
2023             }
2024             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2025         } else {
2026             for (i = 0; i < 16; i++) {
2027                 if (mask & (1 << i)) {
2028                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2029                     tcg_gen_add_i32(addr, addr, incr);
2030                 }
2031             }
2032         }
2033     }
2034 }
2035 
DISAS_INSN(movep)2036 DISAS_INSN(movep)
2037 {
2038     uint8_t i;
2039     int16_t displ;
2040     TCGv reg;
2041     TCGv addr;
2042     TCGv abuf;
2043     TCGv dbuf;
2044 
2045     displ = read_im16(env, s);
2046 
2047     addr = AREG(insn, 0);
2048     reg = DREG(insn, 9);
2049 
2050     abuf = tcg_temp_new();
2051     tcg_gen_addi_i32(abuf, addr, displ);
2052     dbuf = tcg_temp_new();
2053 
2054     if (insn & 0x40) {
2055         i = 4;
2056     } else {
2057         i = 2;
2058     }
2059 
2060     if (insn & 0x80) {
2061         for ( ; i > 0 ; i--) {
2062             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2063             tcg_gen_qemu_st_i32(dbuf, abuf, IS_USER(s), MO_UB);
2064             if (i > 1) {
2065                 tcg_gen_addi_i32(abuf, abuf, 2);
2066             }
2067         }
2068     } else {
2069         for ( ; i > 0 ; i--) {
2070             tcg_gen_qemu_ld_tl(dbuf, abuf, IS_USER(s), MO_UB);
2071             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2072             if (i > 1) {
2073                 tcg_gen_addi_i32(abuf, abuf, 2);
2074             }
2075         }
2076     }
2077 }
2078 
DISAS_INSN(bitop_im)2079 DISAS_INSN(bitop_im)
2080 {
2081     int opsize;
2082     int op;
2083     TCGv src1;
2084     uint32_t mask;
2085     int bitnum;
2086     TCGv tmp;
2087     TCGv addr;
2088 
2089     if ((insn & 0x38) != 0)
2090         opsize = OS_BYTE;
2091     else
2092         opsize = OS_LONG;
2093     op = (insn >> 6) & 3;
2094 
2095     bitnum = read_im16(env, s);
2096     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2097         if (bitnum & 0xfe00) {
2098             disas_undef(env, s, insn);
2099             return;
2100         }
2101     } else {
2102         if (bitnum & 0xff00) {
2103             disas_undef(env, s, insn);
2104             return;
2105         }
2106     }
2107 
2108     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2109 
2110     gen_flush_flags(s);
2111     if (opsize == OS_BYTE)
2112         bitnum &= 7;
2113     else
2114         bitnum &= 31;
2115     mask = 1 << bitnum;
2116 
2117    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2118 
2119     if (op) {
2120         tmp = tcg_temp_new();
2121         switch (op) {
2122         case 1: /* bchg */
2123             tcg_gen_xori_i32(tmp, src1, mask);
2124             break;
2125         case 2: /* bclr */
2126             tcg_gen_andi_i32(tmp, src1, ~mask);
2127             break;
2128         case 3: /* bset */
2129             tcg_gen_ori_i32(tmp, src1, mask);
2130             break;
2131         default: /* btst */
2132             break;
2133         }
2134         DEST_EA(env, insn, opsize, tmp, &addr);
2135     }
2136 }
2137 
gen_get_ccr(DisasContext * s)2138 static TCGv gen_get_ccr(DisasContext *s)
2139 {
2140     TCGv dest;
2141 
2142     update_cc_op(s);
2143     dest = tcg_temp_new();
2144     gen_helper_get_ccr(dest, tcg_env);
2145     return dest;
2146 }
2147 
gen_get_sr(DisasContext * s)2148 static TCGv gen_get_sr(DisasContext *s)
2149 {
2150     TCGv ccr;
2151     TCGv sr;
2152 
2153     ccr = gen_get_ccr(s);
2154     sr = tcg_temp_new();
2155     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2156     tcg_gen_or_i32(sr, sr, ccr);
2157     return sr;
2158 }
2159 
gen_set_sr_im(DisasContext * s,uint16_t val,int ccr_only)2160 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2161 {
2162     if (ccr_only) {
2163         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2164         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2165         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2166         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2167         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2168     } else {
2169         /* Must writeback before changing security state. */
2170         do_writebacks(s);
2171         gen_helper_set_sr(tcg_env, tcg_constant_i32(val));
2172     }
2173     set_cc_op(s, CC_OP_FLAGS);
2174 }
2175 
gen_set_sr(DisasContext * s,TCGv val,int ccr_only)2176 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2177 {
2178     if (ccr_only) {
2179         gen_helper_set_ccr(tcg_env, val);
2180     } else {
2181         /* Must writeback before changing security state. */
2182         do_writebacks(s);
2183         gen_helper_set_sr(tcg_env, val);
2184     }
2185     set_cc_op(s, CC_OP_FLAGS);
2186 }
2187 
gen_move_to_sr(CPUM68KState * env,DisasContext * s,uint16_t insn,bool ccr_only)2188 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2189                            bool ccr_only)
2190 {
2191     if ((insn & 0x3f) == 0x3c) {
2192         uint16_t val;
2193         val = read_im16(env, s);
2194         gen_set_sr_im(s, val, ccr_only);
2195     } else {
2196         TCGv src;
2197         SRC_EA(env, src, OS_WORD, 0, NULL);
2198         gen_set_sr(s, src, ccr_only);
2199     }
2200 }
2201 
DISAS_INSN(arith_im)2202 DISAS_INSN(arith_im)
2203 {
2204     int op;
2205     TCGv im;
2206     TCGv src1;
2207     TCGv dest;
2208     TCGv addr;
2209     int opsize;
2210     bool with_SR = ((insn & 0x3f) == 0x3c);
2211 
2212     op = (insn >> 9) & 7;
2213     opsize = insn_opsize(insn);
2214     switch (opsize) {
2215     case OS_BYTE:
2216         im = tcg_constant_i32((int8_t)read_im8(env, s));
2217         break;
2218     case OS_WORD:
2219         im = tcg_constant_i32((int16_t)read_im16(env, s));
2220         break;
2221     case OS_LONG:
2222         im = tcg_constant_i32(read_im32(env, s));
2223         break;
2224     default:
2225         g_assert_not_reached();
2226     }
2227 
2228     if (with_SR) {
2229         /* SR/CCR can only be used with andi/eori/ori */
2230         if (op == 2 || op == 3 || op == 6) {
2231             disas_undef(env, s, insn);
2232             return;
2233         }
2234         switch (opsize) {
2235         case OS_BYTE:
2236             src1 = gen_get_ccr(s);
2237             break;
2238         case OS_WORD:
2239             if (IS_USER(s)) {
2240                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2241                 return;
2242             }
2243             src1 = gen_get_sr(s);
2244             break;
2245         default:
2246             /* OS_LONG; others already g_assert_not_reached.  */
2247             disas_undef(env, s, insn);
2248             return;
2249         }
2250     } else {
2251         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2252     }
2253     dest = tcg_temp_new();
2254     switch (op) {
2255     case 0: /* ori */
2256         tcg_gen_or_i32(dest, src1, im);
2257         if (with_SR) {
2258             gen_set_sr(s, dest, opsize == OS_BYTE);
2259             gen_exit_tb(s);
2260         } else {
2261             DEST_EA(env, insn, opsize, dest, &addr);
2262             gen_logic_cc(s, dest, opsize);
2263         }
2264         break;
2265     case 1: /* andi */
2266         tcg_gen_and_i32(dest, src1, im);
2267         if (with_SR) {
2268             gen_set_sr(s, dest, opsize == OS_BYTE);
2269             gen_exit_tb(s);
2270         } else {
2271             DEST_EA(env, insn, opsize, dest, &addr);
2272             gen_logic_cc(s, dest, opsize);
2273         }
2274         break;
2275     case 2: /* subi */
2276         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2277         tcg_gen_sub_i32(dest, src1, im);
2278         gen_update_cc_add(dest, im, opsize);
2279         set_cc_op(s, CC_OP_SUBB + opsize);
2280         DEST_EA(env, insn, opsize, dest, &addr);
2281         break;
2282     case 3: /* addi */
2283         tcg_gen_add_i32(dest, src1, im);
2284         gen_update_cc_add(dest, im, opsize);
2285         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2286         set_cc_op(s, CC_OP_ADDB + opsize);
2287         DEST_EA(env, insn, opsize, dest, &addr);
2288         break;
2289     case 5: /* eori */
2290         tcg_gen_xor_i32(dest, src1, im);
2291         if (with_SR) {
2292             gen_set_sr(s, dest, opsize == OS_BYTE);
2293             gen_exit_tb(s);
2294         } else {
2295             DEST_EA(env, insn, opsize, dest, &addr);
2296             gen_logic_cc(s, dest, opsize);
2297         }
2298         break;
2299     case 6: /* cmpi */
2300         gen_update_cc_cmp(s, src1, im, opsize);
2301         break;
2302     default:
2303         abort();
2304     }
2305 }
2306 
DISAS_INSN(cas)2307 DISAS_INSN(cas)
2308 {
2309     int opsize;
2310     TCGv addr;
2311     uint16_t ext;
2312     TCGv load;
2313     TCGv cmp;
2314     MemOp opc;
2315 
2316     switch ((insn >> 9) & 3) {
2317     case 1:
2318         opsize = OS_BYTE;
2319         opc = MO_SB;
2320         break;
2321     case 2:
2322         opsize = OS_WORD;
2323         opc = MO_TESW;
2324         break;
2325     case 3:
2326         opsize = OS_LONG;
2327         opc = MO_TESL;
2328         break;
2329     default:
2330         g_assert_not_reached();
2331     }
2332 
2333     ext = read_im16(env, s);
2334 
2335     /* cas Dc,Du,<EA> */
2336 
2337     addr = gen_lea(env, s, insn, opsize);
2338     if (IS_NULL_QREG(addr)) {
2339         gen_addr_fault(s);
2340         return;
2341     }
2342 
2343     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2344 
2345     /*
2346      * if  <EA> == Dc then
2347      *     <EA> = Du
2348      *     Dc = <EA> (because <EA> == Dc)
2349      * else
2350      *     Dc = <EA>
2351      */
2352 
2353     load = tcg_temp_new();
2354     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2355                                IS_USER(s), opc);
2356     /* update flags before setting cmp to load */
2357     gen_update_cc_cmp(s, load, cmp, opsize);
2358     gen_partset_reg(opsize, DREG(ext, 0), load);
2359 
2360     switch (extract32(insn, 3, 3)) {
2361     case 3: /* Indirect postincrement.  */
2362         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2363         break;
2364     case 4: /* Indirect predecrememnt.  */
2365         tcg_gen_mov_i32(AREG(insn, 0), addr);
2366         break;
2367     }
2368 }
2369 
DISAS_INSN(cas2w)2370 DISAS_INSN(cas2w)
2371 {
2372     uint16_t ext1, ext2;
2373     TCGv addr1, addr2;
2374 
2375     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2376 
2377     ext1 = read_im16(env, s);
2378 
2379     if (ext1 & 0x8000) {
2380         /* Address Register */
2381         addr1 = AREG(ext1, 12);
2382     } else {
2383         /* Data Register */
2384         addr1 = DREG(ext1, 12);
2385     }
2386 
2387     ext2 = read_im16(env, s);
2388     if (ext2 & 0x8000) {
2389         /* Address Register */
2390         addr2 = AREG(ext2, 12);
2391     } else {
2392         /* Data Register */
2393         addr2 = DREG(ext2, 12);
2394     }
2395 
2396     /*
2397      * if (R1) == Dc1 && (R2) == Dc2 then
2398      *     (R1) = Du1
2399      *     (R2) = Du2
2400      * else
2401      *     Dc1 = (R1)
2402      *     Dc2 = (R2)
2403      */
2404 
2405     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2406         gen_helper_exit_atomic(tcg_env);
2407     } else {
2408         TCGv regs = tcg_constant_i32(REG(ext2, 6) |
2409                                      (REG(ext1, 6) << 3) |
2410                                      (REG(ext2, 0) << 6) |
2411                                      (REG(ext1, 0) << 9));
2412         gen_helper_cas2w(tcg_env, regs, addr1, addr2);
2413     }
2414 
2415     /* Note that cas2w also assigned to env->cc_op.  */
2416     s->cc_op = CC_OP_CMPW;
2417     s->cc_op_synced = 1;
2418 }
2419 
DISAS_INSN(cas2l)2420 DISAS_INSN(cas2l)
2421 {
2422     uint16_t ext1, ext2;
2423     TCGv addr1, addr2, regs;
2424 
2425     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2426 
2427     ext1 = read_im16(env, s);
2428 
2429     if (ext1 & 0x8000) {
2430         /* Address Register */
2431         addr1 = AREG(ext1, 12);
2432     } else {
2433         /* Data Register */
2434         addr1 = DREG(ext1, 12);
2435     }
2436 
2437     ext2 = read_im16(env, s);
2438     if (ext2 & 0x8000) {
2439         /* Address Register */
2440         addr2 = AREG(ext2, 12);
2441     } else {
2442         /* Data Register */
2443         addr2 = DREG(ext2, 12);
2444     }
2445 
2446     /*
2447      * if (R1) == Dc1 && (R2) == Dc2 then
2448      *     (R1) = Du1
2449      *     (R2) = Du2
2450      * else
2451      *     Dc1 = (R1)
2452      *     Dc2 = (R2)
2453      */
2454 
2455     regs = tcg_constant_i32(REG(ext2, 6) |
2456                             (REG(ext1, 6) << 3) |
2457                             (REG(ext2, 0) << 6) |
2458                             (REG(ext1, 0) << 9));
2459     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2460         gen_helper_cas2l_parallel(tcg_env, regs, addr1, addr2);
2461     } else {
2462         gen_helper_cas2l(tcg_env, regs, addr1, addr2);
2463     }
2464 
2465     /* Note that cas2l also assigned to env->cc_op.  */
2466     s->cc_op = CC_OP_CMPL;
2467     s->cc_op_synced = 1;
2468 }
2469 
DISAS_INSN(byterev)2470 DISAS_INSN(byterev)
2471 {
2472     TCGv reg;
2473 
2474     reg = DREG(insn, 0);
2475     tcg_gen_bswap32_i32(reg, reg);
2476 }
2477 
DISAS_INSN(move)2478 DISAS_INSN(move)
2479 {
2480     TCGv src;
2481     TCGv dest;
2482     int op;
2483     int opsize;
2484 
2485     switch (insn >> 12) {
2486     case 1: /* move.b */
2487         opsize = OS_BYTE;
2488         break;
2489     case 2: /* move.l */
2490         opsize = OS_LONG;
2491         break;
2492     case 3: /* move.w */
2493         opsize = OS_WORD;
2494         break;
2495     default:
2496         abort();
2497     }
2498     SRC_EA(env, src, opsize, 1, NULL);
2499     op = (insn >> 6) & 7;
2500     if (op == 1) {
2501         /* movea */
2502         /* The value will already have been sign extended.  */
2503         dest = AREG(insn, 9);
2504         tcg_gen_mov_i32(dest, src);
2505     } else {
2506         /* normal move */
2507         uint16_t dest_ea;
2508         dest_ea = ((insn >> 9) & 7) | (op << 3);
2509         DEST_EA(env, dest_ea, opsize, src, NULL);
2510         /* This will be correct because loads sign extend.  */
2511         gen_logic_cc(s, src, opsize);
2512     }
2513 }
2514 
DISAS_INSN(negx)2515 DISAS_INSN(negx)
2516 {
2517     TCGv z;
2518     TCGv src;
2519     TCGv addr;
2520     int opsize;
2521 
2522     opsize = insn_opsize(insn);
2523     SRC_EA(env, src, opsize, 1, &addr);
2524 
2525     gen_flush_flags(s); /* compute old Z */
2526 
2527     /*
2528      * Perform subtract with borrow.
2529      * (X, N) =  -(src + X);
2530      */
2531 
2532     z = tcg_constant_i32(0);
2533     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2534     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2535     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2536 
2537     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2538 
2539     /*
2540      * Compute signed-overflow for negation.  The normal formula for
2541      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2542      * this simplifies to res & src.
2543      */
2544 
2545     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2546 
2547     /* Copy the rest of the results into place.  */
2548     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2549     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2550 
2551     set_cc_op(s, CC_OP_FLAGS);
2552 
2553     /* result is in QREG_CC_N */
2554 
2555     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2556 }
2557 
DISAS_INSN(lea)2558 DISAS_INSN(lea)
2559 {
2560     TCGv reg;
2561     TCGv tmp;
2562 
2563     reg = AREG(insn, 9);
2564     tmp = gen_lea(env, s, insn, OS_LONG);
2565     if (IS_NULL_QREG(tmp)) {
2566         gen_addr_fault(s);
2567         return;
2568     }
2569     tcg_gen_mov_i32(reg, tmp);
2570 }
2571 
DISAS_INSN(clr)2572 DISAS_INSN(clr)
2573 {
2574     int opsize;
2575     TCGv zero;
2576 
2577     zero = tcg_constant_i32(0);
2578     opsize = insn_opsize(insn);
2579     DEST_EA(env, insn, opsize, zero, NULL);
2580     gen_logic_cc(s, zero, opsize);
2581 }
2582 
DISAS_INSN(move_from_ccr)2583 DISAS_INSN(move_from_ccr)
2584 {
2585     TCGv ccr;
2586 
2587     ccr = gen_get_ccr(s);
2588     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2589 }
2590 
DISAS_INSN(neg)2591 DISAS_INSN(neg)
2592 {
2593     TCGv src1;
2594     TCGv dest;
2595     TCGv addr;
2596     int opsize;
2597 
2598     opsize = insn_opsize(insn);
2599     SRC_EA(env, src1, opsize, 1, &addr);
2600     dest = tcg_temp_new();
2601     tcg_gen_neg_i32(dest, src1);
2602     set_cc_op(s, CC_OP_SUBB + opsize);
2603     gen_update_cc_add(dest, src1, opsize);
2604     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2605     DEST_EA(env, insn, opsize, dest, &addr);
2606 }
2607 
DISAS_INSN(move_to_ccr)2608 DISAS_INSN(move_to_ccr)
2609 {
2610     gen_move_to_sr(env, s, insn, true);
2611 }
2612 
DISAS_INSN(not)2613 DISAS_INSN(not)
2614 {
2615     TCGv src1;
2616     TCGv dest;
2617     TCGv addr;
2618     int opsize;
2619 
2620     opsize = insn_opsize(insn);
2621     SRC_EA(env, src1, opsize, 1, &addr);
2622     dest = tcg_temp_new();
2623     tcg_gen_not_i32(dest, src1);
2624     DEST_EA(env, insn, opsize, dest, &addr);
2625     gen_logic_cc(s, dest, opsize);
2626 }
2627 
DISAS_INSN(swap)2628 DISAS_INSN(swap)
2629 {
2630     TCGv src1;
2631     TCGv src2;
2632     TCGv reg;
2633 
2634     src1 = tcg_temp_new();
2635     src2 = tcg_temp_new();
2636     reg = DREG(insn, 0);
2637     tcg_gen_shli_i32(src1, reg, 16);
2638     tcg_gen_shri_i32(src2, reg, 16);
2639     tcg_gen_or_i32(reg, src1, src2);
2640     gen_logic_cc(s, reg, OS_LONG);
2641 }
2642 
DISAS_INSN(bkpt)2643 DISAS_INSN(bkpt)
2644 {
2645 #if defined(CONFIG_USER_ONLY)
2646     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2647 #else
2648     /* BKPT #0 is the alternate semihosting instruction. */
2649     if ((insn & 7) == 0 && semihosting_test(s)) {
2650         gen_exception(s, s->pc, EXCP_SEMIHOSTING);
2651         return;
2652     }
2653     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2654 #endif
2655 }
2656 
DISAS_INSN(pea)2657 DISAS_INSN(pea)
2658 {
2659     TCGv tmp;
2660 
2661     tmp = gen_lea(env, s, insn, OS_LONG);
2662     if (IS_NULL_QREG(tmp)) {
2663         gen_addr_fault(s);
2664         return;
2665     }
2666     gen_push(s, tmp);
2667 }
2668 
DISAS_INSN(ext)2669 DISAS_INSN(ext)
2670 {
2671     int op;
2672     TCGv reg;
2673     TCGv tmp;
2674 
2675     reg = DREG(insn, 0);
2676     op = (insn >> 6) & 7;
2677     tmp = tcg_temp_new();
2678     if (op == 3)
2679         tcg_gen_ext16s_i32(tmp, reg);
2680     else
2681         tcg_gen_ext8s_i32(tmp, reg);
2682     if (op == 2)
2683         gen_partset_reg(OS_WORD, reg, tmp);
2684     else
2685         tcg_gen_mov_i32(reg, tmp);
2686     gen_logic_cc(s, tmp, OS_LONG);
2687 }
2688 
DISAS_INSN(tst)2689 DISAS_INSN(tst)
2690 {
2691     int opsize;
2692     TCGv tmp;
2693 
2694     opsize = insn_opsize(insn);
2695     SRC_EA(env, tmp, opsize, 1, NULL);
2696     gen_logic_cc(s, tmp, opsize);
2697 }
2698 
DISAS_INSN(pulse)2699 DISAS_INSN(pulse)
2700 {
2701   /* Implemented as a NOP.  */
2702 }
2703 
DISAS_INSN(illegal)2704 DISAS_INSN(illegal)
2705 {
2706     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2707 }
2708 
DISAS_INSN(tas)2709 DISAS_INSN(tas)
2710 {
2711     int mode = extract32(insn, 3, 3);
2712     int reg0 = REG(insn, 0);
2713 
2714     if (mode == 0) {
2715         /* data register direct */
2716         TCGv dest = cpu_dregs[reg0];
2717         gen_logic_cc(s, dest, OS_BYTE);
2718         tcg_gen_ori_tl(dest, dest, 0x80);
2719     } else {
2720         TCGv src1, addr;
2721 
2722         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2723         if (IS_NULL_QREG(addr)) {
2724             gen_addr_fault(s);
2725             return;
2726         }
2727         src1 = tcg_temp_new();
2728         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2729                                    IS_USER(s), MO_SB);
2730         gen_logic_cc(s, src1, OS_BYTE);
2731 
2732         switch (mode) {
2733         case 3: /* Indirect postincrement.  */
2734             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2735             break;
2736         case 4: /* Indirect predecrememnt.  */
2737             tcg_gen_mov_i32(AREG(insn, 0), addr);
2738             break;
2739         }
2740     }
2741 }
2742 
DISAS_INSN(mull)2743 DISAS_INSN(mull)
2744 {
2745     uint16_t ext;
2746     TCGv src1;
2747     int sign;
2748 
2749     ext = read_im16(env, s);
2750 
2751     sign = ext & 0x800;
2752 
2753     if (ext & 0x400) {
2754         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2755             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2756             return;
2757         }
2758 
2759         SRC_EA(env, src1, OS_LONG, 0, NULL);
2760 
2761         if (sign) {
2762             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2763         } else {
2764             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2765         }
2766         /* if Dl == Dh, 68040 returns low word */
2767         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2768         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2769         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2770 
2771         tcg_gen_movi_i32(QREG_CC_V, 0);
2772         tcg_gen_movi_i32(QREG_CC_C, 0);
2773 
2774         set_cc_op(s, CC_OP_FLAGS);
2775         return;
2776     }
2777     SRC_EA(env, src1, OS_LONG, 0, NULL);
2778     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2779         tcg_gen_movi_i32(QREG_CC_C, 0);
2780         if (sign) {
2781             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2782             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2783             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2784             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2785                                    QREG_CC_V, QREG_CC_Z);
2786         } else {
2787             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2788             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2789             tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V,
2790                                    QREG_CC_V, QREG_CC_C);
2791         }
2792         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2793 
2794         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2795 
2796         set_cc_op(s, CC_OP_FLAGS);
2797     } else {
2798         /*
2799          * The upper 32 bits of the product are discarded, so
2800          * muls.l and mulu.l are functionally equivalent.
2801          */
2802         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2803         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2804     }
2805 }
2806 
gen_link(DisasContext * s,uint16_t insn,int32_t offset)2807 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2808 {
2809     TCGv reg;
2810     TCGv tmp;
2811 
2812     reg = AREG(insn, 0);
2813     tmp = tcg_temp_new();
2814     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2815     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2816     if ((insn & 7) != 7) {
2817         tcg_gen_mov_i32(reg, tmp);
2818     }
2819     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2820 }
2821 
DISAS_INSN(link)2822 DISAS_INSN(link)
2823 {
2824     int16_t offset;
2825 
2826     offset = read_im16(env, s);
2827     gen_link(s, insn, offset);
2828 }
2829 
DISAS_INSN(linkl)2830 DISAS_INSN(linkl)
2831 {
2832     int32_t offset;
2833 
2834     offset = read_im32(env, s);
2835     gen_link(s, insn, offset);
2836 }
2837 
DISAS_INSN(unlk)2838 DISAS_INSN(unlk)
2839 {
2840     TCGv src;
2841     TCGv reg;
2842     TCGv tmp;
2843 
2844     src = tcg_temp_new();
2845     reg = AREG(insn, 0);
2846     tcg_gen_mov_i32(src, reg);
2847     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2848     tcg_gen_mov_i32(reg, tmp);
2849     tcg_gen_addi_i32(QREG_SP, src, 4);
2850 }
2851 
2852 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(reset)2853 DISAS_INSN(reset)
2854 {
2855     if (IS_USER(s)) {
2856         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2857         return;
2858     }
2859 
2860     gen_helper_reset(tcg_env);
2861 }
2862 #endif
2863 
DISAS_INSN(nop)2864 DISAS_INSN(nop)
2865 {
2866 }
2867 
DISAS_INSN(rtd)2868 DISAS_INSN(rtd)
2869 {
2870     TCGv tmp;
2871     int16_t offset = read_im16(env, s);
2872 
2873     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2874     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2875     gen_jmp(s, tmp);
2876 }
2877 
DISAS_INSN(rtr)2878 DISAS_INSN(rtr)
2879 {
2880     TCGv tmp;
2881     TCGv ccr;
2882     TCGv sp;
2883 
2884     sp = tcg_temp_new();
2885     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2886     tcg_gen_addi_i32(sp, QREG_SP, 2);
2887     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2888     tcg_gen_addi_i32(QREG_SP, sp, 4);
2889 
2890     gen_set_sr(s, ccr, true);
2891 
2892     gen_jmp(s, tmp);
2893 }
2894 
DISAS_INSN(rts)2895 DISAS_INSN(rts)
2896 {
2897     TCGv tmp;
2898 
2899     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2900     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2901     gen_jmp(s, tmp);
2902 }
2903 
DISAS_INSN(jump)2904 DISAS_INSN(jump)
2905 {
2906     TCGv tmp;
2907 
2908     /*
2909      * Load the target address first to ensure correct exception
2910      * behavior.
2911      */
2912     tmp = gen_lea(env, s, insn, OS_LONG);
2913     if (IS_NULL_QREG(tmp)) {
2914         gen_addr_fault(s);
2915         return;
2916     }
2917     if ((insn & 0x40) == 0) {
2918         /* jsr */
2919         gen_push(s, tcg_constant_i32(s->pc));
2920     }
2921     gen_jmp(s, tmp);
2922 }
2923 
DISAS_INSN(addsubq)2924 DISAS_INSN(addsubq)
2925 {
2926     TCGv src;
2927     TCGv dest;
2928     TCGv val;
2929     int imm;
2930     TCGv addr;
2931     int opsize;
2932 
2933     if ((insn & 070) == 010) {
2934         /* Operation on address register is always long.  */
2935         opsize = OS_LONG;
2936     } else {
2937         opsize = insn_opsize(insn);
2938     }
2939     SRC_EA(env, src, opsize, 1, &addr);
2940     imm = (insn >> 9) & 7;
2941     if (imm == 0) {
2942         imm = 8;
2943     }
2944     val = tcg_constant_i32(imm);
2945     dest = tcg_temp_new();
2946     tcg_gen_mov_i32(dest, src);
2947     if ((insn & 0x38) == 0x08) {
2948         /*
2949          * Don't update condition codes if the destination is an
2950          * address register.
2951          */
2952         if (insn & 0x0100) {
2953             tcg_gen_sub_i32(dest, dest, val);
2954         } else {
2955             tcg_gen_add_i32(dest, dest, val);
2956         }
2957     } else {
2958         if (insn & 0x0100) {
2959             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2960             tcg_gen_sub_i32(dest, dest, val);
2961             set_cc_op(s, CC_OP_SUBB + opsize);
2962         } else {
2963             tcg_gen_add_i32(dest, dest, val);
2964             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
2965             set_cc_op(s, CC_OP_ADDB + opsize);
2966         }
2967         gen_update_cc_add(dest, val, opsize);
2968     }
2969     DEST_EA(env, insn, opsize, dest, &addr);
2970 }
2971 
DISAS_INSN(branch)2972 DISAS_INSN(branch)
2973 {
2974     int32_t offset;
2975     uint32_t base;
2976     int op;
2977 
2978     base = s->pc;
2979     op = (insn >> 8) & 0xf;
2980     offset = (int8_t)insn;
2981     if (offset == 0) {
2982         offset = (int16_t)read_im16(env, s);
2983     } else if (offset == -1) {
2984         offset = read_im32(env, s);
2985     }
2986     if (op == 1) {
2987         /* bsr */
2988         gen_push(s, tcg_constant_i32(s->pc));
2989     }
2990     if (op > 1) {
2991         /* Bcc */
2992         TCGLabel *l1 = gen_new_label();
2993         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
2994         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
2995         gen_set_label(l1);
2996         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
2997     } else {
2998         /* Unconditional branch.  */
2999         update_cc_op(s);
3000         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3001     }
3002 }
3003 
DISAS_INSN(moveq)3004 DISAS_INSN(moveq)
3005 {
3006     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3007     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3008 }
3009 
DISAS_INSN(mvzs)3010 DISAS_INSN(mvzs)
3011 {
3012     int opsize;
3013     TCGv src;
3014     TCGv reg;
3015 
3016     if (insn & 0x40)
3017         opsize = OS_WORD;
3018     else
3019         opsize = OS_BYTE;
3020     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3021     reg = DREG(insn, 9);
3022     tcg_gen_mov_i32(reg, src);
3023     gen_logic_cc(s, src, opsize);
3024 }
3025 
DISAS_INSN(or)3026 DISAS_INSN(or)
3027 {
3028     TCGv reg;
3029     TCGv dest;
3030     TCGv src;
3031     TCGv addr;
3032     int opsize;
3033 
3034     opsize = insn_opsize(insn);
3035     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3036     dest = tcg_temp_new();
3037     if (insn & 0x100) {
3038         SRC_EA(env, src, opsize, 0, &addr);
3039         tcg_gen_or_i32(dest, src, reg);
3040         DEST_EA(env, insn, opsize, dest, &addr);
3041     } else {
3042         SRC_EA(env, src, opsize, 0, NULL);
3043         tcg_gen_or_i32(dest, src, reg);
3044         gen_partset_reg(opsize, DREG(insn, 9), dest);
3045     }
3046     gen_logic_cc(s, dest, opsize);
3047 }
3048 
DISAS_INSN(suba)3049 DISAS_INSN(suba)
3050 {
3051     TCGv src;
3052     TCGv reg;
3053 
3054     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3055     reg = AREG(insn, 9);
3056     tcg_gen_sub_i32(reg, reg, src);
3057 }
3058 
gen_subx(DisasContext * s,TCGv src,TCGv dest,int opsize)3059 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3060 {
3061     TCGv tmp, zero;
3062 
3063     gen_flush_flags(s); /* compute old Z */
3064 
3065     /*
3066      * Perform subtract with borrow.
3067      * (X, N) = dest - (src + X);
3068      */
3069 
3070     zero = tcg_constant_i32(0);
3071     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, zero, QREG_CC_X, zero);
3072     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, zero, QREG_CC_N, QREG_CC_X);
3073     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3074     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3075 
3076     /* Compute signed-overflow for subtract.  */
3077 
3078     tmp = tcg_temp_new();
3079     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3080     tcg_gen_xor_i32(tmp, dest, src);
3081     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3082 
3083     /* Copy the rest of the results into place.  */
3084     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3085     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3086 
3087     set_cc_op(s, CC_OP_FLAGS);
3088 
3089     /* result is in QREG_CC_N */
3090 }
3091 
DISAS_INSN(subx_reg)3092 DISAS_INSN(subx_reg)
3093 {
3094     TCGv dest;
3095     TCGv src;
3096     int opsize;
3097 
3098     opsize = insn_opsize(insn);
3099 
3100     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3101     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3102 
3103     gen_subx(s, src, dest, opsize);
3104 
3105     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3106 }
3107 
DISAS_INSN(subx_mem)3108 DISAS_INSN(subx_mem)
3109 {
3110     TCGv src;
3111     TCGv addr_src;
3112     TCGv dest;
3113     TCGv addr_dest;
3114     int opsize;
3115 
3116     opsize = insn_opsize(insn);
3117 
3118     addr_src = AREG(insn, 0);
3119     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3120     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3121 
3122     addr_dest = AREG(insn, 9);
3123     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3124     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3125 
3126     gen_subx(s, src, dest, opsize);
3127 
3128     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3129 }
3130 
DISAS_INSN(mov3q)3131 DISAS_INSN(mov3q)
3132 {
3133     TCGv src;
3134     int val;
3135 
3136     val = (insn >> 9) & 7;
3137     if (val == 0) {
3138         val = -1;
3139     }
3140     src = tcg_constant_i32(val);
3141     gen_logic_cc(s, src, OS_LONG);
3142     DEST_EA(env, insn, OS_LONG, src, NULL);
3143 }
3144 
DISAS_INSN(cmp)3145 DISAS_INSN(cmp)
3146 {
3147     TCGv src;
3148     TCGv reg;
3149     int opsize;
3150 
3151     opsize = insn_opsize(insn);
3152     SRC_EA(env, src, opsize, 1, NULL);
3153     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3154     gen_update_cc_cmp(s, reg, src, opsize);
3155 }
3156 
DISAS_INSN(cmpa)3157 DISAS_INSN(cmpa)
3158 {
3159     int opsize;
3160     TCGv src;
3161     TCGv reg;
3162 
3163     if (insn & 0x100) {
3164         opsize = OS_LONG;
3165     } else {
3166         opsize = OS_WORD;
3167     }
3168     SRC_EA(env, src, opsize, 1, NULL);
3169     reg = AREG(insn, 9);
3170     gen_update_cc_cmp(s, reg, src, OS_LONG);
3171 }
3172 
DISAS_INSN(cmpm)3173 DISAS_INSN(cmpm)
3174 {
3175     int opsize = insn_opsize(insn);
3176     TCGv src, dst;
3177 
3178     /* Post-increment load (mode 3) from Ay.  */
3179     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3180                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3181     /* Post-increment load (mode 3) from Ax.  */
3182     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3183                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3184 
3185     gen_update_cc_cmp(s, dst, src, opsize);
3186 }
3187 
DISAS_INSN(eor)3188 DISAS_INSN(eor)
3189 {
3190     TCGv src;
3191     TCGv dest;
3192     TCGv addr;
3193     int opsize;
3194 
3195     opsize = insn_opsize(insn);
3196 
3197     SRC_EA(env, src, opsize, 0, &addr);
3198     dest = tcg_temp_new();
3199     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3200     gen_logic_cc(s, dest, opsize);
3201     DEST_EA(env, insn, opsize, dest, &addr);
3202 }
3203 
do_exg(TCGv reg1,TCGv reg2)3204 static void do_exg(TCGv reg1, TCGv reg2)
3205 {
3206     TCGv temp = tcg_temp_new();
3207     tcg_gen_mov_i32(temp, reg1);
3208     tcg_gen_mov_i32(reg1, reg2);
3209     tcg_gen_mov_i32(reg2, temp);
3210 }
3211 
DISAS_INSN(exg_dd)3212 DISAS_INSN(exg_dd)
3213 {
3214     /* exchange Dx and Dy */
3215     do_exg(DREG(insn, 9), DREG(insn, 0));
3216 }
3217 
DISAS_INSN(exg_aa)3218 DISAS_INSN(exg_aa)
3219 {
3220     /* exchange Ax and Ay */
3221     do_exg(AREG(insn, 9), AREG(insn, 0));
3222 }
3223 
DISAS_INSN(exg_da)3224 DISAS_INSN(exg_da)
3225 {
3226     /* exchange Dx and Ay */
3227     do_exg(DREG(insn, 9), AREG(insn, 0));
3228 }
3229 
DISAS_INSN(and)3230 DISAS_INSN(and)
3231 {
3232     TCGv src;
3233     TCGv reg;
3234     TCGv dest;
3235     TCGv addr;
3236     int opsize;
3237 
3238     dest = tcg_temp_new();
3239 
3240     opsize = insn_opsize(insn);
3241     reg = DREG(insn, 9);
3242     if (insn & 0x100) {
3243         SRC_EA(env, src, opsize, 0, &addr);
3244         tcg_gen_and_i32(dest, src, reg);
3245         DEST_EA(env, insn, opsize, dest, &addr);
3246     } else {
3247         SRC_EA(env, src, opsize, 0, NULL);
3248         tcg_gen_and_i32(dest, src, reg);
3249         gen_partset_reg(opsize, reg, dest);
3250     }
3251     gen_logic_cc(s, dest, opsize);
3252 }
3253 
DISAS_INSN(adda)3254 DISAS_INSN(adda)
3255 {
3256     TCGv src;
3257     TCGv reg;
3258 
3259     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3260     reg = AREG(insn, 9);
3261     tcg_gen_add_i32(reg, reg, src);
3262 }
3263 
gen_addx(DisasContext * s,TCGv src,TCGv dest,int opsize)3264 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3265 {
3266     TCGv tmp, zero;
3267 
3268     gen_flush_flags(s); /* compute old Z */
3269 
3270     /*
3271      * Perform addition with carry.
3272      * (X, N) = src + dest + X;
3273      */
3274 
3275     zero = tcg_constant_i32(0);
3276     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, zero, dest, zero);
3277     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, zero);
3278     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3279 
3280     /* Compute signed-overflow for addition.  */
3281 
3282     tmp = tcg_temp_new();
3283     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3284     tcg_gen_xor_i32(tmp, dest, src);
3285     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3286 
3287     /* Copy the rest of the results into place.  */
3288     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3289     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3290 
3291     set_cc_op(s, CC_OP_FLAGS);
3292 
3293     /* result is in QREG_CC_N */
3294 }
3295 
DISAS_INSN(addx_reg)3296 DISAS_INSN(addx_reg)
3297 {
3298     TCGv dest;
3299     TCGv src;
3300     int opsize;
3301 
3302     opsize = insn_opsize(insn);
3303 
3304     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3305     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3306 
3307     gen_addx(s, src, dest, opsize);
3308 
3309     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3310 }
3311 
DISAS_INSN(addx_mem)3312 DISAS_INSN(addx_mem)
3313 {
3314     TCGv src;
3315     TCGv addr_src;
3316     TCGv dest;
3317     TCGv addr_dest;
3318     int opsize;
3319 
3320     opsize = insn_opsize(insn);
3321 
3322     addr_src = AREG(insn, 0);
3323     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3324     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3325 
3326     addr_dest = AREG(insn, 9);
3327     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3328     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3329 
3330     gen_addx(s, src, dest, opsize);
3331 
3332     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3333 }
3334 
shift_im(DisasContext * s,uint16_t insn,int opsize)3335 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3336 {
3337     int count = (insn >> 9) & 7;
3338     int logical = insn & 8;
3339     int left = insn & 0x100;
3340     int bits = opsize_bytes(opsize) * 8;
3341     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3342 
3343     if (count == 0) {
3344         count = 8;
3345     }
3346 
3347     tcg_gen_movi_i32(QREG_CC_V, 0);
3348     if (left) {
3349         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3350         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3351 
3352         /*
3353          * Note that ColdFire always clears V (done above),
3354          * while M68000 sets if the most significant bit is changed at
3355          * any time during the shift operation.
3356          */
3357         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3358             /* if shift count >= bits, V is (reg != 0) */
3359             if (count >= bits) {
3360                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3361             } else {
3362                 TCGv t0 = tcg_temp_new();
3363                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3364                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3365                 tcg_gen_negsetcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3366             }
3367         }
3368     } else {
3369         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3370         if (logical) {
3371             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3372         } else {
3373             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3374         }
3375     }
3376 
3377     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3378     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3379     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3380     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3381 
3382     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3383     set_cc_op(s, CC_OP_FLAGS);
3384 }
3385 
shift_reg(DisasContext * s,uint16_t insn,int opsize)3386 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3387 {
3388     int logical = insn & 8;
3389     int left = insn & 0x100;
3390     int bits = opsize_bytes(opsize) * 8;
3391     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3392     TCGv s32;
3393     TCGv_i64 t64, s64;
3394 
3395     t64 = tcg_temp_new_i64();
3396     s64 = tcg_temp_new_i64();
3397     s32 = tcg_temp_new();
3398 
3399     /*
3400      * Note that m68k truncates the shift count modulo 64, not 32.
3401      * In addition, a 64-bit shift makes it easy to find "the last
3402      * bit shifted out", for the carry flag.
3403      */
3404     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3405     tcg_gen_extu_i32_i64(s64, s32);
3406     tcg_gen_extu_i32_i64(t64, reg);
3407 
3408     /* Optimistically set V=0.  Also used as a zero source below.  */
3409     tcg_gen_movi_i32(QREG_CC_V, 0);
3410     if (left) {
3411         tcg_gen_shl_i64(t64, t64, s64);
3412 
3413         if (opsize == OS_LONG) {
3414             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3415             /* Note that C=0 if shift count is 0, and we get that for free.  */
3416         } else {
3417             TCGv zero = tcg_constant_i32(0);
3418             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3419             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3420             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3421                                 s32, zero, zero, QREG_CC_C);
3422         }
3423         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3424 
3425         /* X = C, but only if the shift count was non-zero.  */
3426         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3427                             QREG_CC_C, QREG_CC_X);
3428 
3429         /*
3430          * M68000 sets V if the most significant bit is changed at
3431          * any time during the shift operation.  Do this via creating
3432          * an extension of the sign bit, comparing, and discarding
3433          * the bits below the sign bit.  I.e.
3434          *     int64_t s = (intN_t)reg;
3435          *     int64_t t = (int64_t)(intN_t)reg << count;
3436          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3437          */
3438         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3439             TCGv_i64 tt = tcg_constant_i64(32);
3440             /* if shift is greater than 32, use 32 */
3441             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3442             /* Sign extend the input to 64 bits; re-do the shift.  */
3443             tcg_gen_ext_i32_i64(t64, reg);
3444             tcg_gen_shl_i64(s64, t64, s64);
3445             /* Clear all bits that are unchanged.  */
3446             tcg_gen_xor_i64(t64, t64, s64);
3447             /* Ignore the bits below the sign bit.  */
3448             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3449             /* If any bits remain set, we have overflow.  */
3450             tcg_gen_negsetcond_i64(TCG_COND_NE, t64, t64, tcg_constant_i64(0));
3451             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3452         }
3453     } else {
3454         tcg_gen_shli_i64(t64, t64, 32);
3455         if (logical) {
3456             tcg_gen_shr_i64(t64, t64, s64);
3457         } else {
3458             tcg_gen_sar_i64(t64, t64, s64);
3459         }
3460         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3461 
3462         /* Note that C=0 if shift count is 0, and we get that for free.  */
3463         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3464 
3465         /* X = C, but only if the shift count was non-zero.  */
3466         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3467                             QREG_CC_C, QREG_CC_X);
3468     }
3469     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3470     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3471 
3472     /* Write back the result.  */
3473     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3474     set_cc_op(s, CC_OP_FLAGS);
3475 }
3476 
DISAS_INSN(shift8_im)3477 DISAS_INSN(shift8_im)
3478 {
3479     shift_im(s, insn, OS_BYTE);
3480 }
3481 
DISAS_INSN(shift16_im)3482 DISAS_INSN(shift16_im)
3483 {
3484     shift_im(s, insn, OS_WORD);
3485 }
3486 
DISAS_INSN(shift_im)3487 DISAS_INSN(shift_im)
3488 {
3489     shift_im(s, insn, OS_LONG);
3490 }
3491 
DISAS_INSN(shift8_reg)3492 DISAS_INSN(shift8_reg)
3493 {
3494     shift_reg(s, insn, OS_BYTE);
3495 }
3496 
DISAS_INSN(shift16_reg)3497 DISAS_INSN(shift16_reg)
3498 {
3499     shift_reg(s, insn, OS_WORD);
3500 }
3501 
DISAS_INSN(shift_reg)3502 DISAS_INSN(shift_reg)
3503 {
3504     shift_reg(s, insn, OS_LONG);
3505 }
3506 
DISAS_INSN(shift_mem)3507 DISAS_INSN(shift_mem)
3508 {
3509     int logical = insn & 8;
3510     int left = insn & 0x100;
3511     TCGv src;
3512     TCGv addr;
3513 
3514     SRC_EA(env, src, OS_WORD, !logical, &addr);
3515     tcg_gen_movi_i32(QREG_CC_V, 0);
3516     if (left) {
3517         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3518         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3519 
3520         /*
3521          * Note that ColdFire always clears V,
3522          * while M68000 sets if the most significant bit is changed at
3523          * any time during the shift operation
3524          */
3525         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3526             src = gen_extend(s, src, OS_WORD, 1);
3527             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3528         }
3529     } else {
3530         tcg_gen_mov_i32(QREG_CC_C, src);
3531         if (logical) {
3532             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3533         } else {
3534             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3535         }
3536     }
3537 
3538     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3539     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3540     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3541     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3542 
3543     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3544     set_cc_op(s, CC_OP_FLAGS);
3545 }
3546 
rotate(TCGv reg,TCGv shift,int left,int size)3547 static void rotate(TCGv reg, TCGv shift, int left, int size)
3548 {
3549     switch (size) {
3550     case 8:
3551         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3552         tcg_gen_ext8u_i32(reg, reg);
3553         tcg_gen_muli_i32(reg, reg, 0x01010101);
3554         goto do_long;
3555     case 16:
3556         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3557         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3558         goto do_long;
3559     do_long:
3560     default:
3561         if (left) {
3562             tcg_gen_rotl_i32(reg, reg, shift);
3563         } else {
3564             tcg_gen_rotr_i32(reg, reg, shift);
3565         }
3566     }
3567 
3568     /* compute flags */
3569 
3570     switch (size) {
3571     case 8:
3572         tcg_gen_ext8s_i32(reg, reg);
3573         break;
3574     case 16:
3575         tcg_gen_ext16s_i32(reg, reg);
3576         break;
3577     default:
3578         break;
3579     }
3580 
3581     /* QREG_CC_X is not affected */
3582 
3583     tcg_gen_mov_i32(QREG_CC_N, reg);
3584     tcg_gen_mov_i32(QREG_CC_Z, reg);
3585 
3586     if (left) {
3587         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3588     } else {
3589         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3590     }
3591 
3592     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3593 }
3594 
rotate_x_flags(TCGv reg,TCGv X,int size)3595 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3596 {
3597     switch (size) {
3598     case 8:
3599         tcg_gen_ext8s_i32(reg, reg);
3600         break;
3601     case 16:
3602         tcg_gen_ext16s_i32(reg, reg);
3603         break;
3604     default:
3605         break;
3606     }
3607     tcg_gen_mov_i32(QREG_CC_N, reg);
3608     tcg_gen_mov_i32(QREG_CC_Z, reg);
3609     tcg_gen_mov_i32(QREG_CC_X, X);
3610     tcg_gen_mov_i32(QREG_CC_C, X);
3611     tcg_gen_movi_i32(QREG_CC_V, 0);
3612 }
3613 
3614 /* Result of rotate_x() is valid if 0 <= shift <= size */
rotate_x(TCGv reg,TCGv shift,int left,int size)3615 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3616 {
3617     TCGv X, shl, shr, shx, sz, zero;
3618 
3619     sz = tcg_constant_i32(size);
3620 
3621     shr = tcg_temp_new();
3622     shl = tcg_temp_new();
3623     shx = tcg_temp_new();
3624     if (left) {
3625         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3626         tcg_gen_movi_i32(shr, size + 1);
3627         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3628         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3629         /* shx = shx < 0 ? size : shx; */
3630         zero = tcg_constant_i32(0);
3631         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3632     } else {
3633         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3634         tcg_gen_movi_i32(shl, size + 1);
3635         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3636         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3637     }
3638 
3639     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3640 
3641     tcg_gen_shl_i32(shl, reg, shl);
3642     tcg_gen_shr_i32(shr, reg, shr);
3643     tcg_gen_or_i32(reg, shl, shr);
3644     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3645     tcg_gen_or_i32(reg, reg, shx);
3646 
3647     /* X = (reg >> size) & 1 */
3648 
3649     X = tcg_temp_new();
3650     tcg_gen_extract_i32(X, reg, size, 1);
3651 
3652     return X;
3653 }
3654 
3655 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
rotate32_x(TCGv reg,TCGv shift,int left)3656 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3657 {
3658     TCGv_i64 t0, shift64;
3659     TCGv X, lo, hi, zero;
3660 
3661     shift64 = tcg_temp_new_i64();
3662     tcg_gen_extu_i32_i64(shift64, shift);
3663 
3664     t0 = tcg_temp_new_i64();
3665 
3666     X = tcg_temp_new();
3667     lo = tcg_temp_new();
3668     hi = tcg_temp_new();
3669 
3670     if (left) {
3671         /* create [reg:X:..] */
3672 
3673         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3674         tcg_gen_concat_i32_i64(t0, lo, reg);
3675 
3676         /* rotate */
3677 
3678         tcg_gen_rotl_i64(t0, t0, shift64);
3679 
3680         /* result is [reg:..:reg:X] */
3681 
3682         tcg_gen_extr_i64_i32(lo, hi, t0);
3683         tcg_gen_andi_i32(X, lo, 1);
3684 
3685         tcg_gen_shri_i32(lo, lo, 1);
3686     } else {
3687         /* create [..:X:reg] */
3688 
3689         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3690 
3691         tcg_gen_rotr_i64(t0, t0, shift64);
3692 
3693         /* result is value: [X:reg:..:reg] */
3694 
3695         tcg_gen_extr_i64_i32(lo, hi, t0);
3696 
3697         /* extract X */
3698 
3699         tcg_gen_shri_i32(X, hi, 31);
3700 
3701         /* extract result */
3702 
3703         tcg_gen_shli_i32(hi, hi, 1);
3704     }
3705     tcg_gen_or_i32(lo, lo, hi);
3706 
3707     /* if shift == 0, register and X are not affected */
3708 
3709     zero = tcg_constant_i32(0);
3710     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3711     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3712 
3713     return X;
3714 }
3715 
DISAS_INSN(rotate_im)3716 DISAS_INSN(rotate_im)
3717 {
3718     TCGv shift;
3719     int tmp;
3720     int left = (insn & 0x100);
3721 
3722     tmp = (insn >> 9) & 7;
3723     if (tmp == 0) {
3724         tmp = 8;
3725     }
3726 
3727     shift = tcg_constant_i32(tmp);
3728     if (insn & 8) {
3729         rotate(DREG(insn, 0), shift, left, 32);
3730     } else {
3731         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3732         rotate_x_flags(DREG(insn, 0), X, 32);
3733     }
3734 
3735     set_cc_op(s, CC_OP_FLAGS);
3736 }
3737 
DISAS_INSN(rotate8_im)3738 DISAS_INSN(rotate8_im)
3739 {
3740     int left = (insn & 0x100);
3741     TCGv reg;
3742     TCGv shift;
3743     int tmp;
3744 
3745     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3746 
3747     tmp = (insn >> 9) & 7;
3748     if (tmp == 0) {
3749         tmp = 8;
3750     }
3751 
3752     shift = tcg_constant_i32(tmp);
3753     if (insn & 8) {
3754         rotate(reg, shift, left, 8);
3755     } else {
3756         TCGv X = rotate_x(reg, shift, left, 8);
3757         rotate_x_flags(reg, X, 8);
3758     }
3759     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3760     set_cc_op(s, CC_OP_FLAGS);
3761 }
3762 
DISAS_INSN(rotate16_im)3763 DISAS_INSN(rotate16_im)
3764 {
3765     int left = (insn & 0x100);
3766     TCGv reg;
3767     TCGv shift;
3768     int tmp;
3769 
3770     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3771     tmp = (insn >> 9) & 7;
3772     if (tmp == 0) {
3773         tmp = 8;
3774     }
3775 
3776     shift = tcg_constant_i32(tmp);
3777     if (insn & 8) {
3778         rotate(reg, shift, left, 16);
3779     } else {
3780         TCGv X = rotate_x(reg, shift, left, 16);
3781         rotate_x_flags(reg, X, 16);
3782     }
3783     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3784     set_cc_op(s, CC_OP_FLAGS);
3785 }
3786 
DISAS_INSN(rotate_reg)3787 DISAS_INSN(rotate_reg)
3788 {
3789     TCGv reg;
3790     TCGv src;
3791     TCGv t0, t1;
3792     int left = (insn & 0x100);
3793 
3794     reg = DREG(insn, 0);
3795     src = DREG(insn, 9);
3796     /* shift in [0..63] */
3797     t0 = tcg_temp_new();
3798     tcg_gen_andi_i32(t0, src, 63);
3799     t1 = tcg_temp_new_i32();
3800     if (insn & 8) {
3801         tcg_gen_andi_i32(t1, src, 31);
3802         rotate(reg, t1, left, 32);
3803         /* if shift == 0, clear C */
3804         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3805                             t0, QREG_CC_V /* 0 */,
3806                             QREG_CC_V /* 0 */, QREG_CC_C);
3807     } else {
3808         TCGv X;
3809         /* modulo 33 */
3810         tcg_gen_movi_i32(t1, 33);
3811         tcg_gen_remu_i32(t1, t0, t1);
3812         X = rotate32_x(DREG(insn, 0), t1, left);
3813         rotate_x_flags(DREG(insn, 0), X, 32);
3814     }
3815     set_cc_op(s, CC_OP_FLAGS);
3816 }
3817 
DISAS_INSN(rotate8_reg)3818 DISAS_INSN(rotate8_reg)
3819 {
3820     TCGv reg;
3821     TCGv src;
3822     TCGv t0, t1;
3823     int left = (insn & 0x100);
3824 
3825     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3826     src = DREG(insn, 9);
3827     /* shift in [0..63] */
3828     t0 = tcg_temp_new_i32();
3829     tcg_gen_andi_i32(t0, src, 63);
3830     t1 = tcg_temp_new_i32();
3831     if (insn & 8) {
3832         tcg_gen_andi_i32(t1, src, 7);
3833         rotate(reg, t1, left, 8);
3834         /* if shift == 0, clear C */
3835         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3836                             t0, QREG_CC_V /* 0 */,
3837                             QREG_CC_V /* 0 */, QREG_CC_C);
3838     } else {
3839         TCGv X;
3840         /* modulo 9 */
3841         tcg_gen_movi_i32(t1, 9);
3842         tcg_gen_remu_i32(t1, t0, t1);
3843         X = rotate_x(reg, t1, left, 8);
3844         rotate_x_flags(reg, X, 8);
3845     }
3846     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3847     set_cc_op(s, CC_OP_FLAGS);
3848 }
3849 
DISAS_INSN(rotate16_reg)3850 DISAS_INSN(rotate16_reg)
3851 {
3852     TCGv reg;
3853     TCGv src;
3854     TCGv t0, t1;
3855     int left = (insn & 0x100);
3856 
3857     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3858     src = DREG(insn, 9);
3859     /* shift in [0..63] */
3860     t0 = tcg_temp_new_i32();
3861     tcg_gen_andi_i32(t0, src, 63);
3862     t1 = tcg_temp_new_i32();
3863     if (insn & 8) {
3864         tcg_gen_andi_i32(t1, src, 15);
3865         rotate(reg, t1, left, 16);
3866         /* if shift == 0, clear C */
3867         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3868                             t0, QREG_CC_V /* 0 */,
3869                             QREG_CC_V /* 0 */, QREG_CC_C);
3870     } else {
3871         TCGv X;
3872         /* modulo 17 */
3873         tcg_gen_movi_i32(t1, 17);
3874         tcg_gen_remu_i32(t1, t0, t1);
3875         X = rotate_x(reg, t1, left, 16);
3876         rotate_x_flags(reg, X, 16);
3877     }
3878     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3879     set_cc_op(s, CC_OP_FLAGS);
3880 }
3881 
DISAS_INSN(rotate_mem)3882 DISAS_INSN(rotate_mem)
3883 {
3884     TCGv src;
3885     TCGv addr;
3886     TCGv shift;
3887     int left = (insn & 0x100);
3888 
3889     SRC_EA(env, src, OS_WORD, 0, &addr);
3890 
3891     shift = tcg_constant_i32(1);
3892     if (insn & 0x0200) {
3893         rotate(src, shift, left, 16);
3894     } else {
3895         TCGv X = rotate_x(src, shift, left, 16);
3896         rotate_x_flags(src, X, 16);
3897     }
3898     DEST_EA(env, insn, OS_WORD, src, &addr);
3899     set_cc_op(s, CC_OP_FLAGS);
3900 }
3901 
DISAS_INSN(bfext_reg)3902 DISAS_INSN(bfext_reg)
3903 {
3904     int ext = read_im16(env, s);
3905     int is_sign = insn & 0x200;
3906     TCGv src = DREG(insn, 0);
3907     TCGv dst = DREG(ext, 12);
3908     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
3909     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
3910     int pos = 32 - ofs - len;        /* little bit-endian */
3911     TCGv tmp = tcg_temp_new();
3912     TCGv shift;
3913 
3914     /*
3915      * In general, we're going to rotate the field so that it's at the
3916      * top of the word and then right-shift by the complement of the
3917      * width to extend the field.
3918      */
3919     if (ext & 0x20) {
3920         /* Variable width.  */
3921         if (ext & 0x800) {
3922             /* Variable offset.  */
3923             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3924             tcg_gen_rotl_i32(tmp, src, tmp);
3925         } else {
3926             tcg_gen_rotli_i32(tmp, src, ofs);
3927         }
3928 
3929         shift = tcg_temp_new();
3930         tcg_gen_neg_i32(shift, DREG(ext, 0));
3931         tcg_gen_andi_i32(shift, shift, 31);
3932         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
3933         if (is_sign) {
3934             tcg_gen_mov_i32(dst, QREG_CC_N);
3935         } else {
3936             tcg_gen_shr_i32(dst, tmp, shift);
3937         }
3938     } else {
3939         /* Immediate width.  */
3940         if (ext & 0x800) {
3941             /* Variable offset */
3942             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
3943             tcg_gen_rotl_i32(tmp, src, tmp);
3944             src = tmp;
3945             pos = 32 - len;
3946         } else {
3947             /*
3948              * Immediate offset.  If the field doesn't wrap around the
3949              * end of the word, rely on (s)extract completely.
3950              */
3951             if (pos < 0) {
3952                 tcg_gen_rotli_i32(tmp, src, ofs);
3953                 src = tmp;
3954                 pos = 32 - len;
3955             }
3956         }
3957 
3958         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
3959         if (is_sign) {
3960             tcg_gen_mov_i32(dst, QREG_CC_N);
3961         } else {
3962             tcg_gen_extract_i32(dst, src, pos, len);
3963         }
3964     }
3965 
3966     set_cc_op(s, CC_OP_LOGIC);
3967 }
3968 
DISAS_INSN(bfext_mem)3969 DISAS_INSN(bfext_mem)
3970 {
3971     int ext = read_im16(env, s);
3972     int is_sign = insn & 0x200;
3973     TCGv dest = DREG(ext, 12);
3974     TCGv addr, len, ofs;
3975 
3976     addr = gen_lea(env, s, insn, OS_UNSIZED);
3977     if (IS_NULL_QREG(addr)) {
3978         gen_addr_fault(s);
3979         return;
3980     }
3981 
3982     if (ext & 0x20) {
3983         len = DREG(ext, 0);
3984     } else {
3985         len = tcg_constant_i32(extract32(ext, 0, 5));
3986     }
3987     if (ext & 0x800) {
3988         ofs = DREG(ext, 6);
3989     } else {
3990         ofs = tcg_constant_i32(extract32(ext, 6, 5));
3991     }
3992 
3993     if (is_sign) {
3994         gen_helper_bfexts_mem(dest, tcg_env, addr, ofs, len);
3995         tcg_gen_mov_i32(QREG_CC_N, dest);
3996     } else {
3997         TCGv_i64 tmp = tcg_temp_new_i64();
3998         gen_helper_bfextu_mem(tmp, tcg_env, addr, ofs, len);
3999         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4000     }
4001     set_cc_op(s, CC_OP_LOGIC);
4002 }
4003 
DISAS_INSN(bfop_reg)4004 DISAS_INSN(bfop_reg)
4005 {
4006     int ext = read_im16(env, s);
4007     TCGv src = DREG(insn, 0);
4008     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4009     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4010     TCGv mask, tofs = NULL, tlen = NULL;
4011     bool is_bfffo = (insn & 0x0f00) == 0x0d00;
4012 
4013     if ((ext & 0x820) == 0) {
4014         /* Immediate width and offset.  */
4015         uint32_t maski = 0x7fffffffu >> (len - 1);
4016         if (ofs + len <= 32) {
4017             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4018         } else {
4019             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4020         }
4021         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4022 
4023         mask = tcg_constant_i32(ror32(maski, ofs));
4024         if (is_bfffo) {
4025             tofs = tcg_constant_i32(ofs);
4026             tlen = tcg_constant_i32(len);
4027         }
4028     } else {
4029         TCGv tmp = tcg_temp_new();
4030 
4031         mask = tcg_temp_new();
4032         if (ext & 0x20) {
4033             /* Variable width */
4034             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4035             tcg_gen_andi_i32(tmp, tmp, 31);
4036             tcg_gen_shr_i32(mask, tcg_constant_i32(0x7fffffffu), tmp);
4037             if (is_bfffo) {
4038                 tlen = tcg_temp_new();
4039                 tcg_gen_addi_i32(tlen, tmp, 1);
4040             }
4041         } else {
4042             /* Immediate width */
4043             tcg_gen_movi_i32(mask, 0x7fffffffu >> (len - 1));
4044             if (is_bfffo) {
4045                 tlen = tcg_constant_i32(len);
4046             }
4047         }
4048 
4049         if (ext & 0x800) {
4050             /* Variable offset */
4051             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4052             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4053             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4054             tcg_gen_rotr_i32(mask, mask, tmp);
4055             if (is_bfffo) {
4056                 tofs = tmp;
4057             }
4058         } else {
4059             /* Immediate offset (and variable width) */
4060             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4061             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4062             tcg_gen_rotri_i32(mask, mask, ofs);
4063             if (is_bfffo) {
4064                 tofs = tcg_constant_i32(ofs);
4065             }
4066         }
4067     }
4068     set_cc_op(s, CC_OP_LOGIC);
4069 
4070     switch (insn & 0x0f00) {
4071     case 0x0a00: /* bfchg */
4072         tcg_gen_eqv_i32(src, src, mask);
4073         break;
4074     case 0x0c00: /* bfclr */
4075         tcg_gen_and_i32(src, src, mask);
4076         break;
4077     case 0x0d00: /* bfffo */
4078         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4079         break;
4080     case 0x0e00: /* bfset */
4081         tcg_gen_orc_i32(src, src, mask);
4082         break;
4083     case 0x0800: /* bftst */
4084         /* flags already set; no other work to do.  */
4085         break;
4086     default:
4087         g_assert_not_reached();
4088     }
4089 }
4090 
DISAS_INSN(bfop_mem)4091 DISAS_INSN(bfop_mem)
4092 {
4093     int ext = read_im16(env, s);
4094     TCGv addr, len, ofs;
4095     TCGv_i64 t64;
4096 
4097     addr = gen_lea(env, s, insn, OS_UNSIZED);
4098     if (IS_NULL_QREG(addr)) {
4099         gen_addr_fault(s);
4100         return;
4101     }
4102 
4103     if (ext & 0x20) {
4104         len = DREG(ext, 0);
4105     } else {
4106         len = tcg_constant_i32(extract32(ext, 0, 5));
4107     }
4108     if (ext & 0x800) {
4109         ofs = DREG(ext, 6);
4110     } else {
4111         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4112     }
4113 
4114     switch (insn & 0x0f00) {
4115     case 0x0a00: /* bfchg */
4116         gen_helper_bfchg_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4117         break;
4118     case 0x0c00: /* bfclr */
4119         gen_helper_bfclr_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4120         break;
4121     case 0x0d00: /* bfffo */
4122         t64 = tcg_temp_new_i64();
4123         gen_helper_bfffo_mem(t64, tcg_env, addr, ofs, len);
4124         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4125         break;
4126     case 0x0e00: /* bfset */
4127         gen_helper_bfset_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4128         break;
4129     case 0x0800: /* bftst */
4130         gen_helper_bfexts_mem(QREG_CC_N, tcg_env, addr, ofs, len);
4131         break;
4132     default:
4133         g_assert_not_reached();
4134     }
4135     set_cc_op(s, CC_OP_LOGIC);
4136 }
4137 
DISAS_INSN(bfins_reg)4138 DISAS_INSN(bfins_reg)
4139 {
4140     int ext = read_im16(env, s);
4141     TCGv dst = DREG(insn, 0);
4142     TCGv src = DREG(ext, 12);
4143     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4144     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4145     int pos = 32 - ofs - len;        /* little bit-endian */
4146     TCGv tmp;
4147 
4148     tmp = tcg_temp_new();
4149 
4150     if (ext & 0x20) {
4151         /* Variable width */
4152         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4153         tcg_gen_andi_i32(tmp, tmp, 31);
4154         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4155     } else {
4156         /* Immediate width */
4157         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4158     }
4159     set_cc_op(s, CC_OP_LOGIC);
4160 
4161     /* Immediate width and offset */
4162     if ((ext & 0x820) == 0) {
4163         /* Check for suitability for deposit.  */
4164         if (pos >= 0) {
4165             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4166         } else {
4167             uint32_t maski = -2U << (len - 1);
4168             uint32_t roti = (ofs + len) & 31;
4169             tcg_gen_andi_i32(tmp, src, ~maski);
4170             tcg_gen_rotri_i32(tmp, tmp, roti);
4171             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4172             tcg_gen_or_i32(dst, dst, tmp);
4173         }
4174     } else {
4175         TCGv mask = tcg_temp_new();
4176         TCGv rot = tcg_temp_new();
4177 
4178         if (ext & 0x20) {
4179             /* Variable width */
4180             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4181             tcg_gen_andi_i32(rot, rot, 31);
4182             tcg_gen_movi_i32(mask, -2);
4183             tcg_gen_shl_i32(mask, mask, rot);
4184             tcg_gen_mov_i32(rot, DREG(ext, 0));
4185             tcg_gen_andc_i32(tmp, src, mask);
4186         } else {
4187             /* Immediate width (variable offset) */
4188             uint32_t maski = -2U << (len - 1);
4189             tcg_gen_andi_i32(tmp, src, ~maski);
4190             tcg_gen_movi_i32(mask, maski);
4191             tcg_gen_movi_i32(rot, len & 31);
4192         }
4193         if (ext & 0x800) {
4194             /* Variable offset */
4195             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4196         } else {
4197             /* Immediate offset (variable width) */
4198             tcg_gen_addi_i32(rot, rot, ofs);
4199         }
4200         tcg_gen_andi_i32(rot, rot, 31);
4201         tcg_gen_rotr_i32(mask, mask, rot);
4202         tcg_gen_rotr_i32(tmp, tmp, rot);
4203         tcg_gen_and_i32(dst, dst, mask);
4204         tcg_gen_or_i32(dst, dst, tmp);
4205     }
4206 }
4207 
DISAS_INSN(bfins_mem)4208 DISAS_INSN(bfins_mem)
4209 {
4210     int ext = read_im16(env, s);
4211     TCGv src = DREG(ext, 12);
4212     TCGv addr, len, ofs;
4213 
4214     addr = gen_lea(env, s, insn, OS_UNSIZED);
4215     if (IS_NULL_QREG(addr)) {
4216         gen_addr_fault(s);
4217         return;
4218     }
4219 
4220     if (ext & 0x20) {
4221         len = DREG(ext, 0);
4222     } else {
4223         len = tcg_constant_i32(extract32(ext, 0, 5));
4224     }
4225     if (ext & 0x800) {
4226         ofs = DREG(ext, 6);
4227     } else {
4228         ofs = tcg_constant_i32(extract32(ext, 6, 5));
4229     }
4230 
4231     gen_helper_bfins_mem(QREG_CC_N, tcg_env, addr, src, ofs, len);
4232     set_cc_op(s, CC_OP_LOGIC);
4233 }
4234 
DISAS_INSN(ff1)4235 DISAS_INSN(ff1)
4236 {
4237     TCGv reg;
4238     reg = DREG(insn, 0);
4239     gen_logic_cc(s, reg, OS_LONG);
4240     gen_helper_ff1(reg, reg);
4241 }
4242 
DISAS_INSN(chk)4243 DISAS_INSN(chk)
4244 {
4245     TCGv src, reg;
4246     int opsize;
4247 
4248     switch ((insn >> 7) & 3) {
4249     case 3:
4250         opsize = OS_WORD;
4251         break;
4252     case 2:
4253         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4254             opsize = OS_LONG;
4255             break;
4256         }
4257         /* fallthru */
4258     default:
4259         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4260         return;
4261     }
4262     SRC_EA(env, src, opsize, 1, NULL);
4263     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4264 
4265     gen_flush_flags(s);
4266     gen_helper_chk(tcg_env, reg, src);
4267 }
4268 
DISAS_INSN(chk2)4269 DISAS_INSN(chk2)
4270 {
4271     uint16_t ext;
4272     TCGv addr1, addr2, bound1, bound2, reg;
4273     int opsize;
4274 
4275     switch ((insn >> 9) & 3) {
4276     case 0:
4277         opsize = OS_BYTE;
4278         break;
4279     case 1:
4280         opsize = OS_WORD;
4281         break;
4282     case 2:
4283         opsize = OS_LONG;
4284         break;
4285     default:
4286         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4287         return;
4288     }
4289 
4290     ext = read_im16(env, s);
4291     if ((ext & 0x0800) == 0) {
4292         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4293         return;
4294     }
4295 
4296     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4297     addr2 = tcg_temp_new();
4298     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4299 
4300     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4301     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4302 
4303     reg = tcg_temp_new();
4304     if (ext & 0x8000) {
4305         tcg_gen_mov_i32(reg, AREG(ext, 12));
4306     } else {
4307         gen_ext(reg, DREG(ext, 12), opsize, 1);
4308     }
4309 
4310     gen_flush_flags(s);
4311     gen_helper_chk2(tcg_env, reg, bound1, bound2);
4312 }
4313 
m68k_copy_line(TCGv dst,TCGv src,int index)4314 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4315 {
4316     TCGv addr;
4317     TCGv_i64 t0, t1;
4318 
4319     addr = tcg_temp_new();
4320 
4321     t0 = tcg_temp_new_i64();
4322     t1 = tcg_temp_new_i64();
4323 
4324     tcg_gen_andi_i32(addr, src, ~15);
4325     tcg_gen_qemu_ld_i64(t0, addr, index, MO_TEUQ);
4326     tcg_gen_addi_i32(addr, addr, 8);
4327     tcg_gen_qemu_ld_i64(t1, addr, index, MO_TEUQ);
4328 
4329     tcg_gen_andi_i32(addr, dst, ~15);
4330     tcg_gen_qemu_st_i64(t0, addr, index, MO_TEUQ);
4331     tcg_gen_addi_i32(addr, addr, 8);
4332     tcg_gen_qemu_st_i64(t1, addr, index, MO_TEUQ);
4333 }
4334 
DISAS_INSN(move16_reg)4335 DISAS_INSN(move16_reg)
4336 {
4337     int index = IS_USER(s);
4338     TCGv tmp;
4339     uint16_t ext;
4340 
4341     ext = read_im16(env, s);
4342     if ((ext & (1 << 15)) == 0) {
4343         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4344     }
4345 
4346     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4347 
4348     /* Ax can be Ay, so save Ay before incrementing Ax */
4349     tmp = tcg_temp_new();
4350     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4351     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4352     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4353 }
4354 
DISAS_INSN(move16_mem)4355 DISAS_INSN(move16_mem)
4356 {
4357     int index = IS_USER(s);
4358     TCGv reg, addr;
4359 
4360     reg = AREG(insn, 0);
4361     addr = tcg_constant_i32(read_im32(env, s));
4362 
4363     if ((insn >> 3) & 1) {
4364         /* MOVE16 (xxx).L, (Ay) */
4365         m68k_copy_line(reg, addr, index);
4366     } else {
4367         /* MOVE16 (Ay), (xxx).L */
4368         m68k_copy_line(addr, reg, index);
4369     }
4370 
4371     if (((insn >> 3) & 2) == 0) {
4372         /* (Ay)+ */
4373         tcg_gen_addi_i32(reg, reg, 16);
4374     }
4375 }
4376 
DISAS_INSN(strldsr)4377 DISAS_INSN(strldsr)
4378 {
4379     uint16_t ext;
4380     uint32_t addr;
4381 
4382     addr = s->pc - 2;
4383     ext = read_im16(env, s);
4384     if (ext != 0x46FC) {
4385         gen_exception(s, addr, EXCP_ILLEGAL);
4386         return;
4387     }
4388     ext = read_im16(env, s);
4389     if (IS_USER(s) || (ext & SR_S) == 0) {
4390         gen_exception(s, addr, EXCP_PRIVILEGE);
4391         return;
4392     }
4393     gen_push(s, gen_get_sr(s));
4394     gen_set_sr_im(s, ext, 0);
4395     gen_exit_tb(s);
4396 }
4397 
DISAS_INSN(move_from_sr)4398 DISAS_INSN(move_from_sr)
4399 {
4400     TCGv sr;
4401 
4402     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4403         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4404         return;
4405     }
4406     sr = gen_get_sr(s);
4407     DEST_EA(env, insn, OS_WORD, sr, NULL);
4408 }
4409 
4410 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(moves)4411 DISAS_INSN(moves)
4412 {
4413     int opsize;
4414     uint16_t ext;
4415     TCGv reg;
4416     TCGv addr;
4417     int extend;
4418 
4419     if (IS_USER(s)) {
4420         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4421         return;
4422     }
4423 
4424     ext = read_im16(env, s);
4425 
4426     opsize = insn_opsize(insn);
4427 
4428     if (ext & 0x8000) {
4429         /* address register */
4430         reg = AREG(ext, 12);
4431         extend = 1;
4432     } else {
4433         /* data register */
4434         reg = DREG(ext, 12);
4435         extend = 0;
4436     }
4437 
4438     addr = gen_lea(env, s, insn, opsize);
4439     if (IS_NULL_QREG(addr)) {
4440         gen_addr_fault(s);
4441         return;
4442     }
4443 
4444     if (ext & 0x0800) {
4445         /* from reg to ea */
4446         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4447     } else {
4448         /* from ea to reg */
4449         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4450         if (extend) {
4451             gen_ext(reg, tmp, opsize, 1);
4452         } else {
4453             gen_partset_reg(opsize, reg, tmp);
4454         }
4455     }
4456     switch (extract32(insn, 3, 3)) {
4457     case 3: /* Indirect postincrement.  */
4458         tcg_gen_addi_i32(AREG(insn, 0), addr,
4459                          REG(insn, 0) == 7 && opsize == OS_BYTE
4460                          ? 2
4461                          : opsize_bytes(opsize));
4462         break;
4463     case 4: /* Indirect predecrememnt.  */
4464         tcg_gen_mov_i32(AREG(insn, 0), addr);
4465         break;
4466     }
4467 }
4468 
DISAS_INSN(move_to_sr)4469 DISAS_INSN(move_to_sr)
4470 {
4471     if (IS_USER(s)) {
4472         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4473         return;
4474     }
4475     gen_move_to_sr(env, s, insn, false);
4476     gen_exit_tb(s);
4477 }
4478 
DISAS_INSN(move_from_usp)4479 DISAS_INSN(move_from_usp)
4480 {
4481     if (IS_USER(s)) {
4482         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4483         return;
4484     }
4485     tcg_gen_ld_i32(AREG(insn, 0), tcg_env,
4486                    offsetof(CPUM68KState, sp[M68K_USP]));
4487 }
4488 
DISAS_INSN(move_to_usp)4489 DISAS_INSN(move_to_usp)
4490 {
4491     if (IS_USER(s)) {
4492         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4493         return;
4494     }
4495     tcg_gen_st_i32(AREG(insn, 0), tcg_env,
4496                    offsetof(CPUM68KState, sp[M68K_USP]));
4497 }
4498 
DISAS_INSN(halt)4499 DISAS_INSN(halt)
4500 {
4501     if (IS_USER(s)) {
4502         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4503         return;
4504     }
4505     if (semihosting_test(s)) {
4506         gen_exception(s, s->pc, EXCP_SEMIHOSTING);
4507         return;
4508     }
4509     tcg_gen_movi_i32(cpu_halted, 1);
4510     gen_exception(s, s->pc, EXCP_HLT);
4511 }
4512 
DISAS_INSN(stop)4513 DISAS_INSN(stop)
4514 {
4515     uint16_t ext;
4516 
4517     if (IS_USER(s)) {
4518         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4519         return;
4520     }
4521 
4522     ext = read_im16(env, s);
4523 
4524     gen_set_sr_im(s, ext, 0);
4525     tcg_gen_movi_i32(cpu_halted, 1);
4526     gen_exception(s, s->pc, EXCP_HLT);
4527 }
4528 
DISAS_INSN(rte)4529 DISAS_INSN(rte)
4530 {
4531     if (IS_USER(s)) {
4532         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4533         return;
4534     }
4535     gen_exception(s, s->base.pc_next, EXCP_RTE);
4536 }
4537 
DISAS_INSN(cf_movec)4538 DISAS_INSN(cf_movec)
4539 {
4540     uint16_t ext;
4541     TCGv reg;
4542 
4543     if (IS_USER(s)) {
4544         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4545         return;
4546     }
4547 
4548     ext = read_im16(env, s);
4549 
4550     if (ext & 0x8000) {
4551         reg = AREG(ext, 12);
4552     } else {
4553         reg = DREG(ext, 12);
4554     }
4555     gen_helper_cf_movec_to(tcg_env, tcg_constant_i32(ext & 0xfff), reg);
4556     gen_exit_tb(s);
4557 }
4558 
DISAS_INSN(m68k_movec)4559 DISAS_INSN(m68k_movec)
4560 {
4561     uint16_t ext;
4562     TCGv reg, creg;
4563 
4564     if (IS_USER(s)) {
4565         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4566         return;
4567     }
4568 
4569     ext = read_im16(env, s);
4570 
4571     if (ext & 0x8000) {
4572         reg = AREG(ext, 12);
4573     } else {
4574         reg = DREG(ext, 12);
4575     }
4576     creg = tcg_constant_i32(ext & 0xfff);
4577     if (insn & 1) {
4578         gen_helper_m68k_movec_to(tcg_env, creg, reg);
4579     } else {
4580         gen_helper_m68k_movec_from(reg, tcg_env, creg);
4581     }
4582     gen_exit_tb(s);
4583 }
4584 
DISAS_INSN(intouch)4585 DISAS_INSN(intouch)
4586 {
4587     if (IS_USER(s)) {
4588         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4589         return;
4590     }
4591     /* ICache fetch.  Implement as no-op.  */
4592 }
4593 
DISAS_INSN(cpushl)4594 DISAS_INSN(cpushl)
4595 {
4596     if (IS_USER(s)) {
4597         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4598         return;
4599     }
4600     /* Cache push/invalidate.  Implement as no-op.  */
4601 }
4602 
DISAS_INSN(cpush)4603 DISAS_INSN(cpush)
4604 {
4605     if (IS_USER(s)) {
4606         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4607         return;
4608     }
4609     /* Cache push/invalidate.  Implement as no-op.  */
4610 }
4611 
DISAS_INSN(cinv)4612 DISAS_INSN(cinv)
4613 {
4614     if (IS_USER(s)) {
4615         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4616         return;
4617     }
4618     /* Invalidate cache line.  Implement as no-op.  */
4619 }
4620 
4621 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(pflush)4622 DISAS_INSN(pflush)
4623 {
4624     TCGv opmode;
4625 
4626     if (IS_USER(s)) {
4627         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4628         return;
4629     }
4630 
4631     opmode = tcg_constant_i32((insn >> 3) & 3);
4632     gen_helper_pflush(tcg_env, AREG(insn, 0), opmode);
4633 }
4634 
DISAS_INSN(ptest)4635 DISAS_INSN(ptest)
4636 {
4637     TCGv is_read;
4638 
4639     if (IS_USER(s)) {
4640         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4641         return;
4642     }
4643     is_read = tcg_constant_i32((insn >> 5) & 1);
4644     gen_helper_ptest(tcg_env, AREG(insn, 0), is_read);
4645 }
4646 #endif
4647 
DISAS_INSN(wddata)4648 DISAS_INSN(wddata)
4649 {
4650     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4651 }
4652 
DISAS_INSN(wdebug)4653 DISAS_INSN(wdebug)
4654 {
4655     if (IS_USER(s)) {
4656         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4657         return;
4658     }
4659     /* TODO: Implement wdebug.  */
4660     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4661 }
4662 #endif
4663 
DISAS_INSN(trap)4664 DISAS_INSN(trap)
4665 {
4666     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4667 }
4668 
do_trapcc(DisasContext * s,DisasCompare * c)4669 static void do_trapcc(DisasContext *s, DisasCompare *c)
4670 {
4671     if (c->tcond != TCG_COND_NEVER) {
4672         TCGLabel *over = NULL;
4673 
4674         update_cc_op(s);
4675 
4676         if (c->tcond != TCG_COND_ALWAYS) {
4677             /* Jump over if !c. */
4678             over = gen_new_label();
4679             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4680         }
4681 
4682         tcg_gen_movi_i32(QREG_PC, s->pc);
4683         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4684 
4685         if (over != NULL) {
4686             gen_set_label(over);
4687             s->base.is_jmp = DISAS_NEXT;
4688         }
4689     }
4690 }
4691 
DISAS_INSN(trapcc)4692 DISAS_INSN(trapcc)
4693 {
4694     DisasCompare c;
4695 
4696     /* Consume and discard the immediate operand. */
4697     switch (extract32(insn, 0, 3)) {
4698     case 2: /* trapcc.w */
4699         (void)read_im16(env, s);
4700         break;
4701     case 3: /* trapcc.l */
4702         (void)read_im32(env, s);
4703         break;
4704     case 4: /* trapcc (no operand) */
4705         break;
4706     default:
4707         /* trapcc registered with only valid opmodes */
4708         g_assert_not_reached();
4709     }
4710 
4711     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4712     do_trapcc(s, &c);
4713 }
4714 
DISAS_INSN(trapv)4715 DISAS_INSN(trapv)
4716 {
4717     DisasCompare c;
4718 
4719     gen_cc_cond(&c, s, 9); /* V set */
4720     do_trapcc(s, &c);
4721 }
4722 
gen_load_fcr(DisasContext * s,TCGv res,int reg)4723 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4724 {
4725     switch (reg) {
4726     case M68K_FPIAR:
4727         tcg_gen_movi_i32(res, 0);
4728         break;
4729     case M68K_FPSR:
4730         gen_helper_get_fpsr(res, tcg_env);
4731         break;
4732     case M68K_FPCR:
4733         tcg_gen_ld_i32(res, tcg_env, offsetof(CPUM68KState, fpcr));
4734         break;
4735     }
4736 }
4737 
gen_store_fcr(DisasContext * s,TCGv val,int reg)4738 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4739 {
4740     switch (reg) {
4741     case M68K_FPIAR:
4742         break;
4743     case M68K_FPSR:
4744         gen_helper_set_fpsr(tcg_env, val);
4745         break;
4746     case M68K_FPCR:
4747         gen_helper_set_fpcr(tcg_env, val);
4748         break;
4749     }
4750 }
4751 
gen_qemu_store_fcr(DisasContext * s,TCGv addr,int reg)4752 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4753 {
4754     int index = IS_USER(s);
4755     TCGv tmp;
4756 
4757     tmp = tcg_temp_new();
4758     gen_load_fcr(s, tmp, reg);
4759     tcg_gen_qemu_st_tl(tmp, addr, index, MO_TEUL);
4760 }
4761 
gen_qemu_load_fcr(DisasContext * s,TCGv addr,int reg)4762 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4763 {
4764     int index = IS_USER(s);
4765     TCGv tmp;
4766 
4767     tmp = tcg_temp_new();
4768     tcg_gen_qemu_ld_tl(tmp, addr, index, MO_TEUL);
4769     gen_store_fcr(s, tmp, reg);
4770 }
4771 
4772 
gen_op_fmove_fcr(CPUM68KState * env,DisasContext * s,uint32_t insn,uint32_t ext)4773 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4774                              uint32_t insn, uint32_t ext)
4775 {
4776     int mask = (ext >> 10) & 7;
4777     int is_write = (ext >> 13) & 1;
4778     int mode = extract32(insn, 3, 3);
4779     int i;
4780     TCGv addr, tmp;
4781 
4782     switch (mode) {
4783     case 0: /* Dn */
4784         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4785             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4786             return;
4787         }
4788         if (is_write) {
4789             gen_load_fcr(s, DREG(insn, 0), mask);
4790         } else {
4791             gen_store_fcr(s, DREG(insn, 0), mask);
4792         }
4793         return;
4794     case 1: /* An, only with FPIAR */
4795         if (mask != M68K_FPIAR) {
4796             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4797             return;
4798         }
4799         if (is_write) {
4800             gen_load_fcr(s, AREG(insn, 0), mask);
4801         } else {
4802             gen_store_fcr(s, AREG(insn, 0), mask);
4803         }
4804         return;
4805     case 7: /* Immediate */
4806         if (REG(insn, 0) == 4) {
4807             if (is_write ||
4808                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4809                  mask != M68K_FPCR)) {
4810                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4811                 return;
4812             }
4813             tmp = tcg_constant_i32(read_im32(env, s));
4814             gen_store_fcr(s, tmp, mask);
4815             return;
4816         }
4817         break;
4818     default:
4819         break;
4820     }
4821 
4822     tmp = gen_lea(env, s, insn, OS_LONG);
4823     if (IS_NULL_QREG(tmp)) {
4824         gen_addr_fault(s);
4825         return;
4826     }
4827 
4828     addr = tcg_temp_new();
4829     tcg_gen_mov_i32(addr, tmp);
4830 
4831     /*
4832      * mask:
4833      *
4834      * 0b100 Floating-Point Control Register
4835      * 0b010 Floating-Point Status Register
4836      * 0b001 Floating-Point Instruction Address Register
4837      *
4838      */
4839 
4840     if (is_write && mode == 4) {
4841         for (i = 2; i >= 0; i--, mask >>= 1) {
4842             if (mask & 1) {
4843                 gen_qemu_store_fcr(s, addr, 1 << i);
4844                 if (mask != 1) {
4845                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4846                 }
4847             }
4848        }
4849        tcg_gen_mov_i32(AREG(insn, 0), addr);
4850     } else {
4851         for (i = 0; i < 3; i++, mask >>= 1) {
4852             if (mask & 1) {
4853                 if (is_write) {
4854                     gen_qemu_store_fcr(s, addr, 1 << i);
4855                 } else {
4856                     gen_qemu_load_fcr(s, addr, 1 << i);
4857                 }
4858                 if (mask != 1 || mode == 3) {
4859                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4860                 }
4861             }
4862         }
4863         if (mode == 3) {
4864             tcg_gen_mov_i32(AREG(insn, 0), addr);
4865         }
4866     }
4867 }
4868 
gen_op_fmovem(CPUM68KState * env,DisasContext * s,uint32_t insn,uint32_t ext)4869 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
4870                           uint32_t insn, uint32_t ext)
4871 {
4872     int opsize;
4873     TCGv addr, tmp;
4874     int mode = (ext >> 11) & 0x3;
4875     int is_load = ((ext & 0x2000) == 0);
4876 
4877     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
4878         opsize = OS_EXTENDED;
4879     } else {
4880         opsize = OS_DOUBLE;  /* FIXME */
4881     }
4882 
4883     addr = gen_lea(env, s, insn, opsize);
4884     if (IS_NULL_QREG(addr)) {
4885         gen_addr_fault(s);
4886         return;
4887     }
4888 
4889     tmp = tcg_temp_new();
4890     if (mode & 0x1) {
4891         /* Dynamic register list */
4892         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
4893     } else {
4894         /* Static register list */
4895         tcg_gen_movi_i32(tmp, ext & 0xff);
4896     }
4897 
4898     if (!is_load && (mode & 2) == 0) {
4899         /*
4900          * predecrement addressing mode
4901          * only available to store register to memory
4902          */
4903         if (opsize == OS_EXTENDED) {
4904             gen_helper_fmovemx_st_predec(tmp, tcg_env, addr, tmp);
4905         } else {
4906             gen_helper_fmovemd_st_predec(tmp, tcg_env, addr, tmp);
4907         }
4908     } else {
4909         /* postincrement addressing mode */
4910         if (opsize == OS_EXTENDED) {
4911             if (is_load) {
4912                 gen_helper_fmovemx_ld_postinc(tmp, tcg_env, addr, tmp);
4913             } else {
4914                 gen_helper_fmovemx_st_postinc(tmp, tcg_env, addr, tmp);
4915             }
4916         } else {
4917             if (is_load) {
4918                 gen_helper_fmovemd_ld_postinc(tmp, tcg_env, addr, tmp);
4919             } else {
4920                 gen_helper_fmovemd_st_postinc(tmp, tcg_env, addr, tmp);
4921             }
4922         }
4923     }
4924     if ((insn & 070) == 030 || (insn & 070) == 040) {
4925         tcg_gen_mov_i32(AREG(insn, 0), tmp);
4926     }
4927 }
4928 
4929 /*
4930  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
4931  * immediately before the next FP instruction is executed.
4932  */
DISAS_INSN(fpu)4933 DISAS_INSN(fpu)
4934 {
4935     uint16_t ext;
4936     int opmode;
4937     int opsize;
4938     TCGv_ptr cpu_src, cpu_dest;
4939 
4940     ext = read_im16(env, s);
4941     opmode = ext & 0x7f;
4942     switch ((ext >> 13) & 7) {
4943     case 0:
4944         break;
4945     case 1:
4946         goto undef;
4947     case 2:
4948         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
4949             /* fmovecr */
4950             TCGv rom_offset = tcg_constant_i32(opmode);
4951             cpu_dest = gen_fp_ptr(REG(ext, 7));
4952             gen_helper_fconst(tcg_env, cpu_dest, rom_offset);
4953             return;
4954         }
4955         break;
4956     case 3: /* fmove out */
4957         cpu_src = gen_fp_ptr(REG(ext, 7));
4958         opsize = ext_opsize(ext, 10);
4959         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4960                       EA_STORE, IS_USER(s)) == -1) {
4961             gen_addr_fault(s);
4962         }
4963         gen_helper_ftst(tcg_env, cpu_src);
4964         return;
4965     case 4: /* fmove to control register.  */
4966     case 5: /* fmove from control register.  */
4967         gen_op_fmove_fcr(env, s, insn, ext);
4968         return;
4969     case 6: /* fmovem */
4970     case 7:
4971         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
4972             goto undef;
4973         }
4974         gen_op_fmovem(env, s, insn, ext);
4975         return;
4976     }
4977     if (ext & (1 << 14)) {
4978         /* Source effective address.  */
4979         opsize = ext_opsize(ext, 10);
4980         cpu_src = gen_fp_result_ptr();
4981         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
4982                       EA_LOADS, IS_USER(s)) == -1) {
4983             gen_addr_fault(s);
4984             return;
4985         }
4986     } else {
4987         /* Source register.  */
4988         opsize = OS_EXTENDED;
4989         cpu_src = gen_fp_ptr(REG(ext, 10));
4990     }
4991     cpu_dest = gen_fp_ptr(REG(ext, 7));
4992     switch (opmode) {
4993     case 0: /* fmove */
4994         gen_fp_move(cpu_dest, cpu_src);
4995         break;
4996     case 0x40: /* fsmove */
4997         gen_helper_fsround(tcg_env, cpu_dest, cpu_src);
4998         break;
4999     case 0x44: /* fdmove */
5000         gen_helper_fdround(tcg_env, cpu_dest, cpu_src);
5001         break;
5002     case 1: /* fint */
5003         gen_helper_firound(tcg_env, cpu_dest, cpu_src);
5004         break;
5005     case 2: /* fsinh */
5006         gen_helper_fsinh(tcg_env, cpu_dest, cpu_src);
5007         break;
5008     case 3: /* fintrz */
5009         gen_helper_fitrunc(tcg_env, cpu_dest, cpu_src);
5010         break;
5011     case 4: /* fsqrt */
5012         gen_helper_fsqrt(tcg_env, cpu_dest, cpu_src);
5013         break;
5014     case 0x41: /* fssqrt */
5015         gen_helper_fssqrt(tcg_env, cpu_dest, cpu_src);
5016         break;
5017     case 0x45: /* fdsqrt */
5018         gen_helper_fdsqrt(tcg_env, cpu_dest, cpu_src);
5019         break;
5020     case 0x06: /* flognp1 */
5021         gen_helper_flognp1(tcg_env, cpu_dest, cpu_src);
5022         break;
5023     case 0x08: /* fetoxm1 */
5024         gen_helper_fetoxm1(tcg_env, cpu_dest, cpu_src);
5025         break;
5026     case 0x09: /* ftanh */
5027         gen_helper_ftanh(tcg_env, cpu_dest, cpu_src);
5028         break;
5029     case 0x0a: /* fatan */
5030         gen_helper_fatan(tcg_env, cpu_dest, cpu_src);
5031         break;
5032     case 0x0c: /* fasin */
5033         gen_helper_fasin(tcg_env, cpu_dest, cpu_src);
5034         break;
5035     case 0x0d: /* fatanh */
5036         gen_helper_fatanh(tcg_env, cpu_dest, cpu_src);
5037         break;
5038     case 0x0e: /* fsin */
5039         gen_helper_fsin(tcg_env, cpu_dest, cpu_src);
5040         break;
5041     case 0x0f: /* ftan */
5042         gen_helper_ftan(tcg_env, cpu_dest, cpu_src);
5043         break;
5044     case 0x10: /* fetox */
5045         gen_helper_fetox(tcg_env, cpu_dest, cpu_src);
5046         break;
5047     case 0x11: /* ftwotox */
5048         gen_helper_ftwotox(tcg_env, cpu_dest, cpu_src);
5049         break;
5050     case 0x12: /* ftentox */
5051         gen_helper_ftentox(tcg_env, cpu_dest, cpu_src);
5052         break;
5053     case 0x14: /* flogn */
5054         gen_helper_flogn(tcg_env, cpu_dest, cpu_src);
5055         break;
5056     case 0x15: /* flog10 */
5057         gen_helper_flog10(tcg_env, cpu_dest, cpu_src);
5058         break;
5059     case 0x16: /* flog2 */
5060         gen_helper_flog2(tcg_env, cpu_dest, cpu_src);
5061         break;
5062     case 0x18: /* fabs */
5063         gen_helper_fabs(tcg_env, cpu_dest, cpu_src);
5064         break;
5065     case 0x58: /* fsabs */
5066         gen_helper_fsabs(tcg_env, cpu_dest, cpu_src);
5067         break;
5068     case 0x5c: /* fdabs */
5069         gen_helper_fdabs(tcg_env, cpu_dest, cpu_src);
5070         break;
5071     case 0x19: /* fcosh */
5072         gen_helper_fcosh(tcg_env, cpu_dest, cpu_src);
5073         break;
5074     case 0x1a: /* fneg */
5075         gen_helper_fneg(tcg_env, cpu_dest, cpu_src);
5076         break;
5077     case 0x5a: /* fsneg */
5078         gen_helper_fsneg(tcg_env, cpu_dest, cpu_src);
5079         break;
5080     case 0x5e: /* fdneg */
5081         gen_helper_fdneg(tcg_env, cpu_dest, cpu_src);
5082         break;
5083     case 0x1c: /* facos */
5084         gen_helper_facos(tcg_env, cpu_dest, cpu_src);
5085         break;
5086     case 0x1d: /* fcos */
5087         gen_helper_fcos(tcg_env, cpu_dest, cpu_src);
5088         break;
5089     case 0x1e: /* fgetexp */
5090         gen_helper_fgetexp(tcg_env, cpu_dest, cpu_src);
5091         break;
5092     case 0x1f: /* fgetman */
5093         gen_helper_fgetman(tcg_env, cpu_dest, cpu_src);
5094         break;
5095     case 0x20: /* fdiv */
5096         gen_helper_fdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5097         break;
5098     case 0x60: /* fsdiv */
5099         gen_helper_fsdiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5100         break;
5101     case 0x64: /* fddiv */
5102         gen_helper_fddiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5103         break;
5104     case 0x21: /* fmod */
5105         gen_helper_fmod(tcg_env, cpu_dest, cpu_src, cpu_dest);
5106         break;
5107     case 0x22: /* fadd */
5108         gen_helper_fadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5109         break;
5110     case 0x62: /* fsadd */
5111         gen_helper_fsadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5112         break;
5113     case 0x66: /* fdadd */
5114         gen_helper_fdadd(tcg_env, cpu_dest, cpu_src, cpu_dest);
5115         break;
5116     case 0x23: /* fmul */
5117         gen_helper_fmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5118         break;
5119     case 0x63: /* fsmul */
5120         gen_helper_fsmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5121         break;
5122     case 0x67: /* fdmul */
5123         gen_helper_fdmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5124         break;
5125     case 0x24: /* fsgldiv */
5126         gen_helper_fsgldiv(tcg_env, cpu_dest, cpu_src, cpu_dest);
5127         break;
5128     case 0x25: /* frem */
5129         gen_helper_frem(tcg_env, cpu_dest, cpu_src, cpu_dest);
5130         break;
5131     case 0x26: /* fscale */
5132         gen_helper_fscale(tcg_env, cpu_dest, cpu_src, cpu_dest);
5133         break;
5134     case 0x27: /* fsglmul */
5135         gen_helper_fsglmul(tcg_env, cpu_dest, cpu_src, cpu_dest);
5136         break;
5137     case 0x28: /* fsub */
5138         gen_helper_fsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5139         break;
5140     case 0x68: /* fssub */
5141         gen_helper_fssub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5142         break;
5143     case 0x6c: /* fdsub */
5144         gen_helper_fdsub(tcg_env, cpu_dest, cpu_src, cpu_dest);
5145         break;
5146     case 0x30: case 0x31: case 0x32:
5147     case 0x33: case 0x34: case 0x35:
5148     case 0x36: case 0x37: {
5149             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5150             gen_helper_fsincos(tcg_env, cpu_dest, cpu_dest2, cpu_src);
5151         }
5152         break;
5153     case 0x38: /* fcmp */
5154         gen_helper_fcmp(tcg_env, cpu_src, cpu_dest);
5155         return;
5156     case 0x3a: /* ftst */
5157         gen_helper_ftst(tcg_env, cpu_src);
5158         return;
5159     default:
5160         goto undef;
5161     }
5162     gen_helper_ftst(tcg_env, cpu_dest);
5163     return;
5164 undef:
5165     /* FIXME: Is this right for offset addressing modes?  */
5166     s->pc -= 2;
5167     disas_undef_fpu(env, s, insn);
5168 }
5169 
gen_fcc_cond(DisasCompare * c,DisasContext * s,int cond)5170 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5171 {
5172     TCGv fpsr;
5173     int imm = 0;
5174 
5175     /* TODO: Raise BSUN exception.  */
5176     fpsr = tcg_temp_new();
5177     gen_load_fcr(s, fpsr, M68K_FPSR);
5178     c->v1 = fpsr;
5179 
5180     switch (cond) {
5181     case 0:  /* False */
5182     case 16: /* Signaling False */
5183         c->tcond = TCG_COND_NEVER;
5184         break;
5185     case 1:  /* EQual Z */
5186     case 17: /* Signaling EQual Z */
5187         imm = FPSR_CC_Z;
5188         c->tcond = TCG_COND_TSTNE;
5189         break;
5190     case 2:  /* Ordered Greater Than !(A || Z || N) */
5191     case 18: /* Greater Than !(A || Z || N) */
5192         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5193         c->tcond = TCG_COND_TSTEQ;
5194         break;
5195     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5196     case 19: /* Greater than or Equal Z || !(A || N) */
5197         c->v1 = tcg_temp_new();
5198         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5199         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5200         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5201         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5202         imm = FPSR_CC_Z | FPSR_CC_N;
5203         c->tcond = TCG_COND_TSTNE;
5204         break;
5205     case 4:  /* Ordered Less Than !(!N || A || Z); */
5206     case 20: /* Less Than !(!N || A || Z); */
5207         c->v1 = tcg_temp_new();
5208         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5209         imm = FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z;
5210         c->tcond = TCG_COND_TSTEQ;
5211         break;
5212     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5213     case 21: /* Less than or Equal Z || (N && !A) */
5214         c->v1 = tcg_temp_new();
5215         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5216         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5217         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5218         imm = FPSR_CC_Z | FPSR_CC_N;
5219         c->tcond = TCG_COND_TSTNE;
5220         break;
5221     case 6:  /* Ordered Greater or Less than !(A || Z) */
5222     case 22: /* Greater or Less than !(A || Z) */
5223         imm = FPSR_CC_A | FPSR_CC_Z;
5224         c->tcond = TCG_COND_TSTEQ;
5225         break;
5226     case 7:  /* Ordered !A */
5227     case 23: /* Greater, Less or Equal !A */
5228         imm = FPSR_CC_A;
5229         c->tcond = TCG_COND_TSTEQ;
5230         break;
5231     case 8:  /* Unordered A */
5232     case 24: /* Not Greater, Less or Equal A */
5233         imm = FPSR_CC_A;
5234         c->tcond = TCG_COND_TSTNE;
5235         break;
5236     case 9:  /* Unordered or Equal A || Z */
5237     case 25: /* Not Greater or Less then A || Z */
5238         imm = FPSR_CC_A | FPSR_CC_Z;
5239         c->tcond = TCG_COND_TSTNE;
5240         break;
5241     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5242     case 26: /* Not Less or Equal A || !(N || Z)) */
5243         c->v1 = tcg_temp_new();
5244         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5245         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5246         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5247         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5248         imm = FPSR_CC_A | FPSR_CC_N;
5249         c->tcond = TCG_COND_TSTNE;
5250         break;
5251     case 11: /* Unordered or Greater or Equal A || Z || !N */
5252     case 27: /* Not Less Than A || Z || !N */
5253         c->v1 = tcg_temp_new();
5254         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5255         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5256         c->tcond = TCG_COND_TSTNE;
5257         break;
5258     case 12: /* Unordered or Less Than A || (N && !Z) */
5259     case 28: /* Not Greater than or Equal A || (N && !Z) */
5260         c->v1 = tcg_temp_new();
5261         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5262         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5263         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5264         imm = FPSR_CC_A | FPSR_CC_N;
5265         c->tcond = TCG_COND_TSTNE;
5266         break;
5267     case 13: /* Unordered or Less or Equal A || Z || N */
5268     case 29: /* Not Greater Than A || Z || N */
5269         imm = FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N;
5270         c->tcond = TCG_COND_TSTNE;
5271         break;
5272     case 14: /* Not Equal !Z */
5273     case 30: /* Signaling Not Equal !Z */
5274         imm = FPSR_CC_Z;
5275         c->tcond = TCG_COND_TSTEQ;
5276         break;
5277     case 15: /* True */
5278     case 31: /* Signaling True */
5279         c->tcond = TCG_COND_ALWAYS;
5280         break;
5281     }
5282     c->v2 = tcg_constant_i32(imm);
5283 }
5284 
gen_fjmpcc(DisasContext * s,int cond,TCGLabel * l1)5285 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5286 {
5287     DisasCompare c;
5288 
5289     gen_fcc_cond(&c, s, cond);
5290     update_cc_op(s);
5291     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5292 }
5293 
DISAS_INSN(fbcc)5294 DISAS_INSN(fbcc)
5295 {
5296     uint32_t offset;
5297     uint32_t base;
5298     TCGLabel *l1;
5299 
5300     base = s->pc;
5301     offset = (int16_t)read_im16(env, s);
5302     if (insn & (1 << 6)) {
5303         offset = (offset << 16) | read_im16(env, s);
5304     }
5305 
5306     l1 = gen_new_label();
5307     update_cc_op(s);
5308     gen_fjmpcc(s, insn & 0x3f, l1);
5309     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5310     gen_set_label(l1);
5311     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5312 }
5313 
DISAS_INSN(fscc)5314 DISAS_INSN(fscc)
5315 {
5316     DisasCompare c;
5317     int cond;
5318     TCGv tmp;
5319     uint16_t ext;
5320 
5321     ext = read_im16(env, s);
5322     cond = ext & 0x3f;
5323     gen_fcc_cond(&c, s, cond);
5324 
5325     tmp = tcg_temp_new();
5326     tcg_gen_negsetcond_i32(c.tcond, tmp, c.v1, c.v2);
5327 
5328     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5329 }
5330 
DISAS_INSN(ftrapcc)5331 DISAS_INSN(ftrapcc)
5332 {
5333     DisasCompare c;
5334     uint16_t ext;
5335     int cond;
5336 
5337     ext = read_im16(env, s);
5338     cond = ext & 0x3f;
5339 
5340     /* Consume and discard the immediate operand. */
5341     switch (extract32(insn, 0, 3)) {
5342     case 2: /* ftrapcc.w */
5343         (void)read_im16(env, s);
5344         break;
5345     case 3: /* ftrapcc.l */
5346         (void)read_im32(env, s);
5347         break;
5348     case 4: /* ftrapcc (no operand) */
5349         break;
5350     default:
5351         /* ftrapcc registered with only valid opmodes */
5352         g_assert_not_reached();
5353     }
5354 
5355     gen_fcc_cond(&c, s, cond);
5356     do_trapcc(s, &c);
5357 }
5358 
5359 #if !defined(CONFIG_USER_ONLY)
DISAS_INSN(frestore)5360 DISAS_INSN(frestore)
5361 {
5362     TCGv addr;
5363 
5364     if (IS_USER(s)) {
5365         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5366         return;
5367     }
5368     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5369         SRC_EA(env, addr, OS_LONG, 0, NULL);
5370         /* FIXME: check the state frame */
5371     } else {
5372         disas_undef(env, s, insn);
5373     }
5374 }
5375 
DISAS_INSN(fsave)5376 DISAS_INSN(fsave)
5377 {
5378     if (IS_USER(s)) {
5379         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5380         return;
5381     }
5382 
5383     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5384         /* always write IDLE */
5385         TCGv idle = tcg_constant_i32(0x41000000);
5386         DEST_EA(env, insn, OS_LONG, idle, NULL);
5387     } else {
5388         disas_undef(env, s, insn);
5389     }
5390 }
5391 #endif
5392 
gen_mac_extract_word(DisasContext * s,TCGv val,int upper)5393 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5394 {
5395     TCGv tmp = tcg_temp_new();
5396     if (s->env->macsr & MACSR_FI) {
5397         if (upper)
5398             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5399         else
5400             tcg_gen_shli_i32(tmp, val, 16);
5401     } else if (s->env->macsr & MACSR_SU) {
5402         if (upper)
5403             tcg_gen_sari_i32(tmp, val, 16);
5404         else
5405             tcg_gen_ext16s_i32(tmp, val);
5406     } else {
5407         if (upper)
5408             tcg_gen_shri_i32(tmp, val, 16);
5409         else
5410             tcg_gen_ext16u_i32(tmp, val);
5411     }
5412     return tmp;
5413 }
5414 
gen_mac_clear_flags(void)5415 static void gen_mac_clear_flags(void)
5416 {
5417     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5418                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5419 }
5420 
DISAS_INSN(mac)5421 DISAS_INSN(mac)
5422 {
5423     TCGv rx;
5424     TCGv ry;
5425     uint16_t ext;
5426     int acc;
5427     TCGv tmp;
5428     TCGv addr;
5429     TCGv loadval;
5430     int dual;
5431     TCGv saved_flags;
5432 
5433     if (!s->done_mac) {
5434         s->mactmp = tcg_temp_new_i64();
5435         s->done_mac = 1;
5436     }
5437 
5438     ext = read_im16(env, s);
5439 
5440     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5441     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5442     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5443         disas_undef(env, s, insn);
5444         return;
5445     }
5446     if (insn & 0x30) {
5447         /* MAC with load.  */
5448         tmp = gen_lea(env, s, insn, OS_LONG);
5449         addr = tcg_temp_new();
5450         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5451         /*
5452          * Load the value now to ensure correct exception behavior.
5453          * Perform writeback after reading the MAC inputs.
5454          */
5455         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5456 
5457         acc ^= 1;
5458         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5459         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5460     } else {
5461         loadval = addr = NULL_QREG;
5462         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5463         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5464     }
5465 
5466     gen_mac_clear_flags();
5467 #if 0
5468     l1 = -1;
5469     /* Disabled because conditional branches clobber temporary vars.  */
5470     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5471         /* Skip the multiply if we know we will ignore it.  */
5472         l1 = gen_new_label();
5473         tmp = tcg_temp_new();
5474         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5475         gen_op_jmp_nz32(tmp, l1);
5476     }
5477 #endif
5478 
5479     if ((ext & 0x0800) == 0) {
5480         /* Word.  */
5481         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5482         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5483     }
5484     if (s->env->macsr & MACSR_FI) {
5485         gen_helper_macmulf(s->mactmp, tcg_env, rx, ry);
5486     } else {
5487         if (s->env->macsr & MACSR_SU)
5488             gen_helper_macmuls(s->mactmp, tcg_env, rx, ry);
5489         else
5490             gen_helper_macmulu(s->mactmp, tcg_env, rx, ry);
5491         switch ((ext >> 9) & 3) {
5492         case 1:
5493             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5494             break;
5495         case 3:
5496             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5497             break;
5498         }
5499     }
5500 
5501     if (dual) {
5502         /* Save the overflow flag from the multiply.  */
5503         saved_flags = tcg_temp_new();
5504         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5505     } else {
5506         saved_flags = NULL_QREG;
5507     }
5508 
5509 #if 0
5510     /* Disabled because conditional branches clobber temporary vars.  */
5511     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5512         /* Skip the accumulate if the value is already saturated.  */
5513         l1 = gen_new_label();
5514         tmp = tcg_temp_new();
5515         gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5516         gen_op_jmp_nz32(tmp, l1);
5517     }
5518 #endif
5519 
5520     if (insn & 0x100)
5521         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5522     else
5523         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5524 
5525     if (s->env->macsr & MACSR_FI)
5526         gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5527     else if (s->env->macsr & MACSR_SU)
5528         gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5529     else
5530         gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5531 
5532 #if 0
5533     /* Disabled because conditional branches clobber temporary vars.  */
5534     if (l1 != -1)
5535         gen_set_label(l1);
5536 #endif
5537 
5538     if (dual) {
5539         /* Dual accumulate variant.  */
5540         acc = (ext >> 2) & 3;
5541         /* Restore the overflow flag from the multiplier.  */
5542         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5543 #if 0
5544         /* Disabled because conditional branches clobber temporary vars.  */
5545         if ((s->env->macsr & MACSR_OMC) != 0) {
5546             /* Skip the accumulate if the value is already saturated.  */
5547             l1 = gen_new_label();
5548             tmp = tcg_temp_new();
5549             gen_op_and32(tmp, QREG_MACSR, tcg_constant_i32(MACSR_PAV0 << acc));
5550             gen_op_jmp_nz32(tmp, l1);
5551         }
5552 #endif
5553         if (ext & 2)
5554             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5555         else
5556             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5557         if (s->env->macsr & MACSR_FI)
5558             gen_helper_macsatf(tcg_env, tcg_constant_i32(acc));
5559         else if (s->env->macsr & MACSR_SU)
5560             gen_helper_macsats(tcg_env, tcg_constant_i32(acc));
5561         else
5562             gen_helper_macsatu(tcg_env, tcg_constant_i32(acc));
5563 #if 0
5564         /* Disabled because conditional branches clobber temporary vars.  */
5565         if (l1 != -1)
5566             gen_set_label(l1);
5567 #endif
5568     }
5569     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(acc));
5570 
5571     if (insn & 0x30) {
5572         TCGv rw;
5573         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5574         tcg_gen_mov_i32(rw, loadval);
5575         /*
5576          * FIXME: Should address writeback happen with the masked or
5577          * unmasked value?
5578          */
5579         switch ((insn >> 3) & 7) {
5580         case 3: /* Post-increment.  */
5581             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5582             break;
5583         case 4: /* Pre-decrement.  */
5584             tcg_gen_mov_i32(AREG(insn, 0), addr);
5585         }
5586     }
5587 }
5588 
DISAS_INSN(from_mac)5589 DISAS_INSN(from_mac)
5590 {
5591     TCGv rx;
5592     TCGv_i64 acc;
5593     int accnum;
5594 
5595     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5596     accnum = (insn >> 9) & 3;
5597     acc = MACREG(accnum);
5598     if (s->env->macsr & MACSR_FI) {
5599         gen_helper_get_macf(rx, tcg_env, acc);
5600     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5601         tcg_gen_extrl_i64_i32(rx, acc);
5602     } else if (s->env->macsr & MACSR_SU) {
5603         gen_helper_get_macs(rx, acc);
5604     } else {
5605         gen_helper_get_macu(rx, acc);
5606     }
5607     if (insn & 0x40) {
5608         tcg_gen_movi_i64(acc, 0);
5609         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5610     }
5611 }
5612 
DISAS_INSN(move_mac)5613 DISAS_INSN(move_mac)
5614 {
5615     /* FIXME: This can be done without a helper.  */
5616     int src;
5617     TCGv dest;
5618     src = insn & 3;
5619     dest = tcg_constant_i32((insn >> 9) & 3);
5620     gen_helper_mac_move(tcg_env, dest, tcg_constant_i32(src));
5621     gen_mac_clear_flags();
5622     gen_helper_mac_set_flags(tcg_env, dest);
5623 }
5624 
DISAS_INSN(from_macsr)5625 DISAS_INSN(from_macsr)
5626 {
5627     TCGv reg;
5628 
5629     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5630     tcg_gen_mov_i32(reg, QREG_MACSR);
5631 }
5632 
DISAS_INSN(from_mask)5633 DISAS_INSN(from_mask)
5634 {
5635     TCGv reg;
5636     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5637     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5638 }
5639 
DISAS_INSN(from_mext)5640 DISAS_INSN(from_mext)
5641 {
5642     TCGv reg;
5643     TCGv acc;
5644     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5645     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5646     if (s->env->macsr & MACSR_FI)
5647         gen_helper_get_mac_extf(reg, tcg_env, acc);
5648     else
5649         gen_helper_get_mac_exti(reg, tcg_env, acc);
5650 }
5651 
DISAS_INSN(macsr_to_ccr)5652 DISAS_INSN(macsr_to_ccr)
5653 {
5654     TCGv tmp = tcg_temp_new();
5655 
5656     /* Note that X and C are always cleared. */
5657     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5658     gen_helper_set_ccr(tcg_env, tmp);
5659     set_cc_op(s, CC_OP_FLAGS);
5660 }
5661 
DISAS_INSN(to_mac)5662 DISAS_INSN(to_mac)
5663 {
5664     TCGv_i64 acc;
5665     TCGv val;
5666     int accnum;
5667     accnum = (insn >> 9) & 3;
5668     acc = MACREG(accnum);
5669     SRC_EA(env, val, OS_LONG, 0, NULL);
5670     if (s->env->macsr & MACSR_FI) {
5671         tcg_gen_ext_i32_i64(acc, val);
5672         tcg_gen_shli_i64(acc, acc, 8);
5673     } else if (s->env->macsr & MACSR_SU) {
5674         tcg_gen_ext_i32_i64(acc, val);
5675     } else {
5676         tcg_gen_extu_i32_i64(acc, val);
5677     }
5678     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5679     gen_mac_clear_flags();
5680     gen_helper_mac_set_flags(tcg_env, tcg_constant_i32(accnum));
5681 }
5682 
DISAS_INSN(to_macsr)5683 DISAS_INSN(to_macsr)
5684 {
5685     TCGv val;
5686     SRC_EA(env, val, OS_LONG, 0, NULL);
5687     gen_helper_set_macsr(tcg_env, val);
5688     gen_exit_tb(s);
5689 }
5690 
DISAS_INSN(to_mask)5691 DISAS_INSN(to_mask)
5692 {
5693     TCGv val;
5694     SRC_EA(env, val, OS_LONG, 0, NULL);
5695     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5696 }
5697 
DISAS_INSN(to_mext)5698 DISAS_INSN(to_mext)
5699 {
5700     TCGv val;
5701     TCGv acc;
5702     SRC_EA(env, val, OS_LONG, 0, NULL);
5703     acc = tcg_constant_i32((insn & 0x400) ? 2 : 0);
5704     if (s->env->macsr & MACSR_FI)
5705         gen_helper_set_mac_extf(tcg_env, val, acc);
5706     else if (s->env->macsr & MACSR_SU)
5707         gen_helper_set_mac_exts(tcg_env, val, acc);
5708     else
5709         gen_helper_set_mac_extu(tcg_env, val, acc);
5710 }
5711 
5712 static disas_proc opcode_table[65536];
5713 
5714 static void
register_opcode(disas_proc proc,uint16_t opcode,uint16_t mask)5715 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5716 {
5717   int i;
5718   int from;
5719   int to;
5720 
5721   /* Sanity check.  All set bits must be included in the mask.  */
5722   if (opcode & ~mask) {
5723       fprintf(stderr,
5724               "qemu internal error: bogus opcode definition %04x/%04x\n",
5725               opcode, mask);
5726       abort();
5727   }
5728   /*
5729    * This could probably be cleverer.  For now just optimize the case where
5730    * the top bits are known.
5731    */
5732   /* Find the first zero bit in the mask.  */
5733   i = 0x8000;
5734   while ((i & mask) != 0)
5735       i >>= 1;
5736   /* Iterate over all combinations of this and lower bits.  */
5737   if (i == 0)
5738       i = 1;
5739   else
5740       i <<= 1;
5741   from = opcode & ~(i - 1);
5742   to = from + i;
5743   for (i = from; i < to; i++) {
5744       if ((i & mask) == opcode)
5745           opcode_table[i] = proc;
5746   }
5747 }
5748 
5749 /*
5750  * Register m68k opcode handlers.  Order is important.
5751  * Later insn override earlier ones.
5752  */
register_m68k_insns(CPUM68KState * env)5753 void register_m68k_insns (CPUM68KState *env)
5754 {
5755     /*
5756      * Build the opcode table only once to avoid
5757      * multithreading issues.
5758      */
5759     if (opcode_table[0] != NULL) {
5760         return;
5761     }
5762 
5763     /*
5764      * use BASE() for instruction available
5765      * for CF_ISA_A and M68000.
5766      */
5767 #define BASE(name, opcode, mask) \
5768     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5769 #define INSN(name, opcode, mask, feature) do { \
5770     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5771         BASE(name, opcode, mask); \
5772     } while(0)
5773     BASE(undef,     0000, 0000);
5774     INSN(arith_im,  0080, fff8, CF_ISA_A);
5775     INSN(arith_im,  0000, ff00, M68K);
5776     INSN(chk2,      00c0, f9c0, CHK2);
5777     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5778     BASE(bitop_reg, 0100, f1c0);
5779     BASE(bitop_reg, 0140, f1c0);
5780     BASE(bitop_reg, 0180, f1c0);
5781     BASE(bitop_reg, 01c0, f1c0);
5782     INSN(movep,     0108, f138, MOVEP);
5783     INSN(arith_im,  0280, fff8, CF_ISA_A);
5784     INSN(arith_im,  0200, ff00, M68K);
5785     INSN(undef,     02c0, ffc0, M68K);
5786     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5787     INSN(arith_im,  0480, fff8, CF_ISA_A);
5788     INSN(arith_im,  0400, ff00, M68K);
5789     INSN(undef,     04c0, ffc0, M68K);
5790     INSN(arith_im,  0600, ff00, M68K);
5791     INSN(undef,     06c0, ffc0, M68K);
5792     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5793     INSN(arith_im,  0680, fff8, CF_ISA_A);
5794     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5795     INSN(arith_im,  0c00, ff00, M68K);
5796     BASE(bitop_im,  0800, ffc0);
5797     BASE(bitop_im,  0840, ffc0);
5798     BASE(bitop_im,  0880, ffc0);
5799     BASE(bitop_im,  08c0, ffc0);
5800     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5801     INSN(arith_im,  0a00, ff00, M68K);
5802 #if !defined(CONFIG_USER_ONLY)
5803     INSN(moves,     0e00, ff00, M68K);
5804 #endif
5805     INSN(cas,       0ac0, ffc0, CAS);
5806     INSN(cas,       0cc0, ffc0, CAS);
5807     INSN(cas,       0ec0, ffc0, CAS);
5808     INSN(cas2w,     0cfc, ffff, CAS);
5809     INSN(cas2l,     0efc, ffff, CAS);
5810     BASE(move,      1000, f000);
5811     BASE(move,      2000, f000);
5812     BASE(move,      3000, f000);
5813     INSN(chk,       4000, f040, M68K);
5814     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5815     INSN(negx,      4080, fff8, CF_ISA_A);
5816     INSN(negx,      4000, ff00, M68K);
5817     INSN(undef,     40c0, ffc0, M68K);
5818     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5819     INSN(move_from_sr, 40c0, ffc0, M68K);
5820     BASE(lea,       41c0, f1c0);
5821     BASE(clr,       4200, ff00);
5822     BASE(undef,     42c0, ffc0);
5823     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5824     INSN(move_from_ccr, 42c0, ffc0, M68K);
5825     INSN(neg,       4480, fff8, CF_ISA_A);
5826     INSN(neg,       4400, ff00, M68K);
5827     INSN(undef,     44c0, ffc0, M68K);
5828     BASE(move_to_ccr, 44c0, ffc0);
5829     INSN(not,       4680, fff8, CF_ISA_A);
5830     INSN(not,       4600, ff00, M68K);
5831 #if !defined(CONFIG_USER_ONLY)
5832     BASE(move_to_sr, 46c0, ffc0);
5833 #endif
5834     INSN(nbcd,      4800, ffc0, M68K);
5835     INSN(linkl,     4808, fff8, M68K);
5836     BASE(pea,       4840, ffc0);
5837     BASE(swap,      4840, fff8);
5838     INSN(bkpt,      4848, fff8, BKPT);
5839     INSN(movem,     48d0, fbf8, CF_ISA_A);
5840     INSN(movem,     48e8, fbf8, CF_ISA_A);
5841     INSN(movem,     4880, fb80, M68K);
5842     BASE(ext,       4880, fff8);
5843     BASE(ext,       48c0, fff8);
5844     BASE(ext,       49c0, fff8);
5845     BASE(tst,       4a00, ff00);
5846     INSN(tas,       4ac0, ffc0, CF_ISA_B);
5847     INSN(tas,       4ac0, ffc0, M68K);
5848 #if !defined(CONFIG_USER_ONLY)
5849     INSN(halt,      4ac8, ffff, CF_ISA_A);
5850     INSN(halt,      4ac8, ffff, M68K);
5851 #endif
5852     INSN(pulse,     4acc, ffff, CF_ISA_A);
5853     BASE(illegal,   4afc, ffff);
5854     INSN(mull,      4c00, ffc0, CF_ISA_A);
5855     INSN(mull,      4c00, ffc0, LONG_MULDIV);
5856     INSN(divl,      4c40, ffc0, CF_ISA_A);
5857     INSN(divl,      4c40, ffc0, LONG_MULDIV);
5858     INSN(sats,      4c80, fff8, CF_ISA_B);
5859     BASE(trap,      4e40, fff0);
5860     BASE(link,      4e50, fff8);
5861     BASE(unlk,      4e58, fff8);
5862 #if !defined(CONFIG_USER_ONLY)
5863     INSN(move_to_usp, 4e60, fff8, USP);
5864     INSN(move_from_usp, 4e68, fff8, USP);
5865     INSN(reset,     4e70, ffff, M68K);
5866     BASE(stop,      4e72, ffff);
5867     BASE(rte,       4e73, ffff);
5868     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
5869     INSN(m68k_movec, 4e7a, fffe, MOVEC);
5870 #endif
5871     BASE(nop,       4e71, ffff);
5872     INSN(rtd,       4e74, ffff, RTD);
5873     BASE(rts,       4e75, ffff);
5874     INSN(trapv,     4e76, ffff, M68K);
5875     INSN(rtr,       4e77, ffff, M68K);
5876     BASE(jump,      4e80, ffc0);
5877     BASE(jump,      4ec0, ffc0);
5878     INSN(addsubq,   5000, f080, M68K);
5879     BASE(addsubq,   5080, f0c0);
5880     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
5881     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
5882     INSN(dbcc,      50c8, f0f8, M68K);
5883     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
5884     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
5885     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
5886     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
5887 
5888     /* Branch instructions.  */
5889     BASE(branch,    6000, f000);
5890     /* Disable long branch instructions, then add back the ones we want.  */
5891     BASE(undef,     60ff, f0ff); /* All long branches.  */
5892     INSN(branch,    60ff, f0ff, CF_ISA_B);
5893     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
5894     INSN(branch,    60ff, ffff, BRAL);
5895     INSN(branch,    60ff, f0ff, BCCL);
5896 
5897     BASE(moveq,     7000, f100);
5898     INSN(mvzs,      7100, f100, CF_ISA_B);
5899     BASE(or,        8000, f000);
5900     BASE(divw,      80c0, f0c0);
5901     INSN(sbcd_reg,  8100, f1f8, M68K);
5902     INSN(sbcd_mem,  8108, f1f8, M68K);
5903     BASE(addsub,    9000, f000);
5904     INSN(undef,     90c0, f0c0, CF_ISA_A);
5905     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
5906     INSN(subx_reg,  9100, f138, M68K);
5907     INSN(subx_mem,  9108, f138, M68K);
5908     INSN(suba,      91c0, f1c0, CF_ISA_A);
5909     INSN(suba,      90c0, f0c0, M68K);
5910 
5911     BASE(undef_mac, a000, f000);
5912     INSN(mac,       a000, f100, CF_EMAC);
5913     INSN(from_mac,  a180, f9b0, CF_EMAC);
5914     INSN(move_mac,  a110, f9fc, CF_EMAC);
5915     INSN(from_macsr,a980, f9f0, CF_EMAC);
5916     INSN(from_mask, ad80, fff0, CF_EMAC);
5917     INSN(from_mext, ab80, fbf0, CF_EMAC);
5918     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
5919     INSN(to_mac,    a100, f9c0, CF_EMAC);
5920     INSN(to_macsr,  a900, ffc0, CF_EMAC);
5921     INSN(to_mext,   ab00, fbc0, CF_EMAC);
5922     INSN(to_mask,   ad00, ffc0, CF_EMAC);
5923 
5924     INSN(mov3q,     a140, f1c0, CF_ISA_B);
5925     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
5926     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
5927     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
5928     INSN(cmp,       b080, f1c0, CF_ISA_A);
5929     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
5930     INSN(cmp,       b000, f100, M68K);
5931     INSN(eor,       b100, f100, M68K);
5932     INSN(cmpm,      b108, f138, M68K);
5933     INSN(cmpa,      b0c0, f0c0, M68K);
5934     INSN(eor,       b180, f1c0, CF_ISA_A);
5935     BASE(and,       c000, f000);
5936     INSN(exg_dd,    c140, f1f8, M68K);
5937     INSN(exg_aa,    c148, f1f8, M68K);
5938     INSN(exg_da,    c188, f1f8, M68K);
5939     BASE(mulw,      c0c0, f0c0);
5940     INSN(abcd_reg,  c100, f1f8, M68K);
5941     INSN(abcd_mem,  c108, f1f8, M68K);
5942     BASE(addsub,    d000, f000);
5943     INSN(undef,     d0c0, f0c0, CF_ISA_A);
5944     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
5945     INSN(addx_reg,  d100, f138, M68K);
5946     INSN(addx_mem,  d108, f138, M68K);
5947     INSN(adda,      d1c0, f1c0, CF_ISA_A);
5948     INSN(adda,      d0c0, f0c0, M68K);
5949     INSN(shift_im,  e080, f0f0, CF_ISA_A);
5950     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
5951     INSN(shift8_im, e000, f0f0, M68K);
5952     INSN(shift16_im, e040, f0f0, M68K);
5953     INSN(shift_im,  e080, f0f0, M68K);
5954     INSN(shift8_reg, e020, f0f0, M68K);
5955     INSN(shift16_reg, e060, f0f0, M68K);
5956     INSN(shift_reg, e0a0, f0f0, M68K);
5957     INSN(shift_mem, e0c0, fcc0, M68K);
5958     INSN(rotate_im, e090, f0f0, M68K);
5959     INSN(rotate8_im, e010, f0f0, M68K);
5960     INSN(rotate16_im, e050, f0f0, M68K);
5961     INSN(rotate_reg, e0b0, f0f0, M68K);
5962     INSN(rotate8_reg, e030, f0f0, M68K);
5963     INSN(rotate16_reg, e070, f0f0, M68K);
5964     INSN(rotate_mem, e4c0, fcc0, M68K);
5965     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
5966     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
5967     INSN(bfins_mem, efc0, ffc0, BITFIELD);
5968     INSN(bfins_reg, efc0, fff8, BITFIELD);
5969     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
5970     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
5971     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
5972     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
5973     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
5974     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
5975     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
5976     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
5977     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
5978     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
5979     BASE(undef_fpu, f000, f000);
5980     INSN(fpu,       f200, ffc0, CF_FPU);
5981     INSN(fbcc,      f280, ffc0, CF_FPU);
5982     INSN(fpu,       f200, ffc0, FPU);
5983     INSN(fscc,      f240, ffc0, FPU);
5984     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
5985     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
5986     INSN(fbcc,      f280, ff80, FPU);
5987 #if !defined(CONFIG_USER_ONLY)
5988     INSN(frestore,  f340, ffc0, CF_FPU);
5989     INSN(fsave,     f300, ffc0, CF_FPU);
5990     INSN(frestore,  f340, ffc0, FPU);
5991     INSN(fsave,     f300, ffc0, FPU);
5992     INSN(intouch,   f340, ffc0, CF_ISA_A);
5993     INSN(cpushl,    f428, ff38, CF_ISA_A);
5994     INSN(cpush,     f420, ff20, M68040);
5995     INSN(cinv,      f400, ff20, M68040);
5996     INSN(pflush,    f500, ffe0, M68040);
5997     INSN(ptest,     f548, ffd8, M68040);
5998     INSN(wddata,    fb00, ff00, CF_ISA_A);
5999     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6000 #endif
6001     INSN(move16_mem, f600, ffe0, M68040);
6002     INSN(move16_reg, f620, fff8, M68040);
6003 #undef INSN
6004 }
6005 
m68k_tr_init_disas_context(DisasContextBase * dcbase,CPUState * cpu)6006 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6007 {
6008     DisasContext *dc = container_of(dcbase, DisasContext, base);
6009     CPUM68KState *env = cpu_env(cpu);
6010 
6011     dc->env = env;
6012     dc->pc = dc->base.pc_first;
6013     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6014     dc->pc_prev = 0xdeadbeef;
6015     dc->cc_op = CC_OP_DYNAMIC;
6016     dc->cc_op_synced = 1;
6017     dc->done_mac = 0;
6018     dc->writeback_mask = 0;
6019 
6020     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6021     /* If architectural single step active, limit to 1 */
6022     if (dc->ss_active) {
6023         dc->base.max_insns = 1;
6024     }
6025 }
6026 
m68k_tr_tb_start(DisasContextBase * dcbase,CPUState * cpu)6027 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6028 {
6029 }
6030 
m68k_tr_insn_start(DisasContextBase * dcbase,CPUState * cpu)6031 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6032 {
6033     DisasContext *dc = container_of(dcbase, DisasContext, base);
6034     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6035 }
6036 
m68k_tr_translate_insn(DisasContextBase * dcbase,CPUState * cpu)6037 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6038 {
6039     DisasContext *dc = container_of(dcbase, DisasContext, base);
6040     CPUM68KState *env = cpu_env(cpu);
6041     uint16_t insn = read_im16(env, dc);
6042 
6043     opcode_table[insn](env, dc, insn);
6044     do_writebacks(dc);
6045 
6046     dc->pc_prev = dc->base.pc_next;
6047     dc->base.pc_next = dc->pc;
6048 
6049     if (dc->base.is_jmp == DISAS_NEXT) {
6050         /*
6051          * Stop translation when the next insn might touch a new page.
6052          * This ensures that prefetch aborts at the right place.
6053          *
6054          * We cannot determine the size of the next insn without
6055          * completely decoding it.  However, the maximum insn size
6056          * is 32 bytes, so end if we do not have that much remaining.
6057          * This may produce several small TBs at the end of each page,
6058          * but they will all be linked with goto_tb.
6059          *
6060          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6061          * smaller than MC68020's.
6062          */
6063         target_ulong start_page_offset
6064             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6065 
6066         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6067             dc->base.is_jmp = DISAS_TOO_MANY;
6068         }
6069     }
6070 }
6071 
m68k_tr_tb_stop(DisasContextBase * dcbase,CPUState * cpu)6072 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6073 {
6074     DisasContext *dc = container_of(dcbase, DisasContext, base);
6075 
6076     switch (dc->base.is_jmp) {
6077     case DISAS_NORETURN:
6078         break;
6079     case DISAS_TOO_MANY:
6080         update_cc_op(dc);
6081         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6082         break;
6083     case DISAS_JUMP:
6084         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6085         if (dc->ss_active) {
6086             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6087         } else {
6088             tcg_gen_lookup_and_goto_ptr();
6089         }
6090         break;
6091     case DISAS_EXIT:
6092         /*
6093          * We updated CC_OP and PC in gen_exit_tb, but also modified
6094          * other state that may require returning to the main loop.
6095          */
6096         if (dc->ss_active) {
6097             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6098         } else {
6099             tcg_gen_exit_tb(NULL, 0);
6100         }
6101         break;
6102     default:
6103         g_assert_not_reached();
6104     }
6105 }
6106 
6107 static const TranslatorOps m68k_tr_ops = {
6108     .init_disas_context = m68k_tr_init_disas_context,
6109     .tb_start           = m68k_tr_tb_start,
6110     .insn_start         = m68k_tr_insn_start,
6111     .translate_insn     = m68k_tr_translate_insn,
6112     .tb_stop            = m68k_tr_tb_stop,
6113 };
6114 
gen_intermediate_code(CPUState * cpu,TranslationBlock * tb,int * max_insns,vaddr pc,void * host_pc)6115 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int *max_insns,
6116                            vaddr pc, void *host_pc)
6117 {
6118     DisasContext dc;
6119     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6120 }
6121 
floatx80_to_double(CPUM68KState * env,uint16_t high,uint64_t low)6122 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6123 {
6124     floatx80 a = { .high = high, .low = low };
6125     union {
6126         float64 f64;
6127         double d;
6128     } u;
6129 
6130     u.f64 = floatx80_to_float64(a, &env->fp_status);
6131     return u.d;
6132 }
6133 
m68k_cpu_dump_state(CPUState * cs,FILE * f,int flags)6134 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6135 {
6136     CPUM68KState *env = cpu_env(cs);
6137     int i;
6138     uint16_t sr;
6139     for (i = 0; i < 8; i++) {
6140         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6141                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6142                      i, env->dregs[i], i, env->aregs[i],
6143                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6144                      floatx80_to_double(env, env->fregs[i].l.upper,
6145                                         env->fregs[i].l.lower));
6146     }
6147     qemu_fprintf(f, "PC = %08x   ", env->pc);
6148     sr = env->sr | cpu_m68k_get_ccr(env);
6149     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6150                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6151                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6152                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6153                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6154                  (sr & CCF_C) ? 'C' : '-');
6155     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6156                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6157                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6158                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6159                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6160     qemu_fprintf(f, "\n                                "
6161                  "FPCR =     %04x ", env->fpcr);
6162     switch (env->fpcr & FPCR_PREC_MASK) {
6163     case FPCR_PREC_X:
6164         qemu_fprintf(f, "X ");
6165         break;
6166     case FPCR_PREC_S:
6167         qemu_fprintf(f, "S ");
6168         break;
6169     case FPCR_PREC_D:
6170         qemu_fprintf(f, "D ");
6171         break;
6172     }
6173     switch (env->fpcr & FPCR_RND_MASK) {
6174     case FPCR_RND_N:
6175         qemu_fprintf(f, "RN ");
6176         break;
6177     case FPCR_RND_Z:
6178         qemu_fprintf(f, "RZ ");
6179         break;
6180     case FPCR_RND_M:
6181         qemu_fprintf(f, "RM ");
6182         break;
6183     case FPCR_RND_P:
6184         qemu_fprintf(f, "RP ");
6185         break;
6186     }
6187     qemu_fprintf(f, "\n");
6188 #ifndef CONFIG_USER_ONLY
6189     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6190                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6191                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6192                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6193     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6194     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6195     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6196                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6197     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6198                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6199                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6200     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6201                  env->mmu.mmusr, env->mmu.ar);
6202 #endif /* !CONFIG_USER_ONLY */
6203 }
6204