xref: /qemu/target/m68k/translate.c (revision 0ec8384f)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "exec/log.h"
35 #include "fpu/softfloat.h"
36 
37 
38 //#define DEBUG_DISPATCH 1
39 
40 #define DEFO32(name, offset) static TCGv QREG_##name;
41 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
42 #include "qregs.h.inc"
43 #undef DEFO32
44 #undef DEFO64
45 
46 static TCGv_i32 cpu_halted;
47 static TCGv_i32 cpu_exception_index;
48 
49 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
50 static TCGv cpu_dregs[8];
51 static TCGv cpu_aregs[8];
52 static TCGv_i64 cpu_macc[4];
53 
54 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
55 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
56 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
57 #define MACREG(acc)     cpu_macc[acc]
58 #define QREG_SP         get_areg(s, 7)
59 
60 static TCGv NULL_QREG;
61 #define IS_NULL_QREG(t) (t == NULL_QREG)
62 /* Used to distinguish stores from bad addressing modes.  */
63 static TCGv store_dummy;
64 
65 #include "exec/gen-icount.h"
66 
67 void m68k_tcg_init(void)
68 {
69     char *p;
70     int i;
71 
72 #define DEFO32(name, offset) \
73     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
74         offsetof(CPUM68KState, offset), #name);
75 #define DEFO64(name, offset) \
76     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
77         offsetof(CPUM68KState, offset), #name);
78 #include "qregs.h.inc"
79 #undef DEFO32
80 #undef DEFO64
81 
82     cpu_halted = tcg_global_mem_new_i32(cpu_env,
83                                         -offsetof(M68kCPU, env) +
84                                         offsetof(CPUState, halted), "HALTED");
85     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
86                                                  -offsetof(M68kCPU, env) +
87                                                  offsetof(CPUState, exception_index),
88                                                  "EXCEPTION");
89 
90     p = cpu_reg_names;
91     for (i = 0; i < 8; i++) {
92         sprintf(p, "D%d", i);
93         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
94                                           offsetof(CPUM68KState, dregs[i]), p);
95         p += 3;
96         sprintf(p, "A%d", i);
97         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
98                                           offsetof(CPUM68KState, aregs[i]), p);
99         p += 3;
100     }
101     for (i = 0; i < 4; i++) {
102         sprintf(p, "ACC%d", i);
103         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
104                                          offsetof(CPUM68KState, macc[i]), p);
105         p += 5;
106     }
107 
108     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
109     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
110 }
111 
112 /* internal defines */
113 typedef struct DisasContext {
114     DisasContextBase base;
115     CPUM68KState *env;
116     target_ulong pc;
117     target_ulong pc_prev;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124 #define MAX_TO_RELEASE 8
125     int release_count;
126     TCGv release[MAX_TO_RELEASE];
127     bool ss_active;
128 } DisasContext;
129 
130 static void init_release_array(DisasContext *s)
131 {
132 #ifdef CONFIG_DEBUG_TCG
133     memset(s->release, 0, sizeof(s->release));
134 #endif
135     s->release_count = 0;
136 }
137 
138 static void do_release(DisasContext *s)
139 {
140     int i;
141     for (i = 0; i < s->release_count; i++) {
142         tcg_temp_free(s->release[i]);
143     }
144     init_release_array(s);
145 }
146 
147 static TCGv mark_to_release(DisasContext *s, TCGv tmp)
148 {
149     g_assert(s->release_count < MAX_TO_RELEASE);
150     return s->release[s->release_count++] = tmp;
151 }
152 
153 static TCGv get_areg(DisasContext *s, unsigned regno)
154 {
155     if (s->writeback_mask & (1 << regno)) {
156         return s->writeback[regno];
157     } else {
158         return cpu_aregs[regno];
159     }
160 }
161 
162 static void delay_set_areg(DisasContext *s, unsigned regno,
163                            TCGv val, bool give_temp)
164 {
165     if (s->writeback_mask & (1 << regno)) {
166         if (give_temp) {
167             tcg_temp_free(s->writeback[regno]);
168             s->writeback[regno] = val;
169         } else {
170             tcg_gen_mov_i32(s->writeback[regno], val);
171         }
172     } else {
173         s->writeback_mask |= 1 << regno;
174         if (give_temp) {
175             s->writeback[regno] = val;
176         } else {
177             TCGv tmp = tcg_temp_new();
178             s->writeback[regno] = tmp;
179             tcg_gen_mov_i32(tmp, val);
180         }
181     }
182 }
183 
184 static void do_writebacks(DisasContext *s)
185 {
186     unsigned mask = s->writeback_mask;
187     if (mask) {
188         s->writeback_mask = 0;
189         do {
190             unsigned regno = ctz32(mask);
191             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
192             tcg_temp_free(s->writeback[regno]);
193             mask &= mask - 1;
194         } while (mask);
195     }
196 }
197 
198 /* is_jmp field values */
199 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
200 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
201 
202 #if defined(CONFIG_USER_ONLY)
203 #define IS_USER(s) 1
204 #else
205 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
206 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
207                       MMU_KERNEL_IDX : MMU_USER_IDX)
208 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
209                       MMU_KERNEL_IDX : MMU_USER_IDX)
210 #endif
211 
212 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
213 
214 #ifdef DEBUG_DISPATCH
215 #define DISAS_INSN(name)                                                \
216     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
217                                   uint16_t insn);                       \
218     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
219                              uint16_t insn)                             \
220     {                                                                   \
221         qemu_log("Dispatch " #name "\n");                               \
222         real_disas_##name(env, s, insn);                                \
223     }                                                                   \
224     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
225                                   uint16_t insn)
226 #else
227 #define DISAS_INSN(name)                                                \
228     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
229                              uint16_t insn)
230 #endif
231 
232 static const uint8_t cc_op_live[CC_OP_NB] = {
233     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
234     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
235     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
236     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
237     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
238     [CC_OP_LOGIC] = CCF_X | CCF_N
239 };
240 
241 static void set_cc_op(DisasContext *s, CCOp op)
242 {
243     CCOp old_op = s->cc_op;
244     int dead;
245 
246     if (old_op == op) {
247         return;
248     }
249     s->cc_op = op;
250     s->cc_op_synced = 0;
251 
252     /*
253      * Discard CC computation that will no longer be used.
254      * Note that X and N are never dead.
255      */
256     dead = cc_op_live[old_op] & ~cc_op_live[op];
257     if (dead & CCF_C) {
258         tcg_gen_discard_i32(QREG_CC_C);
259     }
260     if (dead & CCF_Z) {
261         tcg_gen_discard_i32(QREG_CC_Z);
262     }
263     if (dead & CCF_V) {
264         tcg_gen_discard_i32(QREG_CC_V);
265     }
266 }
267 
268 /* Update the CPU env CC_OP state.  */
269 static void update_cc_op(DisasContext *s)
270 {
271     if (!s->cc_op_synced) {
272         s->cc_op_synced = 1;
273         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
274     }
275 }
276 
277 /* Generate a jump to an immediate address.  */
278 static void gen_jmp_im(DisasContext *s, uint32_t dest)
279 {
280     update_cc_op(s);
281     tcg_gen_movi_i32(QREG_PC, dest);
282     s->base.is_jmp = DISAS_JUMP;
283 }
284 
285 /* Generate a jump to the address in qreg DEST.  */
286 static void gen_jmp(DisasContext *s, TCGv dest)
287 {
288     update_cc_op(s);
289     tcg_gen_mov_i32(QREG_PC, dest);
290     s->base.is_jmp = DISAS_JUMP;
291 }
292 
293 static void gen_raise_exception(int nr)
294 {
295     TCGv_i32 tmp;
296 
297     tmp = tcg_const_i32(nr);
298     gen_helper_raise_exception(cpu_env, tmp);
299     tcg_temp_free_i32(tmp);
300 }
301 
302 static void gen_raise_exception_format2(DisasContext *s, int nr,
303                                         target_ulong this_pc)
304 {
305     /*
306      * Pass the address of the insn to the exception handler,
307      * for recording in the Format $2 (6-word) stack frame.
308      * Re-use mmu.ar for the purpose, since that's only valid
309      * after tlb_fill.
310      */
311     tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
312                    offsetof(CPUM68KState, mmu.ar));
313     gen_raise_exception(nr);
314     s->base.is_jmp = DISAS_NORETURN;
315 }
316 
317 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
318 {
319     update_cc_op(s);
320     tcg_gen_movi_i32(QREG_PC, dest);
321 
322     gen_raise_exception(nr);
323 
324     s->base.is_jmp = DISAS_NORETURN;
325 }
326 
327 static inline void gen_addr_fault(DisasContext *s)
328 {
329     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
330 }
331 
332 /*
333  * Generate a load from the specified address.  Narrow values are
334  *  sign extended to full register width.
335  */
336 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
337                             int sign, int index)
338 {
339     TCGv tmp;
340     tmp = tcg_temp_new_i32();
341     switch(opsize) {
342     case OS_BYTE:
343         if (sign)
344             tcg_gen_qemu_ld8s(tmp, addr, index);
345         else
346             tcg_gen_qemu_ld8u(tmp, addr, index);
347         break;
348     case OS_WORD:
349         if (sign)
350             tcg_gen_qemu_ld16s(tmp, addr, index);
351         else
352             tcg_gen_qemu_ld16u(tmp, addr, index);
353         break;
354     case OS_LONG:
355         tcg_gen_qemu_ld32u(tmp, addr, index);
356         break;
357     default:
358         g_assert_not_reached();
359     }
360     return tmp;
361 }
362 
363 /* Generate a store.  */
364 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
365                              int index)
366 {
367     switch(opsize) {
368     case OS_BYTE:
369         tcg_gen_qemu_st8(val, addr, index);
370         break;
371     case OS_WORD:
372         tcg_gen_qemu_st16(val, addr, index);
373         break;
374     case OS_LONG:
375         tcg_gen_qemu_st32(val, addr, index);
376         break;
377     default:
378         g_assert_not_reached();
379     }
380 }
381 
382 typedef enum {
383     EA_STORE,
384     EA_LOADU,
385     EA_LOADS
386 } ea_what;
387 
388 /*
389  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
390  * otherwise generate a store.
391  */
392 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
393                      ea_what what, int index)
394 {
395     if (what == EA_STORE) {
396         gen_store(s, opsize, addr, val, index);
397         return store_dummy;
398     } else {
399         return mark_to_release(s, gen_load(s, opsize, addr,
400                                            what == EA_LOADS, index));
401     }
402 }
403 
404 /* Read a 16-bit immediate constant */
405 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
406 {
407     uint16_t im;
408     im = translator_lduw(env, &s->base, s->pc);
409     s->pc += 2;
410     return im;
411 }
412 
413 /* Read an 8-bit immediate constant */
414 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
415 {
416     return read_im16(env, s);
417 }
418 
419 /* Read a 32-bit immediate constant.  */
420 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
421 {
422     uint32_t im;
423     im = read_im16(env, s) << 16;
424     im |= 0xffff & read_im16(env, s);
425     return im;
426 }
427 
428 /* Read a 64-bit immediate constant.  */
429 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
430 {
431     uint64_t im;
432     im = (uint64_t)read_im32(env, s) << 32;
433     im |= (uint64_t)read_im32(env, s);
434     return im;
435 }
436 
437 /* Calculate and address index.  */
438 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
439 {
440     TCGv add;
441     int scale;
442 
443     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
444     if ((ext & 0x800) == 0) {
445         tcg_gen_ext16s_i32(tmp, add);
446         add = tmp;
447     }
448     scale = (ext >> 9) & 3;
449     if (scale != 0) {
450         tcg_gen_shli_i32(tmp, add, scale);
451         add = tmp;
452     }
453     return add;
454 }
455 
456 /*
457  * Handle a base + index + displacement effective address.
458  * A NULL_QREG base means pc-relative.
459  */
460 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
461 {
462     uint32_t offset;
463     uint16_t ext;
464     TCGv add;
465     TCGv tmp;
466     uint32_t bd, od;
467 
468     offset = s->pc;
469     ext = read_im16(env, s);
470 
471     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
472         return NULL_QREG;
473 
474     if (m68k_feature(s->env, M68K_FEATURE_M68K) &&
475         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
476         ext &= ~(3 << 9);
477     }
478 
479     if (ext & 0x100) {
480         /* full extension word format */
481         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
482             return NULL_QREG;
483 
484         if ((ext & 0x30) > 0x10) {
485             /* base displacement */
486             if ((ext & 0x30) == 0x20) {
487                 bd = (int16_t)read_im16(env, s);
488             } else {
489                 bd = read_im32(env, s);
490             }
491         } else {
492             bd = 0;
493         }
494         tmp = mark_to_release(s, tcg_temp_new());
495         if ((ext & 0x44) == 0) {
496             /* pre-index */
497             add = gen_addr_index(s, ext, tmp);
498         } else {
499             add = NULL_QREG;
500         }
501         if ((ext & 0x80) == 0) {
502             /* base not suppressed */
503             if (IS_NULL_QREG(base)) {
504                 base = mark_to_release(s, tcg_const_i32(offset + bd));
505                 bd = 0;
506             }
507             if (!IS_NULL_QREG(add)) {
508                 tcg_gen_add_i32(tmp, add, base);
509                 add = tmp;
510             } else {
511                 add = base;
512             }
513         }
514         if (!IS_NULL_QREG(add)) {
515             if (bd != 0) {
516                 tcg_gen_addi_i32(tmp, add, bd);
517                 add = tmp;
518             }
519         } else {
520             add = mark_to_release(s, tcg_const_i32(bd));
521         }
522         if ((ext & 3) != 0) {
523             /* memory indirect */
524             base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
525             if ((ext & 0x44) == 4) {
526                 add = gen_addr_index(s, ext, tmp);
527                 tcg_gen_add_i32(tmp, add, base);
528                 add = tmp;
529             } else {
530                 add = base;
531             }
532             if ((ext & 3) > 1) {
533                 /* outer displacement */
534                 if ((ext & 3) == 2) {
535                     od = (int16_t)read_im16(env, s);
536                 } else {
537                     od = read_im32(env, s);
538                 }
539             } else {
540                 od = 0;
541             }
542             if (od != 0) {
543                 tcg_gen_addi_i32(tmp, add, od);
544                 add = tmp;
545             }
546         }
547     } else {
548         /* brief extension word format */
549         tmp = mark_to_release(s, tcg_temp_new());
550         add = gen_addr_index(s, ext, tmp);
551         if (!IS_NULL_QREG(base)) {
552             tcg_gen_add_i32(tmp, add, base);
553             if ((int8_t)ext)
554                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
555         } else {
556             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
557         }
558         add = tmp;
559     }
560     return add;
561 }
562 
563 /* Sign or zero extend a value.  */
564 
565 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
566 {
567     switch (opsize) {
568     case OS_BYTE:
569         if (sign) {
570             tcg_gen_ext8s_i32(res, val);
571         } else {
572             tcg_gen_ext8u_i32(res, val);
573         }
574         break;
575     case OS_WORD:
576         if (sign) {
577             tcg_gen_ext16s_i32(res, val);
578         } else {
579             tcg_gen_ext16u_i32(res, val);
580         }
581         break;
582     case OS_LONG:
583         tcg_gen_mov_i32(res, val);
584         break;
585     default:
586         g_assert_not_reached();
587     }
588 }
589 
590 /* Evaluate all the CC flags.  */
591 
592 static void gen_flush_flags(DisasContext *s)
593 {
594     TCGv t0, t1;
595 
596     switch (s->cc_op) {
597     case CC_OP_FLAGS:
598         return;
599 
600     case CC_OP_ADDB:
601     case CC_OP_ADDW:
602     case CC_OP_ADDL:
603         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
604         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
605         /* Compute signed overflow for addition.  */
606         t0 = tcg_temp_new();
607         t1 = tcg_temp_new();
608         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
609         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
610         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
611         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
612         tcg_temp_free(t0);
613         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
614         tcg_temp_free(t1);
615         break;
616 
617     case CC_OP_SUBB:
618     case CC_OP_SUBW:
619     case CC_OP_SUBL:
620         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
621         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
622         /* Compute signed overflow for subtraction.  */
623         t0 = tcg_temp_new();
624         t1 = tcg_temp_new();
625         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
626         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
627         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
628         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
629         tcg_temp_free(t0);
630         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
631         tcg_temp_free(t1);
632         break;
633 
634     case CC_OP_CMPB:
635     case CC_OP_CMPW:
636     case CC_OP_CMPL:
637         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
638         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
639         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
640         /* Compute signed overflow for subtraction.  */
641         t0 = tcg_temp_new();
642         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
643         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
644         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
645         tcg_temp_free(t0);
646         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
647         break;
648 
649     case CC_OP_LOGIC:
650         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
651         tcg_gen_movi_i32(QREG_CC_C, 0);
652         tcg_gen_movi_i32(QREG_CC_V, 0);
653         break;
654 
655     case CC_OP_DYNAMIC:
656         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
657         s->cc_op_synced = 1;
658         break;
659 
660     default:
661         t0 = tcg_const_i32(s->cc_op);
662         gen_helper_flush_flags(cpu_env, t0);
663         tcg_temp_free(t0);
664         s->cc_op_synced = 1;
665         break;
666     }
667 
668     /* Note that flush_flags also assigned to env->cc_op.  */
669     s->cc_op = CC_OP_FLAGS;
670 }
671 
672 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
673 {
674     TCGv tmp;
675 
676     if (opsize == OS_LONG) {
677         tmp = val;
678     } else {
679         tmp = mark_to_release(s, tcg_temp_new());
680         gen_ext(tmp, val, opsize, sign);
681     }
682 
683     return tmp;
684 }
685 
686 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
687 {
688     gen_ext(QREG_CC_N, val, opsize, 1);
689     set_cc_op(s, CC_OP_LOGIC);
690 }
691 
692 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
693 {
694     tcg_gen_mov_i32(QREG_CC_N, dest);
695     tcg_gen_mov_i32(QREG_CC_V, src);
696     set_cc_op(s, CC_OP_CMPB + opsize);
697 }
698 
699 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
700 {
701     gen_ext(QREG_CC_N, dest, opsize, 1);
702     tcg_gen_mov_i32(QREG_CC_V, src);
703 }
704 
705 static inline int opsize_bytes(int opsize)
706 {
707     switch (opsize) {
708     case OS_BYTE: return 1;
709     case OS_WORD: return 2;
710     case OS_LONG: return 4;
711     case OS_SINGLE: return 4;
712     case OS_DOUBLE: return 8;
713     case OS_EXTENDED: return 12;
714     case OS_PACKED: return 12;
715     default:
716         g_assert_not_reached();
717     }
718 }
719 
720 static inline int insn_opsize(int insn)
721 {
722     switch ((insn >> 6) & 3) {
723     case 0: return OS_BYTE;
724     case 1: return OS_WORD;
725     case 2: return OS_LONG;
726     default:
727         g_assert_not_reached();
728     }
729 }
730 
731 static inline int ext_opsize(int ext, int pos)
732 {
733     switch ((ext >> pos) & 7) {
734     case 0: return OS_LONG;
735     case 1: return OS_SINGLE;
736     case 2: return OS_EXTENDED;
737     case 3: return OS_PACKED;
738     case 4: return OS_WORD;
739     case 5: return OS_DOUBLE;
740     case 6: return OS_BYTE;
741     default:
742         g_assert_not_reached();
743     }
744 }
745 
746 /*
747  * Assign value to a register.  If the width is less than the register width
748  * only the low part of the register is set.
749  */
750 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
751 {
752     TCGv tmp;
753     switch (opsize) {
754     case OS_BYTE:
755         tcg_gen_andi_i32(reg, reg, 0xffffff00);
756         tmp = tcg_temp_new();
757         tcg_gen_ext8u_i32(tmp, val);
758         tcg_gen_or_i32(reg, reg, tmp);
759         tcg_temp_free(tmp);
760         break;
761     case OS_WORD:
762         tcg_gen_andi_i32(reg, reg, 0xffff0000);
763         tmp = tcg_temp_new();
764         tcg_gen_ext16u_i32(tmp, val);
765         tcg_gen_or_i32(reg, reg, tmp);
766         tcg_temp_free(tmp);
767         break;
768     case OS_LONG:
769     case OS_SINGLE:
770         tcg_gen_mov_i32(reg, val);
771         break;
772     default:
773         g_assert_not_reached();
774     }
775 }
776 
777 /*
778  * Generate code for an "effective address".  Does not adjust the base
779  * register for autoincrement addressing modes.
780  */
781 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
782                          int mode, int reg0, int opsize)
783 {
784     TCGv reg;
785     TCGv tmp;
786     uint16_t ext;
787     uint32_t offset;
788 
789     switch (mode) {
790     case 0: /* Data register direct.  */
791     case 1: /* Address register direct.  */
792         return NULL_QREG;
793     case 3: /* Indirect postincrement.  */
794         if (opsize == OS_UNSIZED) {
795             return NULL_QREG;
796         }
797         /* fallthru */
798     case 2: /* Indirect register */
799         return get_areg(s, reg0);
800     case 4: /* Indirect predecrememnt.  */
801         if (opsize == OS_UNSIZED) {
802             return NULL_QREG;
803         }
804         reg = get_areg(s, reg0);
805         tmp = mark_to_release(s, tcg_temp_new());
806         if (reg0 == 7 && opsize == OS_BYTE &&
807             m68k_feature(s->env, M68K_FEATURE_M68K)) {
808             tcg_gen_subi_i32(tmp, reg, 2);
809         } else {
810             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
811         }
812         return tmp;
813     case 5: /* Indirect displacement.  */
814         reg = get_areg(s, reg0);
815         tmp = mark_to_release(s, tcg_temp_new());
816         ext = read_im16(env, s);
817         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
818         return tmp;
819     case 6: /* Indirect index + displacement.  */
820         reg = get_areg(s, reg0);
821         return gen_lea_indexed(env, s, reg);
822     case 7: /* Other */
823         switch (reg0) {
824         case 0: /* Absolute short.  */
825             offset = (int16_t)read_im16(env, s);
826             return mark_to_release(s, tcg_const_i32(offset));
827         case 1: /* Absolute long.  */
828             offset = read_im32(env, s);
829             return mark_to_release(s, tcg_const_i32(offset));
830         case 2: /* pc displacement  */
831             offset = s->pc;
832             offset += (int16_t)read_im16(env, s);
833             return mark_to_release(s, tcg_const_i32(offset));
834         case 3: /* pc index+displacement.  */
835             return gen_lea_indexed(env, s, NULL_QREG);
836         case 4: /* Immediate.  */
837         default:
838             return NULL_QREG;
839         }
840     }
841     /* Should never happen.  */
842     return NULL_QREG;
843 }
844 
845 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
846                     int opsize)
847 {
848     int mode = extract32(insn, 3, 3);
849     int reg0 = REG(insn, 0);
850     return gen_lea_mode(env, s, mode, reg0, opsize);
851 }
852 
853 /*
854  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
855  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
856  * ADDRP is non-null for readwrite operands.
857  */
858 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
859                         int opsize, TCGv val, TCGv *addrp, ea_what what,
860                         int index)
861 {
862     TCGv reg, tmp, result;
863     int32_t offset;
864 
865     switch (mode) {
866     case 0: /* Data register direct.  */
867         reg = cpu_dregs[reg0];
868         if (what == EA_STORE) {
869             gen_partset_reg(opsize, reg, val);
870             return store_dummy;
871         } else {
872             return gen_extend(s, reg, opsize, what == EA_LOADS);
873         }
874     case 1: /* Address register direct.  */
875         reg = get_areg(s, reg0);
876         if (what == EA_STORE) {
877             tcg_gen_mov_i32(reg, val);
878             return store_dummy;
879         } else {
880             return gen_extend(s, reg, opsize, what == EA_LOADS);
881         }
882     case 2: /* Indirect register */
883         reg = get_areg(s, reg0);
884         return gen_ldst(s, opsize, reg, val, what, index);
885     case 3: /* Indirect postincrement.  */
886         reg = get_areg(s, reg0);
887         result = gen_ldst(s, opsize, reg, val, what, index);
888         if (what == EA_STORE || !addrp) {
889             TCGv tmp = tcg_temp_new();
890             if (reg0 == 7 && opsize == OS_BYTE &&
891                 m68k_feature(s->env, M68K_FEATURE_M68K)) {
892                 tcg_gen_addi_i32(tmp, reg, 2);
893             } else {
894                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
895             }
896             delay_set_areg(s, reg0, tmp, true);
897         }
898         return result;
899     case 4: /* Indirect predecrememnt.  */
900         if (addrp && what == EA_STORE) {
901             tmp = *addrp;
902         } else {
903             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
904             if (IS_NULL_QREG(tmp)) {
905                 return tmp;
906             }
907             if (addrp) {
908                 *addrp = tmp;
909             }
910         }
911         result = gen_ldst(s, opsize, tmp, val, what, index);
912         if (what == EA_STORE || !addrp) {
913             delay_set_areg(s, reg0, tmp, false);
914         }
915         return result;
916     case 5: /* Indirect displacement.  */
917     case 6: /* Indirect index + displacement.  */
918     do_indirect:
919         if (addrp && what == EA_STORE) {
920             tmp = *addrp;
921         } else {
922             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
923             if (IS_NULL_QREG(tmp)) {
924                 return tmp;
925             }
926             if (addrp) {
927                 *addrp = tmp;
928             }
929         }
930         return gen_ldst(s, opsize, tmp, val, what, index);
931     case 7: /* Other */
932         switch (reg0) {
933         case 0: /* Absolute short.  */
934         case 1: /* Absolute long.  */
935         case 2: /* pc displacement  */
936         case 3: /* pc index+displacement.  */
937             goto do_indirect;
938         case 4: /* Immediate.  */
939             /* Sign extend values for consistency.  */
940             switch (opsize) {
941             case OS_BYTE:
942                 if (what == EA_LOADS) {
943                     offset = (int8_t)read_im8(env, s);
944                 } else {
945                     offset = read_im8(env, s);
946                 }
947                 break;
948             case OS_WORD:
949                 if (what == EA_LOADS) {
950                     offset = (int16_t)read_im16(env, s);
951                 } else {
952                     offset = read_im16(env, s);
953                 }
954                 break;
955             case OS_LONG:
956                 offset = read_im32(env, s);
957                 break;
958             default:
959                 g_assert_not_reached();
960             }
961             return mark_to_release(s, tcg_const_i32(offset));
962         default:
963             return NULL_QREG;
964         }
965     }
966     /* Should never happen.  */
967     return NULL_QREG;
968 }
969 
970 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
971                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
972 {
973     int mode = extract32(insn, 3, 3);
974     int reg0 = REG(insn, 0);
975     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
976 }
977 
978 static TCGv_ptr gen_fp_ptr(int freg)
979 {
980     TCGv_ptr fp = tcg_temp_new_ptr();
981     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
982     return fp;
983 }
984 
985 static TCGv_ptr gen_fp_result_ptr(void)
986 {
987     TCGv_ptr fp = tcg_temp_new_ptr();
988     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
989     return fp;
990 }
991 
992 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
993 {
994     TCGv t32;
995     TCGv_i64 t64;
996 
997     t32 = tcg_temp_new();
998     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
999     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
1000     tcg_temp_free(t32);
1001 
1002     t64 = tcg_temp_new_i64();
1003     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
1004     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
1005     tcg_temp_free_i64(t64);
1006 }
1007 
1008 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1009                         int index)
1010 {
1011     TCGv tmp;
1012     TCGv_i64 t64;
1013 
1014     t64 = tcg_temp_new_i64();
1015     tmp = tcg_temp_new();
1016     switch (opsize) {
1017     case OS_BYTE:
1018         tcg_gen_qemu_ld8s(tmp, addr, index);
1019         gen_helper_exts32(cpu_env, fp, tmp);
1020         break;
1021     case OS_WORD:
1022         tcg_gen_qemu_ld16s(tmp, addr, index);
1023         gen_helper_exts32(cpu_env, fp, tmp);
1024         break;
1025     case OS_LONG:
1026         tcg_gen_qemu_ld32u(tmp, addr, index);
1027         gen_helper_exts32(cpu_env, fp, tmp);
1028         break;
1029     case OS_SINGLE:
1030         tcg_gen_qemu_ld32u(tmp, addr, index);
1031         gen_helper_extf32(cpu_env, fp, tmp);
1032         break;
1033     case OS_DOUBLE:
1034         tcg_gen_qemu_ld64(t64, addr, index);
1035         gen_helper_extf64(cpu_env, fp, t64);
1036         break;
1037     case OS_EXTENDED:
1038         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1039             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1040             break;
1041         }
1042         tcg_gen_qemu_ld32u(tmp, addr, index);
1043         tcg_gen_shri_i32(tmp, tmp, 16);
1044         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1045         tcg_gen_addi_i32(tmp, addr, 4);
1046         tcg_gen_qemu_ld64(t64, tmp, index);
1047         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1048         break;
1049     case OS_PACKED:
1050         /*
1051          * unimplemented data type on 68040/ColdFire
1052          * FIXME if needed for another FPU
1053          */
1054         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1055         break;
1056     default:
1057         g_assert_not_reached();
1058     }
1059     tcg_temp_free(tmp);
1060     tcg_temp_free_i64(t64);
1061 }
1062 
1063 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1064                          int index)
1065 {
1066     TCGv tmp;
1067     TCGv_i64 t64;
1068 
1069     t64 = tcg_temp_new_i64();
1070     tmp = tcg_temp_new();
1071     switch (opsize) {
1072     case OS_BYTE:
1073         gen_helper_reds32(tmp, cpu_env, fp);
1074         tcg_gen_qemu_st8(tmp, addr, index);
1075         break;
1076     case OS_WORD:
1077         gen_helper_reds32(tmp, cpu_env, fp);
1078         tcg_gen_qemu_st16(tmp, addr, index);
1079         break;
1080     case OS_LONG:
1081         gen_helper_reds32(tmp, cpu_env, fp);
1082         tcg_gen_qemu_st32(tmp, addr, index);
1083         break;
1084     case OS_SINGLE:
1085         gen_helper_redf32(tmp, cpu_env, fp);
1086         tcg_gen_qemu_st32(tmp, addr, index);
1087         break;
1088     case OS_DOUBLE:
1089         gen_helper_redf64(t64, cpu_env, fp);
1090         tcg_gen_qemu_st64(t64, addr, index);
1091         break;
1092     case OS_EXTENDED:
1093         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1094             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1095             break;
1096         }
1097         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1098         tcg_gen_shli_i32(tmp, tmp, 16);
1099         tcg_gen_qemu_st32(tmp, addr, index);
1100         tcg_gen_addi_i32(tmp, addr, 4);
1101         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1102         tcg_gen_qemu_st64(t64, tmp, index);
1103         break;
1104     case OS_PACKED:
1105         /*
1106          * unimplemented data type on 68040/ColdFire
1107          * FIXME if needed for another FPU
1108          */
1109         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1110         break;
1111     default:
1112         g_assert_not_reached();
1113     }
1114     tcg_temp_free(tmp);
1115     tcg_temp_free_i64(t64);
1116 }
1117 
1118 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1119                         TCGv_ptr fp, ea_what what, int index)
1120 {
1121     if (what == EA_STORE) {
1122         gen_store_fp(s, opsize, addr, fp, index);
1123     } else {
1124         gen_load_fp(s, opsize, addr, fp, index);
1125     }
1126 }
1127 
1128 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1129                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1130                           int index)
1131 {
1132     TCGv reg, addr, tmp;
1133     TCGv_i64 t64;
1134 
1135     switch (mode) {
1136     case 0: /* Data register direct.  */
1137         reg = cpu_dregs[reg0];
1138         if (what == EA_STORE) {
1139             switch (opsize) {
1140             case OS_BYTE:
1141             case OS_WORD:
1142             case OS_LONG:
1143                 gen_helper_reds32(reg, cpu_env, fp);
1144                 break;
1145             case OS_SINGLE:
1146                 gen_helper_redf32(reg, cpu_env, fp);
1147                 break;
1148             default:
1149                 g_assert_not_reached();
1150             }
1151         } else {
1152             tmp = tcg_temp_new();
1153             switch (opsize) {
1154             case OS_BYTE:
1155                 tcg_gen_ext8s_i32(tmp, reg);
1156                 gen_helper_exts32(cpu_env, fp, tmp);
1157                 break;
1158             case OS_WORD:
1159                 tcg_gen_ext16s_i32(tmp, reg);
1160                 gen_helper_exts32(cpu_env, fp, tmp);
1161                 break;
1162             case OS_LONG:
1163                 gen_helper_exts32(cpu_env, fp, reg);
1164                 break;
1165             case OS_SINGLE:
1166                 gen_helper_extf32(cpu_env, fp, reg);
1167                 break;
1168             default:
1169                 g_assert_not_reached();
1170             }
1171             tcg_temp_free(tmp);
1172         }
1173         return 0;
1174     case 1: /* Address register direct.  */
1175         return -1;
1176     case 2: /* Indirect register */
1177         addr = get_areg(s, reg0);
1178         gen_ldst_fp(s, opsize, addr, fp, what, index);
1179         return 0;
1180     case 3: /* Indirect postincrement.  */
1181         addr = cpu_aregs[reg0];
1182         gen_ldst_fp(s, opsize, addr, fp, what, index);
1183         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1184         return 0;
1185     case 4: /* Indirect predecrememnt.  */
1186         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1187         if (IS_NULL_QREG(addr)) {
1188             return -1;
1189         }
1190         gen_ldst_fp(s, opsize, addr, fp, what, index);
1191         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1192         return 0;
1193     case 5: /* Indirect displacement.  */
1194     case 6: /* Indirect index + displacement.  */
1195     do_indirect:
1196         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1197         if (IS_NULL_QREG(addr)) {
1198             return -1;
1199         }
1200         gen_ldst_fp(s, opsize, addr, fp, what, index);
1201         return 0;
1202     case 7: /* Other */
1203         switch (reg0) {
1204         case 0: /* Absolute short.  */
1205         case 1: /* Absolute long.  */
1206         case 2: /* pc displacement  */
1207         case 3: /* pc index+displacement.  */
1208             goto do_indirect;
1209         case 4: /* Immediate.  */
1210             if (what == EA_STORE) {
1211                 return -1;
1212             }
1213             switch (opsize) {
1214             case OS_BYTE:
1215                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1216                 gen_helper_exts32(cpu_env, fp, tmp);
1217                 tcg_temp_free(tmp);
1218                 break;
1219             case OS_WORD:
1220                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1221                 gen_helper_exts32(cpu_env, fp, tmp);
1222                 tcg_temp_free(tmp);
1223                 break;
1224             case OS_LONG:
1225                 tmp = tcg_const_i32(read_im32(env, s));
1226                 gen_helper_exts32(cpu_env, fp, tmp);
1227                 tcg_temp_free(tmp);
1228                 break;
1229             case OS_SINGLE:
1230                 tmp = tcg_const_i32(read_im32(env, s));
1231                 gen_helper_extf32(cpu_env, fp, tmp);
1232                 tcg_temp_free(tmp);
1233                 break;
1234             case OS_DOUBLE:
1235                 t64 = tcg_const_i64(read_im64(env, s));
1236                 gen_helper_extf64(cpu_env, fp, t64);
1237                 tcg_temp_free_i64(t64);
1238                 break;
1239             case OS_EXTENDED:
1240                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1241                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1242                     break;
1243                 }
1244                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1245                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1246                 tcg_temp_free(tmp);
1247                 t64 = tcg_const_i64(read_im64(env, s));
1248                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1249                 tcg_temp_free_i64(t64);
1250                 break;
1251             case OS_PACKED:
1252                 /*
1253                  * unimplemented data type on 68040/ColdFire
1254                  * FIXME if needed for another FPU
1255                  */
1256                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1257                 break;
1258             default:
1259                 g_assert_not_reached();
1260             }
1261             return 0;
1262         default:
1263             return -1;
1264         }
1265     }
1266     return -1;
1267 }
1268 
1269 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1270                        int opsize, TCGv_ptr fp, ea_what what, int index)
1271 {
1272     int mode = extract32(insn, 3, 3);
1273     int reg0 = REG(insn, 0);
1274     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1275 }
1276 
1277 typedef struct {
1278     TCGCond tcond;
1279     bool g1;
1280     bool g2;
1281     TCGv v1;
1282     TCGv v2;
1283 } DisasCompare;
1284 
1285 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1286 {
1287     TCGv tmp, tmp2;
1288     TCGCond tcond;
1289     CCOp op = s->cc_op;
1290 
1291     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1292     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1293         c->g1 = c->g2 = 1;
1294         c->v1 = QREG_CC_N;
1295         c->v2 = QREG_CC_V;
1296         switch (cond) {
1297         case 2: /* HI */
1298         case 3: /* LS */
1299             tcond = TCG_COND_LEU;
1300             goto done;
1301         case 4: /* CC */
1302         case 5: /* CS */
1303             tcond = TCG_COND_LTU;
1304             goto done;
1305         case 6: /* NE */
1306         case 7: /* EQ */
1307             tcond = TCG_COND_EQ;
1308             goto done;
1309         case 10: /* PL */
1310         case 11: /* MI */
1311             c->g1 = c->g2 = 0;
1312             c->v2 = tcg_const_i32(0);
1313             c->v1 = tmp = tcg_temp_new();
1314             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1315             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1316             /* fallthru */
1317         case 12: /* GE */
1318         case 13: /* LT */
1319             tcond = TCG_COND_LT;
1320             goto done;
1321         case 14: /* GT */
1322         case 15: /* LE */
1323             tcond = TCG_COND_LE;
1324             goto done;
1325         }
1326     }
1327 
1328     c->g1 = 1;
1329     c->g2 = 0;
1330     c->v2 = tcg_const_i32(0);
1331 
1332     switch (cond) {
1333     case 0: /* T */
1334     case 1: /* F */
1335         c->v1 = c->v2;
1336         tcond = TCG_COND_NEVER;
1337         goto done;
1338     case 14: /* GT (!(Z || (N ^ V))) */
1339     case 15: /* LE (Z || (N ^ V)) */
1340         /*
1341          * Logic operations clear V, which simplifies LE to (Z || N),
1342          * and since Z and N are co-located, this becomes a normal
1343          * comparison vs N.
1344          */
1345         if (op == CC_OP_LOGIC) {
1346             c->v1 = QREG_CC_N;
1347             tcond = TCG_COND_LE;
1348             goto done;
1349         }
1350         break;
1351     case 12: /* GE (!(N ^ V)) */
1352     case 13: /* LT (N ^ V) */
1353         /* Logic operations clear V, which simplifies this to N.  */
1354         if (op != CC_OP_LOGIC) {
1355             break;
1356         }
1357         /* fallthru */
1358     case 10: /* PL (!N) */
1359     case 11: /* MI (N) */
1360         /* Several cases represent N normally.  */
1361         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1362             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1363             op == CC_OP_LOGIC) {
1364             c->v1 = QREG_CC_N;
1365             tcond = TCG_COND_LT;
1366             goto done;
1367         }
1368         break;
1369     case 6: /* NE (!Z) */
1370     case 7: /* EQ (Z) */
1371         /* Some cases fold Z into N.  */
1372         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1373             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1374             op == CC_OP_LOGIC) {
1375             tcond = TCG_COND_EQ;
1376             c->v1 = QREG_CC_N;
1377             goto done;
1378         }
1379         break;
1380     case 4: /* CC (!C) */
1381     case 5: /* CS (C) */
1382         /* Some cases fold C into X.  */
1383         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1384             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1385             tcond = TCG_COND_NE;
1386             c->v1 = QREG_CC_X;
1387             goto done;
1388         }
1389         /* fallthru */
1390     case 8: /* VC (!V) */
1391     case 9: /* VS (V) */
1392         /* Logic operations clear V and C.  */
1393         if (op == CC_OP_LOGIC) {
1394             tcond = TCG_COND_NEVER;
1395             c->v1 = c->v2;
1396             goto done;
1397         }
1398         break;
1399     }
1400 
1401     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1402     gen_flush_flags(s);
1403 
1404     switch (cond) {
1405     case 0: /* T */
1406     case 1: /* F */
1407     default:
1408         /* Invalid, or handled above.  */
1409         abort();
1410     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1411     case 3: /* LS (C || Z) */
1412         c->v1 = tmp = tcg_temp_new();
1413         c->g1 = 0;
1414         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1415         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1416         tcond = TCG_COND_NE;
1417         break;
1418     case 4: /* CC (!C) */
1419     case 5: /* CS (C) */
1420         c->v1 = QREG_CC_C;
1421         tcond = TCG_COND_NE;
1422         break;
1423     case 6: /* NE (!Z) */
1424     case 7: /* EQ (Z) */
1425         c->v1 = QREG_CC_Z;
1426         tcond = TCG_COND_EQ;
1427         break;
1428     case 8: /* VC (!V) */
1429     case 9: /* VS (V) */
1430         c->v1 = QREG_CC_V;
1431         tcond = TCG_COND_LT;
1432         break;
1433     case 10: /* PL (!N) */
1434     case 11: /* MI (N) */
1435         c->v1 = QREG_CC_N;
1436         tcond = TCG_COND_LT;
1437         break;
1438     case 12: /* GE (!(N ^ V)) */
1439     case 13: /* LT (N ^ V) */
1440         c->v1 = tmp = tcg_temp_new();
1441         c->g1 = 0;
1442         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1443         tcond = TCG_COND_LT;
1444         break;
1445     case 14: /* GT (!(Z || (N ^ V))) */
1446     case 15: /* LE (Z || (N ^ V)) */
1447         c->v1 = tmp = tcg_temp_new();
1448         c->g1 = 0;
1449         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1450         tcg_gen_neg_i32(tmp, tmp);
1451         tmp2 = tcg_temp_new();
1452         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1453         tcg_gen_or_i32(tmp, tmp, tmp2);
1454         tcg_temp_free(tmp2);
1455         tcond = TCG_COND_LT;
1456         break;
1457     }
1458 
1459  done:
1460     if ((cond & 1) == 0) {
1461         tcond = tcg_invert_cond(tcond);
1462     }
1463     c->tcond = tcond;
1464 }
1465 
1466 static void free_cond(DisasCompare *c)
1467 {
1468     if (!c->g1) {
1469         tcg_temp_free(c->v1);
1470     }
1471     if (!c->g2) {
1472         tcg_temp_free(c->v2);
1473     }
1474 }
1475 
1476 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1477 {
1478   DisasCompare c;
1479 
1480   gen_cc_cond(&c, s, cond);
1481   update_cc_op(s);
1482   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1483   free_cond(&c);
1484 }
1485 
1486 /* Force a TB lookup after an instruction that changes the CPU state.  */
1487 static void gen_exit_tb(DisasContext *s)
1488 {
1489     update_cc_op(s);
1490     tcg_gen_movi_i32(QREG_PC, s->pc);
1491     s->base.is_jmp = DISAS_EXIT;
1492 }
1493 
1494 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1495         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1496                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1497         if (IS_NULL_QREG(result)) {                                     \
1498             gen_addr_fault(s);                                          \
1499             return;                                                     \
1500         }                                                               \
1501     } while (0)
1502 
1503 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1504         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1505                                 EA_STORE, IS_USER(s));                  \
1506         if (IS_NULL_QREG(ea_result)) {                                  \
1507             gen_addr_fault(s);                                          \
1508             return;                                                     \
1509         }                                                               \
1510     } while (0)
1511 
1512 /* Generate a jump to an immediate address.  */
1513 static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1514                        target_ulong src)
1515 {
1516     if (unlikely(s->ss_active)) {
1517         update_cc_op(s);
1518         tcg_gen_movi_i32(QREG_PC, dest);
1519         gen_raise_exception_format2(s, EXCP_TRACE, src);
1520     } else if (translator_use_goto_tb(&s->base, dest)) {
1521         tcg_gen_goto_tb(n);
1522         tcg_gen_movi_i32(QREG_PC, dest);
1523         tcg_gen_exit_tb(s->base.tb, n);
1524     } else {
1525         gen_jmp_im(s, dest);
1526         tcg_gen_exit_tb(NULL, 0);
1527     }
1528     s->base.is_jmp = DISAS_NORETURN;
1529 }
1530 
1531 DISAS_INSN(scc)
1532 {
1533     DisasCompare c;
1534     int cond;
1535     TCGv tmp;
1536 
1537     cond = (insn >> 8) & 0xf;
1538     gen_cc_cond(&c, s, cond);
1539 
1540     tmp = tcg_temp_new();
1541     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1542     free_cond(&c);
1543 
1544     tcg_gen_neg_i32(tmp, tmp);
1545     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1546     tcg_temp_free(tmp);
1547 }
1548 
1549 DISAS_INSN(dbcc)
1550 {
1551     TCGLabel *l1;
1552     TCGv reg;
1553     TCGv tmp;
1554     int16_t offset;
1555     uint32_t base;
1556 
1557     reg = DREG(insn, 0);
1558     base = s->pc;
1559     offset = (int16_t)read_im16(env, s);
1560     l1 = gen_new_label();
1561     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1562 
1563     tmp = tcg_temp_new();
1564     tcg_gen_ext16s_i32(tmp, reg);
1565     tcg_gen_addi_i32(tmp, tmp, -1);
1566     gen_partset_reg(OS_WORD, reg, tmp);
1567     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1568     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1569     gen_set_label(l1);
1570     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1571 }
1572 
1573 DISAS_INSN(undef_mac)
1574 {
1575     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1576 }
1577 
1578 DISAS_INSN(undef_fpu)
1579 {
1580     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1581 }
1582 
1583 DISAS_INSN(undef)
1584 {
1585     /*
1586      * ??? This is both instructions that are as yet unimplemented
1587      * for the 680x0 series, as well as those that are implemented
1588      * but actually illegal for CPU32 or pre-68020.
1589      */
1590     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1591                   insn, s->base.pc_next);
1592     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1593 }
1594 
1595 DISAS_INSN(mulw)
1596 {
1597     TCGv reg;
1598     TCGv tmp;
1599     TCGv src;
1600     int sign;
1601 
1602     sign = (insn & 0x100) != 0;
1603     reg = DREG(insn, 9);
1604     tmp = tcg_temp_new();
1605     if (sign)
1606         tcg_gen_ext16s_i32(tmp, reg);
1607     else
1608         tcg_gen_ext16u_i32(tmp, reg);
1609     SRC_EA(env, src, OS_WORD, sign, NULL);
1610     tcg_gen_mul_i32(tmp, tmp, src);
1611     tcg_gen_mov_i32(reg, tmp);
1612     gen_logic_cc(s, tmp, OS_LONG);
1613     tcg_temp_free(tmp);
1614 }
1615 
1616 DISAS_INSN(divw)
1617 {
1618     int sign;
1619     TCGv src;
1620     TCGv destr;
1621     TCGv ilen;
1622 
1623     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1624 
1625     sign = (insn & 0x100) != 0;
1626 
1627     /* dest.l / src.w */
1628 
1629     SRC_EA(env, src, OS_WORD, sign, NULL);
1630     destr = tcg_constant_i32(REG(insn, 9));
1631     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1632     if (sign) {
1633         gen_helper_divsw(cpu_env, destr, src, ilen);
1634     } else {
1635         gen_helper_divuw(cpu_env, destr, src, ilen);
1636     }
1637 
1638     set_cc_op(s, CC_OP_FLAGS);
1639 }
1640 
1641 DISAS_INSN(divl)
1642 {
1643     TCGv num, reg, den, ilen;
1644     int sign;
1645     uint16_t ext;
1646 
1647     ext = read_im16(env, s);
1648 
1649     sign = (ext & 0x0800) != 0;
1650 
1651     if (ext & 0x400) {
1652         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1653             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1654             return;
1655         }
1656 
1657         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1658 
1659         SRC_EA(env, den, OS_LONG, 0, NULL);
1660         num = tcg_constant_i32(REG(ext, 12));
1661         reg = tcg_constant_i32(REG(ext, 0));
1662         ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1663         if (sign) {
1664             gen_helper_divsll(cpu_env, num, reg, den, ilen);
1665         } else {
1666             gen_helper_divull(cpu_env, num, reg, den, ilen);
1667         }
1668         set_cc_op(s, CC_OP_FLAGS);
1669         return;
1670     }
1671 
1672     /* divX.l <EA>, Dq        32/32 -> 32q     */
1673     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1674 
1675     SRC_EA(env, den, OS_LONG, 0, NULL);
1676     num = tcg_constant_i32(REG(ext, 12));
1677     reg = tcg_constant_i32(REG(ext, 0));
1678     ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1679     if (sign) {
1680         gen_helper_divsl(cpu_env, num, reg, den, ilen);
1681     } else {
1682         gen_helper_divul(cpu_env, num, reg, den, ilen);
1683     }
1684 
1685     set_cc_op(s, CC_OP_FLAGS);
1686 }
1687 
1688 static void bcd_add(TCGv dest, TCGv src)
1689 {
1690     TCGv t0, t1;
1691 
1692     /*
1693      * dest10 = dest10 + src10 + X
1694      *
1695      *        t1 = src
1696      *        t2 = t1 + 0x066
1697      *        t3 = t2 + dest + X
1698      *        t4 = t2 ^ dest
1699      *        t5 = t3 ^ t4
1700      *        t6 = ~t5 & 0x110
1701      *        t7 = (t6 >> 2) | (t6 >> 3)
1702      *        return t3 - t7
1703      */
1704 
1705     /*
1706      * t1 = (src + 0x066) + dest + X
1707      *    = result with some possible exceeding 0x6
1708      */
1709 
1710     t0 = tcg_const_i32(0x066);
1711     tcg_gen_add_i32(t0, t0, src);
1712 
1713     t1 = tcg_temp_new();
1714     tcg_gen_add_i32(t1, t0, dest);
1715     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1716 
1717     /* we will remove exceeding 0x6 where there is no carry */
1718 
1719     /*
1720      * t0 = (src + 0x0066) ^ dest
1721      *    = t1 without carries
1722      */
1723 
1724     tcg_gen_xor_i32(t0, t0, dest);
1725 
1726     /*
1727      * extract the carries
1728      * t0 = t0 ^ t1
1729      *    = only the carries
1730      */
1731 
1732     tcg_gen_xor_i32(t0, t0, t1);
1733 
1734     /*
1735      * generate 0x1 where there is no carry
1736      * and for each 0x10, generate a 0x6
1737      */
1738 
1739     tcg_gen_shri_i32(t0, t0, 3);
1740     tcg_gen_not_i32(t0, t0);
1741     tcg_gen_andi_i32(t0, t0, 0x22);
1742     tcg_gen_add_i32(dest, t0, t0);
1743     tcg_gen_add_i32(dest, dest, t0);
1744     tcg_temp_free(t0);
1745 
1746     /*
1747      * remove the exceeding 0x6
1748      * for digits that have not generated a carry
1749      */
1750 
1751     tcg_gen_sub_i32(dest, t1, dest);
1752     tcg_temp_free(t1);
1753 }
1754 
1755 static void bcd_sub(TCGv dest, TCGv src)
1756 {
1757     TCGv t0, t1, t2;
1758 
1759     /*
1760      *  dest10 = dest10 - src10 - X
1761      *         = bcd_add(dest + 1 - X, 0x199 - src)
1762      */
1763 
1764     /* t0 = 0x066 + (0x199 - src) */
1765 
1766     t0 = tcg_temp_new();
1767     tcg_gen_subfi_i32(t0, 0x1ff, src);
1768 
1769     /* t1 = t0 + dest + 1 - X*/
1770 
1771     t1 = tcg_temp_new();
1772     tcg_gen_add_i32(t1, t0, dest);
1773     tcg_gen_addi_i32(t1, t1, 1);
1774     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1775 
1776     /* t2 = t0 ^ dest */
1777 
1778     t2 = tcg_temp_new();
1779     tcg_gen_xor_i32(t2, t0, dest);
1780 
1781     /* t0 = t1 ^ t2 */
1782 
1783     tcg_gen_xor_i32(t0, t1, t2);
1784 
1785     /*
1786      * t2 = ~t0 & 0x110
1787      * t0 = (t2 >> 2) | (t2 >> 3)
1788      *
1789      * to fit on 8bit operands, changed in:
1790      *
1791      * t2 = ~(t0 >> 3) & 0x22
1792      * t0 = t2 + t2
1793      * t0 = t0 + t2
1794      */
1795 
1796     tcg_gen_shri_i32(t2, t0, 3);
1797     tcg_gen_not_i32(t2, t2);
1798     tcg_gen_andi_i32(t2, t2, 0x22);
1799     tcg_gen_add_i32(t0, t2, t2);
1800     tcg_gen_add_i32(t0, t0, t2);
1801     tcg_temp_free(t2);
1802 
1803     /* return t1 - t0 */
1804 
1805     tcg_gen_sub_i32(dest, t1, t0);
1806     tcg_temp_free(t0);
1807     tcg_temp_free(t1);
1808 }
1809 
1810 static void bcd_flags(TCGv val)
1811 {
1812     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1813     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1814 
1815     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1816 
1817     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1818 }
1819 
1820 DISAS_INSN(abcd_reg)
1821 {
1822     TCGv src;
1823     TCGv dest;
1824 
1825     gen_flush_flags(s); /* !Z is sticky */
1826 
1827     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1828     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1829     bcd_add(dest, src);
1830     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1831 
1832     bcd_flags(dest);
1833 }
1834 
1835 DISAS_INSN(abcd_mem)
1836 {
1837     TCGv src, dest, addr;
1838 
1839     gen_flush_flags(s); /* !Z is sticky */
1840 
1841     /* Indirect pre-decrement load (mode 4) */
1842 
1843     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1844                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1845     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1846                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1847 
1848     bcd_add(dest, src);
1849 
1850     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1851                 EA_STORE, IS_USER(s));
1852 
1853     bcd_flags(dest);
1854 }
1855 
1856 DISAS_INSN(sbcd_reg)
1857 {
1858     TCGv src, dest;
1859 
1860     gen_flush_flags(s); /* !Z is sticky */
1861 
1862     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1863     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1864 
1865     bcd_sub(dest, src);
1866 
1867     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1868 
1869     bcd_flags(dest);
1870 }
1871 
1872 DISAS_INSN(sbcd_mem)
1873 {
1874     TCGv src, dest, addr;
1875 
1876     gen_flush_flags(s); /* !Z is sticky */
1877 
1878     /* Indirect pre-decrement load (mode 4) */
1879 
1880     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1881                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1882     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1883                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1884 
1885     bcd_sub(dest, src);
1886 
1887     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1888                 EA_STORE, IS_USER(s));
1889 
1890     bcd_flags(dest);
1891 }
1892 
1893 DISAS_INSN(nbcd)
1894 {
1895     TCGv src, dest;
1896     TCGv addr;
1897 
1898     gen_flush_flags(s); /* !Z is sticky */
1899 
1900     SRC_EA(env, src, OS_BYTE, 0, &addr);
1901 
1902     dest = tcg_const_i32(0);
1903     bcd_sub(dest, src);
1904 
1905     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1906 
1907     bcd_flags(dest);
1908 
1909     tcg_temp_free(dest);
1910 }
1911 
1912 DISAS_INSN(addsub)
1913 {
1914     TCGv reg;
1915     TCGv dest;
1916     TCGv src;
1917     TCGv tmp;
1918     TCGv addr;
1919     int add;
1920     int opsize;
1921 
1922     add = (insn & 0x4000) != 0;
1923     opsize = insn_opsize(insn);
1924     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1925     dest = tcg_temp_new();
1926     if (insn & 0x100) {
1927         SRC_EA(env, tmp, opsize, 1, &addr);
1928         src = reg;
1929     } else {
1930         tmp = reg;
1931         SRC_EA(env, src, opsize, 1, NULL);
1932     }
1933     if (add) {
1934         tcg_gen_add_i32(dest, tmp, src);
1935         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1936         set_cc_op(s, CC_OP_ADDB + opsize);
1937     } else {
1938         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1939         tcg_gen_sub_i32(dest, tmp, src);
1940         set_cc_op(s, CC_OP_SUBB + opsize);
1941     }
1942     gen_update_cc_add(dest, src, opsize);
1943     if (insn & 0x100) {
1944         DEST_EA(env, insn, opsize, dest, &addr);
1945     } else {
1946         gen_partset_reg(opsize, DREG(insn, 9), dest);
1947     }
1948     tcg_temp_free(dest);
1949 }
1950 
1951 /* Reverse the order of the bits in REG.  */
1952 DISAS_INSN(bitrev)
1953 {
1954     TCGv reg;
1955     reg = DREG(insn, 0);
1956     gen_helper_bitrev(reg, reg);
1957 }
1958 
1959 DISAS_INSN(bitop_reg)
1960 {
1961     int opsize;
1962     int op;
1963     TCGv src1;
1964     TCGv src2;
1965     TCGv tmp;
1966     TCGv addr;
1967     TCGv dest;
1968 
1969     if ((insn & 0x38) != 0)
1970         opsize = OS_BYTE;
1971     else
1972         opsize = OS_LONG;
1973     op = (insn >> 6) & 3;
1974     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1975 
1976     gen_flush_flags(s);
1977     src2 = tcg_temp_new();
1978     if (opsize == OS_BYTE)
1979         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1980     else
1981         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1982 
1983     tmp = tcg_const_i32(1);
1984     tcg_gen_shl_i32(tmp, tmp, src2);
1985     tcg_temp_free(src2);
1986 
1987     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1988 
1989     dest = tcg_temp_new();
1990     switch (op) {
1991     case 1: /* bchg */
1992         tcg_gen_xor_i32(dest, src1, tmp);
1993         break;
1994     case 2: /* bclr */
1995         tcg_gen_andc_i32(dest, src1, tmp);
1996         break;
1997     case 3: /* bset */
1998         tcg_gen_or_i32(dest, src1, tmp);
1999         break;
2000     default: /* btst */
2001         break;
2002     }
2003     tcg_temp_free(tmp);
2004     if (op) {
2005         DEST_EA(env, insn, opsize, dest, &addr);
2006     }
2007     tcg_temp_free(dest);
2008 }
2009 
2010 DISAS_INSN(sats)
2011 {
2012     TCGv reg;
2013     reg = DREG(insn, 0);
2014     gen_flush_flags(s);
2015     gen_helper_sats(reg, reg, QREG_CC_V);
2016     gen_logic_cc(s, reg, OS_LONG);
2017 }
2018 
2019 static void gen_push(DisasContext *s, TCGv val)
2020 {
2021     TCGv tmp;
2022 
2023     tmp = tcg_temp_new();
2024     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2025     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2026     tcg_gen_mov_i32(QREG_SP, tmp);
2027     tcg_temp_free(tmp);
2028 }
2029 
2030 static TCGv mreg(int reg)
2031 {
2032     if (reg < 8) {
2033         /* Dx */
2034         return cpu_dregs[reg];
2035     }
2036     /* Ax */
2037     return cpu_aregs[reg & 7];
2038 }
2039 
2040 DISAS_INSN(movem)
2041 {
2042     TCGv addr, incr, tmp, r[16];
2043     int is_load = (insn & 0x0400) != 0;
2044     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2045     uint16_t mask = read_im16(env, s);
2046     int mode = extract32(insn, 3, 3);
2047     int reg0 = REG(insn, 0);
2048     int i;
2049 
2050     tmp = cpu_aregs[reg0];
2051 
2052     switch (mode) {
2053     case 0: /* data register direct */
2054     case 1: /* addr register direct */
2055     do_addr_fault:
2056         gen_addr_fault(s);
2057         return;
2058 
2059     case 2: /* indirect */
2060         break;
2061 
2062     case 3: /* indirect post-increment */
2063         if (!is_load) {
2064             /* post-increment is not allowed */
2065             goto do_addr_fault;
2066         }
2067         break;
2068 
2069     case 4: /* indirect pre-decrement */
2070         if (is_load) {
2071             /* pre-decrement is not allowed */
2072             goto do_addr_fault;
2073         }
2074         /*
2075          * We want a bare copy of the address reg, without any pre-decrement
2076          * adjustment, as gen_lea would provide.
2077          */
2078         break;
2079 
2080     default:
2081         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2082         if (IS_NULL_QREG(tmp)) {
2083             goto do_addr_fault;
2084         }
2085         break;
2086     }
2087 
2088     addr = tcg_temp_new();
2089     tcg_gen_mov_i32(addr, tmp);
2090     incr = tcg_const_i32(opsize_bytes(opsize));
2091 
2092     if (is_load) {
2093         /* memory to register */
2094         for (i = 0; i < 16; i++) {
2095             if (mask & (1 << i)) {
2096                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2097                 tcg_gen_add_i32(addr, addr, incr);
2098             }
2099         }
2100         for (i = 0; i < 16; i++) {
2101             if (mask & (1 << i)) {
2102                 tcg_gen_mov_i32(mreg(i), r[i]);
2103                 tcg_temp_free(r[i]);
2104             }
2105         }
2106         if (mode == 3) {
2107             /* post-increment: movem (An)+,X */
2108             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2109         }
2110     } else {
2111         /* register to memory */
2112         if (mode == 4) {
2113             /* pre-decrement: movem X,-(An) */
2114             for (i = 15; i >= 0; i--) {
2115                 if ((mask << i) & 0x8000) {
2116                     tcg_gen_sub_i32(addr, addr, incr);
2117                     if (reg0 + 8 == i &&
2118                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2119                         /*
2120                          * M68020+: if the addressing register is the
2121                          * register moved to memory, the value written
2122                          * is the initial value decremented by the size of
2123                          * the operation, regardless of how many actual
2124                          * stores have been performed until this point.
2125                          * M68000/M68010: the value is the initial value.
2126                          */
2127                         tmp = tcg_temp_new();
2128                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2129                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2130                         tcg_temp_free(tmp);
2131                     } else {
2132                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2133                     }
2134                 }
2135             }
2136             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2137         } else {
2138             for (i = 0; i < 16; i++) {
2139                 if (mask & (1 << i)) {
2140                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2141                     tcg_gen_add_i32(addr, addr, incr);
2142                 }
2143             }
2144         }
2145     }
2146 
2147     tcg_temp_free(incr);
2148     tcg_temp_free(addr);
2149 }
2150 
2151 DISAS_INSN(movep)
2152 {
2153     uint8_t i;
2154     int16_t displ;
2155     TCGv reg;
2156     TCGv addr;
2157     TCGv abuf;
2158     TCGv dbuf;
2159 
2160     displ = read_im16(env, s);
2161 
2162     addr = AREG(insn, 0);
2163     reg = DREG(insn, 9);
2164 
2165     abuf = tcg_temp_new();
2166     tcg_gen_addi_i32(abuf, addr, displ);
2167     dbuf = tcg_temp_new();
2168 
2169     if (insn & 0x40) {
2170         i = 4;
2171     } else {
2172         i = 2;
2173     }
2174 
2175     if (insn & 0x80) {
2176         for ( ; i > 0 ; i--) {
2177             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2178             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2179             if (i > 1) {
2180                 tcg_gen_addi_i32(abuf, abuf, 2);
2181             }
2182         }
2183     } else {
2184         for ( ; i > 0 ; i--) {
2185             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2186             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2187             if (i > 1) {
2188                 tcg_gen_addi_i32(abuf, abuf, 2);
2189             }
2190         }
2191     }
2192     tcg_temp_free(abuf);
2193     tcg_temp_free(dbuf);
2194 }
2195 
2196 DISAS_INSN(bitop_im)
2197 {
2198     int opsize;
2199     int op;
2200     TCGv src1;
2201     uint32_t mask;
2202     int bitnum;
2203     TCGv tmp;
2204     TCGv addr;
2205 
2206     if ((insn & 0x38) != 0)
2207         opsize = OS_BYTE;
2208     else
2209         opsize = OS_LONG;
2210     op = (insn >> 6) & 3;
2211 
2212     bitnum = read_im16(env, s);
2213     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2214         if (bitnum & 0xfe00) {
2215             disas_undef(env, s, insn);
2216             return;
2217         }
2218     } else {
2219         if (bitnum & 0xff00) {
2220             disas_undef(env, s, insn);
2221             return;
2222         }
2223     }
2224 
2225     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2226 
2227     gen_flush_flags(s);
2228     if (opsize == OS_BYTE)
2229         bitnum &= 7;
2230     else
2231         bitnum &= 31;
2232     mask = 1 << bitnum;
2233 
2234    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2235 
2236     if (op) {
2237         tmp = tcg_temp_new();
2238         switch (op) {
2239         case 1: /* bchg */
2240             tcg_gen_xori_i32(tmp, src1, mask);
2241             break;
2242         case 2: /* bclr */
2243             tcg_gen_andi_i32(tmp, src1, ~mask);
2244             break;
2245         case 3: /* bset */
2246             tcg_gen_ori_i32(tmp, src1, mask);
2247             break;
2248         default: /* btst */
2249             break;
2250         }
2251         DEST_EA(env, insn, opsize, tmp, &addr);
2252         tcg_temp_free(tmp);
2253     }
2254 }
2255 
2256 static TCGv gen_get_ccr(DisasContext *s)
2257 {
2258     TCGv dest;
2259 
2260     update_cc_op(s);
2261     dest = tcg_temp_new();
2262     gen_helper_get_ccr(dest, cpu_env);
2263     return dest;
2264 }
2265 
2266 static TCGv gen_get_sr(DisasContext *s)
2267 {
2268     TCGv ccr;
2269     TCGv sr;
2270 
2271     ccr = gen_get_ccr(s);
2272     sr = tcg_temp_new();
2273     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2274     tcg_gen_or_i32(sr, sr, ccr);
2275     tcg_temp_free(ccr);
2276     return sr;
2277 }
2278 
2279 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2280 {
2281     if (ccr_only) {
2282         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2283         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2284         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2285         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2286         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2287     } else {
2288         /* Must writeback before changing security state. */
2289         do_writebacks(s);
2290         gen_helper_set_sr(cpu_env, tcg_constant_i32(val));
2291     }
2292     set_cc_op(s, CC_OP_FLAGS);
2293 }
2294 
2295 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2296 {
2297     if (ccr_only) {
2298         gen_helper_set_ccr(cpu_env, val);
2299     } else {
2300         /* Must writeback before changing security state. */
2301         do_writebacks(s);
2302         gen_helper_set_sr(cpu_env, val);
2303     }
2304     set_cc_op(s, CC_OP_FLAGS);
2305 }
2306 
2307 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2308                            bool ccr_only)
2309 {
2310     if ((insn & 0x3f) == 0x3c) {
2311         uint16_t val;
2312         val = read_im16(env, s);
2313         gen_set_sr_im(s, val, ccr_only);
2314     } else {
2315         TCGv src;
2316         SRC_EA(env, src, OS_WORD, 0, NULL);
2317         gen_set_sr(s, src, ccr_only);
2318     }
2319 }
2320 
2321 DISAS_INSN(arith_im)
2322 {
2323     int op;
2324     TCGv im;
2325     TCGv src1;
2326     TCGv dest;
2327     TCGv addr;
2328     int opsize;
2329     bool with_SR = ((insn & 0x3f) == 0x3c);
2330 
2331     op = (insn >> 9) & 7;
2332     opsize = insn_opsize(insn);
2333     switch (opsize) {
2334     case OS_BYTE:
2335         im = tcg_const_i32((int8_t)read_im8(env, s));
2336         break;
2337     case OS_WORD:
2338         im = tcg_const_i32((int16_t)read_im16(env, s));
2339         break;
2340     case OS_LONG:
2341         im = tcg_const_i32(read_im32(env, s));
2342         break;
2343     default:
2344         g_assert_not_reached();
2345     }
2346 
2347     if (with_SR) {
2348         /* SR/CCR can only be used with andi/eori/ori */
2349         if (op == 2 || op == 3 || op == 6) {
2350             disas_undef(env, s, insn);
2351             return;
2352         }
2353         switch (opsize) {
2354         case OS_BYTE:
2355             src1 = gen_get_ccr(s);
2356             break;
2357         case OS_WORD:
2358             if (IS_USER(s)) {
2359                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2360                 return;
2361             }
2362             src1 = gen_get_sr(s);
2363             break;
2364         default:
2365             /* OS_LONG; others already g_assert_not_reached.  */
2366             disas_undef(env, s, insn);
2367             return;
2368         }
2369     } else {
2370         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2371     }
2372     dest = tcg_temp_new();
2373     switch (op) {
2374     case 0: /* ori */
2375         tcg_gen_or_i32(dest, src1, im);
2376         if (with_SR) {
2377             gen_set_sr(s, dest, opsize == OS_BYTE);
2378             gen_exit_tb(s);
2379         } else {
2380             DEST_EA(env, insn, opsize, dest, &addr);
2381             gen_logic_cc(s, dest, opsize);
2382         }
2383         break;
2384     case 1: /* andi */
2385         tcg_gen_and_i32(dest, src1, im);
2386         if (with_SR) {
2387             gen_set_sr(s, dest, opsize == OS_BYTE);
2388             gen_exit_tb(s);
2389         } else {
2390             DEST_EA(env, insn, opsize, dest, &addr);
2391             gen_logic_cc(s, dest, opsize);
2392         }
2393         break;
2394     case 2: /* subi */
2395         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2396         tcg_gen_sub_i32(dest, src1, im);
2397         gen_update_cc_add(dest, im, opsize);
2398         set_cc_op(s, CC_OP_SUBB + opsize);
2399         DEST_EA(env, insn, opsize, dest, &addr);
2400         break;
2401     case 3: /* addi */
2402         tcg_gen_add_i32(dest, src1, im);
2403         gen_update_cc_add(dest, im, opsize);
2404         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2405         set_cc_op(s, CC_OP_ADDB + opsize);
2406         DEST_EA(env, insn, opsize, dest, &addr);
2407         break;
2408     case 5: /* eori */
2409         tcg_gen_xor_i32(dest, src1, im);
2410         if (with_SR) {
2411             gen_set_sr(s, dest, opsize == OS_BYTE);
2412             gen_exit_tb(s);
2413         } else {
2414             DEST_EA(env, insn, opsize, dest, &addr);
2415             gen_logic_cc(s, dest, opsize);
2416         }
2417         break;
2418     case 6: /* cmpi */
2419         gen_update_cc_cmp(s, src1, im, opsize);
2420         break;
2421     default:
2422         abort();
2423     }
2424     tcg_temp_free(im);
2425     tcg_temp_free(dest);
2426 }
2427 
2428 DISAS_INSN(cas)
2429 {
2430     int opsize;
2431     TCGv addr;
2432     uint16_t ext;
2433     TCGv load;
2434     TCGv cmp;
2435     MemOp opc;
2436 
2437     switch ((insn >> 9) & 3) {
2438     case 1:
2439         opsize = OS_BYTE;
2440         opc = MO_SB;
2441         break;
2442     case 2:
2443         opsize = OS_WORD;
2444         opc = MO_TESW;
2445         break;
2446     case 3:
2447         opsize = OS_LONG;
2448         opc = MO_TESL;
2449         break;
2450     default:
2451         g_assert_not_reached();
2452     }
2453 
2454     ext = read_im16(env, s);
2455 
2456     /* cas Dc,Du,<EA> */
2457 
2458     addr = gen_lea(env, s, insn, opsize);
2459     if (IS_NULL_QREG(addr)) {
2460         gen_addr_fault(s);
2461         return;
2462     }
2463 
2464     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2465 
2466     /*
2467      * if  <EA> == Dc then
2468      *     <EA> = Du
2469      *     Dc = <EA> (because <EA> == Dc)
2470      * else
2471      *     Dc = <EA>
2472      */
2473 
2474     load = tcg_temp_new();
2475     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2476                                IS_USER(s), opc);
2477     /* update flags before setting cmp to load */
2478     gen_update_cc_cmp(s, load, cmp, opsize);
2479     gen_partset_reg(opsize, DREG(ext, 0), load);
2480 
2481     tcg_temp_free(load);
2482 
2483     switch (extract32(insn, 3, 3)) {
2484     case 3: /* Indirect postincrement.  */
2485         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2486         break;
2487     case 4: /* Indirect predecrememnt.  */
2488         tcg_gen_mov_i32(AREG(insn, 0), addr);
2489         break;
2490     }
2491 }
2492 
2493 DISAS_INSN(cas2w)
2494 {
2495     uint16_t ext1, ext2;
2496     TCGv addr1, addr2;
2497     TCGv regs;
2498 
2499     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2500 
2501     ext1 = read_im16(env, s);
2502 
2503     if (ext1 & 0x8000) {
2504         /* Address Register */
2505         addr1 = AREG(ext1, 12);
2506     } else {
2507         /* Data Register */
2508         addr1 = DREG(ext1, 12);
2509     }
2510 
2511     ext2 = read_im16(env, s);
2512     if (ext2 & 0x8000) {
2513         /* Address Register */
2514         addr2 = AREG(ext2, 12);
2515     } else {
2516         /* Data Register */
2517         addr2 = DREG(ext2, 12);
2518     }
2519 
2520     /*
2521      * if (R1) == Dc1 && (R2) == Dc2 then
2522      *     (R1) = Du1
2523      *     (R2) = Du2
2524      * else
2525      *     Dc1 = (R1)
2526      *     Dc2 = (R2)
2527      */
2528 
2529     regs = tcg_const_i32(REG(ext2, 6) |
2530                          (REG(ext1, 6) << 3) |
2531                          (REG(ext2, 0) << 6) |
2532                          (REG(ext1, 0) << 9));
2533     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2534         gen_helper_exit_atomic(cpu_env);
2535     } else {
2536         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2537     }
2538     tcg_temp_free(regs);
2539 
2540     /* Note that cas2w also assigned to env->cc_op.  */
2541     s->cc_op = CC_OP_CMPW;
2542     s->cc_op_synced = 1;
2543 }
2544 
2545 DISAS_INSN(cas2l)
2546 {
2547     uint16_t ext1, ext2;
2548     TCGv addr1, addr2, regs;
2549 
2550     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2551 
2552     ext1 = read_im16(env, s);
2553 
2554     if (ext1 & 0x8000) {
2555         /* Address Register */
2556         addr1 = AREG(ext1, 12);
2557     } else {
2558         /* Data Register */
2559         addr1 = DREG(ext1, 12);
2560     }
2561 
2562     ext2 = read_im16(env, s);
2563     if (ext2 & 0x8000) {
2564         /* Address Register */
2565         addr2 = AREG(ext2, 12);
2566     } else {
2567         /* Data Register */
2568         addr2 = DREG(ext2, 12);
2569     }
2570 
2571     /*
2572      * if (R1) == Dc1 && (R2) == Dc2 then
2573      *     (R1) = Du1
2574      *     (R2) = Du2
2575      * else
2576      *     Dc1 = (R1)
2577      *     Dc2 = (R2)
2578      */
2579 
2580     regs = tcg_const_i32(REG(ext2, 6) |
2581                          (REG(ext1, 6) << 3) |
2582                          (REG(ext2, 0) << 6) |
2583                          (REG(ext1, 0) << 9));
2584     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2585         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2586     } else {
2587         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2588     }
2589     tcg_temp_free(regs);
2590 
2591     /* Note that cas2l also assigned to env->cc_op.  */
2592     s->cc_op = CC_OP_CMPL;
2593     s->cc_op_synced = 1;
2594 }
2595 
2596 DISAS_INSN(byterev)
2597 {
2598     TCGv reg;
2599 
2600     reg = DREG(insn, 0);
2601     tcg_gen_bswap32_i32(reg, reg);
2602 }
2603 
2604 DISAS_INSN(move)
2605 {
2606     TCGv src;
2607     TCGv dest;
2608     int op;
2609     int opsize;
2610 
2611     switch (insn >> 12) {
2612     case 1: /* move.b */
2613         opsize = OS_BYTE;
2614         break;
2615     case 2: /* move.l */
2616         opsize = OS_LONG;
2617         break;
2618     case 3: /* move.w */
2619         opsize = OS_WORD;
2620         break;
2621     default:
2622         abort();
2623     }
2624     SRC_EA(env, src, opsize, 1, NULL);
2625     op = (insn >> 6) & 7;
2626     if (op == 1) {
2627         /* movea */
2628         /* The value will already have been sign extended.  */
2629         dest = AREG(insn, 9);
2630         tcg_gen_mov_i32(dest, src);
2631     } else {
2632         /* normal move */
2633         uint16_t dest_ea;
2634         dest_ea = ((insn >> 9) & 7) | (op << 3);
2635         DEST_EA(env, dest_ea, opsize, src, NULL);
2636         /* This will be correct because loads sign extend.  */
2637         gen_logic_cc(s, src, opsize);
2638     }
2639 }
2640 
2641 DISAS_INSN(negx)
2642 {
2643     TCGv z;
2644     TCGv src;
2645     TCGv addr;
2646     int opsize;
2647 
2648     opsize = insn_opsize(insn);
2649     SRC_EA(env, src, opsize, 1, &addr);
2650 
2651     gen_flush_flags(s); /* compute old Z */
2652 
2653     /*
2654      * Perform subtract with borrow.
2655      * (X, N) =  -(src + X);
2656      */
2657 
2658     z = tcg_const_i32(0);
2659     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2660     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2661     tcg_temp_free(z);
2662     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2663 
2664     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2665 
2666     /*
2667      * Compute signed-overflow for negation.  The normal formula for
2668      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2669      * this simplifies to res & src.
2670      */
2671 
2672     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2673 
2674     /* Copy the rest of the results into place.  */
2675     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2676     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2677 
2678     set_cc_op(s, CC_OP_FLAGS);
2679 
2680     /* result is in QREG_CC_N */
2681 
2682     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2683 }
2684 
2685 DISAS_INSN(lea)
2686 {
2687     TCGv reg;
2688     TCGv tmp;
2689 
2690     reg = AREG(insn, 9);
2691     tmp = gen_lea(env, s, insn, OS_LONG);
2692     if (IS_NULL_QREG(tmp)) {
2693         gen_addr_fault(s);
2694         return;
2695     }
2696     tcg_gen_mov_i32(reg, tmp);
2697 }
2698 
2699 DISAS_INSN(clr)
2700 {
2701     int opsize;
2702     TCGv zero;
2703 
2704     zero = tcg_const_i32(0);
2705 
2706     opsize = insn_opsize(insn);
2707     DEST_EA(env, insn, opsize, zero, NULL);
2708     gen_logic_cc(s, zero, opsize);
2709     tcg_temp_free(zero);
2710 }
2711 
2712 DISAS_INSN(move_from_ccr)
2713 {
2714     TCGv ccr;
2715 
2716     ccr = gen_get_ccr(s);
2717     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2718 }
2719 
2720 DISAS_INSN(neg)
2721 {
2722     TCGv src1;
2723     TCGv dest;
2724     TCGv addr;
2725     int opsize;
2726 
2727     opsize = insn_opsize(insn);
2728     SRC_EA(env, src1, opsize, 1, &addr);
2729     dest = tcg_temp_new();
2730     tcg_gen_neg_i32(dest, src1);
2731     set_cc_op(s, CC_OP_SUBB + opsize);
2732     gen_update_cc_add(dest, src1, opsize);
2733     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2734     DEST_EA(env, insn, opsize, dest, &addr);
2735     tcg_temp_free(dest);
2736 }
2737 
2738 DISAS_INSN(move_to_ccr)
2739 {
2740     gen_move_to_sr(env, s, insn, true);
2741 }
2742 
2743 DISAS_INSN(not)
2744 {
2745     TCGv src1;
2746     TCGv dest;
2747     TCGv addr;
2748     int opsize;
2749 
2750     opsize = insn_opsize(insn);
2751     SRC_EA(env, src1, opsize, 1, &addr);
2752     dest = tcg_temp_new();
2753     tcg_gen_not_i32(dest, src1);
2754     DEST_EA(env, insn, opsize, dest, &addr);
2755     gen_logic_cc(s, dest, opsize);
2756 }
2757 
2758 DISAS_INSN(swap)
2759 {
2760     TCGv src1;
2761     TCGv src2;
2762     TCGv reg;
2763 
2764     src1 = tcg_temp_new();
2765     src2 = tcg_temp_new();
2766     reg = DREG(insn, 0);
2767     tcg_gen_shli_i32(src1, reg, 16);
2768     tcg_gen_shri_i32(src2, reg, 16);
2769     tcg_gen_or_i32(reg, src1, src2);
2770     tcg_temp_free(src2);
2771     tcg_temp_free(src1);
2772     gen_logic_cc(s, reg, OS_LONG);
2773 }
2774 
2775 DISAS_INSN(bkpt)
2776 {
2777 #if defined(CONFIG_SOFTMMU)
2778     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2779 #else
2780     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2781 #endif
2782 }
2783 
2784 DISAS_INSN(pea)
2785 {
2786     TCGv tmp;
2787 
2788     tmp = gen_lea(env, s, insn, OS_LONG);
2789     if (IS_NULL_QREG(tmp)) {
2790         gen_addr_fault(s);
2791         return;
2792     }
2793     gen_push(s, tmp);
2794 }
2795 
2796 DISAS_INSN(ext)
2797 {
2798     int op;
2799     TCGv reg;
2800     TCGv tmp;
2801 
2802     reg = DREG(insn, 0);
2803     op = (insn >> 6) & 7;
2804     tmp = tcg_temp_new();
2805     if (op == 3)
2806         tcg_gen_ext16s_i32(tmp, reg);
2807     else
2808         tcg_gen_ext8s_i32(tmp, reg);
2809     if (op == 2)
2810         gen_partset_reg(OS_WORD, reg, tmp);
2811     else
2812         tcg_gen_mov_i32(reg, tmp);
2813     gen_logic_cc(s, tmp, OS_LONG);
2814     tcg_temp_free(tmp);
2815 }
2816 
2817 DISAS_INSN(tst)
2818 {
2819     int opsize;
2820     TCGv tmp;
2821 
2822     opsize = insn_opsize(insn);
2823     SRC_EA(env, tmp, opsize, 1, NULL);
2824     gen_logic_cc(s, tmp, opsize);
2825 }
2826 
2827 DISAS_INSN(pulse)
2828 {
2829   /* Implemented as a NOP.  */
2830 }
2831 
2832 DISAS_INSN(illegal)
2833 {
2834     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2835 }
2836 
2837 DISAS_INSN(tas)
2838 {
2839     int mode = extract32(insn, 3, 3);
2840     int reg0 = REG(insn, 0);
2841 
2842     if (mode == 0) {
2843         /* data register direct */
2844         TCGv dest = cpu_dregs[reg0];
2845         gen_logic_cc(s, dest, OS_BYTE);
2846         tcg_gen_ori_tl(dest, dest, 0x80);
2847     } else {
2848         TCGv src1, addr;
2849 
2850         addr = gen_lea_mode(env, s, mode, reg0, OS_BYTE);
2851         if (IS_NULL_QREG(addr)) {
2852             gen_addr_fault(s);
2853             return;
2854         }
2855         src1 = tcg_temp_new();
2856         tcg_gen_atomic_fetch_or_tl(src1, addr, tcg_constant_tl(0x80),
2857                                    IS_USER(s), MO_SB);
2858         gen_logic_cc(s, src1, OS_BYTE);
2859         tcg_temp_free(src1);
2860 
2861         switch (mode) {
2862         case 3: /* Indirect postincrement.  */
2863             tcg_gen_addi_i32(AREG(insn, 0), addr, 1);
2864             break;
2865         case 4: /* Indirect predecrememnt.  */
2866             tcg_gen_mov_i32(AREG(insn, 0), addr);
2867             break;
2868         }
2869     }
2870 }
2871 
2872 DISAS_INSN(mull)
2873 {
2874     uint16_t ext;
2875     TCGv src1;
2876     int sign;
2877 
2878     ext = read_im16(env, s);
2879 
2880     sign = ext & 0x800;
2881 
2882     if (ext & 0x400) {
2883         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2884             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2885             return;
2886         }
2887 
2888         SRC_EA(env, src1, OS_LONG, 0, NULL);
2889 
2890         if (sign) {
2891             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2892         } else {
2893             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2894         }
2895         /* if Dl == Dh, 68040 returns low word */
2896         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2897         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2898         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2899 
2900         tcg_gen_movi_i32(QREG_CC_V, 0);
2901         tcg_gen_movi_i32(QREG_CC_C, 0);
2902 
2903         set_cc_op(s, CC_OP_FLAGS);
2904         return;
2905     }
2906     SRC_EA(env, src1, OS_LONG, 0, NULL);
2907     if (m68k_feature(s->env, M68K_FEATURE_M68K)) {
2908         tcg_gen_movi_i32(QREG_CC_C, 0);
2909         if (sign) {
2910             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2911             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2912             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2913             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2914         } else {
2915             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2916             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2917             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2918         }
2919         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2920         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2921 
2922         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2923 
2924         set_cc_op(s, CC_OP_FLAGS);
2925     } else {
2926         /*
2927          * The upper 32 bits of the product are discarded, so
2928          * muls.l and mulu.l are functionally equivalent.
2929          */
2930         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2931         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2932     }
2933 }
2934 
2935 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2936 {
2937     TCGv reg;
2938     TCGv tmp;
2939 
2940     reg = AREG(insn, 0);
2941     tmp = tcg_temp_new();
2942     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2943     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2944     if ((insn & 7) != 7) {
2945         tcg_gen_mov_i32(reg, tmp);
2946     }
2947     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2948     tcg_temp_free(tmp);
2949 }
2950 
2951 DISAS_INSN(link)
2952 {
2953     int16_t offset;
2954 
2955     offset = read_im16(env, s);
2956     gen_link(s, insn, offset);
2957 }
2958 
2959 DISAS_INSN(linkl)
2960 {
2961     int32_t offset;
2962 
2963     offset = read_im32(env, s);
2964     gen_link(s, insn, offset);
2965 }
2966 
2967 DISAS_INSN(unlk)
2968 {
2969     TCGv src;
2970     TCGv reg;
2971     TCGv tmp;
2972 
2973     src = tcg_temp_new();
2974     reg = AREG(insn, 0);
2975     tcg_gen_mov_i32(src, reg);
2976     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2977     tcg_gen_mov_i32(reg, tmp);
2978     tcg_gen_addi_i32(QREG_SP, src, 4);
2979     tcg_temp_free(src);
2980     tcg_temp_free(tmp);
2981 }
2982 
2983 #if defined(CONFIG_SOFTMMU)
2984 DISAS_INSN(reset)
2985 {
2986     if (IS_USER(s)) {
2987         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2988         return;
2989     }
2990 
2991     gen_helper_reset(cpu_env);
2992 }
2993 #endif
2994 
2995 DISAS_INSN(nop)
2996 {
2997 }
2998 
2999 DISAS_INSN(rtd)
3000 {
3001     TCGv tmp;
3002     int16_t offset = read_im16(env, s);
3003 
3004     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3005     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
3006     gen_jmp(s, tmp);
3007 }
3008 
3009 DISAS_INSN(rtr)
3010 {
3011     TCGv tmp;
3012     TCGv ccr;
3013     TCGv sp;
3014 
3015     sp = tcg_temp_new();
3016     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
3017     tcg_gen_addi_i32(sp, QREG_SP, 2);
3018     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
3019     tcg_gen_addi_i32(QREG_SP, sp, 4);
3020     tcg_temp_free(sp);
3021 
3022     gen_set_sr(s, ccr, true);
3023     tcg_temp_free(ccr);
3024 
3025     gen_jmp(s, tmp);
3026 }
3027 
3028 DISAS_INSN(rts)
3029 {
3030     TCGv tmp;
3031 
3032     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3033     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
3034     gen_jmp(s, tmp);
3035 }
3036 
3037 DISAS_INSN(jump)
3038 {
3039     TCGv tmp;
3040 
3041     /*
3042      * Load the target address first to ensure correct exception
3043      * behavior.
3044      */
3045     tmp = gen_lea(env, s, insn, OS_LONG);
3046     if (IS_NULL_QREG(tmp)) {
3047         gen_addr_fault(s);
3048         return;
3049     }
3050     if ((insn & 0x40) == 0) {
3051         /* jsr */
3052         gen_push(s, tcg_const_i32(s->pc));
3053     }
3054     gen_jmp(s, tmp);
3055 }
3056 
3057 DISAS_INSN(addsubq)
3058 {
3059     TCGv src;
3060     TCGv dest;
3061     TCGv val;
3062     int imm;
3063     TCGv addr;
3064     int opsize;
3065 
3066     if ((insn & 070) == 010) {
3067         /* Operation on address register is always long.  */
3068         opsize = OS_LONG;
3069     } else {
3070         opsize = insn_opsize(insn);
3071     }
3072     SRC_EA(env, src, opsize, 1, &addr);
3073     imm = (insn >> 9) & 7;
3074     if (imm == 0) {
3075         imm = 8;
3076     }
3077     val = tcg_const_i32(imm);
3078     dest = tcg_temp_new();
3079     tcg_gen_mov_i32(dest, src);
3080     if ((insn & 0x38) == 0x08) {
3081         /*
3082          * Don't update condition codes if the destination is an
3083          * address register.
3084          */
3085         if (insn & 0x0100) {
3086             tcg_gen_sub_i32(dest, dest, val);
3087         } else {
3088             tcg_gen_add_i32(dest, dest, val);
3089         }
3090     } else {
3091         if (insn & 0x0100) {
3092             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3093             tcg_gen_sub_i32(dest, dest, val);
3094             set_cc_op(s, CC_OP_SUBB + opsize);
3095         } else {
3096             tcg_gen_add_i32(dest, dest, val);
3097             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3098             set_cc_op(s, CC_OP_ADDB + opsize);
3099         }
3100         gen_update_cc_add(dest, val, opsize);
3101     }
3102     tcg_temp_free(val);
3103     DEST_EA(env, insn, opsize, dest, &addr);
3104     tcg_temp_free(dest);
3105 }
3106 
3107 DISAS_INSN(branch)
3108 {
3109     int32_t offset;
3110     uint32_t base;
3111     int op;
3112 
3113     base = s->pc;
3114     op = (insn >> 8) & 0xf;
3115     offset = (int8_t)insn;
3116     if (offset == 0) {
3117         offset = (int16_t)read_im16(env, s);
3118     } else if (offset == -1) {
3119         offset = read_im32(env, s);
3120     }
3121     if (op == 1) {
3122         /* bsr */
3123         gen_push(s, tcg_const_i32(s->pc));
3124     }
3125     if (op > 1) {
3126         /* Bcc */
3127         TCGLabel *l1 = gen_new_label();
3128         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3129         gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3130         gen_set_label(l1);
3131         gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3132     } else {
3133         /* Unconditional branch.  */
3134         update_cc_op(s);
3135         gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3136     }
3137 }
3138 
3139 DISAS_INSN(moveq)
3140 {
3141     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3142     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3143 }
3144 
3145 DISAS_INSN(mvzs)
3146 {
3147     int opsize;
3148     TCGv src;
3149     TCGv reg;
3150 
3151     if (insn & 0x40)
3152         opsize = OS_WORD;
3153     else
3154         opsize = OS_BYTE;
3155     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3156     reg = DREG(insn, 9);
3157     tcg_gen_mov_i32(reg, src);
3158     gen_logic_cc(s, src, opsize);
3159 }
3160 
3161 DISAS_INSN(or)
3162 {
3163     TCGv reg;
3164     TCGv dest;
3165     TCGv src;
3166     TCGv addr;
3167     int opsize;
3168 
3169     opsize = insn_opsize(insn);
3170     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3171     dest = tcg_temp_new();
3172     if (insn & 0x100) {
3173         SRC_EA(env, src, opsize, 0, &addr);
3174         tcg_gen_or_i32(dest, src, reg);
3175         DEST_EA(env, insn, opsize, dest, &addr);
3176     } else {
3177         SRC_EA(env, src, opsize, 0, NULL);
3178         tcg_gen_or_i32(dest, src, reg);
3179         gen_partset_reg(opsize, DREG(insn, 9), dest);
3180     }
3181     gen_logic_cc(s, dest, opsize);
3182     tcg_temp_free(dest);
3183 }
3184 
3185 DISAS_INSN(suba)
3186 {
3187     TCGv src;
3188     TCGv reg;
3189 
3190     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3191     reg = AREG(insn, 9);
3192     tcg_gen_sub_i32(reg, reg, src);
3193 }
3194 
3195 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3196 {
3197     TCGv tmp;
3198 
3199     gen_flush_flags(s); /* compute old Z */
3200 
3201     /*
3202      * Perform subtract with borrow.
3203      * (X, N) = dest - (src + X);
3204      */
3205 
3206     tmp = tcg_const_i32(0);
3207     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3208     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3209     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3210     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3211 
3212     /* Compute signed-overflow for subtract.  */
3213 
3214     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3215     tcg_gen_xor_i32(tmp, dest, src);
3216     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3217     tcg_temp_free(tmp);
3218 
3219     /* Copy the rest of the results into place.  */
3220     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3221     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3222 
3223     set_cc_op(s, CC_OP_FLAGS);
3224 
3225     /* result is in QREG_CC_N */
3226 }
3227 
3228 DISAS_INSN(subx_reg)
3229 {
3230     TCGv dest;
3231     TCGv src;
3232     int opsize;
3233 
3234     opsize = insn_opsize(insn);
3235 
3236     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3237     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3238 
3239     gen_subx(s, src, dest, opsize);
3240 
3241     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3242 }
3243 
3244 DISAS_INSN(subx_mem)
3245 {
3246     TCGv src;
3247     TCGv addr_src;
3248     TCGv dest;
3249     TCGv addr_dest;
3250     int opsize;
3251 
3252     opsize = insn_opsize(insn);
3253 
3254     addr_src = AREG(insn, 0);
3255     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3256     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3257 
3258     addr_dest = AREG(insn, 9);
3259     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3260     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3261 
3262     gen_subx(s, src, dest, opsize);
3263 
3264     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3265 
3266     tcg_temp_free(dest);
3267     tcg_temp_free(src);
3268 }
3269 
3270 DISAS_INSN(mov3q)
3271 {
3272     TCGv src;
3273     int val;
3274 
3275     val = (insn >> 9) & 7;
3276     if (val == 0)
3277         val = -1;
3278     src = tcg_const_i32(val);
3279     gen_logic_cc(s, src, OS_LONG);
3280     DEST_EA(env, insn, OS_LONG, src, NULL);
3281     tcg_temp_free(src);
3282 }
3283 
3284 DISAS_INSN(cmp)
3285 {
3286     TCGv src;
3287     TCGv reg;
3288     int opsize;
3289 
3290     opsize = insn_opsize(insn);
3291     SRC_EA(env, src, opsize, 1, NULL);
3292     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3293     gen_update_cc_cmp(s, reg, src, opsize);
3294 }
3295 
3296 DISAS_INSN(cmpa)
3297 {
3298     int opsize;
3299     TCGv src;
3300     TCGv reg;
3301 
3302     if (insn & 0x100) {
3303         opsize = OS_LONG;
3304     } else {
3305         opsize = OS_WORD;
3306     }
3307     SRC_EA(env, src, opsize, 1, NULL);
3308     reg = AREG(insn, 9);
3309     gen_update_cc_cmp(s, reg, src, OS_LONG);
3310 }
3311 
3312 DISAS_INSN(cmpm)
3313 {
3314     int opsize = insn_opsize(insn);
3315     TCGv src, dst;
3316 
3317     /* Post-increment load (mode 3) from Ay.  */
3318     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3319                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3320     /* Post-increment load (mode 3) from Ax.  */
3321     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3322                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3323 
3324     gen_update_cc_cmp(s, dst, src, opsize);
3325 }
3326 
3327 DISAS_INSN(eor)
3328 {
3329     TCGv src;
3330     TCGv dest;
3331     TCGv addr;
3332     int opsize;
3333 
3334     opsize = insn_opsize(insn);
3335 
3336     SRC_EA(env, src, opsize, 0, &addr);
3337     dest = tcg_temp_new();
3338     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3339     gen_logic_cc(s, dest, opsize);
3340     DEST_EA(env, insn, opsize, dest, &addr);
3341     tcg_temp_free(dest);
3342 }
3343 
3344 static void do_exg(TCGv reg1, TCGv reg2)
3345 {
3346     TCGv temp = tcg_temp_new();
3347     tcg_gen_mov_i32(temp, reg1);
3348     tcg_gen_mov_i32(reg1, reg2);
3349     tcg_gen_mov_i32(reg2, temp);
3350     tcg_temp_free(temp);
3351 }
3352 
3353 DISAS_INSN(exg_dd)
3354 {
3355     /* exchange Dx and Dy */
3356     do_exg(DREG(insn, 9), DREG(insn, 0));
3357 }
3358 
3359 DISAS_INSN(exg_aa)
3360 {
3361     /* exchange Ax and Ay */
3362     do_exg(AREG(insn, 9), AREG(insn, 0));
3363 }
3364 
3365 DISAS_INSN(exg_da)
3366 {
3367     /* exchange Dx and Ay */
3368     do_exg(DREG(insn, 9), AREG(insn, 0));
3369 }
3370 
3371 DISAS_INSN(and)
3372 {
3373     TCGv src;
3374     TCGv reg;
3375     TCGv dest;
3376     TCGv addr;
3377     int opsize;
3378 
3379     dest = tcg_temp_new();
3380 
3381     opsize = insn_opsize(insn);
3382     reg = DREG(insn, 9);
3383     if (insn & 0x100) {
3384         SRC_EA(env, src, opsize, 0, &addr);
3385         tcg_gen_and_i32(dest, src, reg);
3386         DEST_EA(env, insn, opsize, dest, &addr);
3387     } else {
3388         SRC_EA(env, src, opsize, 0, NULL);
3389         tcg_gen_and_i32(dest, src, reg);
3390         gen_partset_reg(opsize, reg, dest);
3391     }
3392     gen_logic_cc(s, dest, opsize);
3393     tcg_temp_free(dest);
3394 }
3395 
3396 DISAS_INSN(adda)
3397 {
3398     TCGv src;
3399     TCGv reg;
3400 
3401     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3402     reg = AREG(insn, 9);
3403     tcg_gen_add_i32(reg, reg, src);
3404 }
3405 
3406 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3407 {
3408     TCGv tmp;
3409 
3410     gen_flush_flags(s); /* compute old Z */
3411 
3412     /*
3413      * Perform addition with carry.
3414      * (X, N) = src + dest + X;
3415      */
3416 
3417     tmp = tcg_const_i32(0);
3418     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3419     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3420     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3421 
3422     /* Compute signed-overflow for addition.  */
3423 
3424     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3425     tcg_gen_xor_i32(tmp, dest, src);
3426     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3427     tcg_temp_free(tmp);
3428 
3429     /* Copy the rest of the results into place.  */
3430     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3431     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3432 
3433     set_cc_op(s, CC_OP_FLAGS);
3434 
3435     /* result is in QREG_CC_N */
3436 }
3437 
3438 DISAS_INSN(addx_reg)
3439 {
3440     TCGv dest;
3441     TCGv src;
3442     int opsize;
3443 
3444     opsize = insn_opsize(insn);
3445 
3446     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3447     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3448 
3449     gen_addx(s, src, dest, opsize);
3450 
3451     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3452 }
3453 
3454 DISAS_INSN(addx_mem)
3455 {
3456     TCGv src;
3457     TCGv addr_src;
3458     TCGv dest;
3459     TCGv addr_dest;
3460     int opsize;
3461 
3462     opsize = insn_opsize(insn);
3463 
3464     addr_src = AREG(insn, 0);
3465     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3466     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3467 
3468     addr_dest = AREG(insn, 9);
3469     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3470     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3471 
3472     gen_addx(s, src, dest, opsize);
3473 
3474     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3475 
3476     tcg_temp_free(dest);
3477     tcg_temp_free(src);
3478 }
3479 
3480 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3481 {
3482     int count = (insn >> 9) & 7;
3483     int logical = insn & 8;
3484     int left = insn & 0x100;
3485     int bits = opsize_bytes(opsize) * 8;
3486     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3487 
3488     if (count == 0) {
3489         count = 8;
3490     }
3491 
3492     tcg_gen_movi_i32(QREG_CC_V, 0);
3493     if (left) {
3494         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3495         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3496 
3497         /*
3498          * Note that ColdFire always clears V (done above),
3499          * while M68000 sets if the most significant bit is changed at
3500          * any time during the shift operation.
3501          */
3502         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3503             /* if shift count >= bits, V is (reg != 0) */
3504             if (count >= bits) {
3505                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3506             } else {
3507                 TCGv t0 = tcg_temp_new();
3508                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3509                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3510                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3511                 tcg_temp_free(t0);
3512             }
3513             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3514         }
3515     } else {
3516         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3517         if (logical) {
3518             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3519         } else {
3520             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3521         }
3522     }
3523 
3524     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3525     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3526     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3527     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3528 
3529     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3530     set_cc_op(s, CC_OP_FLAGS);
3531 }
3532 
3533 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3534 {
3535     int logical = insn & 8;
3536     int left = insn & 0x100;
3537     int bits = opsize_bytes(opsize) * 8;
3538     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3539     TCGv s32;
3540     TCGv_i64 t64, s64;
3541 
3542     t64 = tcg_temp_new_i64();
3543     s64 = tcg_temp_new_i64();
3544     s32 = tcg_temp_new();
3545 
3546     /*
3547      * Note that m68k truncates the shift count modulo 64, not 32.
3548      * In addition, a 64-bit shift makes it easy to find "the last
3549      * bit shifted out", for the carry flag.
3550      */
3551     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3552     tcg_gen_extu_i32_i64(s64, s32);
3553     tcg_gen_extu_i32_i64(t64, reg);
3554 
3555     /* Optimistically set V=0.  Also used as a zero source below.  */
3556     tcg_gen_movi_i32(QREG_CC_V, 0);
3557     if (left) {
3558         tcg_gen_shl_i64(t64, t64, s64);
3559 
3560         if (opsize == OS_LONG) {
3561             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3562             /* Note that C=0 if shift count is 0, and we get that for free.  */
3563         } else {
3564             TCGv zero = tcg_const_i32(0);
3565             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3566             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3567             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3568                                 s32, zero, zero, QREG_CC_C);
3569             tcg_temp_free(zero);
3570         }
3571         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3572 
3573         /* X = C, but only if the shift count was non-zero.  */
3574         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3575                             QREG_CC_C, QREG_CC_X);
3576 
3577         /*
3578          * M68000 sets V if the most significant bit is changed at
3579          * any time during the shift operation.  Do this via creating
3580          * an extension of the sign bit, comparing, and discarding
3581          * the bits below the sign bit.  I.e.
3582          *     int64_t s = (intN_t)reg;
3583          *     int64_t t = (int64_t)(intN_t)reg << count;
3584          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3585          */
3586         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3587             TCGv_i64 tt = tcg_const_i64(32);
3588             /* if shift is greater than 32, use 32 */
3589             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3590             tcg_temp_free_i64(tt);
3591             /* Sign extend the input to 64 bits; re-do the shift.  */
3592             tcg_gen_ext_i32_i64(t64, reg);
3593             tcg_gen_shl_i64(s64, t64, s64);
3594             /* Clear all bits that are unchanged.  */
3595             tcg_gen_xor_i64(t64, t64, s64);
3596             /* Ignore the bits below the sign bit.  */
3597             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3598             /* If any bits remain set, we have overflow.  */
3599             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3600             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3601             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3602         }
3603     } else {
3604         tcg_gen_shli_i64(t64, t64, 32);
3605         if (logical) {
3606             tcg_gen_shr_i64(t64, t64, s64);
3607         } else {
3608             tcg_gen_sar_i64(t64, t64, s64);
3609         }
3610         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3611 
3612         /* Note that C=0 if shift count is 0, and we get that for free.  */
3613         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3614 
3615         /* X = C, but only if the shift count was non-zero.  */
3616         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3617                             QREG_CC_C, QREG_CC_X);
3618     }
3619     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3620     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3621 
3622     tcg_temp_free(s32);
3623     tcg_temp_free_i64(s64);
3624     tcg_temp_free_i64(t64);
3625 
3626     /* Write back the result.  */
3627     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3628     set_cc_op(s, CC_OP_FLAGS);
3629 }
3630 
3631 DISAS_INSN(shift8_im)
3632 {
3633     shift_im(s, insn, OS_BYTE);
3634 }
3635 
3636 DISAS_INSN(shift16_im)
3637 {
3638     shift_im(s, insn, OS_WORD);
3639 }
3640 
3641 DISAS_INSN(shift_im)
3642 {
3643     shift_im(s, insn, OS_LONG);
3644 }
3645 
3646 DISAS_INSN(shift8_reg)
3647 {
3648     shift_reg(s, insn, OS_BYTE);
3649 }
3650 
3651 DISAS_INSN(shift16_reg)
3652 {
3653     shift_reg(s, insn, OS_WORD);
3654 }
3655 
3656 DISAS_INSN(shift_reg)
3657 {
3658     shift_reg(s, insn, OS_LONG);
3659 }
3660 
3661 DISAS_INSN(shift_mem)
3662 {
3663     int logical = insn & 8;
3664     int left = insn & 0x100;
3665     TCGv src;
3666     TCGv addr;
3667 
3668     SRC_EA(env, src, OS_WORD, !logical, &addr);
3669     tcg_gen_movi_i32(QREG_CC_V, 0);
3670     if (left) {
3671         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3672         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3673 
3674         /*
3675          * Note that ColdFire always clears V,
3676          * while M68000 sets if the most significant bit is changed at
3677          * any time during the shift operation
3678          */
3679         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68K)) {
3680             src = gen_extend(s, src, OS_WORD, 1);
3681             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3682         }
3683     } else {
3684         tcg_gen_mov_i32(QREG_CC_C, src);
3685         if (logical) {
3686             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3687         } else {
3688             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3689         }
3690     }
3691 
3692     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3693     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3694     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3695     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3696 
3697     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3698     set_cc_op(s, CC_OP_FLAGS);
3699 }
3700 
3701 static void rotate(TCGv reg, TCGv shift, int left, int size)
3702 {
3703     switch (size) {
3704     case 8:
3705         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3706         tcg_gen_ext8u_i32(reg, reg);
3707         tcg_gen_muli_i32(reg, reg, 0x01010101);
3708         goto do_long;
3709     case 16:
3710         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3711         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3712         goto do_long;
3713     do_long:
3714     default:
3715         if (left) {
3716             tcg_gen_rotl_i32(reg, reg, shift);
3717         } else {
3718             tcg_gen_rotr_i32(reg, reg, shift);
3719         }
3720     }
3721 
3722     /* compute flags */
3723 
3724     switch (size) {
3725     case 8:
3726         tcg_gen_ext8s_i32(reg, reg);
3727         break;
3728     case 16:
3729         tcg_gen_ext16s_i32(reg, reg);
3730         break;
3731     default:
3732         break;
3733     }
3734 
3735     /* QREG_CC_X is not affected */
3736 
3737     tcg_gen_mov_i32(QREG_CC_N, reg);
3738     tcg_gen_mov_i32(QREG_CC_Z, reg);
3739 
3740     if (left) {
3741         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3742     } else {
3743         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3744     }
3745 
3746     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3747 }
3748 
3749 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3750 {
3751     switch (size) {
3752     case 8:
3753         tcg_gen_ext8s_i32(reg, reg);
3754         break;
3755     case 16:
3756         tcg_gen_ext16s_i32(reg, reg);
3757         break;
3758     default:
3759         break;
3760     }
3761     tcg_gen_mov_i32(QREG_CC_N, reg);
3762     tcg_gen_mov_i32(QREG_CC_Z, reg);
3763     tcg_gen_mov_i32(QREG_CC_X, X);
3764     tcg_gen_mov_i32(QREG_CC_C, X);
3765     tcg_gen_movi_i32(QREG_CC_V, 0);
3766 }
3767 
3768 /* Result of rotate_x() is valid if 0 <= shift <= size */
3769 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3770 {
3771     TCGv X, shl, shr, shx, sz, zero;
3772 
3773     sz = tcg_const_i32(size);
3774 
3775     shr = tcg_temp_new();
3776     shl = tcg_temp_new();
3777     shx = tcg_temp_new();
3778     if (left) {
3779         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3780         tcg_gen_movi_i32(shr, size + 1);
3781         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3782         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3783         /* shx = shx < 0 ? size : shx; */
3784         zero = tcg_const_i32(0);
3785         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3786         tcg_temp_free(zero);
3787     } else {
3788         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3789         tcg_gen_movi_i32(shl, size + 1);
3790         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3791         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3792     }
3793     tcg_temp_free_i32(sz);
3794 
3795     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3796 
3797     tcg_gen_shl_i32(shl, reg, shl);
3798     tcg_gen_shr_i32(shr, reg, shr);
3799     tcg_gen_or_i32(reg, shl, shr);
3800     tcg_temp_free(shl);
3801     tcg_temp_free(shr);
3802     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3803     tcg_gen_or_i32(reg, reg, shx);
3804     tcg_temp_free(shx);
3805 
3806     /* X = (reg >> size) & 1 */
3807 
3808     X = tcg_temp_new();
3809     tcg_gen_extract_i32(X, reg, size, 1);
3810 
3811     return X;
3812 }
3813 
3814 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3815 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3816 {
3817     TCGv_i64 t0, shift64;
3818     TCGv X, lo, hi, zero;
3819 
3820     shift64 = tcg_temp_new_i64();
3821     tcg_gen_extu_i32_i64(shift64, shift);
3822 
3823     t0 = tcg_temp_new_i64();
3824 
3825     X = tcg_temp_new();
3826     lo = tcg_temp_new();
3827     hi = tcg_temp_new();
3828 
3829     if (left) {
3830         /* create [reg:X:..] */
3831 
3832         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3833         tcg_gen_concat_i32_i64(t0, lo, reg);
3834 
3835         /* rotate */
3836 
3837         tcg_gen_rotl_i64(t0, t0, shift64);
3838         tcg_temp_free_i64(shift64);
3839 
3840         /* result is [reg:..:reg:X] */
3841 
3842         tcg_gen_extr_i64_i32(lo, hi, t0);
3843         tcg_gen_andi_i32(X, lo, 1);
3844 
3845         tcg_gen_shri_i32(lo, lo, 1);
3846     } else {
3847         /* create [..:X:reg] */
3848 
3849         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3850 
3851         tcg_gen_rotr_i64(t0, t0, shift64);
3852         tcg_temp_free_i64(shift64);
3853 
3854         /* result is value: [X:reg:..:reg] */
3855 
3856         tcg_gen_extr_i64_i32(lo, hi, t0);
3857 
3858         /* extract X */
3859 
3860         tcg_gen_shri_i32(X, hi, 31);
3861 
3862         /* extract result */
3863 
3864         tcg_gen_shli_i32(hi, hi, 1);
3865     }
3866     tcg_temp_free_i64(t0);
3867     tcg_gen_or_i32(lo, lo, hi);
3868     tcg_temp_free(hi);
3869 
3870     /* if shift == 0, register and X are not affected */
3871 
3872     zero = tcg_const_i32(0);
3873     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3874     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3875     tcg_temp_free(zero);
3876     tcg_temp_free(lo);
3877 
3878     return X;
3879 }
3880 
3881 DISAS_INSN(rotate_im)
3882 {
3883     TCGv shift;
3884     int tmp;
3885     int left = (insn & 0x100);
3886 
3887     tmp = (insn >> 9) & 7;
3888     if (tmp == 0) {
3889         tmp = 8;
3890     }
3891 
3892     shift = tcg_const_i32(tmp);
3893     if (insn & 8) {
3894         rotate(DREG(insn, 0), shift, left, 32);
3895     } else {
3896         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3897         rotate_x_flags(DREG(insn, 0), X, 32);
3898         tcg_temp_free(X);
3899     }
3900     tcg_temp_free(shift);
3901 
3902     set_cc_op(s, CC_OP_FLAGS);
3903 }
3904 
3905 DISAS_INSN(rotate8_im)
3906 {
3907     int left = (insn & 0x100);
3908     TCGv reg;
3909     TCGv shift;
3910     int tmp;
3911 
3912     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3913 
3914     tmp = (insn >> 9) & 7;
3915     if (tmp == 0) {
3916         tmp = 8;
3917     }
3918 
3919     shift = tcg_const_i32(tmp);
3920     if (insn & 8) {
3921         rotate(reg, shift, left, 8);
3922     } else {
3923         TCGv X = rotate_x(reg, shift, left, 8);
3924         rotate_x_flags(reg, X, 8);
3925         tcg_temp_free(X);
3926     }
3927     tcg_temp_free(shift);
3928     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3929     set_cc_op(s, CC_OP_FLAGS);
3930 }
3931 
3932 DISAS_INSN(rotate16_im)
3933 {
3934     int left = (insn & 0x100);
3935     TCGv reg;
3936     TCGv shift;
3937     int tmp;
3938 
3939     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3940     tmp = (insn >> 9) & 7;
3941     if (tmp == 0) {
3942         tmp = 8;
3943     }
3944 
3945     shift = tcg_const_i32(tmp);
3946     if (insn & 8) {
3947         rotate(reg, shift, left, 16);
3948     } else {
3949         TCGv X = rotate_x(reg, shift, left, 16);
3950         rotate_x_flags(reg, X, 16);
3951         tcg_temp_free(X);
3952     }
3953     tcg_temp_free(shift);
3954     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3955     set_cc_op(s, CC_OP_FLAGS);
3956 }
3957 
3958 DISAS_INSN(rotate_reg)
3959 {
3960     TCGv reg;
3961     TCGv src;
3962     TCGv t0, t1;
3963     int left = (insn & 0x100);
3964 
3965     reg = DREG(insn, 0);
3966     src = DREG(insn, 9);
3967     /* shift in [0..63] */
3968     t0 = tcg_temp_new();
3969     tcg_gen_andi_i32(t0, src, 63);
3970     t1 = tcg_temp_new_i32();
3971     if (insn & 8) {
3972         tcg_gen_andi_i32(t1, src, 31);
3973         rotate(reg, t1, left, 32);
3974         /* if shift == 0, clear C */
3975         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3976                             t0, QREG_CC_V /* 0 */,
3977                             QREG_CC_V /* 0 */, QREG_CC_C);
3978     } else {
3979         TCGv X;
3980         /* modulo 33 */
3981         tcg_gen_movi_i32(t1, 33);
3982         tcg_gen_remu_i32(t1, t0, t1);
3983         X = rotate32_x(DREG(insn, 0), t1, left);
3984         rotate_x_flags(DREG(insn, 0), X, 32);
3985         tcg_temp_free(X);
3986     }
3987     tcg_temp_free(t1);
3988     tcg_temp_free(t0);
3989     set_cc_op(s, CC_OP_FLAGS);
3990 }
3991 
3992 DISAS_INSN(rotate8_reg)
3993 {
3994     TCGv reg;
3995     TCGv src;
3996     TCGv t0, t1;
3997     int left = (insn & 0x100);
3998 
3999     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
4000     src = DREG(insn, 9);
4001     /* shift in [0..63] */
4002     t0 = tcg_temp_new_i32();
4003     tcg_gen_andi_i32(t0, src, 63);
4004     t1 = tcg_temp_new_i32();
4005     if (insn & 8) {
4006         tcg_gen_andi_i32(t1, src, 7);
4007         rotate(reg, t1, left, 8);
4008         /* if shift == 0, clear C */
4009         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4010                             t0, QREG_CC_V /* 0 */,
4011                             QREG_CC_V /* 0 */, QREG_CC_C);
4012     } else {
4013         TCGv X;
4014         /* modulo 9 */
4015         tcg_gen_movi_i32(t1, 9);
4016         tcg_gen_remu_i32(t1, t0, t1);
4017         X = rotate_x(reg, t1, left, 8);
4018         rotate_x_flags(reg, X, 8);
4019         tcg_temp_free(X);
4020     }
4021     tcg_temp_free(t1);
4022     tcg_temp_free(t0);
4023     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
4024     set_cc_op(s, CC_OP_FLAGS);
4025 }
4026 
4027 DISAS_INSN(rotate16_reg)
4028 {
4029     TCGv reg;
4030     TCGv src;
4031     TCGv t0, t1;
4032     int left = (insn & 0x100);
4033 
4034     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4035     src = DREG(insn, 9);
4036     /* shift in [0..63] */
4037     t0 = tcg_temp_new_i32();
4038     tcg_gen_andi_i32(t0, src, 63);
4039     t1 = tcg_temp_new_i32();
4040     if (insn & 8) {
4041         tcg_gen_andi_i32(t1, src, 15);
4042         rotate(reg, t1, left, 16);
4043         /* if shift == 0, clear C */
4044         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4045                             t0, QREG_CC_V /* 0 */,
4046                             QREG_CC_V /* 0 */, QREG_CC_C);
4047     } else {
4048         TCGv X;
4049         /* modulo 17 */
4050         tcg_gen_movi_i32(t1, 17);
4051         tcg_gen_remu_i32(t1, t0, t1);
4052         X = rotate_x(reg, t1, left, 16);
4053         rotate_x_flags(reg, X, 16);
4054         tcg_temp_free(X);
4055     }
4056     tcg_temp_free(t1);
4057     tcg_temp_free(t0);
4058     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4059     set_cc_op(s, CC_OP_FLAGS);
4060 }
4061 
4062 DISAS_INSN(rotate_mem)
4063 {
4064     TCGv src;
4065     TCGv addr;
4066     TCGv shift;
4067     int left = (insn & 0x100);
4068 
4069     SRC_EA(env, src, OS_WORD, 0, &addr);
4070 
4071     shift = tcg_const_i32(1);
4072     if (insn & 0x0200) {
4073         rotate(src, shift, left, 16);
4074     } else {
4075         TCGv X = rotate_x(src, shift, left, 16);
4076         rotate_x_flags(src, X, 16);
4077         tcg_temp_free(X);
4078     }
4079     tcg_temp_free(shift);
4080     DEST_EA(env, insn, OS_WORD, src, &addr);
4081     set_cc_op(s, CC_OP_FLAGS);
4082 }
4083 
4084 DISAS_INSN(bfext_reg)
4085 {
4086     int ext = read_im16(env, s);
4087     int is_sign = insn & 0x200;
4088     TCGv src = DREG(insn, 0);
4089     TCGv dst = DREG(ext, 12);
4090     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4091     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4092     int pos = 32 - ofs - len;        /* little bit-endian */
4093     TCGv tmp = tcg_temp_new();
4094     TCGv shift;
4095 
4096     /*
4097      * In general, we're going to rotate the field so that it's at the
4098      * top of the word and then right-shift by the complement of the
4099      * width to extend the field.
4100      */
4101     if (ext & 0x20) {
4102         /* Variable width.  */
4103         if (ext & 0x800) {
4104             /* Variable offset.  */
4105             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4106             tcg_gen_rotl_i32(tmp, src, tmp);
4107         } else {
4108             tcg_gen_rotli_i32(tmp, src, ofs);
4109         }
4110 
4111         shift = tcg_temp_new();
4112         tcg_gen_neg_i32(shift, DREG(ext, 0));
4113         tcg_gen_andi_i32(shift, shift, 31);
4114         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4115         if (is_sign) {
4116             tcg_gen_mov_i32(dst, QREG_CC_N);
4117         } else {
4118             tcg_gen_shr_i32(dst, tmp, shift);
4119         }
4120         tcg_temp_free(shift);
4121     } else {
4122         /* Immediate width.  */
4123         if (ext & 0x800) {
4124             /* Variable offset */
4125             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4126             tcg_gen_rotl_i32(tmp, src, tmp);
4127             src = tmp;
4128             pos = 32 - len;
4129         } else {
4130             /*
4131              * Immediate offset.  If the field doesn't wrap around the
4132              * end of the word, rely on (s)extract completely.
4133              */
4134             if (pos < 0) {
4135                 tcg_gen_rotli_i32(tmp, src, ofs);
4136                 src = tmp;
4137                 pos = 32 - len;
4138             }
4139         }
4140 
4141         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4142         if (is_sign) {
4143             tcg_gen_mov_i32(dst, QREG_CC_N);
4144         } else {
4145             tcg_gen_extract_i32(dst, src, pos, len);
4146         }
4147     }
4148 
4149     tcg_temp_free(tmp);
4150     set_cc_op(s, CC_OP_LOGIC);
4151 }
4152 
4153 DISAS_INSN(bfext_mem)
4154 {
4155     int ext = read_im16(env, s);
4156     int is_sign = insn & 0x200;
4157     TCGv dest = DREG(ext, 12);
4158     TCGv addr, len, ofs;
4159 
4160     addr = gen_lea(env, s, insn, OS_UNSIZED);
4161     if (IS_NULL_QREG(addr)) {
4162         gen_addr_fault(s);
4163         return;
4164     }
4165 
4166     if (ext & 0x20) {
4167         len = DREG(ext, 0);
4168     } else {
4169         len = tcg_const_i32(extract32(ext, 0, 5));
4170     }
4171     if (ext & 0x800) {
4172         ofs = DREG(ext, 6);
4173     } else {
4174         ofs = tcg_const_i32(extract32(ext, 6, 5));
4175     }
4176 
4177     if (is_sign) {
4178         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4179         tcg_gen_mov_i32(QREG_CC_N, dest);
4180     } else {
4181         TCGv_i64 tmp = tcg_temp_new_i64();
4182         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4183         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4184         tcg_temp_free_i64(tmp);
4185     }
4186     set_cc_op(s, CC_OP_LOGIC);
4187 
4188     if (!(ext & 0x20)) {
4189         tcg_temp_free(len);
4190     }
4191     if (!(ext & 0x800)) {
4192         tcg_temp_free(ofs);
4193     }
4194 }
4195 
4196 DISAS_INSN(bfop_reg)
4197 {
4198     int ext = read_im16(env, s);
4199     TCGv src = DREG(insn, 0);
4200     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4201     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4202     TCGv mask, tofs, tlen;
4203 
4204     tofs = NULL;
4205     tlen = NULL;
4206     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4207         tofs = tcg_temp_new();
4208         tlen = tcg_temp_new();
4209     }
4210 
4211     if ((ext & 0x820) == 0) {
4212         /* Immediate width and offset.  */
4213         uint32_t maski = 0x7fffffffu >> (len - 1);
4214         if (ofs + len <= 32) {
4215             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4216         } else {
4217             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4218         }
4219         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4220         mask = tcg_const_i32(ror32(maski, ofs));
4221         if (tofs) {
4222             tcg_gen_movi_i32(tofs, ofs);
4223             tcg_gen_movi_i32(tlen, len);
4224         }
4225     } else {
4226         TCGv tmp = tcg_temp_new();
4227         if (ext & 0x20) {
4228             /* Variable width */
4229             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4230             tcg_gen_andi_i32(tmp, tmp, 31);
4231             mask = tcg_const_i32(0x7fffffffu);
4232             tcg_gen_shr_i32(mask, mask, tmp);
4233             if (tlen) {
4234                 tcg_gen_addi_i32(tlen, tmp, 1);
4235             }
4236         } else {
4237             /* Immediate width */
4238             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4239             if (tlen) {
4240                 tcg_gen_movi_i32(tlen, len);
4241             }
4242         }
4243         if (ext & 0x800) {
4244             /* Variable offset */
4245             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4246             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4247             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4248             tcg_gen_rotr_i32(mask, mask, tmp);
4249             if (tofs) {
4250                 tcg_gen_mov_i32(tofs, tmp);
4251             }
4252         } else {
4253             /* Immediate offset (and variable width) */
4254             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4255             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4256             tcg_gen_rotri_i32(mask, mask, ofs);
4257             if (tofs) {
4258                 tcg_gen_movi_i32(tofs, ofs);
4259             }
4260         }
4261         tcg_temp_free(tmp);
4262     }
4263     set_cc_op(s, CC_OP_LOGIC);
4264 
4265     switch (insn & 0x0f00) {
4266     case 0x0a00: /* bfchg */
4267         tcg_gen_eqv_i32(src, src, mask);
4268         break;
4269     case 0x0c00: /* bfclr */
4270         tcg_gen_and_i32(src, src, mask);
4271         break;
4272     case 0x0d00: /* bfffo */
4273         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4274         tcg_temp_free(tlen);
4275         tcg_temp_free(tofs);
4276         break;
4277     case 0x0e00: /* bfset */
4278         tcg_gen_orc_i32(src, src, mask);
4279         break;
4280     case 0x0800: /* bftst */
4281         /* flags already set; no other work to do.  */
4282         break;
4283     default:
4284         g_assert_not_reached();
4285     }
4286     tcg_temp_free(mask);
4287 }
4288 
4289 DISAS_INSN(bfop_mem)
4290 {
4291     int ext = read_im16(env, s);
4292     TCGv addr, len, ofs;
4293     TCGv_i64 t64;
4294 
4295     addr = gen_lea(env, s, insn, OS_UNSIZED);
4296     if (IS_NULL_QREG(addr)) {
4297         gen_addr_fault(s);
4298         return;
4299     }
4300 
4301     if (ext & 0x20) {
4302         len = DREG(ext, 0);
4303     } else {
4304         len = tcg_const_i32(extract32(ext, 0, 5));
4305     }
4306     if (ext & 0x800) {
4307         ofs = DREG(ext, 6);
4308     } else {
4309         ofs = tcg_const_i32(extract32(ext, 6, 5));
4310     }
4311 
4312     switch (insn & 0x0f00) {
4313     case 0x0a00: /* bfchg */
4314         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4315         break;
4316     case 0x0c00: /* bfclr */
4317         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4318         break;
4319     case 0x0d00: /* bfffo */
4320         t64 = tcg_temp_new_i64();
4321         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4322         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4323         tcg_temp_free_i64(t64);
4324         break;
4325     case 0x0e00: /* bfset */
4326         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4327         break;
4328     case 0x0800: /* bftst */
4329         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4330         break;
4331     default:
4332         g_assert_not_reached();
4333     }
4334     set_cc_op(s, CC_OP_LOGIC);
4335 
4336     if (!(ext & 0x20)) {
4337         tcg_temp_free(len);
4338     }
4339     if (!(ext & 0x800)) {
4340         tcg_temp_free(ofs);
4341     }
4342 }
4343 
4344 DISAS_INSN(bfins_reg)
4345 {
4346     int ext = read_im16(env, s);
4347     TCGv dst = DREG(insn, 0);
4348     TCGv src = DREG(ext, 12);
4349     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4350     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4351     int pos = 32 - ofs - len;        /* little bit-endian */
4352     TCGv tmp;
4353 
4354     tmp = tcg_temp_new();
4355 
4356     if (ext & 0x20) {
4357         /* Variable width */
4358         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4359         tcg_gen_andi_i32(tmp, tmp, 31);
4360         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4361     } else {
4362         /* Immediate width */
4363         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4364     }
4365     set_cc_op(s, CC_OP_LOGIC);
4366 
4367     /* Immediate width and offset */
4368     if ((ext & 0x820) == 0) {
4369         /* Check for suitability for deposit.  */
4370         if (pos >= 0) {
4371             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4372         } else {
4373             uint32_t maski = -2U << (len - 1);
4374             uint32_t roti = (ofs + len) & 31;
4375             tcg_gen_andi_i32(tmp, src, ~maski);
4376             tcg_gen_rotri_i32(tmp, tmp, roti);
4377             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4378             tcg_gen_or_i32(dst, dst, tmp);
4379         }
4380     } else {
4381         TCGv mask = tcg_temp_new();
4382         TCGv rot = tcg_temp_new();
4383 
4384         if (ext & 0x20) {
4385             /* Variable width */
4386             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4387             tcg_gen_andi_i32(rot, rot, 31);
4388             tcg_gen_movi_i32(mask, -2);
4389             tcg_gen_shl_i32(mask, mask, rot);
4390             tcg_gen_mov_i32(rot, DREG(ext, 0));
4391             tcg_gen_andc_i32(tmp, src, mask);
4392         } else {
4393             /* Immediate width (variable offset) */
4394             uint32_t maski = -2U << (len - 1);
4395             tcg_gen_andi_i32(tmp, src, ~maski);
4396             tcg_gen_movi_i32(mask, maski);
4397             tcg_gen_movi_i32(rot, len & 31);
4398         }
4399         if (ext & 0x800) {
4400             /* Variable offset */
4401             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4402         } else {
4403             /* Immediate offset (variable width) */
4404             tcg_gen_addi_i32(rot, rot, ofs);
4405         }
4406         tcg_gen_andi_i32(rot, rot, 31);
4407         tcg_gen_rotr_i32(mask, mask, rot);
4408         tcg_gen_rotr_i32(tmp, tmp, rot);
4409         tcg_gen_and_i32(dst, dst, mask);
4410         tcg_gen_or_i32(dst, dst, tmp);
4411 
4412         tcg_temp_free(rot);
4413         tcg_temp_free(mask);
4414     }
4415     tcg_temp_free(tmp);
4416 }
4417 
4418 DISAS_INSN(bfins_mem)
4419 {
4420     int ext = read_im16(env, s);
4421     TCGv src = DREG(ext, 12);
4422     TCGv addr, len, ofs;
4423 
4424     addr = gen_lea(env, s, insn, OS_UNSIZED);
4425     if (IS_NULL_QREG(addr)) {
4426         gen_addr_fault(s);
4427         return;
4428     }
4429 
4430     if (ext & 0x20) {
4431         len = DREG(ext, 0);
4432     } else {
4433         len = tcg_const_i32(extract32(ext, 0, 5));
4434     }
4435     if (ext & 0x800) {
4436         ofs = DREG(ext, 6);
4437     } else {
4438         ofs = tcg_const_i32(extract32(ext, 6, 5));
4439     }
4440 
4441     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4442     set_cc_op(s, CC_OP_LOGIC);
4443 
4444     if (!(ext & 0x20)) {
4445         tcg_temp_free(len);
4446     }
4447     if (!(ext & 0x800)) {
4448         tcg_temp_free(ofs);
4449     }
4450 }
4451 
4452 DISAS_INSN(ff1)
4453 {
4454     TCGv reg;
4455     reg = DREG(insn, 0);
4456     gen_logic_cc(s, reg, OS_LONG);
4457     gen_helper_ff1(reg, reg);
4458 }
4459 
4460 DISAS_INSN(chk)
4461 {
4462     TCGv src, reg;
4463     int opsize;
4464 
4465     switch ((insn >> 7) & 3) {
4466     case 3:
4467         opsize = OS_WORD;
4468         break;
4469     case 2:
4470         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4471             opsize = OS_LONG;
4472             break;
4473         }
4474         /* fallthru */
4475     default:
4476         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4477         return;
4478     }
4479     SRC_EA(env, src, opsize, 1, NULL);
4480     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4481 
4482     gen_flush_flags(s);
4483     gen_helper_chk(cpu_env, reg, src);
4484 }
4485 
4486 DISAS_INSN(chk2)
4487 {
4488     uint16_t ext;
4489     TCGv addr1, addr2, bound1, bound2, reg;
4490     int opsize;
4491 
4492     switch ((insn >> 9) & 3) {
4493     case 0:
4494         opsize = OS_BYTE;
4495         break;
4496     case 1:
4497         opsize = OS_WORD;
4498         break;
4499     case 2:
4500         opsize = OS_LONG;
4501         break;
4502     default:
4503         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4504         return;
4505     }
4506 
4507     ext = read_im16(env, s);
4508     if ((ext & 0x0800) == 0) {
4509         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4510         return;
4511     }
4512 
4513     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4514     addr2 = tcg_temp_new();
4515     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4516 
4517     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4518     tcg_temp_free(addr1);
4519     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4520     tcg_temp_free(addr2);
4521 
4522     reg = tcg_temp_new();
4523     if (ext & 0x8000) {
4524         tcg_gen_mov_i32(reg, AREG(ext, 12));
4525     } else {
4526         gen_ext(reg, DREG(ext, 12), opsize, 1);
4527     }
4528 
4529     gen_flush_flags(s);
4530     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4531     tcg_temp_free(reg);
4532     tcg_temp_free(bound1);
4533     tcg_temp_free(bound2);
4534 }
4535 
4536 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4537 {
4538     TCGv addr;
4539     TCGv_i64 t0, t1;
4540 
4541     addr = tcg_temp_new();
4542 
4543     t0 = tcg_temp_new_i64();
4544     t1 = tcg_temp_new_i64();
4545 
4546     tcg_gen_andi_i32(addr, src, ~15);
4547     tcg_gen_qemu_ld64(t0, addr, index);
4548     tcg_gen_addi_i32(addr, addr, 8);
4549     tcg_gen_qemu_ld64(t1, addr, index);
4550 
4551     tcg_gen_andi_i32(addr, dst, ~15);
4552     tcg_gen_qemu_st64(t0, addr, index);
4553     tcg_gen_addi_i32(addr, addr, 8);
4554     tcg_gen_qemu_st64(t1, addr, index);
4555 
4556     tcg_temp_free_i64(t0);
4557     tcg_temp_free_i64(t1);
4558     tcg_temp_free(addr);
4559 }
4560 
4561 DISAS_INSN(move16_reg)
4562 {
4563     int index = IS_USER(s);
4564     TCGv tmp;
4565     uint16_t ext;
4566 
4567     ext = read_im16(env, s);
4568     if ((ext & (1 << 15)) == 0) {
4569         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4570     }
4571 
4572     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4573 
4574     /* Ax can be Ay, so save Ay before incrementing Ax */
4575     tmp = tcg_temp_new();
4576     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4577     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4578     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4579     tcg_temp_free(tmp);
4580 }
4581 
4582 DISAS_INSN(move16_mem)
4583 {
4584     int index = IS_USER(s);
4585     TCGv reg, addr;
4586 
4587     reg = AREG(insn, 0);
4588     addr = tcg_const_i32(read_im32(env, s));
4589 
4590     if ((insn >> 3) & 1) {
4591         /* MOVE16 (xxx).L, (Ay) */
4592         m68k_copy_line(reg, addr, index);
4593     } else {
4594         /* MOVE16 (Ay), (xxx).L */
4595         m68k_copy_line(addr, reg, index);
4596     }
4597 
4598     tcg_temp_free(addr);
4599 
4600     if (((insn >> 3) & 2) == 0) {
4601         /* (Ay)+ */
4602         tcg_gen_addi_i32(reg, reg, 16);
4603     }
4604 }
4605 
4606 DISAS_INSN(strldsr)
4607 {
4608     uint16_t ext;
4609     uint32_t addr;
4610 
4611     addr = s->pc - 2;
4612     ext = read_im16(env, s);
4613     if (ext != 0x46FC) {
4614         gen_exception(s, addr, EXCP_ILLEGAL);
4615         return;
4616     }
4617     ext = read_im16(env, s);
4618     if (IS_USER(s) || (ext & SR_S) == 0) {
4619         gen_exception(s, addr, EXCP_PRIVILEGE);
4620         return;
4621     }
4622     gen_push(s, gen_get_sr(s));
4623     gen_set_sr_im(s, ext, 0);
4624     gen_exit_tb(s);
4625 }
4626 
4627 DISAS_INSN(move_from_sr)
4628 {
4629     TCGv sr;
4630 
4631     if (IS_USER(s) && m68k_feature(env, M68K_FEATURE_MOVEFROMSR_PRIV)) {
4632         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4633         return;
4634     }
4635     sr = gen_get_sr(s);
4636     DEST_EA(env, insn, OS_WORD, sr, NULL);
4637 }
4638 
4639 #if defined(CONFIG_SOFTMMU)
4640 DISAS_INSN(moves)
4641 {
4642     int opsize;
4643     uint16_t ext;
4644     TCGv reg;
4645     TCGv addr;
4646     int extend;
4647 
4648     if (IS_USER(s)) {
4649         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4650         return;
4651     }
4652 
4653     ext = read_im16(env, s);
4654 
4655     opsize = insn_opsize(insn);
4656 
4657     if (ext & 0x8000) {
4658         /* address register */
4659         reg = AREG(ext, 12);
4660         extend = 1;
4661     } else {
4662         /* data register */
4663         reg = DREG(ext, 12);
4664         extend = 0;
4665     }
4666 
4667     addr = gen_lea(env, s, insn, opsize);
4668     if (IS_NULL_QREG(addr)) {
4669         gen_addr_fault(s);
4670         return;
4671     }
4672 
4673     if (ext & 0x0800) {
4674         /* from reg to ea */
4675         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4676     } else {
4677         /* from ea to reg */
4678         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4679         if (extend) {
4680             gen_ext(reg, tmp, opsize, 1);
4681         } else {
4682             gen_partset_reg(opsize, reg, tmp);
4683         }
4684         tcg_temp_free(tmp);
4685     }
4686     switch (extract32(insn, 3, 3)) {
4687     case 3: /* Indirect postincrement.  */
4688         tcg_gen_addi_i32(AREG(insn, 0), addr,
4689                          REG(insn, 0) == 7 && opsize == OS_BYTE
4690                          ? 2
4691                          : opsize_bytes(opsize));
4692         break;
4693     case 4: /* Indirect predecrememnt.  */
4694         tcg_gen_mov_i32(AREG(insn, 0), addr);
4695         break;
4696     }
4697 }
4698 
4699 DISAS_INSN(move_to_sr)
4700 {
4701     if (IS_USER(s)) {
4702         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4703         return;
4704     }
4705     gen_move_to_sr(env, s, insn, false);
4706     gen_exit_tb(s);
4707 }
4708 
4709 DISAS_INSN(move_from_usp)
4710 {
4711     if (IS_USER(s)) {
4712         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4713         return;
4714     }
4715     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4716                    offsetof(CPUM68KState, sp[M68K_USP]));
4717 }
4718 
4719 DISAS_INSN(move_to_usp)
4720 {
4721     if (IS_USER(s)) {
4722         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4723         return;
4724     }
4725     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4726                    offsetof(CPUM68KState, sp[M68K_USP]));
4727 }
4728 
4729 DISAS_INSN(halt)
4730 {
4731     if (IS_USER(s)) {
4732         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4733         return;
4734     }
4735 
4736     gen_exception(s, s->pc, EXCP_HALT_INSN);
4737 }
4738 
4739 DISAS_INSN(stop)
4740 {
4741     uint16_t ext;
4742 
4743     if (IS_USER(s)) {
4744         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4745         return;
4746     }
4747 
4748     ext = read_im16(env, s);
4749 
4750     gen_set_sr_im(s, ext, 0);
4751     tcg_gen_movi_i32(cpu_halted, 1);
4752     gen_exception(s, s->pc, EXCP_HLT);
4753 }
4754 
4755 DISAS_INSN(rte)
4756 {
4757     if (IS_USER(s)) {
4758         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4759         return;
4760     }
4761     gen_exception(s, s->base.pc_next, EXCP_RTE);
4762 }
4763 
4764 DISAS_INSN(cf_movec)
4765 {
4766     uint16_t ext;
4767     TCGv reg;
4768 
4769     if (IS_USER(s)) {
4770         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4771         return;
4772     }
4773 
4774     ext = read_im16(env, s);
4775 
4776     if (ext & 0x8000) {
4777         reg = AREG(ext, 12);
4778     } else {
4779         reg = DREG(ext, 12);
4780     }
4781     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4782     gen_exit_tb(s);
4783 }
4784 
4785 DISAS_INSN(m68k_movec)
4786 {
4787     uint16_t ext;
4788     TCGv reg;
4789 
4790     if (IS_USER(s)) {
4791         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4792         return;
4793     }
4794 
4795     ext = read_im16(env, s);
4796 
4797     if (ext & 0x8000) {
4798         reg = AREG(ext, 12);
4799     } else {
4800         reg = DREG(ext, 12);
4801     }
4802     if (insn & 1) {
4803         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4804     } else {
4805         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4806     }
4807     gen_exit_tb(s);
4808 }
4809 
4810 DISAS_INSN(intouch)
4811 {
4812     if (IS_USER(s)) {
4813         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4814         return;
4815     }
4816     /* ICache fetch.  Implement as no-op.  */
4817 }
4818 
4819 DISAS_INSN(cpushl)
4820 {
4821     if (IS_USER(s)) {
4822         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4823         return;
4824     }
4825     /* Cache push/invalidate.  Implement as no-op.  */
4826 }
4827 
4828 DISAS_INSN(cpush)
4829 {
4830     if (IS_USER(s)) {
4831         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4832         return;
4833     }
4834     /* Cache push/invalidate.  Implement as no-op.  */
4835 }
4836 
4837 DISAS_INSN(cinv)
4838 {
4839     if (IS_USER(s)) {
4840         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4841         return;
4842     }
4843     /* Invalidate cache line.  Implement as no-op.  */
4844 }
4845 
4846 #if defined(CONFIG_SOFTMMU)
4847 DISAS_INSN(pflush)
4848 {
4849     TCGv opmode;
4850 
4851     if (IS_USER(s)) {
4852         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4853         return;
4854     }
4855 
4856     opmode = tcg_const_i32((insn >> 3) & 3);
4857     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4858     tcg_temp_free(opmode);
4859 }
4860 
4861 DISAS_INSN(ptest)
4862 {
4863     TCGv is_read;
4864 
4865     if (IS_USER(s)) {
4866         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4867         return;
4868     }
4869     is_read = tcg_const_i32((insn >> 5) & 1);
4870     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4871     tcg_temp_free(is_read);
4872 }
4873 #endif
4874 
4875 DISAS_INSN(wddata)
4876 {
4877     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4878 }
4879 
4880 DISAS_INSN(wdebug)
4881 {
4882     if (IS_USER(s)) {
4883         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4884         return;
4885     }
4886     /* TODO: Implement wdebug.  */
4887     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4888 }
4889 #endif
4890 
4891 DISAS_INSN(trap)
4892 {
4893     gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4894 }
4895 
4896 static void do_trapcc(DisasContext *s, DisasCompare *c)
4897 {
4898     if (c->tcond != TCG_COND_NEVER) {
4899         TCGLabel *over = NULL;
4900 
4901         update_cc_op(s);
4902 
4903         if (c->tcond != TCG_COND_ALWAYS) {
4904             /* Jump over if !c. */
4905             over = gen_new_label();
4906             tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4907         }
4908 
4909         tcg_gen_movi_i32(QREG_PC, s->pc);
4910         gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4911 
4912         if (over != NULL) {
4913             gen_set_label(over);
4914             s->base.is_jmp = DISAS_NEXT;
4915         }
4916     }
4917     free_cond(c);
4918 }
4919 
4920 DISAS_INSN(trapcc)
4921 {
4922     DisasCompare c;
4923 
4924     /* Consume and discard the immediate operand. */
4925     switch (extract32(insn, 0, 3)) {
4926     case 2: /* trapcc.w */
4927         (void)read_im16(env, s);
4928         break;
4929     case 3: /* trapcc.l */
4930         (void)read_im32(env, s);
4931         break;
4932     case 4: /* trapcc (no operand) */
4933         break;
4934     default:
4935         /* trapcc registered with only valid opmodes */
4936         g_assert_not_reached();
4937     }
4938 
4939     gen_cc_cond(&c, s, extract32(insn, 8, 4));
4940     do_trapcc(s, &c);
4941 }
4942 
4943 DISAS_INSN(trapv)
4944 {
4945     DisasCompare c;
4946 
4947     gen_cc_cond(&c, s, 9); /* V set */
4948     do_trapcc(s, &c);
4949 }
4950 
4951 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4952 {
4953     switch (reg) {
4954     case M68K_FPIAR:
4955         tcg_gen_movi_i32(res, 0);
4956         break;
4957     case M68K_FPSR:
4958         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4959         break;
4960     case M68K_FPCR:
4961         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4962         break;
4963     }
4964 }
4965 
4966 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4967 {
4968     switch (reg) {
4969     case M68K_FPIAR:
4970         break;
4971     case M68K_FPSR:
4972         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4973         break;
4974     case M68K_FPCR:
4975         gen_helper_set_fpcr(cpu_env, val);
4976         break;
4977     }
4978 }
4979 
4980 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4981 {
4982     int index = IS_USER(s);
4983     TCGv tmp;
4984 
4985     tmp = tcg_temp_new();
4986     gen_load_fcr(s, tmp, reg);
4987     tcg_gen_qemu_st32(tmp, addr, index);
4988     tcg_temp_free(tmp);
4989 }
4990 
4991 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4992 {
4993     int index = IS_USER(s);
4994     TCGv tmp;
4995 
4996     tmp = tcg_temp_new();
4997     tcg_gen_qemu_ld32u(tmp, addr, index);
4998     gen_store_fcr(s, tmp, reg);
4999     tcg_temp_free(tmp);
5000 }
5001 
5002 
5003 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
5004                              uint32_t insn, uint32_t ext)
5005 {
5006     int mask = (ext >> 10) & 7;
5007     int is_write = (ext >> 13) & 1;
5008     int mode = extract32(insn, 3, 3);
5009     int i;
5010     TCGv addr, tmp;
5011 
5012     switch (mode) {
5013     case 0: /* Dn */
5014         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
5015             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5016             return;
5017         }
5018         if (is_write) {
5019             gen_load_fcr(s, DREG(insn, 0), mask);
5020         } else {
5021             gen_store_fcr(s, DREG(insn, 0), mask);
5022         }
5023         return;
5024     case 1: /* An, only with FPIAR */
5025         if (mask != M68K_FPIAR) {
5026             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5027             return;
5028         }
5029         if (is_write) {
5030             gen_load_fcr(s, AREG(insn, 0), mask);
5031         } else {
5032             gen_store_fcr(s, AREG(insn, 0), mask);
5033         }
5034         return;
5035     case 7: /* Immediate */
5036         if (REG(insn, 0) == 4) {
5037             if (is_write ||
5038                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
5039                  mask != M68K_FPCR)) {
5040                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5041                 return;
5042             }
5043             tmp = tcg_const_i32(read_im32(env, s));
5044             gen_store_fcr(s, tmp, mask);
5045             tcg_temp_free(tmp);
5046             return;
5047         }
5048         break;
5049     default:
5050         break;
5051     }
5052 
5053     tmp = gen_lea(env, s, insn, OS_LONG);
5054     if (IS_NULL_QREG(tmp)) {
5055         gen_addr_fault(s);
5056         return;
5057     }
5058 
5059     addr = tcg_temp_new();
5060     tcg_gen_mov_i32(addr, tmp);
5061 
5062     /*
5063      * mask:
5064      *
5065      * 0b100 Floating-Point Control Register
5066      * 0b010 Floating-Point Status Register
5067      * 0b001 Floating-Point Instruction Address Register
5068      *
5069      */
5070 
5071     if (is_write && mode == 4) {
5072         for (i = 2; i >= 0; i--, mask >>= 1) {
5073             if (mask & 1) {
5074                 gen_qemu_store_fcr(s, addr, 1 << i);
5075                 if (mask != 1) {
5076                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
5077                 }
5078             }
5079        }
5080        tcg_gen_mov_i32(AREG(insn, 0), addr);
5081     } else {
5082         for (i = 0; i < 3; i++, mask >>= 1) {
5083             if (mask & 1) {
5084                 if (is_write) {
5085                     gen_qemu_store_fcr(s, addr, 1 << i);
5086                 } else {
5087                     gen_qemu_load_fcr(s, addr, 1 << i);
5088                 }
5089                 if (mask != 1 || mode == 3) {
5090                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5091                 }
5092             }
5093         }
5094         if (mode == 3) {
5095             tcg_gen_mov_i32(AREG(insn, 0), addr);
5096         }
5097     }
5098     tcg_temp_free_i32(addr);
5099 }
5100 
5101 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5102                           uint32_t insn, uint32_t ext)
5103 {
5104     int opsize;
5105     TCGv addr, tmp;
5106     int mode = (ext >> 11) & 0x3;
5107     int is_load = ((ext & 0x2000) == 0);
5108 
5109     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5110         opsize = OS_EXTENDED;
5111     } else {
5112         opsize = OS_DOUBLE;  /* FIXME */
5113     }
5114 
5115     addr = gen_lea(env, s, insn, opsize);
5116     if (IS_NULL_QREG(addr)) {
5117         gen_addr_fault(s);
5118         return;
5119     }
5120 
5121     tmp = tcg_temp_new();
5122     if (mode & 0x1) {
5123         /* Dynamic register list */
5124         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5125     } else {
5126         /* Static register list */
5127         tcg_gen_movi_i32(tmp, ext & 0xff);
5128     }
5129 
5130     if (!is_load && (mode & 2) == 0) {
5131         /*
5132          * predecrement addressing mode
5133          * only available to store register to memory
5134          */
5135         if (opsize == OS_EXTENDED) {
5136             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5137         } else {
5138             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5139         }
5140     } else {
5141         /* postincrement addressing mode */
5142         if (opsize == OS_EXTENDED) {
5143             if (is_load) {
5144                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5145             } else {
5146                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5147             }
5148         } else {
5149             if (is_load) {
5150                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5151             } else {
5152                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5153             }
5154         }
5155     }
5156     if ((insn & 070) == 030 || (insn & 070) == 040) {
5157         tcg_gen_mov_i32(AREG(insn, 0), tmp);
5158     }
5159     tcg_temp_free(tmp);
5160 }
5161 
5162 /*
5163  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5164  * immediately before the next FP instruction is executed.
5165  */
5166 DISAS_INSN(fpu)
5167 {
5168     uint16_t ext;
5169     int opmode;
5170     int opsize;
5171     TCGv_ptr cpu_src, cpu_dest;
5172 
5173     ext = read_im16(env, s);
5174     opmode = ext & 0x7f;
5175     switch ((ext >> 13) & 7) {
5176     case 0:
5177         break;
5178     case 1:
5179         goto undef;
5180     case 2:
5181         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5182             /* fmovecr */
5183             TCGv rom_offset = tcg_const_i32(opmode);
5184             cpu_dest = gen_fp_ptr(REG(ext, 7));
5185             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5186             tcg_temp_free_ptr(cpu_dest);
5187             tcg_temp_free(rom_offset);
5188             return;
5189         }
5190         break;
5191     case 3: /* fmove out */
5192         cpu_src = gen_fp_ptr(REG(ext, 7));
5193         opsize = ext_opsize(ext, 10);
5194         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5195                       EA_STORE, IS_USER(s)) == -1) {
5196             gen_addr_fault(s);
5197         }
5198         gen_helper_ftst(cpu_env, cpu_src);
5199         tcg_temp_free_ptr(cpu_src);
5200         return;
5201     case 4: /* fmove to control register.  */
5202     case 5: /* fmove from control register.  */
5203         gen_op_fmove_fcr(env, s, insn, ext);
5204         return;
5205     case 6: /* fmovem */
5206     case 7:
5207         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5208             goto undef;
5209         }
5210         gen_op_fmovem(env, s, insn, ext);
5211         return;
5212     }
5213     if (ext & (1 << 14)) {
5214         /* Source effective address.  */
5215         opsize = ext_opsize(ext, 10);
5216         cpu_src = gen_fp_result_ptr();
5217         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5218                       EA_LOADS, IS_USER(s)) == -1) {
5219             gen_addr_fault(s);
5220             return;
5221         }
5222     } else {
5223         /* Source register.  */
5224         opsize = OS_EXTENDED;
5225         cpu_src = gen_fp_ptr(REG(ext, 10));
5226     }
5227     cpu_dest = gen_fp_ptr(REG(ext, 7));
5228     switch (opmode) {
5229     case 0: /* fmove */
5230         gen_fp_move(cpu_dest, cpu_src);
5231         break;
5232     case 0x40: /* fsmove */
5233         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5234         break;
5235     case 0x44: /* fdmove */
5236         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5237         break;
5238     case 1: /* fint */
5239         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5240         break;
5241     case 2: /* fsinh */
5242         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5243         break;
5244     case 3: /* fintrz */
5245         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5246         break;
5247     case 4: /* fsqrt */
5248         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5249         break;
5250     case 0x41: /* fssqrt */
5251         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5252         break;
5253     case 0x45: /* fdsqrt */
5254         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5255         break;
5256     case 0x06: /* flognp1 */
5257         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5258         break;
5259     case 0x08: /* fetoxm1 */
5260         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5261         break;
5262     case 0x09: /* ftanh */
5263         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5264         break;
5265     case 0x0a: /* fatan */
5266         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5267         break;
5268     case 0x0c: /* fasin */
5269         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5270         break;
5271     case 0x0d: /* fatanh */
5272         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5273         break;
5274     case 0x0e: /* fsin */
5275         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5276         break;
5277     case 0x0f: /* ftan */
5278         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5279         break;
5280     case 0x10: /* fetox */
5281         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5282         break;
5283     case 0x11: /* ftwotox */
5284         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5285         break;
5286     case 0x12: /* ftentox */
5287         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5288         break;
5289     case 0x14: /* flogn */
5290         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5291         break;
5292     case 0x15: /* flog10 */
5293         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5294         break;
5295     case 0x16: /* flog2 */
5296         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5297         break;
5298     case 0x18: /* fabs */
5299         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5300         break;
5301     case 0x58: /* fsabs */
5302         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5303         break;
5304     case 0x5c: /* fdabs */
5305         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5306         break;
5307     case 0x19: /* fcosh */
5308         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5309         break;
5310     case 0x1a: /* fneg */
5311         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5312         break;
5313     case 0x5a: /* fsneg */
5314         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5315         break;
5316     case 0x5e: /* fdneg */
5317         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5318         break;
5319     case 0x1c: /* facos */
5320         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5321         break;
5322     case 0x1d: /* fcos */
5323         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5324         break;
5325     case 0x1e: /* fgetexp */
5326         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5327         break;
5328     case 0x1f: /* fgetman */
5329         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5330         break;
5331     case 0x20: /* fdiv */
5332         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5333         break;
5334     case 0x60: /* fsdiv */
5335         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5336         break;
5337     case 0x64: /* fddiv */
5338         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5339         break;
5340     case 0x21: /* fmod */
5341         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5342         break;
5343     case 0x22: /* fadd */
5344         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5345         break;
5346     case 0x62: /* fsadd */
5347         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5348         break;
5349     case 0x66: /* fdadd */
5350         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5351         break;
5352     case 0x23: /* fmul */
5353         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5354         break;
5355     case 0x63: /* fsmul */
5356         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5357         break;
5358     case 0x67: /* fdmul */
5359         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5360         break;
5361     case 0x24: /* fsgldiv */
5362         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5363         break;
5364     case 0x25: /* frem */
5365         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5366         break;
5367     case 0x26: /* fscale */
5368         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5369         break;
5370     case 0x27: /* fsglmul */
5371         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5372         break;
5373     case 0x28: /* fsub */
5374         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5375         break;
5376     case 0x68: /* fssub */
5377         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5378         break;
5379     case 0x6c: /* fdsub */
5380         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5381         break;
5382     case 0x30: case 0x31: case 0x32:
5383     case 0x33: case 0x34: case 0x35:
5384     case 0x36: case 0x37: {
5385             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5386             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5387             tcg_temp_free_ptr(cpu_dest2);
5388         }
5389         break;
5390     case 0x38: /* fcmp */
5391         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5392         return;
5393     case 0x3a: /* ftst */
5394         gen_helper_ftst(cpu_env, cpu_src);
5395         return;
5396     default:
5397         goto undef;
5398     }
5399     tcg_temp_free_ptr(cpu_src);
5400     gen_helper_ftst(cpu_env, cpu_dest);
5401     tcg_temp_free_ptr(cpu_dest);
5402     return;
5403 undef:
5404     /* FIXME: Is this right for offset addressing modes?  */
5405     s->pc -= 2;
5406     disas_undef_fpu(env, s, insn);
5407 }
5408 
5409 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5410 {
5411     TCGv fpsr;
5412 
5413     c->g1 = 1;
5414     c->v2 = tcg_const_i32(0);
5415     c->g2 = 0;
5416     /* TODO: Raise BSUN exception.  */
5417     fpsr = tcg_temp_new();
5418     gen_load_fcr(s, fpsr, M68K_FPSR);
5419     switch (cond) {
5420     case 0:  /* False */
5421     case 16: /* Signaling False */
5422         c->v1 = c->v2;
5423         c->tcond = TCG_COND_NEVER;
5424         break;
5425     case 1:  /* EQual Z */
5426     case 17: /* Signaling EQual Z */
5427         c->v1 = tcg_temp_new();
5428         c->g1 = 0;
5429         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5430         c->tcond = TCG_COND_NE;
5431         break;
5432     case 2:  /* Ordered Greater Than !(A || Z || N) */
5433     case 18: /* Greater Than !(A || Z || N) */
5434         c->v1 = tcg_temp_new();
5435         c->g1 = 0;
5436         tcg_gen_andi_i32(c->v1, fpsr,
5437                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5438         c->tcond = TCG_COND_EQ;
5439         break;
5440     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5441     case 19: /* Greater than or Equal Z || !(A || N) */
5442         c->v1 = tcg_temp_new();
5443         c->g1 = 0;
5444         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5445         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5446         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5447         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5448         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5449         c->tcond = TCG_COND_NE;
5450         break;
5451     case 4:  /* Ordered Less Than !(!N || A || Z); */
5452     case 20: /* Less Than !(!N || A || Z); */
5453         c->v1 = tcg_temp_new();
5454         c->g1 = 0;
5455         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5456         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5457         c->tcond = TCG_COND_EQ;
5458         break;
5459     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5460     case 21: /* Less than or Equal Z || (N && !A) */
5461         c->v1 = tcg_temp_new();
5462         c->g1 = 0;
5463         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5464         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5465         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5466         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5467         c->tcond = TCG_COND_NE;
5468         break;
5469     case 6:  /* Ordered Greater or Less than !(A || Z) */
5470     case 22: /* Greater or Less than !(A || Z) */
5471         c->v1 = tcg_temp_new();
5472         c->g1 = 0;
5473         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5474         c->tcond = TCG_COND_EQ;
5475         break;
5476     case 7:  /* Ordered !A */
5477     case 23: /* Greater, Less or Equal !A */
5478         c->v1 = tcg_temp_new();
5479         c->g1 = 0;
5480         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5481         c->tcond = TCG_COND_EQ;
5482         break;
5483     case 8:  /* Unordered A */
5484     case 24: /* Not Greater, Less or Equal A */
5485         c->v1 = tcg_temp_new();
5486         c->g1 = 0;
5487         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5488         c->tcond = TCG_COND_NE;
5489         break;
5490     case 9:  /* Unordered or Equal A || Z */
5491     case 25: /* Not Greater or Less then A || Z */
5492         c->v1 = tcg_temp_new();
5493         c->g1 = 0;
5494         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5495         c->tcond = TCG_COND_NE;
5496         break;
5497     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5498     case 26: /* Not Less or Equal A || !(N || Z)) */
5499         c->v1 = tcg_temp_new();
5500         c->g1 = 0;
5501         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5502         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5503         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5504         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5505         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5506         c->tcond = TCG_COND_NE;
5507         break;
5508     case 11: /* Unordered or Greater or Equal A || Z || !N */
5509     case 27: /* Not Less Than A || Z || !N */
5510         c->v1 = tcg_temp_new();
5511         c->g1 = 0;
5512         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5513         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5514         c->tcond = TCG_COND_NE;
5515         break;
5516     case 12: /* Unordered or Less Than A || (N && !Z) */
5517     case 28: /* Not Greater than or Equal A || (N && !Z) */
5518         c->v1 = tcg_temp_new();
5519         c->g1 = 0;
5520         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5521         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5522         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5523         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5524         c->tcond = TCG_COND_NE;
5525         break;
5526     case 13: /* Unordered or Less or Equal A || Z || N */
5527     case 29: /* Not Greater Than A || Z || N */
5528         c->v1 = tcg_temp_new();
5529         c->g1 = 0;
5530         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5531         c->tcond = TCG_COND_NE;
5532         break;
5533     case 14: /* Not Equal !Z */
5534     case 30: /* Signaling Not Equal !Z */
5535         c->v1 = tcg_temp_new();
5536         c->g1 = 0;
5537         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5538         c->tcond = TCG_COND_EQ;
5539         break;
5540     case 15: /* True */
5541     case 31: /* Signaling True */
5542         c->v1 = c->v2;
5543         c->tcond = TCG_COND_ALWAYS;
5544         break;
5545     }
5546     tcg_temp_free(fpsr);
5547 }
5548 
5549 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5550 {
5551     DisasCompare c;
5552 
5553     gen_fcc_cond(&c, s, cond);
5554     update_cc_op(s);
5555     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5556     free_cond(&c);
5557 }
5558 
5559 DISAS_INSN(fbcc)
5560 {
5561     uint32_t offset;
5562     uint32_t base;
5563     TCGLabel *l1;
5564 
5565     base = s->pc;
5566     offset = (int16_t)read_im16(env, s);
5567     if (insn & (1 << 6)) {
5568         offset = (offset << 16) | read_im16(env, s);
5569     }
5570 
5571     l1 = gen_new_label();
5572     update_cc_op(s);
5573     gen_fjmpcc(s, insn & 0x3f, l1);
5574     gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5575     gen_set_label(l1);
5576     gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5577 }
5578 
5579 DISAS_INSN(fscc)
5580 {
5581     DisasCompare c;
5582     int cond;
5583     TCGv tmp;
5584     uint16_t ext;
5585 
5586     ext = read_im16(env, s);
5587     cond = ext & 0x3f;
5588     gen_fcc_cond(&c, s, cond);
5589 
5590     tmp = tcg_temp_new();
5591     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5592     free_cond(&c);
5593 
5594     tcg_gen_neg_i32(tmp, tmp);
5595     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5596     tcg_temp_free(tmp);
5597 }
5598 
5599 DISAS_INSN(ftrapcc)
5600 {
5601     DisasCompare c;
5602     uint16_t ext;
5603     int cond;
5604 
5605     ext = read_im16(env, s);
5606     cond = ext & 0x3f;
5607 
5608     /* Consume and discard the immediate operand. */
5609     switch (extract32(insn, 0, 3)) {
5610     case 2: /* ftrapcc.w */
5611         (void)read_im16(env, s);
5612         break;
5613     case 3: /* ftrapcc.l */
5614         (void)read_im32(env, s);
5615         break;
5616     case 4: /* ftrapcc (no operand) */
5617         break;
5618     default:
5619         /* ftrapcc registered with only valid opmodes */
5620         g_assert_not_reached();
5621     }
5622 
5623     gen_fcc_cond(&c, s, cond);
5624     do_trapcc(s, &c);
5625 }
5626 
5627 #if defined(CONFIG_SOFTMMU)
5628 DISAS_INSN(frestore)
5629 {
5630     TCGv addr;
5631 
5632     if (IS_USER(s)) {
5633         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5634         return;
5635     }
5636     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5637         SRC_EA(env, addr, OS_LONG, 0, NULL);
5638         /* FIXME: check the state frame */
5639     } else {
5640         disas_undef(env, s, insn);
5641     }
5642 }
5643 
5644 DISAS_INSN(fsave)
5645 {
5646     if (IS_USER(s)) {
5647         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5648         return;
5649     }
5650 
5651     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5652         /* always write IDLE */
5653         TCGv idle = tcg_const_i32(0x41000000);
5654         DEST_EA(env, insn, OS_LONG, idle, NULL);
5655         tcg_temp_free(idle);
5656     } else {
5657         disas_undef(env, s, insn);
5658     }
5659 }
5660 #endif
5661 
5662 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5663 {
5664     TCGv tmp = tcg_temp_new();
5665     if (s->env->macsr & MACSR_FI) {
5666         if (upper)
5667             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5668         else
5669             tcg_gen_shli_i32(tmp, val, 16);
5670     } else if (s->env->macsr & MACSR_SU) {
5671         if (upper)
5672             tcg_gen_sari_i32(tmp, val, 16);
5673         else
5674             tcg_gen_ext16s_i32(tmp, val);
5675     } else {
5676         if (upper)
5677             tcg_gen_shri_i32(tmp, val, 16);
5678         else
5679             tcg_gen_ext16u_i32(tmp, val);
5680     }
5681     return tmp;
5682 }
5683 
5684 static void gen_mac_clear_flags(void)
5685 {
5686     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5687                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5688 }
5689 
5690 DISAS_INSN(mac)
5691 {
5692     TCGv rx;
5693     TCGv ry;
5694     uint16_t ext;
5695     int acc;
5696     TCGv tmp;
5697     TCGv addr;
5698     TCGv loadval;
5699     int dual;
5700     TCGv saved_flags;
5701 
5702     if (!s->done_mac) {
5703         s->mactmp = tcg_temp_new_i64();
5704         s->done_mac = 1;
5705     }
5706 
5707     ext = read_im16(env, s);
5708 
5709     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5710     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5711     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5712         disas_undef(env, s, insn);
5713         return;
5714     }
5715     if (insn & 0x30) {
5716         /* MAC with load.  */
5717         tmp = gen_lea(env, s, insn, OS_LONG);
5718         addr = tcg_temp_new();
5719         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5720         /*
5721          * Load the value now to ensure correct exception behavior.
5722          * Perform writeback after reading the MAC inputs.
5723          */
5724         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5725 
5726         acc ^= 1;
5727         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5728         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5729     } else {
5730         loadval = addr = NULL_QREG;
5731         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5732         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5733     }
5734 
5735     gen_mac_clear_flags();
5736 #if 0
5737     l1 = -1;
5738     /* Disabled because conditional branches clobber temporary vars.  */
5739     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5740         /* Skip the multiply if we know we will ignore it.  */
5741         l1 = gen_new_label();
5742         tmp = tcg_temp_new();
5743         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5744         gen_op_jmp_nz32(tmp, l1);
5745     }
5746 #endif
5747 
5748     if ((ext & 0x0800) == 0) {
5749         /* Word.  */
5750         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5751         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5752     }
5753     if (s->env->macsr & MACSR_FI) {
5754         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5755     } else {
5756         if (s->env->macsr & MACSR_SU)
5757             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5758         else
5759             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5760         switch ((ext >> 9) & 3) {
5761         case 1:
5762             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5763             break;
5764         case 3:
5765             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5766             break;
5767         }
5768     }
5769 
5770     if (dual) {
5771         /* Save the overflow flag from the multiply.  */
5772         saved_flags = tcg_temp_new();
5773         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5774     } else {
5775         saved_flags = NULL_QREG;
5776     }
5777 
5778 #if 0
5779     /* Disabled because conditional branches clobber temporary vars.  */
5780     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5781         /* Skip the accumulate if the value is already saturated.  */
5782         l1 = gen_new_label();
5783         tmp = tcg_temp_new();
5784         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5785         gen_op_jmp_nz32(tmp, l1);
5786     }
5787 #endif
5788 
5789     if (insn & 0x100)
5790         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5791     else
5792         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5793 
5794     if (s->env->macsr & MACSR_FI)
5795         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5796     else if (s->env->macsr & MACSR_SU)
5797         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5798     else
5799         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5800 
5801 #if 0
5802     /* Disabled because conditional branches clobber temporary vars.  */
5803     if (l1 != -1)
5804         gen_set_label(l1);
5805 #endif
5806 
5807     if (dual) {
5808         /* Dual accumulate variant.  */
5809         acc = (ext >> 2) & 3;
5810         /* Restore the overflow flag from the multiplier.  */
5811         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5812 #if 0
5813         /* Disabled because conditional branches clobber temporary vars.  */
5814         if ((s->env->macsr & MACSR_OMC) != 0) {
5815             /* Skip the accumulate if the value is already saturated.  */
5816             l1 = gen_new_label();
5817             tmp = tcg_temp_new();
5818             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5819             gen_op_jmp_nz32(tmp, l1);
5820         }
5821 #endif
5822         if (ext & 2)
5823             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5824         else
5825             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5826         if (s->env->macsr & MACSR_FI)
5827             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5828         else if (s->env->macsr & MACSR_SU)
5829             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5830         else
5831             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5832 #if 0
5833         /* Disabled because conditional branches clobber temporary vars.  */
5834         if (l1 != -1)
5835             gen_set_label(l1);
5836 #endif
5837     }
5838     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5839 
5840     if (insn & 0x30) {
5841         TCGv rw;
5842         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5843         tcg_gen_mov_i32(rw, loadval);
5844         /*
5845          * FIXME: Should address writeback happen with the masked or
5846          * unmasked value?
5847          */
5848         switch ((insn >> 3) & 7) {
5849         case 3: /* Post-increment.  */
5850             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5851             break;
5852         case 4: /* Pre-decrement.  */
5853             tcg_gen_mov_i32(AREG(insn, 0), addr);
5854         }
5855         tcg_temp_free(loadval);
5856     }
5857 }
5858 
5859 DISAS_INSN(from_mac)
5860 {
5861     TCGv rx;
5862     TCGv_i64 acc;
5863     int accnum;
5864 
5865     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5866     accnum = (insn >> 9) & 3;
5867     acc = MACREG(accnum);
5868     if (s->env->macsr & MACSR_FI) {
5869         gen_helper_get_macf(rx, cpu_env, acc);
5870     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5871         tcg_gen_extrl_i64_i32(rx, acc);
5872     } else if (s->env->macsr & MACSR_SU) {
5873         gen_helper_get_macs(rx, acc);
5874     } else {
5875         gen_helper_get_macu(rx, acc);
5876     }
5877     if (insn & 0x40) {
5878         tcg_gen_movi_i64(acc, 0);
5879         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5880     }
5881 }
5882 
5883 DISAS_INSN(move_mac)
5884 {
5885     /* FIXME: This can be done without a helper.  */
5886     int src;
5887     TCGv dest;
5888     src = insn & 3;
5889     dest = tcg_const_i32((insn >> 9) & 3);
5890     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5891     gen_mac_clear_flags();
5892     gen_helper_mac_set_flags(cpu_env, dest);
5893 }
5894 
5895 DISAS_INSN(from_macsr)
5896 {
5897     TCGv reg;
5898 
5899     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5900     tcg_gen_mov_i32(reg, QREG_MACSR);
5901 }
5902 
5903 DISAS_INSN(from_mask)
5904 {
5905     TCGv reg;
5906     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5907     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5908 }
5909 
5910 DISAS_INSN(from_mext)
5911 {
5912     TCGv reg;
5913     TCGv acc;
5914     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5915     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5916     if (s->env->macsr & MACSR_FI)
5917         gen_helper_get_mac_extf(reg, cpu_env, acc);
5918     else
5919         gen_helper_get_mac_exti(reg, cpu_env, acc);
5920 }
5921 
5922 DISAS_INSN(macsr_to_ccr)
5923 {
5924     TCGv tmp = tcg_temp_new();
5925 
5926     /* Note that X and C are always cleared. */
5927     tcg_gen_andi_i32(tmp, QREG_MACSR, CCF_N | CCF_Z | CCF_V);
5928     gen_helper_set_ccr(cpu_env, tmp);
5929     tcg_temp_free(tmp);
5930     set_cc_op(s, CC_OP_FLAGS);
5931 }
5932 
5933 DISAS_INSN(to_mac)
5934 {
5935     TCGv_i64 acc;
5936     TCGv val;
5937     int accnum;
5938     accnum = (insn >> 9) & 3;
5939     acc = MACREG(accnum);
5940     SRC_EA(env, val, OS_LONG, 0, NULL);
5941     if (s->env->macsr & MACSR_FI) {
5942         tcg_gen_ext_i32_i64(acc, val);
5943         tcg_gen_shli_i64(acc, acc, 8);
5944     } else if (s->env->macsr & MACSR_SU) {
5945         tcg_gen_ext_i32_i64(acc, val);
5946     } else {
5947         tcg_gen_extu_i32_i64(acc, val);
5948     }
5949     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5950     gen_mac_clear_flags();
5951     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5952 }
5953 
5954 DISAS_INSN(to_macsr)
5955 {
5956     TCGv val;
5957     SRC_EA(env, val, OS_LONG, 0, NULL);
5958     gen_helper_set_macsr(cpu_env, val);
5959     gen_exit_tb(s);
5960 }
5961 
5962 DISAS_INSN(to_mask)
5963 {
5964     TCGv val;
5965     SRC_EA(env, val, OS_LONG, 0, NULL);
5966     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5967 }
5968 
5969 DISAS_INSN(to_mext)
5970 {
5971     TCGv val;
5972     TCGv acc;
5973     SRC_EA(env, val, OS_LONG, 0, NULL);
5974     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5975     if (s->env->macsr & MACSR_FI)
5976         gen_helper_set_mac_extf(cpu_env, val, acc);
5977     else if (s->env->macsr & MACSR_SU)
5978         gen_helper_set_mac_exts(cpu_env, val, acc);
5979     else
5980         gen_helper_set_mac_extu(cpu_env, val, acc);
5981 }
5982 
5983 static disas_proc opcode_table[65536];
5984 
5985 static void
5986 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5987 {
5988   int i;
5989   int from;
5990   int to;
5991 
5992   /* Sanity check.  All set bits must be included in the mask.  */
5993   if (opcode & ~mask) {
5994       fprintf(stderr,
5995               "qemu internal error: bogus opcode definition %04x/%04x\n",
5996               opcode, mask);
5997       abort();
5998   }
5999   /*
6000    * This could probably be cleverer.  For now just optimize the case where
6001    * the top bits are known.
6002    */
6003   /* Find the first zero bit in the mask.  */
6004   i = 0x8000;
6005   while ((i & mask) != 0)
6006       i >>= 1;
6007   /* Iterate over all combinations of this and lower bits.  */
6008   if (i == 0)
6009       i = 1;
6010   else
6011       i <<= 1;
6012   from = opcode & ~(i - 1);
6013   to = from + i;
6014   for (i = from; i < to; i++) {
6015       if ((i & mask) == opcode)
6016           opcode_table[i] = proc;
6017   }
6018 }
6019 
6020 /*
6021  * Register m68k opcode handlers.  Order is important.
6022  * Later insn override earlier ones.
6023  */
6024 void register_m68k_insns (CPUM68KState *env)
6025 {
6026     /*
6027      * Build the opcode table only once to avoid
6028      * multithreading issues.
6029      */
6030     if (opcode_table[0] != NULL) {
6031         return;
6032     }
6033 
6034     /*
6035      * use BASE() for instruction available
6036      * for CF_ISA_A and M68000.
6037      */
6038 #define BASE(name, opcode, mask) \
6039     register_opcode(disas_##name, 0x##opcode, 0x##mask)
6040 #define INSN(name, opcode, mask, feature) do { \
6041     if (m68k_feature(env, M68K_FEATURE_##feature)) \
6042         BASE(name, opcode, mask); \
6043     } while(0)
6044     BASE(undef,     0000, 0000);
6045     INSN(arith_im,  0080, fff8, CF_ISA_A);
6046     INSN(arith_im,  0000, ff00, M68K);
6047     INSN(chk2,      00c0, f9c0, CHK2);
6048     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
6049     BASE(bitop_reg, 0100, f1c0);
6050     BASE(bitop_reg, 0140, f1c0);
6051     BASE(bitop_reg, 0180, f1c0);
6052     BASE(bitop_reg, 01c0, f1c0);
6053     INSN(movep,     0108, f138, MOVEP);
6054     INSN(arith_im,  0280, fff8, CF_ISA_A);
6055     INSN(arith_im,  0200, ff00, M68K);
6056     INSN(undef,     02c0, ffc0, M68K);
6057     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
6058     INSN(arith_im,  0480, fff8, CF_ISA_A);
6059     INSN(arith_im,  0400, ff00, M68K);
6060     INSN(undef,     04c0, ffc0, M68K);
6061     INSN(arith_im,  0600, ff00, M68K);
6062     INSN(undef,     06c0, ffc0, M68K);
6063     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
6064     INSN(arith_im,  0680, fff8, CF_ISA_A);
6065     INSN(arith_im,  0c00, ff38, CF_ISA_A);
6066     INSN(arith_im,  0c00, ff00, M68K);
6067     BASE(bitop_im,  0800, ffc0);
6068     BASE(bitop_im,  0840, ffc0);
6069     BASE(bitop_im,  0880, ffc0);
6070     BASE(bitop_im,  08c0, ffc0);
6071     INSN(arith_im,  0a80, fff8, CF_ISA_A);
6072     INSN(arith_im,  0a00, ff00, M68K);
6073 #if defined(CONFIG_SOFTMMU)
6074     INSN(moves,     0e00, ff00, M68K);
6075 #endif
6076     INSN(cas,       0ac0, ffc0, CAS);
6077     INSN(cas,       0cc0, ffc0, CAS);
6078     INSN(cas,       0ec0, ffc0, CAS);
6079     INSN(cas2w,     0cfc, ffff, CAS);
6080     INSN(cas2l,     0efc, ffff, CAS);
6081     BASE(move,      1000, f000);
6082     BASE(move,      2000, f000);
6083     BASE(move,      3000, f000);
6084     INSN(chk,       4000, f040, M68K);
6085     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
6086     INSN(negx,      4080, fff8, CF_ISA_A);
6087     INSN(negx,      4000, ff00, M68K);
6088     INSN(undef,     40c0, ffc0, M68K);
6089     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
6090     INSN(move_from_sr, 40c0, ffc0, M68K);
6091     BASE(lea,       41c0, f1c0);
6092     BASE(clr,       4200, ff00);
6093     BASE(undef,     42c0, ffc0);
6094     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
6095     INSN(move_from_ccr, 42c0, ffc0, M68K);
6096     INSN(neg,       4480, fff8, CF_ISA_A);
6097     INSN(neg,       4400, ff00, M68K);
6098     INSN(undef,     44c0, ffc0, M68K);
6099     BASE(move_to_ccr, 44c0, ffc0);
6100     INSN(not,       4680, fff8, CF_ISA_A);
6101     INSN(not,       4600, ff00, M68K);
6102 #if defined(CONFIG_SOFTMMU)
6103     BASE(move_to_sr, 46c0, ffc0);
6104 #endif
6105     INSN(nbcd,      4800, ffc0, M68K);
6106     INSN(linkl,     4808, fff8, M68K);
6107     BASE(pea,       4840, ffc0);
6108     BASE(swap,      4840, fff8);
6109     INSN(bkpt,      4848, fff8, BKPT);
6110     INSN(movem,     48d0, fbf8, CF_ISA_A);
6111     INSN(movem,     48e8, fbf8, CF_ISA_A);
6112     INSN(movem,     4880, fb80, M68K);
6113     BASE(ext,       4880, fff8);
6114     BASE(ext,       48c0, fff8);
6115     BASE(ext,       49c0, fff8);
6116     BASE(tst,       4a00, ff00);
6117     INSN(tas,       4ac0, ffc0, CF_ISA_B);
6118     INSN(tas,       4ac0, ffc0, M68K);
6119 #if defined(CONFIG_SOFTMMU)
6120     INSN(halt,      4ac8, ffff, CF_ISA_A);
6121     INSN(halt,      4ac8, ffff, M68K);
6122 #endif
6123     INSN(pulse,     4acc, ffff, CF_ISA_A);
6124     BASE(illegal,   4afc, ffff);
6125     INSN(mull,      4c00, ffc0, CF_ISA_A);
6126     INSN(mull,      4c00, ffc0, LONG_MULDIV);
6127     INSN(divl,      4c40, ffc0, CF_ISA_A);
6128     INSN(divl,      4c40, ffc0, LONG_MULDIV);
6129     INSN(sats,      4c80, fff8, CF_ISA_B);
6130     BASE(trap,      4e40, fff0);
6131     BASE(link,      4e50, fff8);
6132     BASE(unlk,      4e58, fff8);
6133 #if defined(CONFIG_SOFTMMU)
6134     INSN(move_to_usp, 4e60, fff8, USP);
6135     INSN(move_from_usp, 4e68, fff8, USP);
6136     INSN(reset,     4e70, ffff, M68K);
6137     BASE(stop,      4e72, ffff);
6138     BASE(rte,       4e73, ffff);
6139     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6140     INSN(m68k_movec, 4e7a, fffe, MOVEC);
6141 #endif
6142     BASE(nop,       4e71, ffff);
6143     INSN(rtd,       4e74, ffff, RTD);
6144     BASE(rts,       4e75, ffff);
6145     INSN(trapv,     4e76, ffff, M68K);
6146     INSN(rtr,       4e77, ffff, M68K);
6147     BASE(jump,      4e80, ffc0);
6148     BASE(jump,      4ec0, ffc0);
6149     INSN(addsubq,   5000, f080, M68K);
6150     BASE(addsubq,   5080, f0c0);
6151     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6152     INSN(scc,       50c0, f0c0, M68K);     /* Scc.B <EA> */
6153     INSN(dbcc,      50c8, f0f8, M68K);
6154     INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
6155     INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
6156     INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
6157     INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
6158 
6159     /* Branch instructions.  */
6160     BASE(branch,    6000, f000);
6161     /* Disable long branch instructions, then add back the ones we want.  */
6162     BASE(undef,     60ff, f0ff); /* All long branches.  */
6163     INSN(branch,    60ff, f0ff, CF_ISA_B);
6164     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6165     INSN(branch,    60ff, ffff, BRAL);
6166     INSN(branch,    60ff, f0ff, BCCL);
6167 
6168     BASE(moveq,     7000, f100);
6169     INSN(mvzs,      7100, f100, CF_ISA_B);
6170     BASE(or,        8000, f000);
6171     BASE(divw,      80c0, f0c0);
6172     INSN(sbcd_reg,  8100, f1f8, M68K);
6173     INSN(sbcd_mem,  8108, f1f8, M68K);
6174     BASE(addsub,    9000, f000);
6175     INSN(undef,     90c0, f0c0, CF_ISA_A);
6176     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6177     INSN(subx_reg,  9100, f138, M68K);
6178     INSN(subx_mem,  9108, f138, M68K);
6179     INSN(suba,      91c0, f1c0, CF_ISA_A);
6180     INSN(suba,      90c0, f0c0, M68K);
6181 
6182     BASE(undef_mac, a000, f000);
6183     INSN(mac,       a000, f100, CF_EMAC);
6184     INSN(from_mac,  a180, f9b0, CF_EMAC);
6185     INSN(move_mac,  a110, f9fc, CF_EMAC);
6186     INSN(from_macsr,a980, f9f0, CF_EMAC);
6187     INSN(from_mask, ad80, fff0, CF_EMAC);
6188     INSN(from_mext, ab80, fbf0, CF_EMAC);
6189     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6190     INSN(to_mac,    a100, f9c0, CF_EMAC);
6191     INSN(to_macsr,  a900, ffc0, CF_EMAC);
6192     INSN(to_mext,   ab00, fbc0, CF_EMAC);
6193     INSN(to_mask,   ad00, ffc0, CF_EMAC);
6194 
6195     INSN(mov3q,     a140, f1c0, CF_ISA_B);
6196     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6197     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6198     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6199     INSN(cmp,       b080, f1c0, CF_ISA_A);
6200     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6201     INSN(cmp,       b000, f100, M68K);
6202     INSN(eor,       b100, f100, M68K);
6203     INSN(cmpm,      b108, f138, M68K);
6204     INSN(cmpa,      b0c0, f0c0, M68K);
6205     INSN(eor,       b180, f1c0, CF_ISA_A);
6206     BASE(and,       c000, f000);
6207     INSN(exg_dd,    c140, f1f8, M68K);
6208     INSN(exg_aa,    c148, f1f8, M68K);
6209     INSN(exg_da,    c188, f1f8, M68K);
6210     BASE(mulw,      c0c0, f0c0);
6211     INSN(abcd_reg,  c100, f1f8, M68K);
6212     INSN(abcd_mem,  c108, f1f8, M68K);
6213     BASE(addsub,    d000, f000);
6214     INSN(undef,     d0c0, f0c0, CF_ISA_A);
6215     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6216     INSN(addx_reg,  d100, f138, M68K);
6217     INSN(addx_mem,  d108, f138, M68K);
6218     INSN(adda,      d1c0, f1c0, CF_ISA_A);
6219     INSN(adda,      d0c0, f0c0, M68K);
6220     INSN(shift_im,  e080, f0f0, CF_ISA_A);
6221     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6222     INSN(shift8_im, e000, f0f0, M68K);
6223     INSN(shift16_im, e040, f0f0, M68K);
6224     INSN(shift_im,  e080, f0f0, M68K);
6225     INSN(shift8_reg, e020, f0f0, M68K);
6226     INSN(shift16_reg, e060, f0f0, M68K);
6227     INSN(shift_reg, e0a0, f0f0, M68K);
6228     INSN(shift_mem, e0c0, fcc0, M68K);
6229     INSN(rotate_im, e090, f0f0, M68K);
6230     INSN(rotate8_im, e010, f0f0, M68K);
6231     INSN(rotate16_im, e050, f0f0, M68K);
6232     INSN(rotate_reg, e0b0, f0f0, M68K);
6233     INSN(rotate8_reg, e030, f0f0, M68K);
6234     INSN(rotate16_reg, e070, f0f0, M68K);
6235     INSN(rotate_mem, e4c0, fcc0, M68K);
6236     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6237     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6238     INSN(bfins_mem, efc0, ffc0, BITFIELD);
6239     INSN(bfins_reg, efc0, fff8, BITFIELD);
6240     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6241     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6242     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6243     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6244     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6245     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6246     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6247     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6248     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6249     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6250     BASE(undef_fpu, f000, f000);
6251     INSN(fpu,       f200, ffc0, CF_FPU);
6252     INSN(fbcc,      f280, ffc0, CF_FPU);
6253     INSN(fpu,       f200, ffc0, FPU);
6254     INSN(fscc,      f240, ffc0, FPU);
6255     INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
6256     INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
6257     INSN(fbcc,      f280, ff80, FPU);
6258 #if defined(CONFIG_SOFTMMU)
6259     INSN(frestore,  f340, ffc0, CF_FPU);
6260     INSN(fsave,     f300, ffc0, CF_FPU);
6261     INSN(frestore,  f340, ffc0, FPU);
6262     INSN(fsave,     f300, ffc0, FPU);
6263     INSN(intouch,   f340, ffc0, CF_ISA_A);
6264     INSN(cpushl,    f428, ff38, CF_ISA_A);
6265     INSN(cpush,     f420, ff20, M68040);
6266     INSN(cinv,      f400, ff20, M68040);
6267     INSN(pflush,    f500, ffe0, M68040);
6268     INSN(ptest,     f548, ffd8, M68040);
6269     INSN(wddata,    fb00, ff00, CF_ISA_A);
6270     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6271 #endif
6272     INSN(move16_mem, f600, ffe0, M68040);
6273     INSN(move16_reg, f620, fff8, M68040);
6274 #undef INSN
6275 }
6276 
6277 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6278 {
6279     DisasContext *dc = container_of(dcbase, DisasContext, base);
6280     CPUM68KState *env = cpu->env_ptr;
6281 
6282     dc->env = env;
6283     dc->pc = dc->base.pc_first;
6284     /* This value will always be filled in properly before m68k_tr_tb_stop. */
6285     dc->pc_prev = 0xdeadbeef;
6286     dc->cc_op = CC_OP_DYNAMIC;
6287     dc->cc_op_synced = 1;
6288     dc->done_mac = 0;
6289     dc->writeback_mask = 0;
6290     init_release_array(dc);
6291 
6292     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6293     /* If architectural single step active, limit to 1 */
6294     if (dc->ss_active) {
6295         dc->base.max_insns = 1;
6296     }
6297 }
6298 
6299 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6300 {
6301 }
6302 
6303 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6304 {
6305     DisasContext *dc = container_of(dcbase, DisasContext, base);
6306     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6307 }
6308 
6309 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6310 {
6311     DisasContext *dc = container_of(dcbase, DisasContext, base);
6312     CPUM68KState *env = cpu->env_ptr;
6313     uint16_t insn = read_im16(env, dc);
6314 
6315     opcode_table[insn](env, dc, insn);
6316     do_writebacks(dc);
6317     do_release(dc);
6318 
6319     dc->pc_prev = dc->base.pc_next;
6320     dc->base.pc_next = dc->pc;
6321 
6322     if (dc->base.is_jmp == DISAS_NEXT) {
6323         /*
6324          * Stop translation when the next insn might touch a new page.
6325          * This ensures that prefetch aborts at the right place.
6326          *
6327          * We cannot determine the size of the next insn without
6328          * completely decoding it.  However, the maximum insn size
6329          * is 32 bytes, so end if we do not have that much remaining.
6330          * This may produce several small TBs at the end of each page,
6331          * but they will all be linked with goto_tb.
6332          *
6333          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6334          * smaller than MC68020's.
6335          */
6336         target_ulong start_page_offset
6337             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6338 
6339         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6340             dc->base.is_jmp = DISAS_TOO_MANY;
6341         }
6342     }
6343 }
6344 
6345 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6346 {
6347     DisasContext *dc = container_of(dcbase, DisasContext, base);
6348 
6349     switch (dc->base.is_jmp) {
6350     case DISAS_NORETURN:
6351         break;
6352     case DISAS_TOO_MANY:
6353         update_cc_op(dc);
6354         gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6355         break;
6356     case DISAS_JUMP:
6357         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6358         if (dc->ss_active) {
6359             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6360         } else {
6361             tcg_gen_lookup_and_goto_ptr();
6362         }
6363         break;
6364     case DISAS_EXIT:
6365         /*
6366          * We updated CC_OP and PC in gen_exit_tb, but also modified
6367          * other state that may require returning to the main loop.
6368          */
6369         if (dc->ss_active) {
6370             gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6371         } else {
6372             tcg_gen_exit_tb(NULL, 0);
6373         }
6374         break;
6375     default:
6376         g_assert_not_reached();
6377     }
6378 }
6379 
6380 static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6381                               CPUState *cpu, FILE *logfile)
6382 {
6383     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6384     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6385 }
6386 
6387 static const TranslatorOps m68k_tr_ops = {
6388     .init_disas_context = m68k_tr_init_disas_context,
6389     .tb_start           = m68k_tr_tb_start,
6390     .insn_start         = m68k_tr_insn_start,
6391     .translate_insn     = m68k_tr_translate_insn,
6392     .tb_stop            = m68k_tr_tb_stop,
6393     .disas_log          = m68k_tr_disas_log,
6394 };
6395 
6396 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns,
6397                            target_ulong pc, void *host_pc)
6398 {
6399     DisasContext dc;
6400     translator_loop(cpu, tb, max_insns, pc, host_pc, &m68k_tr_ops, &dc.base);
6401 }
6402 
6403 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6404 {
6405     floatx80 a = { .high = high, .low = low };
6406     union {
6407         float64 f64;
6408         double d;
6409     } u;
6410 
6411     u.f64 = floatx80_to_float64(a, &env->fp_status);
6412     return u.d;
6413 }
6414 
6415 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6416 {
6417     M68kCPU *cpu = M68K_CPU(cs);
6418     CPUM68KState *env = &cpu->env;
6419     int i;
6420     uint16_t sr;
6421     for (i = 0; i < 8; i++) {
6422         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6423                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6424                      i, env->dregs[i], i, env->aregs[i],
6425                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6426                      floatx80_to_double(env, env->fregs[i].l.upper,
6427                                         env->fregs[i].l.lower));
6428     }
6429     qemu_fprintf(f, "PC = %08x   ", env->pc);
6430     sr = env->sr | cpu_m68k_get_ccr(env);
6431     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6432                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6433                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6434                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6435                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6436                  (sr & CCF_C) ? 'C' : '-');
6437     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6438                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6439                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6440                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6441                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6442     qemu_fprintf(f, "\n                                "
6443                  "FPCR =     %04x ", env->fpcr);
6444     switch (env->fpcr & FPCR_PREC_MASK) {
6445     case FPCR_PREC_X:
6446         qemu_fprintf(f, "X ");
6447         break;
6448     case FPCR_PREC_S:
6449         qemu_fprintf(f, "S ");
6450         break;
6451     case FPCR_PREC_D:
6452         qemu_fprintf(f, "D ");
6453         break;
6454     }
6455     switch (env->fpcr & FPCR_RND_MASK) {
6456     case FPCR_RND_N:
6457         qemu_fprintf(f, "RN ");
6458         break;
6459     case FPCR_RND_Z:
6460         qemu_fprintf(f, "RZ ");
6461         break;
6462     case FPCR_RND_M:
6463         qemu_fprintf(f, "RM ");
6464         break;
6465     case FPCR_RND_P:
6466         qemu_fprintf(f, "RP ");
6467         break;
6468     }
6469     qemu_fprintf(f, "\n");
6470 #ifdef CONFIG_SOFTMMU
6471     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6472                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6473                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6474                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6475     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6476     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6477     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6478                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6479     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6480                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6481                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6482     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6483                  env->mmu.mmusr, env->mmu.ar);
6484 #endif
6485 }
6486