xref: /qemu/target/m68k/translate.c (revision d0fb9657)
1 /*
2  *  m68k translation
3  *
4  *  Copyright (c) 2005-2007 CodeSourcery
5  *  Written by Paul Brook
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "qemu/log.h"
27 #include "qemu/qemu-print.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/translator.h"
30 
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 
34 #include "trace-tcg.h"
35 #include "exec/log.h"
36 #include "fpu/softfloat.h"
37 
38 
39 //#define DEBUG_DISPATCH 1
40 
41 #define DEFO32(name, offset) static TCGv QREG_##name;
42 #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
43 #include "qregs.def"
44 #undef DEFO32
45 #undef DEFO64
46 
47 static TCGv_i32 cpu_halted;
48 static TCGv_i32 cpu_exception_index;
49 
50 static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
51 static TCGv cpu_dregs[8];
52 static TCGv cpu_aregs[8];
53 static TCGv_i64 cpu_macc[4];
54 
55 #define REG(insn, pos)  (((insn) >> (pos)) & 7)
56 #define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
57 #define AREG(insn, pos) get_areg(s, REG(insn, pos))
58 #define MACREG(acc)     cpu_macc[acc]
59 #define QREG_SP         get_areg(s, 7)
60 
61 static TCGv NULL_QREG;
62 #define IS_NULL_QREG(t) (t == NULL_QREG)
63 /* Used to distinguish stores from bad addressing modes.  */
64 static TCGv store_dummy;
65 
66 #include "exec/gen-icount.h"
67 
68 void m68k_tcg_init(void)
69 {
70     char *p;
71     int i;
72 
73 #define DEFO32(name, offset) \
74     QREG_##name = tcg_global_mem_new_i32(cpu_env, \
75         offsetof(CPUM68KState, offset), #name);
76 #define DEFO64(name, offset) \
77     QREG_##name = tcg_global_mem_new_i64(cpu_env, \
78         offsetof(CPUM68KState, offset), #name);
79 #include "qregs.def"
80 #undef DEFO32
81 #undef DEFO64
82 
83     cpu_halted = tcg_global_mem_new_i32(cpu_env,
84                                         -offsetof(M68kCPU, env) +
85                                         offsetof(CPUState, halted), "HALTED");
86     cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
87                                                  -offsetof(M68kCPU, env) +
88                                                  offsetof(CPUState, exception_index),
89                                                  "EXCEPTION");
90 
91     p = cpu_reg_names;
92     for (i = 0; i < 8; i++) {
93         sprintf(p, "D%d", i);
94         cpu_dregs[i] = tcg_global_mem_new(cpu_env,
95                                           offsetof(CPUM68KState, dregs[i]), p);
96         p += 3;
97         sprintf(p, "A%d", i);
98         cpu_aregs[i] = tcg_global_mem_new(cpu_env,
99                                           offsetof(CPUM68KState, aregs[i]), p);
100         p += 3;
101     }
102     for (i = 0; i < 4; i++) {
103         sprintf(p, "ACC%d", i);
104         cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
105                                          offsetof(CPUM68KState, macc[i]), p);
106         p += 5;
107     }
108 
109     NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
110     store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
111 }
112 
113 /* internal defines */
114 typedef struct DisasContext {
115     DisasContextBase base;
116     CPUM68KState *env;
117     target_ulong pc;
118     CCOp cc_op; /* Current CC operation */
119     int cc_op_synced;
120     TCGv_i64 mactmp;
121     int done_mac;
122     int writeback_mask;
123     TCGv writeback[8];
124 #define MAX_TO_RELEASE 8
125     int release_count;
126     TCGv release[MAX_TO_RELEASE];
127     bool ss_active;
128 } DisasContext;
129 
130 static void init_release_array(DisasContext *s)
131 {
132 #ifdef CONFIG_DEBUG_TCG
133     memset(s->release, 0, sizeof(s->release));
134 #endif
135     s->release_count = 0;
136 }
137 
138 static void do_release(DisasContext *s)
139 {
140     int i;
141     for (i = 0; i < s->release_count; i++) {
142         tcg_temp_free(s->release[i]);
143     }
144     init_release_array(s);
145 }
146 
147 static TCGv mark_to_release(DisasContext *s, TCGv tmp)
148 {
149     g_assert(s->release_count < MAX_TO_RELEASE);
150     return s->release[s->release_count++] = tmp;
151 }
152 
153 static TCGv get_areg(DisasContext *s, unsigned regno)
154 {
155     if (s->writeback_mask & (1 << regno)) {
156         return s->writeback[regno];
157     } else {
158         return cpu_aregs[regno];
159     }
160 }
161 
162 static void delay_set_areg(DisasContext *s, unsigned regno,
163                            TCGv val, bool give_temp)
164 {
165     if (s->writeback_mask & (1 << regno)) {
166         if (give_temp) {
167             tcg_temp_free(s->writeback[regno]);
168             s->writeback[regno] = val;
169         } else {
170             tcg_gen_mov_i32(s->writeback[regno], val);
171         }
172     } else {
173         s->writeback_mask |= 1 << regno;
174         if (give_temp) {
175             s->writeback[regno] = val;
176         } else {
177             TCGv tmp = tcg_temp_new();
178             s->writeback[regno] = tmp;
179             tcg_gen_mov_i32(tmp, val);
180         }
181     }
182 }
183 
184 static void do_writebacks(DisasContext *s)
185 {
186     unsigned mask = s->writeback_mask;
187     if (mask) {
188         s->writeback_mask = 0;
189         do {
190             unsigned regno = ctz32(mask);
191             tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
192             tcg_temp_free(s->writeback[regno]);
193             mask &= mask - 1;
194         } while (mask);
195     }
196 }
197 
198 static bool is_singlestepping(DisasContext *s)
199 {
200     /*
201      * Return true if we are singlestepping either because of
202      * architectural singlestep or QEMU gdbstub singlestep. This does
203      * not include the command line '-singlestep' mode which is rather
204      * misnamed as it only means "one instruction per TB" and doesn't
205      * affect the code we generate.
206      */
207     return s->base.singlestep_enabled || s->ss_active;
208 }
209 
210 /* is_jmp field values */
211 #define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
212 #define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
213 
214 #if defined(CONFIG_USER_ONLY)
215 #define IS_USER(s) 1
216 #else
217 #define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
218 #define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
219                       MMU_KERNEL_IDX : MMU_USER_IDX)
220 #define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
221                       MMU_KERNEL_IDX : MMU_USER_IDX)
222 #endif
223 
224 typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
225 
226 #ifdef DEBUG_DISPATCH
227 #define DISAS_INSN(name)                                                \
228     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
229                                   uint16_t insn);                       \
230     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
231                              uint16_t insn)                             \
232     {                                                                   \
233         qemu_log("Dispatch " #name "\n");                               \
234         real_disas_##name(env, s, insn);                                \
235     }                                                                   \
236     static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
237                                   uint16_t insn)
238 #else
239 #define DISAS_INSN(name)                                                \
240     static void disas_##name(CPUM68KState *env, DisasContext *s,        \
241                              uint16_t insn)
242 #endif
243 
244 static const uint8_t cc_op_live[CC_OP_NB] = {
245     [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
246     [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
247     [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
248     [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
249     [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
250     [CC_OP_LOGIC] = CCF_X | CCF_N
251 };
252 
253 static void set_cc_op(DisasContext *s, CCOp op)
254 {
255     CCOp old_op = s->cc_op;
256     int dead;
257 
258     if (old_op == op) {
259         return;
260     }
261     s->cc_op = op;
262     s->cc_op_synced = 0;
263 
264     /*
265      * Discard CC computation that will no longer be used.
266      * Note that X and N are never dead.
267      */
268     dead = cc_op_live[old_op] & ~cc_op_live[op];
269     if (dead & CCF_C) {
270         tcg_gen_discard_i32(QREG_CC_C);
271     }
272     if (dead & CCF_Z) {
273         tcg_gen_discard_i32(QREG_CC_Z);
274     }
275     if (dead & CCF_V) {
276         tcg_gen_discard_i32(QREG_CC_V);
277     }
278 }
279 
280 /* Update the CPU env CC_OP state.  */
281 static void update_cc_op(DisasContext *s)
282 {
283     if (!s->cc_op_synced) {
284         s->cc_op_synced = 1;
285         tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
286     }
287 }
288 
289 /* Generate a jump to an immediate address.  */
290 static void gen_jmp_im(DisasContext *s, uint32_t dest)
291 {
292     update_cc_op(s);
293     tcg_gen_movi_i32(QREG_PC, dest);
294     s->base.is_jmp = DISAS_JUMP;
295 }
296 
297 /* Generate a jump to the address in qreg DEST.  */
298 static void gen_jmp(DisasContext *s, TCGv dest)
299 {
300     update_cc_op(s);
301     tcg_gen_mov_i32(QREG_PC, dest);
302     s->base.is_jmp = DISAS_JUMP;
303 }
304 
305 static void gen_raise_exception(int nr)
306 {
307     TCGv_i32 tmp;
308 
309     tmp = tcg_const_i32(nr);
310     gen_helper_raise_exception(cpu_env, tmp);
311     tcg_temp_free_i32(tmp);
312 }
313 
314 static void gen_exception(DisasContext *s, uint32_t dest, int nr)
315 {
316     update_cc_op(s);
317     tcg_gen_movi_i32(QREG_PC, dest);
318 
319     gen_raise_exception(nr);
320 
321     s->base.is_jmp = DISAS_NORETURN;
322 }
323 
324 static void gen_singlestep_exception(DisasContext *s)
325 {
326     /*
327      * Generate the right kind of exception for singlestep, which is
328      * either the architectural singlestep or EXCP_DEBUG for QEMU's
329      * gdb singlestepping.
330      */
331     if (s->ss_active) {
332         gen_raise_exception(EXCP_TRACE);
333     } else {
334         gen_raise_exception(EXCP_DEBUG);
335     }
336 }
337 
338 static inline void gen_addr_fault(DisasContext *s)
339 {
340     gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
341 }
342 
343 /*
344  * Generate a load from the specified address.  Narrow values are
345  *  sign extended to full register width.
346  */
347 static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
348                             int sign, int index)
349 {
350     TCGv tmp;
351     tmp = tcg_temp_new_i32();
352     switch(opsize) {
353     case OS_BYTE:
354         if (sign)
355             tcg_gen_qemu_ld8s(tmp, addr, index);
356         else
357             tcg_gen_qemu_ld8u(tmp, addr, index);
358         break;
359     case OS_WORD:
360         if (sign)
361             tcg_gen_qemu_ld16s(tmp, addr, index);
362         else
363             tcg_gen_qemu_ld16u(tmp, addr, index);
364         break;
365     case OS_LONG:
366         tcg_gen_qemu_ld32u(tmp, addr, index);
367         break;
368     default:
369         g_assert_not_reached();
370     }
371     return tmp;
372 }
373 
374 /* Generate a store.  */
375 static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
376                              int index)
377 {
378     switch(opsize) {
379     case OS_BYTE:
380         tcg_gen_qemu_st8(val, addr, index);
381         break;
382     case OS_WORD:
383         tcg_gen_qemu_st16(val, addr, index);
384         break;
385     case OS_LONG:
386         tcg_gen_qemu_st32(val, addr, index);
387         break;
388     default:
389         g_assert_not_reached();
390     }
391 }
392 
393 typedef enum {
394     EA_STORE,
395     EA_LOADU,
396     EA_LOADS
397 } ea_what;
398 
399 /*
400  * Generate an unsigned load if VAL is 0 a signed load if val is -1,
401  * otherwise generate a store.
402  */
403 static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
404                      ea_what what, int index)
405 {
406     if (what == EA_STORE) {
407         gen_store(s, opsize, addr, val, index);
408         return store_dummy;
409     } else {
410         return mark_to_release(s, gen_load(s, opsize, addr,
411                                            what == EA_LOADS, index));
412     }
413 }
414 
415 /* Read a 16-bit immediate constant */
416 static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
417 {
418     uint16_t im;
419     im = translator_lduw(env, s->pc);
420     s->pc += 2;
421     return im;
422 }
423 
424 /* Read an 8-bit immediate constant */
425 static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
426 {
427     return read_im16(env, s);
428 }
429 
430 /* Read a 32-bit immediate constant.  */
431 static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
432 {
433     uint32_t im;
434     im = read_im16(env, s) << 16;
435     im |= 0xffff & read_im16(env, s);
436     return im;
437 }
438 
439 /* Read a 64-bit immediate constant.  */
440 static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
441 {
442     uint64_t im;
443     im = (uint64_t)read_im32(env, s) << 32;
444     im |= (uint64_t)read_im32(env, s);
445     return im;
446 }
447 
448 /* Calculate and address index.  */
449 static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
450 {
451     TCGv add;
452     int scale;
453 
454     add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
455     if ((ext & 0x800) == 0) {
456         tcg_gen_ext16s_i32(tmp, add);
457         add = tmp;
458     }
459     scale = (ext >> 9) & 3;
460     if (scale != 0) {
461         tcg_gen_shli_i32(tmp, add, scale);
462         add = tmp;
463     }
464     return add;
465 }
466 
467 /*
468  * Handle a base + index + displacement effective address.
469  * A NULL_QREG base means pc-relative.
470  */
471 static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
472 {
473     uint32_t offset;
474     uint16_t ext;
475     TCGv add;
476     TCGv tmp;
477     uint32_t bd, od;
478 
479     offset = s->pc;
480     ext = read_im16(env, s);
481 
482     if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
483         return NULL_QREG;
484 
485     if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
486         !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
487         ext &= ~(3 << 9);
488     }
489 
490     if (ext & 0x100) {
491         /* full extension word format */
492         if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
493             return NULL_QREG;
494 
495         if ((ext & 0x30) > 0x10) {
496             /* base displacement */
497             if ((ext & 0x30) == 0x20) {
498                 bd = (int16_t)read_im16(env, s);
499             } else {
500                 bd = read_im32(env, s);
501             }
502         } else {
503             bd = 0;
504         }
505         tmp = mark_to_release(s, tcg_temp_new());
506         if ((ext & 0x44) == 0) {
507             /* pre-index */
508             add = gen_addr_index(s, ext, tmp);
509         } else {
510             add = NULL_QREG;
511         }
512         if ((ext & 0x80) == 0) {
513             /* base not suppressed */
514             if (IS_NULL_QREG(base)) {
515                 base = mark_to_release(s, tcg_const_i32(offset + bd));
516                 bd = 0;
517             }
518             if (!IS_NULL_QREG(add)) {
519                 tcg_gen_add_i32(tmp, add, base);
520                 add = tmp;
521             } else {
522                 add = base;
523             }
524         }
525         if (!IS_NULL_QREG(add)) {
526             if (bd != 0) {
527                 tcg_gen_addi_i32(tmp, add, bd);
528                 add = tmp;
529             }
530         } else {
531             add = mark_to_release(s, tcg_const_i32(bd));
532         }
533         if ((ext & 3) != 0) {
534             /* memory indirect */
535             base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
536             if ((ext & 0x44) == 4) {
537                 add = gen_addr_index(s, ext, tmp);
538                 tcg_gen_add_i32(tmp, add, base);
539                 add = tmp;
540             } else {
541                 add = base;
542             }
543             if ((ext & 3) > 1) {
544                 /* outer displacement */
545                 if ((ext & 3) == 2) {
546                     od = (int16_t)read_im16(env, s);
547                 } else {
548                     od = read_im32(env, s);
549                 }
550             } else {
551                 od = 0;
552             }
553             if (od != 0) {
554                 tcg_gen_addi_i32(tmp, add, od);
555                 add = tmp;
556             }
557         }
558     } else {
559         /* brief extension word format */
560         tmp = mark_to_release(s, tcg_temp_new());
561         add = gen_addr_index(s, ext, tmp);
562         if (!IS_NULL_QREG(base)) {
563             tcg_gen_add_i32(tmp, add, base);
564             if ((int8_t)ext)
565                 tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
566         } else {
567             tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
568         }
569         add = tmp;
570     }
571     return add;
572 }
573 
574 /* Sign or zero extend a value.  */
575 
576 static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
577 {
578     switch (opsize) {
579     case OS_BYTE:
580         if (sign) {
581             tcg_gen_ext8s_i32(res, val);
582         } else {
583             tcg_gen_ext8u_i32(res, val);
584         }
585         break;
586     case OS_WORD:
587         if (sign) {
588             tcg_gen_ext16s_i32(res, val);
589         } else {
590             tcg_gen_ext16u_i32(res, val);
591         }
592         break;
593     case OS_LONG:
594         tcg_gen_mov_i32(res, val);
595         break;
596     default:
597         g_assert_not_reached();
598     }
599 }
600 
601 /* Evaluate all the CC flags.  */
602 
603 static void gen_flush_flags(DisasContext *s)
604 {
605     TCGv t0, t1;
606 
607     switch (s->cc_op) {
608     case CC_OP_FLAGS:
609         return;
610 
611     case CC_OP_ADDB:
612     case CC_OP_ADDW:
613     case CC_OP_ADDL:
614         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
615         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
616         /* Compute signed overflow for addition.  */
617         t0 = tcg_temp_new();
618         t1 = tcg_temp_new();
619         tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
620         gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
621         tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
622         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
623         tcg_temp_free(t0);
624         tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
625         tcg_temp_free(t1);
626         break;
627 
628     case CC_OP_SUBB:
629     case CC_OP_SUBW:
630     case CC_OP_SUBL:
631         tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
632         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
633         /* Compute signed overflow for subtraction.  */
634         t0 = tcg_temp_new();
635         t1 = tcg_temp_new();
636         tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
637         gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
638         tcg_gen_xor_i32(t1, QREG_CC_N, t0);
639         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
640         tcg_temp_free(t0);
641         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
642         tcg_temp_free(t1);
643         break;
644 
645     case CC_OP_CMPB:
646     case CC_OP_CMPW:
647     case CC_OP_CMPL:
648         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
649         tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
650         gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
651         /* Compute signed overflow for subtraction.  */
652         t0 = tcg_temp_new();
653         tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
654         tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
655         tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
656         tcg_temp_free(t0);
657         tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
658         break;
659 
660     case CC_OP_LOGIC:
661         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
662         tcg_gen_movi_i32(QREG_CC_C, 0);
663         tcg_gen_movi_i32(QREG_CC_V, 0);
664         break;
665 
666     case CC_OP_DYNAMIC:
667         gen_helper_flush_flags(cpu_env, QREG_CC_OP);
668         s->cc_op_synced = 1;
669         break;
670 
671     default:
672         t0 = tcg_const_i32(s->cc_op);
673         gen_helper_flush_flags(cpu_env, t0);
674         tcg_temp_free(t0);
675         s->cc_op_synced = 1;
676         break;
677     }
678 
679     /* Note that flush_flags also assigned to env->cc_op.  */
680     s->cc_op = CC_OP_FLAGS;
681 }
682 
683 static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
684 {
685     TCGv tmp;
686 
687     if (opsize == OS_LONG) {
688         tmp = val;
689     } else {
690         tmp = mark_to_release(s, tcg_temp_new());
691         gen_ext(tmp, val, opsize, sign);
692     }
693 
694     return tmp;
695 }
696 
697 static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
698 {
699     gen_ext(QREG_CC_N, val, opsize, 1);
700     set_cc_op(s, CC_OP_LOGIC);
701 }
702 
703 static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
704 {
705     tcg_gen_mov_i32(QREG_CC_N, dest);
706     tcg_gen_mov_i32(QREG_CC_V, src);
707     set_cc_op(s, CC_OP_CMPB + opsize);
708 }
709 
710 static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
711 {
712     gen_ext(QREG_CC_N, dest, opsize, 1);
713     tcg_gen_mov_i32(QREG_CC_V, src);
714 }
715 
716 static inline int opsize_bytes(int opsize)
717 {
718     switch (opsize) {
719     case OS_BYTE: return 1;
720     case OS_WORD: return 2;
721     case OS_LONG: return 4;
722     case OS_SINGLE: return 4;
723     case OS_DOUBLE: return 8;
724     case OS_EXTENDED: return 12;
725     case OS_PACKED: return 12;
726     default:
727         g_assert_not_reached();
728     }
729 }
730 
731 static inline int insn_opsize(int insn)
732 {
733     switch ((insn >> 6) & 3) {
734     case 0: return OS_BYTE;
735     case 1: return OS_WORD;
736     case 2: return OS_LONG;
737     default:
738         g_assert_not_reached();
739     }
740 }
741 
742 static inline int ext_opsize(int ext, int pos)
743 {
744     switch ((ext >> pos) & 7) {
745     case 0: return OS_LONG;
746     case 1: return OS_SINGLE;
747     case 2: return OS_EXTENDED;
748     case 3: return OS_PACKED;
749     case 4: return OS_WORD;
750     case 5: return OS_DOUBLE;
751     case 6: return OS_BYTE;
752     default:
753         g_assert_not_reached();
754     }
755 }
756 
757 /*
758  * Assign value to a register.  If the width is less than the register width
759  * only the low part of the register is set.
760  */
761 static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
762 {
763     TCGv tmp;
764     switch (opsize) {
765     case OS_BYTE:
766         tcg_gen_andi_i32(reg, reg, 0xffffff00);
767         tmp = tcg_temp_new();
768         tcg_gen_ext8u_i32(tmp, val);
769         tcg_gen_or_i32(reg, reg, tmp);
770         tcg_temp_free(tmp);
771         break;
772     case OS_WORD:
773         tcg_gen_andi_i32(reg, reg, 0xffff0000);
774         tmp = tcg_temp_new();
775         tcg_gen_ext16u_i32(tmp, val);
776         tcg_gen_or_i32(reg, reg, tmp);
777         tcg_temp_free(tmp);
778         break;
779     case OS_LONG:
780     case OS_SINGLE:
781         tcg_gen_mov_i32(reg, val);
782         break;
783     default:
784         g_assert_not_reached();
785     }
786 }
787 
788 /*
789  * Generate code for an "effective address".  Does not adjust the base
790  * register for autoincrement addressing modes.
791  */
792 static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
793                          int mode, int reg0, int opsize)
794 {
795     TCGv reg;
796     TCGv tmp;
797     uint16_t ext;
798     uint32_t offset;
799 
800     switch (mode) {
801     case 0: /* Data register direct.  */
802     case 1: /* Address register direct.  */
803         return NULL_QREG;
804     case 3: /* Indirect postincrement.  */
805         if (opsize == OS_UNSIZED) {
806             return NULL_QREG;
807         }
808         /* fallthru */
809     case 2: /* Indirect register */
810         return get_areg(s, reg0);
811     case 4: /* Indirect predecrememnt.  */
812         if (opsize == OS_UNSIZED) {
813             return NULL_QREG;
814         }
815         reg = get_areg(s, reg0);
816         tmp = mark_to_release(s, tcg_temp_new());
817         if (reg0 == 7 && opsize == OS_BYTE &&
818             m68k_feature(s->env, M68K_FEATURE_M68000)) {
819             tcg_gen_subi_i32(tmp, reg, 2);
820         } else {
821             tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
822         }
823         return tmp;
824     case 5: /* Indirect displacement.  */
825         reg = get_areg(s, reg0);
826         tmp = mark_to_release(s, tcg_temp_new());
827         ext = read_im16(env, s);
828         tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
829         return tmp;
830     case 6: /* Indirect index + displacement.  */
831         reg = get_areg(s, reg0);
832         return gen_lea_indexed(env, s, reg);
833     case 7: /* Other */
834         switch (reg0) {
835         case 0: /* Absolute short.  */
836             offset = (int16_t)read_im16(env, s);
837             return mark_to_release(s, tcg_const_i32(offset));
838         case 1: /* Absolute long.  */
839             offset = read_im32(env, s);
840             return mark_to_release(s, tcg_const_i32(offset));
841         case 2: /* pc displacement  */
842             offset = s->pc;
843             offset += (int16_t)read_im16(env, s);
844             return mark_to_release(s, tcg_const_i32(offset));
845         case 3: /* pc index+displacement.  */
846             return gen_lea_indexed(env, s, NULL_QREG);
847         case 4: /* Immediate.  */
848         default:
849             return NULL_QREG;
850         }
851     }
852     /* Should never happen.  */
853     return NULL_QREG;
854 }
855 
856 static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
857                     int opsize)
858 {
859     int mode = extract32(insn, 3, 3);
860     int reg0 = REG(insn, 0);
861     return gen_lea_mode(env, s, mode, reg0, opsize);
862 }
863 
864 /*
865  * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
866  * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
867  * ADDRP is non-null for readwrite operands.
868  */
869 static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
870                         int opsize, TCGv val, TCGv *addrp, ea_what what,
871                         int index)
872 {
873     TCGv reg, tmp, result;
874     int32_t offset;
875 
876     switch (mode) {
877     case 0: /* Data register direct.  */
878         reg = cpu_dregs[reg0];
879         if (what == EA_STORE) {
880             gen_partset_reg(opsize, reg, val);
881             return store_dummy;
882         } else {
883             return gen_extend(s, reg, opsize, what == EA_LOADS);
884         }
885     case 1: /* Address register direct.  */
886         reg = get_areg(s, reg0);
887         if (what == EA_STORE) {
888             tcg_gen_mov_i32(reg, val);
889             return store_dummy;
890         } else {
891             return gen_extend(s, reg, opsize, what == EA_LOADS);
892         }
893     case 2: /* Indirect register */
894         reg = get_areg(s, reg0);
895         return gen_ldst(s, opsize, reg, val, what, index);
896     case 3: /* Indirect postincrement.  */
897         reg = get_areg(s, reg0);
898         result = gen_ldst(s, opsize, reg, val, what, index);
899         if (what == EA_STORE || !addrp) {
900             TCGv tmp = tcg_temp_new();
901             if (reg0 == 7 && opsize == OS_BYTE &&
902                 m68k_feature(s->env, M68K_FEATURE_M68000)) {
903                 tcg_gen_addi_i32(tmp, reg, 2);
904             } else {
905                 tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
906             }
907             delay_set_areg(s, reg0, tmp, true);
908         }
909         return result;
910     case 4: /* Indirect predecrememnt.  */
911         if (addrp && what == EA_STORE) {
912             tmp = *addrp;
913         } else {
914             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
915             if (IS_NULL_QREG(tmp)) {
916                 return tmp;
917             }
918             if (addrp) {
919                 *addrp = tmp;
920             }
921         }
922         result = gen_ldst(s, opsize, tmp, val, what, index);
923         if (what == EA_STORE || !addrp) {
924             delay_set_areg(s, reg0, tmp, false);
925         }
926         return result;
927     case 5: /* Indirect displacement.  */
928     case 6: /* Indirect index + displacement.  */
929     do_indirect:
930         if (addrp && what == EA_STORE) {
931             tmp = *addrp;
932         } else {
933             tmp = gen_lea_mode(env, s, mode, reg0, opsize);
934             if (IS_NULL_QREG(tmp)) {
935                 return tmp;
936             }
937             if (addrp) {
938                 *addrp = tmp;
939             }
940         }
941         return gen_ldst(s, opsize, tmp, val, what, index);
942     case 7: /* Other */
943         switch (reg0) {
944         case 0: /* Absolute short.  */
945         case 1: /* Absolute long.  */
946         case 2: /* pc displacement  */
947         case 3: /* pc index+displacement.  */
948             goto do_indirect;
949         case 4: /* Immediate.  */
950             /* Sign extend values for consistency.  */
951             switch (opsize) {
952             case OS_BYTE:
953                 if (what == EA_LOADS) {
954                     offset = (int8_t)read_im8(env, s);
955                 } else {
956                     offset = read_im8(env, s);
957                 }
958                 break;
959             case OS_WORD:
960                 if (what == EA_LOADS) {
961                     offset = (int16_t)read_im16(env, s);
962                 } else {
963                     offset = read_im16(env, s);
964                 }
965                 break;
966             case OS_LONG:
967                 offset = read_im32(env, s);
968                 break;
969             default:
970                 g_assert_not_reached();
971             }
972             return mark_to_release(s, tcg_const_i32(offset));
973         default:
974             return NULL_QREG;
975         }
976     }
977     /* Should never happen.  */
978     return NULL_QREG;
979 }
980 
981 static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
982                    int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
983 {
984     int mode = extract32(insn, 3, 3);
985     int reg0 = REG(insn, 0);
986     return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
987 }
988 
989 static TCGv_ptr gen_fp_ptr(int freg)
990 {
991     TCGv_ptr fp = tcg_temp_new_ptr();
992     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
993     return fp;
994 }
995 
996 static TCGv_ptr gen_fp_result_ptr(void)
997 {
998     TCGv_ptr fp = tcg_temp_new_ptr();
999     tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
1000     return fp;
1001 }
1002 
1003 static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
1004 {
1005     TCGv t32;
1006     TCGv_i64 t64;
1007 
1008     t32 = tcg_temp_new();
1009     tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
1010     tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
1011     tcg_temp_free(t32);
1012 
1013     t64 = tcg_temp_new_i64();
1014     tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
1015     tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
1016     tcg_temp_free_i64(t64);
1017 }
1018 
1019 static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1020                         int index)
1021 {
1022     TCGv tmp;
1023     TCGv_i64 t64;
1024 
1025     t64 = tcg_temp_new_i64();
1026     tmp = tcg_temp_new();
1027     switch (opsize) {
1028     case OS_BYTE:
1029         tcg_gen_qemu_ld8s(tmp, addr, index);
1030         gen_helper_exts32(cpu_env, fp, tmp);
1031         break;
1032     case OS_WORD:
1033         tcg_gen_qemu_ld16s(tmp, addr, index);
1034         gen_helper_exts32(cpu_env, fp, tmp);
1035         break;
1036     case OS_LONG:
1037         tcg_gen_qemu_ld32u(tmp, addr, index);
1038         gen_helper_exts32(cpu_env, fp, tmp);
1039         break;
1040     case OS_SINGLE:
1041         tcg_gen_qemu_ld32u(tmp, addr, index);
1042         gen_helper_extf32(cpu_env, fp, tmp);
1043         break;
1044     case OS_DOUBLE:
1045         tcg_gen_qemu_ld64(t64, addr, index);
1046         gen_helper_extf64(cpu_env, fp, t64);
1047         break;
1048     case OS_EXTENDED:
1049         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1050             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1051             break;
1052         }
1053         tcg_gen_qemu_ld32u(tmp, addr, index);
1054         tcg_gen_shri_i32(tmp, tmp, 16);
1055         tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1056         tcg_gen_addi_i32(tmp, addr, 4);
1057         tcg_gen_qemu_ld64(t64, tmp, index);
1058         tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1059         break;
1060     case OS_PACKED:
1061         /*
1062          * unimplemented data type on 68040/ColdFire
1063          * FIXME if needed for another FPU
1064          */
1065         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1066         break;
1067     default:
1068         g_assert_not_reached();
1069     }
1070     tcg_temp_free(tmp);
1071     tcg_temp_free_i64(t64);
1072 }
1073 
1074 static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1075                          int index)
1076 {
1077     TCGv tmp;
1078     TCGv_i64 t64;
1079 
1080     t64 = tcg_temp_new_i64();
1081     tmp = tcg_temp_new();
1082     switch (opsize) {
1083     case OS_BYTE:
1084         gen_helper_reds32(tmp, cpu_env, fp);
1085         tcg_gen_qemu_st8(tmp, addr, index);
1086         break;
1087     case OS_WORD:
1088         gen_helper_reds32(tmp, cpu_env, fp);
1089         tcg_gen_qemu_st16(tmp, addr, index);
1090         break;
1091     case OS_LONG:
1092         gen_helper_reds32(tmp, cpu_env, fp);
1093         tcg_gen_qemu_st32(tmp, addr, index);
1094         break;
1095     case OS_SINGLE:
1096         gen_helper_redf32(tmp, cpu_env, fp);
1097         tcg_gen_qemu_st32(tmp, addr, index);
1098         break;
1099     case OS_DOUBLE:
1100         gen_helper_redf64(t64, cpu_env, fp);
1101         tcg_gen_qemu_st64(t64, addr, index);
1102         break;
1103     case OS_EXTENDED:
1104         if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1105             gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1106             break;
1107         }
1108         tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1109         tcg_gen_shli_i32(tmp, tmp, 16);
1110         tcg_gen_qemu_st32(tmp, addr, index);
1111         tcg_gen_addi_i32(tmp, addr, 4);
1112         tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1113         tcg_gen_qemu_st64(t64, tmp, index);
1114         break;
1115     case OS_PACKED:
1116         /*
1117          * unimplemented data type on 68040/ColdFire
1118          * FIXME if needed for another FPU
1119          */
1120         gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1121         break;
1122     default:
1123         g_assert_not_reached();
1124     }
1125     tcg_temp_free(tmp);
1126     tcg_temp_free_i64(t64);
1127 }
1128 
1129 static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1130                         TCGv_ptr fp, ea_what what, int index)
1131 {
1132     if (what == EA_STORE) {
1133         gen_store_fp(s, opsize, addr, fp, index);
1134     } else {
1135         gen_load_fp(s, opsize, addr, fp, index);
1136     }
1137 }
1138 
1139 static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1140                           int reg0, int opsize, TCGv_ptr fp, ea_what what,
1141                           int index)
1142 {
1143     TCGv reg, addr, tmp;
1144     TCGv_i64 t64;
1145 
1146     switch (mode) {
1147     case 0: /* Data register direct.  */
1148         reg = cpu_dregs[reg0];
1149         if (what == EA_STORE) {
1150             switch (opsize) {
1151             case OS_BYTE:
1152             case OS_WORD:
1153             case OS_LONG:
1154                 gen_helper_reds32(reg, cpu_env, fp);
1155                 break;
1156             case OS_SINGLE:
1157                 gen_helper_redf32(reg, cpu_env, fp);
1158                 break;
1159             default:
1160                 g_assert_not_reached();
1161             }
1162         } else {
1163             tmp = tcg_temp_new();
1164             switch (opsize) {
1165             case OS_BYTE:
1166                 tcg_gen_ext8s_i32(tmp, reg);
1167                 gen_helper_exts32(cpu_env, fp, tmp);
1168                 break;
1169             case OS_WORD:
1170                 tcg_gen_ext16s_i32(tmp, reg);
1171                 gen_helper_exts32(cpu_env, fp, tmp);
1172                 break;
1173             case OS_LONG:
1174                 gen_helper_exts32(cpu_env, fp, reg);
1175                 break;
1176             case OS_SINGLE:
1177                 gen_helper_extf32(cpu_env, fp, reg);
1178                 break;
1179             default:
1180                 g_assert_not_reached();
1181             }
1182             tcg_temp_free(tmp);
1183         }
1184         return 0;
1185     case 1: /* Address register direct.  */
1186         return -1;
1187     case 2: /* Indirect register */
1188         addr = get_areg(s, reg0);
1189         gen_ldst_fp(s, opsize, addr, fp, what, index);
1190         return 0;
1191     case 3: /* Indirect postincrement.  */
1192         addr = cpu_aregs[reg0];
1193         gen_ldst_fp(s, opsize, addr, fp, what, index);
1194         tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1195         return 0;
1196     case 4: /* Indirect predecrememnt.  */
1197         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1198         if (IS_NULL_QREG(addr)) {
1199             return -1;
1200         }
1201         gen_ldst_fp(s, opsize, addr, fp, what, index);
1202         tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1203         return 0;
1204     case 5: /* Indirect displacement.  */
1205     case 6: /* Indirect index + displacement.  */
1206     do_indirect:
1207         addr = gen_lea_mode(env, s, mode, reg0, opsize);
1208         if (IS_NULL_QREG(addr)) {
1209             return -1;
1210         }
1211         gen_ldst_fp(s, opsize, addr, fp, what, index);
1212         return 0;
1213     case 7: /* Other */
1214         switch (reg0) {
1215         case 0: /* Absolute short.  */
1216         case 1: /* Absolute long.  */
1217         case 2: /* pc displacement  */
1218         case 3: /* pc index+displacement.  */
1219             goto do_indirect;
1220         case 4: /* Immediate.  */
1221             if (what == EA_STORE) {
1222                 return -1;
1223             }
1224             switch (opsize) {
1225             case OS_BYTE:
1226                 tmp = tcg_const_i32((int8_t)read_im8(env, s));
1227                 gen_helper_exts32(cpu_env, fp, tmp);
1228                 tcg_temp_free(tmp);
1229                 break;
1230             case OS_WORD:
1231                 tmp = tcg_const_i32((int16_t)read_im16(env, s));
1232                 gen_helper_exts32(cpu_env, fp, tmp);
1233                 tcg_temp_free(tmp);
1234                 break;
1235             case OS_LONG:
1236                 tmp = tcg_const_i32(read_im32(env, s));
1237                 gen_helper_exts32(cpu_env, fp, tmp);
1238                 tcg_temp_free(tmp);
1239                 break;
1240             case OS_SINGLE:
1241                 tmp = tcg_const_i32(read_im32(env, s));
1242                 gen_helper_extf32(cpu_env, fp, tmp);
1243                 tcg_temp_free(tmp);
1244                 break;
1245             case OS_DOUBLE:
1246                 t64 = tcg_const_i64(read_im64(env, s));
1247                 gen_helper_extf64(cpu_env, fp, t64);
1248                 tcg_temp_free_i64(t64);
1249                 break;
1250             case OS_EXTENDED:
1251                 if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1252                     gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1253                     break;
1254                 }
1255                 tmp = tcg_const_i32(read_im32(env, s) >> 16);
1256                 tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1257                 tcg_temp_free(tmp);
1258                 t64 = tcg_const_i64(read_im64(env, s));
1259                 tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1260                 tcg_temp_free_i64(t64);
1261                 break;
1262             case OS_PACKED:
1263                 /*
1264                  * unimplemented data type on 68040/ColdFire
1265                  * FIXME if needed for another FPU
1266                  */
1267                 gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1268                 break;
1269             default:
1270                 g_assert_not_reached();
1271             }
1272             return 0;
1273         default:
1274             return -1;
1275         }
1276     }
1277     return -1;
1278 }
1279 
1280 static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1281                        int opsize, TCGv_ptr fp, ea_what what, int index)
1282 {
1283     int mode = extract32(insn, 3, 3);
1284     int reg0 = REG(insn, 0);
1285     return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1286 }
1287 
1288 typedef struct {
1289     TCGCond tcond;
1290     bool g1;
1291     bool g2;
1292     TCGv v1;
1293     TCGv v2;
1294 } DisasCompare;
1295 
1296 static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1297 {
1298     TCGv tmp, tmp2;
1299     TCGCond tcond;
1300     CCOp op = s->cc_op;
1301 
1302     /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1303     if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1304         c->g1 = c->g2 = 1;
1305         c->v1 = QREG_CC_N;
1306         c->v2 = QREG_CC_V;
1307         switch (cond) {
1308         case 2: /* HI */
1309         case 3: /* LS */
1310             tcond = TCG_COND_LEU;
1311             goto done;
1312         case 4: /* CC */
1313         case 5: /* CS */
1314             tcond = TCG_COND_LTU;
1315             goto done;
1316         case 6: /* NE */
1317         case 7: /* EQ */
1318             tcond = TCG_COND_EQ;
1319             goto done;
1320         case 10: /* PL */
1321         case 11: /* MI */
1322             c->g1 = c->g2 = 0;
1323             c->v2 = tcg_const_i32(0);
1324             c->v1 = tmp = tcg_temp_new();
1325             tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1326             gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1327             /* fallthru */
1328         case 12: /* GE */
1329         case 13: /* LT */
1330             tcond = TCG_COND_LT;
1331             goto done;
1332         case 14: /* GT */
1333         case 15: /* LE */
1334             tcond = TCG_COND_LE;
1335             goto done;
1336         }
1337     }
1338 
1339     c->g1 = 1;
1340     c->g2 = 0;
1341     c->v2 = tcg_const_i32(0);
1342 
1343     switch (cond) {
1344     case 0: /* T */
1345     case 1: /* F */
1346         c->v1 = c->v2;
1347         tcond = TCG_COND_NEVER;
1348         goto done;
1349     case 14: /* GT (!(Z || (N ^ V))) */
1350     case 15: /* LE (Z || (N ^ V)) */
1351         /*
1352          * Logic operations clear V, which simplifies LE to (Z || N),
1353          * and since Z and N are co-located, this becomes a normal
1354          * comparison vs N.
1355          */
1356         if (op == CC_OP_LOGIC) {
1357             c->v1 = QREG_CC_N;
1358             tcond = TCG_COND_LE;
1359             goto done;
1360         }
1361         break;
1362     case 12: /* GE (!(N ^ V)) */
1363     case 13: /* LT (N ^ V) */
1364         /* Logic operations clear V, which simplifies this to N.  */
1365         if (op != CC_OP_LOGIC) {
1366             break;
1367         }
1368         /* fallthru */
1369     case 10: /* PL (!N) */
1370     case 11: /* MI (N) */
1371         /* Several cases represent N normally.  */
1372         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1373             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1374             op == CC_OP_LOGIC) {
1375             c->v1 = QREG_CC_N;
1376             tcond = TCG_COND_LT;
1377             goto done;
1378         }
1379         break;
1380     case 6: /* NE (!Z) */
1381     case 7: /* EQ (Z) */
1382         /* Some cases fold Z into N.  */
1383         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1384             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1385             op == CC_OP_LOGIC) {
1386             tcond = TCG_COND_EQ;
1387             c->v1 = QREG_CC_N;
1388             goto done;
1389         }
1390         break;
1391     case 4: /* CC (!C) */
1392     case 5: /* CS (C) */
1393         /* Some cases fold C into X.  */
1394         if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1395             op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1396             tcond = TCG_COND_NE;
1397             c->v1 = QREG_CC_X;
1398             goto done;
1399         }
1400         /* fallthru */
1401     case 8: /* VC (!V) */
1402     case 9: /* VS (V) */
1403         /* Logic operations clear V and C.  */
1404         if (op == CC_OP_LOGIC) {
1405             tcond = TCG_COND_NEVER;
1406             c->v1 = c->v2;
1407             goto done;
1408         }
1409         break;
1410     }
1411 
1412     /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1413     gen_flush_flags(s);
1414 
1415     switch (cond) {
1416     case 0: /* T */
1417     case 1: /* F */
1418     default:
1419         /* Invalid, or handled above.  */
1420         abort();
1421     case 2: /* HI (!C && !Z) -> !(C || Z)*/
1422     case 3: /* LS (C || Z) */
1423         c->v1 = tmp = tcg_temp_new();
1424         c->g1 = 0;
1425         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1426         tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1427         tcond = TCG_COND_NE;
1428         break;
1429     case 4: /* CC (!C) */
1430     case 5: /* CS (C) */
1431         c->v1 = QREG_CC_C;
1432         tcond = TCG_COND_NE;
1433         break;
1434     case 6: /* NE (!Z) */
1435     case 7: /* EQ (Z) */
1436         c->v1 = QREG_CC_Z;
1437         tcond = TCG_COND_EQ;
1438         break;
1439     case 8: /* VC (!V) */
1440     case 9: /* VS (V) */
1441         c->v1 = QREG_CC_V;
1442         tcond = TCG_COND_LT;
1443         break;
1444     case 10: /* PL (!N) */
1445     case 11: /* MI (N) */
1446         c->v1 = QREG_CC_N;
1447         tcond = TCG_COND_LT;
1448         break;
1449     case 12: /* GE (!(N ^ V)) */
1450     case 13: /* LT (N ^ V) */
1451         c->v1 = tmp = tcg_temp_new();
1452         c->g1 = 0;
1453         tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1454         tcond = TCG_COND_LT;
1455         break;
1456     case 14: /* GT (!(Z || (N ^ V))) */
1457     case 15: /* LE (Z || (N ^ V)) */
1458         c->v1 = tmp = tcg_temp_new();
1459         c->g1 = 0;
1460         tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1461         tcg_gen_neg_i32(tmp, tmp);
1462         tmp2 = tcg_temp_new();
1463         tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1464         tcg_gen_or_i32(tmp, tmp, tmp2);
1465         tcg_temp_free(tmp2);
1466         tcond = TCG_COND_LT;
1467         break;
1468     }
1469 
1470  done:
1471     if ((cond & 1) == 0) {
1472         tcond = tcg_invert_cond(tcond);
1473     }
1474     c->tcond = tcond;
1475 }
1476 
1477 static void free_cond(DisasCompare *c)
1478 {
1479     if (!c->g1) {
1480         tcg_temp_free(c->v1);
1481     }
1482     if (!c->g2) {
1483         tcg_temp_free(c->v2);
1484     }
1485 }
1486 
1487 static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1488 {
1489   DisasCompare c;
1490 
1491   gen_cc_cond(&c, s, cond);
1492   update_cc_op(s);
1493   tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1494   free_cond(&c);
1495 }
1496 
1497 /* Force a TB lookup after an instruction that changes the CPU state.  */
1498 static void gen_exit_tb(DisasContext *s)
1499 {
1500     update_cc_op(s);
1501     tcg_gen_movi_i32(QREG_PC, s->pc);
1502     s->base.is_jmp = DISAS_EXIT;
1503 }
1504 
1505 #define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1506         result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1507                         op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1508         if (IS_NULL_QREG(result)) {                                     \
1509             gen_addr_fault(s);                                          \
1510             return;                                                     \
1511         }                                                               \
1512     } while (0)
1513 
1514 #define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1515         TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1516                                 EA_STORE, IS_USER(s));                  \
1517         if (IS_NULL_QREG(ea_result)) {                                  \
1518             gen_addr_fault(s);                                          \
1519             return;                                                     \
1520         }                                                               \
1521     } while (0)
1522 
1523 static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1524 {
1525 #ifndef CONFIG_USER_ONLY
1526     return (s->base.pc_first & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)
1527         || (s->base.pc_next & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1528 #else
1529     return true;
1530 #endif
1531 }
1532 
1533 /* Generate a jump to an immediate address.  */
1534 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1535 {
1536     if (unlikely(is_singlestepping(s))) {
1537         update_cc_op(s);
1538         tcg_gen_movi_i32(QREG_PC, dest);
1539         gen_singlestep_exception(s);
1540     } else if (use_goto_tb(s, dest)) {
1541         tcg_gen_goto_tb(n);
1542         tcg_gen_movi_i32(QREG_PC, dest);
1543         tcg_gen_exit_tb(s->base.tb, n);
1544     } else {
1545         gen_jmp_im(s, dest);
1546         tcg_gen_exit_tb(NULL, 0);
1547     }
1548     s->base.is_jmp = DISAS_NORETURN;
1549 }
1550 
1551 DISAS_INSN(scc)
1552 {
1553     DisasCompare c;
1554     int cond;
1555     TCGv tmp;
1556 
1557     cond = (insn >> 8) & 0xf;
1558     gen_cc_cond(&c, s, cond);
1559 
1560     tmp = tcg_temp_new();
1561     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1562     free_cond(&c);
1563 
1564     tcg_gen_neg_i32(tmp, tmp);
1565     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1566     tcg_temp_free(tmp);
1567 }
1568 
1569 DISAS_INSN(dbcc)
1570 {
1571     TCGLabel *l1;
1572     TCGv reg;
1573     TCGv tmp;
1574     int16_t offset;
1575     uint32_t base;
1576 
1577     reg = DREG(insn, 0);
1578     base = s->pc;
1579     offset = (int16_t)read_im16(env, s);
1580     l1 = gen_new_label();
1581     gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1582 
1583     tmp = tcg_temp_new();
1584     tcg_gen_ext16s_i32(tmp, reg);
1585     tcg_gen_addi_i32(tmp, tmp, -1);
1586     gen_partset_reg(OS_WORD, reg, tmp);
1587     tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1588     gen_jmp_tb(s, 1, base + offset);
1589     gen_set_label(l1);
1590     gen_jmp_tb(s, 0, s->pc);
1591 }
1592 
1593 DISAS_INSN(undef_mac)
1594 {
1595     gen_exception(s, s->base.pc_next, EXCP_LINEA);
1596 }
1597 
1598 DISAS_INSN(undef_fpu)
1599 {
1600     gen_exception(s, s->base.pc_next, EXCP_LINEF);
1601 }
1602 
1603 DISAS_INSN(undef)
1604 {
1605     /*
1606      * ??? This is both instructions that are as yet unimplemented
1607      * for the 680x0 series, as well as those that are implemented
1608      * but actually illegal for CPU32 or pre-68020.
1609      */
1610     qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1611                   insn, s->base.pc_next);
1612     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1613 }
1614 
1615 DISAS_INSN(mulw)
1616 {
1617     TCGv reg;
1618     TCGv tmp;
1619     TCGv src;
1620     int sign;
1621 
1622     sign = (insn & 0x100) != 0;
1623     reg = DREG(insn, 9);
1624     tmp = tcg_temp_new();
1625     if (sign)
1626         tcg_gen_ext16s_i32(tmp, reg);
1627     else
1628         tcg_gen_ext16u_i32(tmp, reg);
1629     SRC_EA(env, src, OS_WORD, sign, NULL);
1630     tcg_gen_mul_i32(tmp, tmp, src);
1631     tcg_gen_mov_i32(reg, tmp);
1632     gen_logic_cc(s, tmp, OS_LONG);
1633     tcg_temp_free(tmp);
1634 }
1635 
1636 DISAS_INSN(divw)
1637 {
1638     int sign;
1639     TCGv src;
1640     TCGv destr;
1641 
1642     /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1643 
1644     sign = (insn & 0x100) != 0;
1645 
1646     /* dest.l / src.w */
1647 
1648     SRC_EA(env, src, OS_WORD, sign, NULL);
1649     destr = tcg_const_i32(REG(insn, 9));
1650     if (sign) {
1651         gen_helper_divsw(cpu_env, destr, src);
1652     } else {
1653         gen_helper_divuw(cpu_env, destr, src);
1654     }
1655     tcg_temp_free(destr);
1656 
1657     set_cc_op(s, CC_OP_FLAGS);
1658 }
1659 
1660 DISAS_INSN(divl)
1661 {
1662     TCGv num, reg, den;
1663     int sign;
1664     uint16_t ext;
1665 
1666     ext = read_im16(env, s);
1667 
1668     sign = (ext & 0x0800) != 0;
1669 
1670     if (ext & 0x400) {
1671         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1672             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1673             return;
1674         }
1675 
1676         /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1677 
1678         SRC_EA(env, den, OS_LONG, 0, NULL);
1679         num = tcg_const_i32(REG(ext, 12));
1680         reg = tcg_const_i32(REG(ext, 0));
1681         if (sign) {
1682             gen_helper_divsll(cpu_env, num, reg, den);
1683         } else {
1684             gen_helper_divull(cpu_env, num, reg, den);
1685         }
1686         tcg_temp_free(reg);
1687         tcg_temp_free(num);
1688         set_cc_op(s, CC_OP_FLAGS);
1689         return;
1690     }
1691 
1692     /* divX.l <EA>, Dq        32/32 -> 32q     */
1693     /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1694 
1695     SRC_EA(env, den, OS_LONG, 0, NULL);
1696     num = tcg_const_i32(REG(ext, 12));
1697     reg = tcg_const_i32(REG(ext, 0));
1698     if (sign) {
1699         gen_helper_divsl(cpu_env, num, reg, den);
1700     } else {
1701         gen_helper_divul(cpu_env, num, reg, den);
1702     }
1703     tcg_temp_free(reg);
1704     tcg_temp_free(num);
1705 
1706     set_cc_op(s, CC_OP_FLAGS);
1707 }
1708 
1709 static void bcd_add(TCGv dest, TCGv src)
1710 {
1711     TCGv t0, t1;
1712 
1713     /*
1714      * dest10 = dest10 + src10 + X
1715      *
1716      *        t1 = src
1717      *        t2 = t1 + 0x066
1718      *        t3 = t2 + dest + X
1719      *        t4 = t2 ^ dest
1720      *        t5 = t3 ^ t4
1721      *        t6 = ~t5 & 0x110
1722      *        t7 = (t6 >> 2) | (t6 >> 3)
1723      *        return t3 - t7
1724      */
1725 
1726     /*
1727      * t1 = (src + 0x066) + dest + X
1728      *    = result with some possible exceeding 0x6
1729      */
1730 
1731     t0 = tcg_const_i32(0x066);
1732     tcg_gen_add_i32(t0, t0, src);
1733 
1734     t1 = tcg_temp_new();
1735     tcg_gen_add_i32(t1, t0, dest);
1736     tcg_gen_add_i32(t1, t1, QREG_CC_X);
1737 
1738     /* we will remove exceeding 0x6 where there is no carry */
1739 
1740     /*
1741      * t0 = (src + 0x0066) ^ dest
1742      *    = t1 without carries
1743      */
1744 
1745     tcg_gen_xor_i32(t0, t0, dest);
1746 
1747     /*
1748      * extract the carries
1749      * t0 = t0 ^ t1
1750      *    = only the carries
1751      */
1752 
1753     tcg_gen_xor_i32(t0, t0, t1);
1754 
1755     /*
1756      * generate 0x1 where there is no carry
1757      * and for each 0x10, generate a 0x6
1758      */
1759 
1760     tcg_gen_shri_i32(t0, t0, 3);
1761     tcg_gen_not_i32(t0, t0);
1762     tcg_gen_andi_i32(t0, t0, 0x22);
1763     tcg_gen_add_i32(dest, t0, t0);
1764     tcg_gen_add_i32(dest, dest, t0);
1765     tcg_temp_free(t0);
1766 
1767     /*
1768      * remove the exceeding 0x6
1769      * for digits that have not generated a carry
1770      */
1771 
1772     tcg_gen_sub_i32(dest, t1, dest);
1773     tcg_temp_free(t1);
1774 }
1775 
1776 static void bcd_sub(TCGv dest, TCGv src)
1777 {
1778     TCGv t0, t1, t2;
1779 
1780     /*
1781      *  dest10 = dest10 - src10 - X
1782      *         = bcd_add(dest + 1 - X, 0x199 - src)
1783      */
1784 
1785     /* t0 = 0x066 + (0x199 - src) */
1786 
1787     t0 = tcg_temp_new();
1788     tcg_gen_subfi_i32(t0, 0x1ff, src);
1789 
1790     /* t1 = t0 + dest + 1 - X*/
1791 
1792     t1 = tcg_temp_new();
1793     tcg_gen_add_i32(t1, t0, dest);
1794     tcg_gen_addi_i32(t1, t1, 1);
1795     tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1796 
1797     /* t2 = t0 ^ dest */
1798 
1799     t2 = tcg_temp_new();
1800     tcg_gen_xor_i32(t2, t0, dest);
1801 
1802     /* t0 = t1 ^ t2 */
1803 
1804     tcg_gen_xor_i32(t0, t1, t2);
1805 
1806     /*
1807      * t2 = ~t0 & 0x110
1808      * t0 = (t2 >> 2) | (t2 >> 3)
1809      *
1810      * to fit on 8bit operands, changed in:
1811      *
1812      * t2 = ~(t0 >> 3) & 0x22
1813      * t0 = t2 + t2
1814      * t0 = t0 + t2
1815      */
1816 
1817     tcg_gen_shri_i32(t2, t0, 3);
1818     tcg_gen_not_i32(t2, t2);
1819     tcg_gen_andi_i32(t2, t2, 0x22);
1820     tcg_gen_add_i32(t0, t2, t2);
1821     tcg_gen_add_i32(t0, t0, t2);
1822     tcg_temp_free(t2);
1823 
1824     /* return t1 - t0 */
1825 
1826     tcg_gen_sub_i32(dest, t1, t0);
1827     tcg_temp_free(t0);
1828     tcg_temp_free(t1);
1829 }
1830 
1831 static void bcd_flags(TCGv val)
1832 {
1833     tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1834     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1835 
1836     tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1837 
1838     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1839 }
1840 
1841 DISAS_INSN(abcd_reg)
1842 {
1843     TCGv src;
1844     TCGv dest;
1845 
1846     gen_flush_flags(s); /* !Z is sticky */
1847 
1848     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1849     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1850     bcd_add(dest, src);
1851     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1852 
1853     bcd_flags(dest);
1854 }
1855 
1856 DISAS_INSN(abcd_mem)
1857 {
1858     TCGv src, dest, addr;
1859 
1860     gen_flush_flags(s); /* !Z is sticky */
1861 
1862     /* Indirect pre-decrement load (mode 4) */
1863 
1864     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1865                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1866     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1867                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1868 
1869     bcd_add(dest, src);
1870 
1871     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1872                 EA_STORE, IS_USER(s));
1873 
1874     bcd_flags(dest);
1875 }
1876 
1877 DISAS_INSN(sbcd_reg)
1878 {
1879     TCGv src, dest;
1880 
1881     gen_flush_flags(s); /* !Z is sticky */
1882 
1883     src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1884     dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1885 
1886     bcd_sub(dest, src);
1887 
1888     gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1889 
1890     bcd_flags(dest);
1891 }
1892 
1893 DISAS_INSN(sbcd_mem)
1894 {
1895     TCGv src, dest, addr;
1896 
1897     gen_flush_flags(s); /* !Z is sticky */
1898 
1899     /* Indirect pre-decrement load (mode 4) */
1900 
1901     src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1902                       NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1903     dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1904                        NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1905 
1906     bcd_sub(dest, src);
1907 
1908     gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1909                 EA_STORE, IS_USER(s));
1910 
1911     bcd_flags(dest);
1912 }
1913 
1914 DISAS_INSN(nbcd)
1915 {
1916     TCGv src, dest;
1917     TCGv addr;
1918 
1919     gen_flush_flags(s); /* !Z is sticky */
1920 
1921     SRC_EA(env, src, OS_BYTE, 0, &addr);
1922 
1923     dest = tcg_const_i32(0);
1924     bcd_sub(dest, src);
1925 
1926     DEST_EA(env, insn, OS_BYTE, dest, &addr);
1927 
1928     bcd_flags(dest);
1929 
1930     tcg_temp_free(dest);
1931 }
1932 
1933 DISAS_INSN(addsub)
1934 {
1935     TCGv reg;
1936     TCGv dest;
1937     TCGv src;
1938     TCGv tmp;
1939     TCGv addr;
1940     int add;
1941     int opsize;
1942 
1943     add = (insn & 0x4000) != 0;
1944     opsize = insn_opsize(insn);
1945     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1946     dest = tcg_temp_new();
1947     if (insn & 0x100) {
1948         SRC_EA(env, tmp, opsize, 1, &addr);
1949         src = reg;
1950     } else {
1951         tmp = reg;
1952         SRC_EA(env, src, opsize, 1, NULL);
1953     }
1954     if (add) {
1955         tcg_gen_add_i32(dest, tmp, src);
1956         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1957         set_cc_op(s, CC_OP_ADDB + opsize);
1958     } else {
1959         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1960         tcg_gen_sub_i32(dest, tmp, src);
1961         set_cc_op(s, CC_OP_SUBB + opsize);
1962     }
1963     gen_update_cc_add(dest, src, opsize);
1964     if (insn & 0x100) {
1965         DEST_EA(env, insn, opsize, dest, &addr);
1966     } else {
1967         gen_partset_reg(opsize, DREG(insn, 9), dest);
1968     }
1969     tcg_temp_free(dest);
1970 }
1971 
1972 /* Reverse the order of the bits in REG.  */
1973 DISAS_INSN(bitrev)
1974 {
1975     TCGv reg;
1976     reg = DREG(insn, 0);
1977     gen_helper_bitrev(reg, reg);
1978 }
1979 
1980 DISAS_INSN(bitop_reg)
1981 {
1982     int opsize;
1983     int op;
1984     TCGv src1;
1985     TCGv src2;
1986     TCGv tmp;
1987     TCGv addr;
1988     TCGv dest;
1989 
1990     if ((insn & 0x38) != 0)
1991         opsize = OS_BYTE;
1992     else
1993         opsize = OS_LONG;
1994     op = (insn >> 6) & 3;
1995     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1996 
1997     gen_flush_flags(s);
1998     src2 = tcg_temp_new();
1999     if (opsize == OS_BYTE)
2000         tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
2001     else
2002         tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
2003 
2004     tmp = tcg_const_i32(1);
2005     tcg_gen_shl_i32(tmp, tmp, src2);
2006     tcg_temp_free(src2);
2007 
2008     tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
2009 
2010     dest = tcg_temp_new();
2011     switch (op) {
2012     case 1: /* bchg */
2013         tcg_gen_xor_i32(dest, src1, tmp);
2014         break;
2015     case 2: /* bclr */
2016         tcg_gen_andc_i32(dest, src1, tmp);
2017         break;
2018     case 3: /* bset */
2019         tcg_gen_or_i32(dest, src1, tmp);
2020         break;
2021     default: /* btst */
2022         break;
2023     }
2024     tcg_temp_free(tmp);
2025     if (op) {
2026         DEST_EA(env, insn, opsize, dest, &addr);
2027     }
2028     tcg_temp_free(dest);
2029 }
2030 
2031 DISAS_INSN(sats)
2032 {
2033     TCGv reg;
2034     reg = DREG(insn, 0);
2035     gen_flush_flags(s);
2036     gen_helper_sats(reg, reg, QREG_CC_V);
2037     gen_logic_cc(s, reg, OS_LONG);
2038 }
2039 
2040 static void gen_push(DisasContext *s, TCGv val)
2041 {
2042     TCGv tmp;
2043 
2044     tmp = tcg_temp_new();
2045     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2046     gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2047     tcg_gen_mov_i32(QREG_SP, tmp);
2048     tcg_temp_free(tmp);
2049 }
2050 
2051 static TCGv mreg(int reg)
2052 {
2053     if (reg < 8) {
2054         /* Dx */
2055         return cpu_dregs[reg];
2056     }
2057     /* Ax */
2058     return cpu_aregs[reg & 7];
2059 }
2060 
2061 DISAS_INSN(movem)
2062 {
2063     TCGv addr, incr, tmp, r[16];
2064     int is_load = (insn & 0x0400) != 0;
2065     int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2066     uint16_t mask = read_im16(env, s);
2067     int mode = extract32(insn, 3, 3);
2068     int reg0 = REG(insn, 0);
2069     int i;
2070 
2071     tmp = cpu_aregs[reg0];
2072 
2073     switch (mode) {
2074     case 0: /* data register direct */
2075     case 1: /* addr register direct */
2076     do_addr_fault:
2077         gen_addr_fault(s);
2078         return;
2079 
2080     case 2: /* indirect */
2081         break;
2082 
2083     case 3: /* indirect post-increment */
2084         if (!is_load) {
2085             /* post-increment is not allowed */
2086             goto do_addr_fault;
2087         }
2088         break;
2089 
2090     case 4: /* indirect pre-decrement */
2091         if (is_load) {
2092             /* pre-decrement is not allowed */
2093             goto do_addr_fault;
2094         }
2095         /*
2096          * We want a bare copy of the address reg, without any pre-decrement
2097          * adjustment, as gen_lea would provide.
2098          */
2099         break;
2100 
2101     default:
2102         tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2103         if (IS_NULL_QREG(tmp)) {
2104             goto do_addr_fault;
2105         }
2106         break;
2107     }
2108 
2109     addr = tcg_temp_new();
2110     tcg_gen_mov_i32(addr, tmp);
2111     incr = tcg_const_i32(opsize_bytes(opsize));
2112 
2113     if (is_load) {
2114         /* memory to register */
2115         for (i = 0; i < 16; i++) {
2116             if (mask & (1 << i)) {
2117                 r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2118                 tcg_gen_add_i32(addr, addr, incr);
2119             }
2120         }
2121         for (i = 0; i < 16; i++) {
2122             if (mask & (1 << i)) {
2123                 tcg_gen_mov_i32(mreg(i), r[i]);
2124                 tcg_temp_free(r[i]);
2125             }
2126         }
2127         if (mode == 3) {
2128             /* post-increment: movem (An)+,X */
2129             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2130         }
2131     } else {
2132         /* register to memory */
2133         if (mode == 4) {
2134             /* pre-decrement: movem X,-(An) */
2135             for (i = 15; i >= 0; i--) {
2136                 if ((mask << i) & 0x8000) {
2137                     tcg_gen_sub_i32(addr, addr, incr);
2138                     if (reg0 + 8 == i &&
2139                         m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2140                         /*
2141                          * M68020+: if the addressing register is the
2142                          * register moved to memory, the value written
2143                          * is the initial value decremented by the size of
2144                          * the operation, regardless of how many actual
2145                          * stores have been performed until this point.
2146                          * M68000/M68010: the value is the initial value.
2147                          */
2148                         tmp = tcg_temp_new();
2149                         tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2150                         gen_store(s, opsize, addr, tmp, IS_USER(s));
2151                         tcg_temp_free(tmp);
2152                     } else {
2153                         gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2154                     }
2155                 }
2156             }
2157             tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2158         } else {
2159             for (i = 0; i < 16; i++) {
2160                 if (mask & (1 << i)) {
2161                     gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2162                     tcg_gen_add_i32(addr, addr, incr);
2163                 }
2164             }
2165         }
2166     }
2167 
2168     tcg_temp_free(incr);
2169     tcg_temp_free(addr);
2170 }
2171 
2172 DISAS_INSN(movep)
2173 {
2174     uint8_t i;
2175     int16_t displ;
2176     TCGv reg;
2177     TCGv addr;
2178     TCGv abuf;
2179     TCGv dbuf;
2180 
2181     displ = read_im16(env, s);
2182 
2183     addr = AREG(insn, 0);
2184     reg = DREG(insn, 9);
2185 
2186     abuf = tcg_temp_new();
2187     tcg_gen_addi_i32(abuf, addr, displ);
2188     dbuf = tcg_temp_new();
2189 
2190     if (insn & 0x40) {
2191         i = 4;
2192     } else {
2193         i = 2;
2194     }
2195 
2196     if (insn & 0x80) {
2197         for ( ; i > 0 ; i--) {
2198             tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2199             tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2200             if (i > 1) {
2201                 tcg_gen_addi_i32(abuf, abuf, 2);
2202             }
2203         }
2204     } else {
2205         for ( ; i > 0 ; i--) {
2206             tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2207             tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2208             if (i > 1) {
2209                 tcg_gen_addi_i32(abuf, abuf, 2);
2210             }
2211         }
2212     }
2213     tcg_temp_free(abuf);
2214     tcg_temp_free(dbuf);
2215 }
2216 
2217 DISAS_INSN(bitop_im)
2218 {
2219     int opsize;
2220     int op;
2221     TCGv src1;
2222     uint32_t mask;
2223     int bitnum;
2224     TCGv tmp;
2225     TCGv addr;
2226 
2227     if ((insn & 0x38) != 0)
2228         opsize = OS_BYTE;
2229     else
2230         opsize = OS_LONG;
2231     op = (insn >> 6) & 3;
2232 
2233     bitnum = read_im16(env, s);
2234     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2235         if (bitnum & 0xfe00) {
2236             disas_undef(env, s, insn);
2237             return;
2238         }
2239     } else {
2240         if (bitnum & 0xff00) {
2241             disas_undef(env, s, insn);
2242             return;
2243         }
2244     }
2245 
2246     SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2247 
2248     gen_flush_flags(s);
2249     if (opsize == OS_BYTE)
2250         bitnum &= 7;
2251     else
2252         bitnum &= 31;
2253     mask = 1 << bitnum;
2254 
2255    tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2256 
2257     if (op) {
2258         tmp = tcg_temp_new();
2259         switch (op) {
2260         case 1: /* bchg */
2261             tcg_gen_xori_i32(tmp, src1, mask);
2262             break;
2263         case 2: /* bclr */
2264             tcg_gen_andi_i32(tmp, src1, ~mask);
2265             break;
2266         case 3: /* bset */
2267             tcg_gen_ori_i32(tmp, src1, mask);
2268             break;
2269         default: /* btst */
2270             break;
2271         }
2272         DEST_EA(env, insn, opsize, tmp, &addr);
2273         tcg_temp_free(tmp);
2274     }
2275 }
2276 
2277 static TCGv gen_get_ccr(DisasContext *s)
2278 {
2279     TCGv dest;
2280 
2281     update_cc_op(s);
2282     dest = tcg_temp_new();
2283     gen_helper_get_ccr(dest, cpu_env);
2284     return dest;
2285 }
2286 
2287 static TCGv gen_get_sr(DisasContext *s)
2288 {
2289     TCGv ccr;
2290     TCGv sr;
2291 
2292     ccr = gen_get_ccr(s);
2293     sr = tcg_temp_new();
2294     tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2295     tcg_gen_or_i32(sr, sr, ccr);
2296     tcg_temp_free(ccr);
2297     return sr;
2298 }
2299 
2300 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2301 {
2302     if (ccr_only) {
2303         tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2304         tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2305         tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2306         tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2307         tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2308     } else {
2309         TCGv sr = tcg_const_i32(val);
2310         gen_helper_set_sr(cpu_env, sr);
2311         tcg_temp_free(sr);
2312     }
2313     set_cc_op(s, CC_OP_FLAGS);
2314 }
2315 
2316 static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2317 {
2318     if (ccr_only) {
2319         gen_helper_set_ccr(cpu_env, val);
2320     } else {
2321         gen_helper_set_sr(cpu_env, val);
2322     }
2323     set_cc_op(s, CC_OP_FLAGS);
2324 }
2325 
2326 static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2327                            bool ccr_only)
2328 {
2329     if ((insn & 0x3f) == 0x3c) {
2330         uint16_t val;
2331         val = read_im16(env, s);
2332         gen_set_sr_im(s, val, ccr_only);
2333     } else {
2334         TCGv src;
2335         SRC_EA(env, src, OS_WORD, 0, NULL);
2336         gen_set_sr(s, src, ccr_only);
2337     }
2338 }
2339 
2340 DISAS_INSN(arith_im)
2341 {
2342     int op;
2343     TCGv im;
2344     TCGv src1;
2345     TCGv dest;
2346     TCGv addr;
2347     int opsize;
2348     bool with_SR = ((insn & 0x3f) == 0x3c);
2349 
2350     op = (insn >> 9) & 7;
2351     opsize = insn_opsize(insn);
2352     switch (opsize) {
2353     case OS_BYTE:
2354         im = tcg_const_i32((int8_t)read_im8(env, s));
2355         break;
2356     case OS_WORD:
2357         im = tcg_const_i32((int16_t)read_im16(env, s));
2358         break;
2359     case OS_LONG:
2360         im = tcg_const_i32(read_im32(env, s));
2361         break;
2362     default:
2363         g_assert_not_reached();
2364     }
2365 
2366     if (with_SR) {
2367         /* SR/CCR can only be used with andi/eori/ori */
2368         if (op == 2 || op == 3 || op == 6) {
2369             disas_undef(env, s, insn);
2370             return;
2371         }
2372         switch (opsize) {
2373         case OS_BYTE:
2374             src1 = gen_get_ccr(s);
2375             break;
2376         case OS_WORD:
2377             if (IS_USER(s)) {
2378                 gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2379                 return;
2380             }
2381             src1 = gen_get_sr(s);
2382             break;
2383         default:
2384             /* OS_LONG; others already g_assert_not_reached.  */
2385             disas_undef(env, s, insn);
2386             return;
2387         }
2388     } else {
2389         SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2390     }
2391     dest = tcg_temp_new();
2392     switch (op) {
2393     case 0: /* ori */
2394         tcg_gen_or_i32(dest, src1, im);
2395         if (with_SR) {
2396             gen_set_sr(s, dest, opsize == OS_BYTE);
2397         } else {
2398             DEST_EA(env, insn, opsize, dest, &addr);
2399             gen_logic_cc(s, dest, opsize);
2400         }
2401         break;
2402     case 1: /* andi */
2403         tcg_gen_and_i32(dest, src1, im);
2404         if (with_SR) {
2405             gen_set_sr(s, dest, opsize == OS_BYTE);
2406         } else {
2407             DEST_EA(env, insn, opsize, dest, &addr);
2408             gen_logic_cc(s, dest, opsize);
2409         }
2410         break;
2411     case 2: /* subi */
2412         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2413         tcg_gen_sub_i32(dest, src1, im);
2414         gen_update_cc_add(dest, im, opsize);
2415         set_cc_op(s, CC_OP_SUBB + opsize);
2416         DEST_EA(env, insn, opsize, dest, &addr);
2417         break;
2418     case 3: /* addi */
2419         tcg_gen_add_i32(dest, src1, im);
2420         gen_update_cc_add(dest, im, opsize);
2421         tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2422         set_cc_op(s, CC_OP_ADDB + opsize);
2423         DEST_EA(env, insn, opsize, dest, &addr);
2424         break;
2425     case 5: /* eori */
2426         tcg_gen_xor_i32(dest, src1, im);
2427         if (with_SR) {
2428             gen_set_sr(s, dest, opsize == OS_BYTE);
2429         } else {
2430             DEST_EA(env, insn, opsize, dest, &addr);
2431             gen_logic_cc(s, dest, opsize);
2432         }
2433         break;
2434     case 6: /* cmpi */
2435         gen_update_cc_cmp(s, src1, im, opsize);
2436         break;
2437     default:
2438         abort();
2439     }
2440     tcg_temp_free(im);
2441     tcg_temp_free(dest);
2442 }
2443 
2444 DISAS_INSN(cas)
2445 {
2446     int opsize;
2447     TCGv addr;
2448     uint16_t ext;
2449     TCGv load;
2450     TCGv cmp;
2451     MemOp opc;
2452 
2453     switch ((insn >> 9) & 3) {
2454     case 1:
2455         opsize = OS_BYTE;
2456         opc = MO_SB;
2457         break;
2458     case 2:
2459         opsize = OS_WORD;
2460         opc = MO_TESW;
2461         break;
2462     case 3:
2463         opsize = OS_LONG;
2464         opc = MO_TESL;
2465         break;
2466     default:
2467         g_assert_not_reached();
2468     }
2469 
2470     ext = read_im16(env, s);
2471 
2472     /* cas Dc,Du,<EA> */
2473 
2474     addr = gen_lea(env, s, insn, opsize);
2475     if (IS_NULL_QREG(addr)) {
2476         gen_addr_fault(s);
2477         return;
2478     }
2479 
2480     cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2481 
2482     /*
2483      * if  <EA> == Dc then
2484      *     <EA> = Du
2485      *     Dc = <EA> (because <EA> == Dc)
2486      * else
2487      *     Dc = <EA>
2488      */
2489 
2490     load = tcg_temp_new();
2491     tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2492                                IS_USER(s), opc);
2493     /* update flags before setting cmp to load */
2494     gen_update_cc_cmp(s, load, cmp, opsize);
2495     gen_partset_reg(opsize, DREG(ext, 0), load);
2496 
2497     tcg_temp_free(load);
2498 
2499     switch (extract32(insn, 3, 3)) {
2500     case 3: /* Indirect postincrement.  */
2501         tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2502         break;
2503     case 4: /* Indirect predecrememnt.  */
2504         tcg_gen_mov_i32(AREG(insn, 0), addr);
2505         break;
2506     }
2507 }
2508 
2509 DISAS_INSN(cas2w)
2510 {
2511     uint16_t ext1, ext2;
2512     TCGv addr1, addr2;
2513     TCGv regs;
2514 
2515     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2516 
2517     ext1 = read_im16(env, s);
2518 
2519     if (ext1 & 0x8000) {
2520         /* Address Register */
2521         addr1 = AREG(ext1, 12);
2522     } else {
2523         /* Data Register */
2524         addr1 = DREG(ext1, 12);
2525     }
2526 
2527     ext2 = read_im16(env, s);
2528     if (ext2 & 0x8000) {
2529         /* Address Register */
2530         addr2 = AREG(ext2, 12);
2531     } else {
2532         /* Data Register */
2533         addr2 = DREG(ext2, 12);
2534     }
2535 
2536     /*
2537      * if (R1) == Dc1 && (R2) == Dc2 then
2538      *     (R1) = Du1
2539      *     (R2) = Du2
2540      * else
2541      *     Dc1 = (R1)
2542      *     Dc2 = (R2)
2543      */
2544 
2545     regs = tcg_const_i32(REG(ext2, 6) |
2546                          (REG(ext1, 6) << 3) |
2547                          (REG(ext2, 0) << 6) |
2548                          (REG(ext1, 0) << 9));
2549     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2550         gen_helper_exit_atomic(cpu_env);
2551     } else {
2552         gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2553     }
2554     tcg_temp_free(regs);
2555 
2556     /* Note that cas2w also assigned to env->cc_op.  */
2557     s->cc_op = CC_OP_CMPW;
2558     s->cc_op_synced = 1;
2559 }
2560 
2561 DISAS_INSN(cas2l)
2562 {
2563     uint16_t ext1, ext2;
2564     TCGv addr1, addr2, regs;
2565 
2566     /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2567 
2568     ext1 = read_im16(env, s);
2569 
2570     if (ext1 & 0x8000) {
2571         /* Address Register */
2572         addr1 = AREG(ext1, 12);
2573     } else {
2574         /* Data Register */
2575         addr1 = DREG(ext1, 12);
2576     }
2577 
2578     ext2 = read_im16(env, s);
2579     if (ext2 & 0x8000) {
2580         /* Address Register */
2581         addr2 = AREG(ext2, 12);
2582     } else {
2583         /* Data Register */
2584         addr2 = DREG(ext2, 12);
2585     }
2586 
2587     /*
2588      * if (R1) == Dc1 && (R2) == Dc2 then
2589      *     (R1) = Du1
2590      *     (R2) = Du2
2591      * else
2592      *     Dc1 = (R1)
2593      *     Dc2 = (R2)
2594      */
2595 
2596     regs = tcg_const_i32(REG(ext2, 6) |
2597                          (REG(ext1, 6) << 3) |
2598                          (REG(ext2, 0) << 6) |
2599                          (REG(ext1, 0) << 9));
2600     if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2601         gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2602     } else {
2603         gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2604     }
2605     tcg_temp_free(regs);
2606 
2607     /* Note that cas2l also assigned to env->cc_op.  */
2608     s->cc_op = CC_OP_CMPL;
2609     s->cc_op_synced = 1;
2610 }
2611 
2612 DISAS_INSN(byterev)
2613 {
2614     TCGv reg;
2615 
2616     reg = DREG(insn, 0);
2617     tcg_gen_bswap32_i32(reg, reg);
2618 }
2619 
2620 DISAS_INSN(move)
2621 {
2622     TCGv src;
2623     TCGv dest;
2624     int op;
2625     int opsize;
2626 
2627     switch (insn >> 12) {
2628     case 1: /* move.b */
2629         opsize = OS_BYTE;
2630         break;
2631     case 2: /* move.l */
2632         opsize = OS_LONG;
2633         break;
2634     case 3: /* move.w */
2635         opsize = OS_WORD;
2636         break;
2637     default:
2638         abort();
2639     }
2640     SRC_EA(env, src, opsize, 1, NULL);
2641     op = (insn >> 6) & 7;
2642     if (op == 1) {
2643         /* movea */
2644         /* The value will already have been sign extended.  */
2645         dest = AREG(insn, 9);
2646         tcg_gen_mov_i32(dest, src);
2647     } else {
2648         /* normal move */
2649         uint16_t dest_ea;
2650         dest_ea = ((insn >> 9) & 7) | (op << 3);
2651         DEST_EA(env, dest_ea, opsize, src, NULL);
2652         /* This will be correct because loads sign extend.  */
2653         gen_logic_cc(s, src, opsize);
2654     }
2655 }
2656 
2657 DISAS_INSN(negx)
2658 {
2659     TCGv z;
2660     TCGv src;
2661     TCGv addr;
2662     int opsize;
2663 
2664     opsize = insn_opsize(insn);
2665     SRC_EA(env, src, opsize, 1, &addr);
2666 
2667     gen_flush_flags(s); /* compute old Z */
2668 
2669     /*
2670      * Perform subtract with borrow.
2671      * (X, N) =  -(src + X);
2672      */
2673 
2674     z = tcg_const_i32(0);
2675     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2676     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2677     tcg_temp_free(z);
2678     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2679 
2680     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2681 
2682     /*
2683      * Compute signed-overflow for negation.  The normal formula for
2684      * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2685      * this simplifies to res & src.
2686      */
2687 
2688     tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2689 
2690     /* Copy the rest of the results into place.  */
2691     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2692     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2693 
2694     set_cc_op(s, CC_OP_FLAGS);
2695 
2696     /* result is in QREG_CC_N */
2697 
2698     DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2699 }
2700 
2701 DISAS_INSN(lea)
2702 {
2703     TCGv reg;
2704     TCGv tmp;
2705 
2706     reg = AREG(insn, 9);
2707     tmp = gen_lea(env, s, insn, OS_LONG);
2708     if (IS_NULL_QREG(tmp)) {
2709         gen_addr_fault(s);
2710         return;
2711     }
2712     tcg_gen_mov_i32(reg, tmp);
2713 }
2714 
2715 DISAS_INSN(clr)
2716 {
2717     int opsize;
2718     TCGv zero;
2719 
2720     zero = tcg_const_i32(0);
2721 
2722     opsize = insn_opsize(insn);
2723     DEST_EA(env, insn, opsize, zero, NULL);
2724     gen_logic_cc(s, zero, opsize);
2725     tcg_temp_free(zero);
2726 }
2727 
2728 DISAS_INSN(move_from_ccr)
2729 {
2730     TCGv ccr;
2731 
2732     ccr = gen_get_ccr(s);
2733     DEST_EA(env, insn, OS_WORD, ccr, NULL);
2734 }
2735 
2736 DISAS_INSN(neg)
2737 {
2738     TCGv src1;
2739     TCGv dest;
2740     TCGv addr;
2741     int opsize;
2742 
2743     opsize = insn_opsize(insn);
2744     SRC_EA(env, src1, opsize, 1, &addr);
2745     dest = tcg_temp_new();
2746     tcg_gen_neg_i32(dest, src1);
2747     set_cc_op(s, CC_OP_SUBB + opsize);
2748     gen_update_cc_add(dest, src1, opsize);
2749     tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2750     DEST_EA(env, insn, opsize, dest, &addr);
2751     tcg_temp_free(dest);
2752 }
2753 
2754 DISAS_INSN(move_to_ccr)
2755 {
2756     gen_move_to_sr(env, s, insn, true);
2757 }
2758 
2759 DISAS_INSN(not)
2760 {
2761     TCGv src1;
2762     TCGv dest;
2763     TCGv addr;
2764     int opsize;
2765 
2766     opsize = insn_opsize(insn);
2767     SRC_EA(env, src1, opsize, 1, &addr);
2768     dest = tcg_temp_new();
2769     tcg_gen_not_i32(dest, src1);
2770     DEST_EA(env, insn, opsize, dest, &addr);
2771     gen_logic_cc(s, dest, opsize);
2772 }
2773 
2774 DISAS_INSN(swap)
2775 {
2776     TCGv src1;
2777     TCGv src2;
2778     TCGv reg;
2779 
2780     src1 = tcg_temp_new();
2781     src2 = tcg_temp_new();
2782     reg = DREG(insn, 0);
2783     tcg_gen_shli_i32(src1, reg, 16);
2784     tcg_gen_shri_i32(src2, reg, 16);
2785     tcg_gen_or_i32(reg, src1, src2);
2786     tcg_temp_free(src2);
2787     tcg_temp_free(src1);
2788     gen_logic_cc(s, reg, OS_LONG);
2789 }
2790 
2791 DISAS_INSN(bkpt)
2792 {
2793     gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2794 }
2795 
2796 DISAS_INSN(pea)
2797 {
2798     TCGv tmp;
2799 
2800     tmp = gen_lea(env, s, insn, OS_LONG);
2801     if (IS_NULL_QREG(tmp)) {
2802         gen_addr_fault(s);
2803         return;
2804     }
2805     gen_push(s, tmp);
2806 }
2807 
2808 DISAS_INSN(ext)
2809 {
2810     int op;
2811     TCGv reg;
2812     TCGv tmp;
2813 
2814     reg = DREG(insn, 0);
2815     op = (insn >> 6) & 7;
2816     tmp = tcg_temp_new();
2817     if (op == 3)
2818         tcg_gen_ext16s_i32(tmp, reg);
2819     else
2820         tcg_gen_ext8s_i32(tmp, reg);
2821     if (op == 2)
2822         gen_partset_reg(OS_WORD, reg, tmp);
2823     else
2824         tcg_gen_mov_i32(reg, tmp);
2825     gen_logic_cc(s, tmp, OS_LONG);
2826     tcg_temp_free(tmp);
2827 }
2828 
2829 DISAS_INSN(tst)
2830 {
2831     int opsize;
2832     TCGv tmp;
2833 
2834     opsize = insn_opsize(insn);
2835     SRC_EA(env, tmp, opsize, 1, NULL);
2836     gen_logic_cc(s, tmp, opsize);
2837 }
2838 
2839 DISAS_INSN(pulse)
2840 {
2841   /* Implemented as a NOP.  */
2842 }
2843 
2844 DISAS_INSN(illegal)
2845 {
2846     gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2847 }
2848 
2849 /* ??? This should be atomic.  */
2850 DISAS_INSN(tas)
2851 {
2852     TCGv dest;
2853     TCGv src1;
2854     TCGv addr;
2855 
2856     dest = tcg_temp_new();
2857     SRC_EA(env, src1, OS_BYTE, 1, &addr);
2858     gen_logic_cc(s, src1, OS_BYTE);
2859     tcg_gen_ori_i32(dest, src1, 0x80);
2860     DEST_EA(env, insn, OS_BYTE, dest, &addr);
2861     tcg_temp_free(dest);
2862 }
2863 
2864 DISAS_INSN(mull)
2865 {
2866     uint16_t ext;
2867     TCGv src1;
2868     int sign;
2869 
2870     ext = read_im16(env, s);
2871 
2872     sign = ext & 0x800;
2873 
2874     if (ext & 0x400) {
2875         if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2876             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2877             return;
2878         }
2879 
2880         SRC_EA(env, src1, OS_LONG, 0, NULL);
2881 
2882         if (sign) {
2883             tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2884         } else {
2885             tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2886         }
2887         /* if Dl == Dh, 68040 returns low word */
2888         tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2889         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2890         tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2891 
2892         tcg_gen_movi_i32(QREG_CC_V, 0);
2893         tcg_gen_movi_i32(QREG_CC_C, 0);
2894 
2895         set_cc_op(s, CC_OP_FLAGS);
2896         return;
2897     }
2898     SRC_EA(env, src1, OS_LONG, 0, NULL);
2899     if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2900         tcg_gen_movi_i32(QREG_CC_C, 0);
2901         if (sign) {
2902             tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2903             /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2904             tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2905             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2906         } else {
2907             tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2908             /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2909             tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2910         }
2911         tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2912         tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2913 
2914         tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2915 
2916         set_cc_op(s, CC_OP_FLAGS);
2917     } else {
2918         /*
2919          * The upper 32 bits of the product are discarded, so
2920          * muls.l and mulu.l are functionally equivalent.
2921          */
2922         tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2923         gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2924     }
2925 }
2926 
2927 static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2928 {
2929     TCGv reg;
2930     TCGv tmp;
2931 
2932     reg = AREG(insn, 0);
2933     tmp = tcg_temp_new();
2934     tcg_gen_subi_i32(tmp, QREG_SP, 4);
2935     gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2936     if ((insn & 7) != 7) {
2937         tcg_gen_mov_i32(reg, tmp);
2938     }
2939     tcg_gen_addi_i32(QREG_SP, tmp, offset);
2940     tcg_temp_free(tmp);
2941 }
2942 
2943 DISAS_INSN(link)
2944 {
2945     int16_t offset;
2946 
2947     offset = read_im16(env, s);
2948     gen_link(s, insn, offset);
2949 }
2950 
2951 DISAS_INSN(linkl)
2952 {
2953     int32_t offset;
2954 
2955     offset = read_im32(env, s);
2956     gen_link(s, insn, offset);
2957 }
2958 
2959 DISAS_INSN(unlk)
2960 {
2961     TCGv src;
2962     TCGv reg;
2963     TCGv tmp;
2964 
2965     src = tcg_temp_new();
2966     reg = AREG(insn, 0);
2967     tcg_gen_mov_i32(src, reg);
2968     tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2969     tcg_gen_mov_i32(reg, tmp);
2970     tcg_gen_addi_i32(QREG_SP, src, 4);
2971     tcg_temp_free(src);
2972     tcg_temp_free(tmp);
2973 }
2974 
2975 #if defined(CONFIG_SOFTMMU)
2976 DISAS_INSN(reset)
2977 {
2978     if (IS_USER(s)) {
2979         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2980         return;
2981     }
2982 
2983     gen_helper_reset(cpu_env);
2984 }
2985 #endif
2986 
2987 DISAS_INSN(nop)
2988 {
2989 }
2990 
2991 DISAS_INSN(rtd)
2992 {
2993     TCGv tmp;
2994     int16_t offset = read_im16(env, s);
2995 
2996     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2997     tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2998     gen_jmp(s, tmp);
2999 }
3000 
3001 DISAS_INSN(rtr)
3002 {
3003     TCGv tmp;
3004     TCGv ccr;
3005     TCGv sp;
3006 
3007     sp = tcg_temp_new();
3008     ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
3009     tcg_gen_addi_i32(sp, QREG_SP, 2);
3010     tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
3011     tcg_gen_addi_i32(QREG_SP, sp, 4);
3012     tcg_temp_free(sp);
3013 
3014     gen_set_sr(s, ccr, true);
3015     tcg_temp_free(ccr);
3016 
3017     gen_jmp(s, tmp);
3018 }
3019 
3020 DISAS_INSN(rts)
3021 {
3022     TCGv tmp;
3023 
3024     tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3025     tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
3026     gen_jmp(s, tmp);
3027 }
3028 
3029 DISAS_INSN(jump)
3030 {
3031     TCGv tmp;
3032 
3033     /*
3034      * Load the target address first to ensure correct exception
3035      * behavior.
3036      */
3037     tmp = gen_lea(env, s, insn, OS_LONG);
3038     if (IS_NULL_QREG(tmp)) {
3039         gen_addr_fault(s);
3040         return;
3041     }
3042     if ((insn & 0x40) == 0) {
3043         /* jsr */
3044         gen_push(s, tcg_const_i32(s->pc));
3045     }
3046     gen_jmp(s, tmp);
3047 }
3048 
3049 DISAS_INSN(addsubq)
3050 {
3051     TCGv src;
3052     TCGv dest;
3053     TCGv val;
3054     int imm;
3055     TCGv addr;
3056     int opsize;
3057 
3058     if ((insn & 070) == 010) {
3059         /* Operation on address register is always long.  */
3060         opsize = OS_LONG;
3061     } else {
3062         opsize = insn_opsize(insn);
3063     }
3064     SRC_EA(env, src, opsize, 1, &addr);
3065     imm = (insn >> 9) & 7;
3066     if (imm == 0) {
3067         imm = 8;
3068     }
3069     val = tcg_const_i32(imm);
3070     dest = tcg_temp_new();
3071     tcg_gen_mov_i32(dest, src);
3072     if ((insn & 0x38) == 0x08) {
3073         /*
3074          * Don't update condition codes if the destination is an
3075          * address register.
3076          */
3077         if (insn & 0x0100) {
3078             tcg_gen_sub_i32(dest, dest, val);
3079         } else {
3080             tcg_gen_add_i32(dest, dest, val);
3081         }
3082     } else {
3083         if (insn & 0x0100) {
3084             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3085             tcg_gen_sub_i32(dest, dest, val);
3086             set_cc_op(s, CC_OP_SUBB + opsize);
3087         } else {
3088             tcg_gen_add_i32(dest, dest, val);
3089             tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3090             set_cc_op(s, CC_OP_ADDB + opsize);
3091         }
3092         gen_update_cc_add(dest, val, opsize);
3093     }
3094     tcg_temp_free(val);
3095     DEST_EA(env, insn, opsize, dest, &addr);
3096     tcg_temp_free(dest);
3097 }
3098 
3099 DISAS_INSN(tpf)
3100 {
3101     switch (insn & 7) {
3102     case 2: /* One extension word.  */
3103         s->pc += 2;
3104         break;
3105     case 3: /* Two extension words.  */
3106         s->pc += 4;
3107         break;
3108     case 4: /* No extension words.  */
3109         break;
3110     default:
3111         disas_undef(env, s, insn);
3112     }
3113 }
3114 
3115 DISAS_INSN(branch)
3116 {
3117     int32_t offset;
3118     uint32_t base;
3119     int op;
3120 
3121     base = s->pc;
3122     op = (insn >> 8) & 0xf;
3123     offset = (int8_t)insn;
3124     if (offset == 0) {
3125         offset = (int16_t)read_im16(env, s);
3126     } else if (offset == -1) {
3127         offset = read_im32(env, s);
3128     }
3129     if (op == 1) {
3130         /* bsr */
3131         gen_push(s, tcg_const_i32(s->pc));
3132     }
3133     if (op > 1) {
3134         /* Bcc */
3135         TCGLabel *l1 = gen_new_label();
3136         gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3137         gen_jmp_tb(s, 1, base + offset);
3138         gen_set_label(l1);
3139         gen_jmp_tb(s, 0, s->pc);
3140     } else {
3141         /* Unconditional branch.  */
3142         update_cc_op(s);
3143         gen_jmp_tb(s, 0, base + offset);
3144     }
3145 }
3146 
3147 DISAS_INSN(moveq)
3148 {
3149     tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3150     gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3151 }
3152 
3153 DISAS_INSN(mvzs)
3154 {
3155     int opsize;
3156     TCGv src;
3157     TCGv reg;
3158 
3159     if (insn & 0x40)
3160         opsize = OS_WORD;
3161     else
3162         opsize = OS_BYTE;
3163     SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3164     reg = DREG(insn, 9);
3165     tcg_gen_mov_i32(reg, src);
3166     gen_logic_cc(s, src, opsize);
3167 }
3168 
3169 DISAS_INSN(or)
3170 {
3171     TCGv reg;
3172     TCGv dest;
3173     TCGv src;
3174     TCGv addr;
3175     int opsize;
3176 
3177     opsize = insn_opsize(insn);
3178     reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3179     dest = tcg_temp_new();
3180     if (insn & 0x100) {
3181         SRC_EA(env, src, opsize, 0, &addr);
3182         tcg_gen_or_i32(dest, src, reg);
3183         DEST_EA(env, insn, opsize, dest, &addr);
3184     } else {
3185         SRC_EA(env, src, opsize, 0, NULL);
3186         tcg_gen_or_i32(dest, src, reg);
3187         gen_partset_reg(opsize, DREG(insn, 9), dest);
3188     }
3189     gen_logic_cc(s, dest, opsize);
3190     tcg_temp_free(dest);
3191 }
3192 
3193 DISAS_INSN(suba)
3194 {
3195     TCGv src;
3196     TCGv reg;
3197 
3198     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3199     reg = AREG(insn, 9);
3200     tcg_gen_sub_i32(reg, reg, src);
3201 }
3202 
3203 static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3204 {
3205     TCGv tmp;
3206 
3207     gen_flush_flags(s); /* compute old Z */
3208 
3209     /*
3210      * Perform subtract with borrow.
3211      * (X, N) = dest - (src + X);
3212      */
3213 
3214     tmp = tcg_const_i32(0);
3215     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3216     tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3217     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3218     tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3219 
3220     /* Compute signed-overflow for subtract.  */
3221 
3222     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3223     tcg_gen_xor_i32(tmp, dest, src);
3224     tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3225     tcg_temp_free(tmp);
3226 
3227     /* Copy the rest of the results into place.  */
3228     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3229     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3230 
3231     set_cc_op(s, CC_OP_FLAGS);
3232 
3233     /* result is in QREG_CC_N */
3234 }
3235 
3236 DISAS_INSN(subx_reg)
3237 {
3238     TCGv dest;
3239     TCGv src;
3240     int opsize;
3241 
3242     opsize = insn_opsize(insn);
3243 
3244     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3245     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3246 
3247     gen_subx(s, src, dest, opsize);
3248 
3249     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3250 }
3251 
3252 DISAS_INSN(subx_mem)
3253 {
3254     TCGv src;
3255     TCGv addr_src;
3256     TCGv dest;
3257     TCGv addr_dest;
3258     int opsize;
3259 
3260     opsize = insn_opsize(insn);
3261 
3262     addr_src = AREG(insn, 0);
3263     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3264     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3265 
3266     addr_dest = AREG(insn, 9);
3267     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3268     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3269 
3270     gen_subx(s, src, dest, opsize);
3271 
3272     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3273 
3274     tcg_temp_free(dest);
3275     tcg_temp_free(src);
3276 }
3277 
3278 DISAS_INSN(mov3q)
3279 {
3280     TCGv src;
3281     int val;
3282 
3283     val = (insn >> 9) & 7;
3284     if (val == 0)
3285         val = -1;
3286     src = tcg_const_i32(val);
3287     gen_logic_cc(s, src, OS_LONG);
3288     DEST_EA(env, insn, OS_LONG, src, NULL);
3289     tcg_temp_free(src);
3290 }
3291 
3292 DISAS_INSN(cmp)
3293 {
3294     TCGv src;
3295     TCGv reg;
3296     int opsize;
3297 
3298     opsize = insn_opsize(insn);
3299     SRC_EA(env, src, opsize, 1, NULL);
3300     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3301     gen_update_cc_cmp(s, reg, src, opsize);
3302 }
3303 
3304 DISAS_INSN(cmpa)
3305 {
3306     int opsize;
3307     TCGv src;
3308     TCGv reg;
3309 
3310     if (insn & 0x100) {
3311         opsize = OS_LONG;
3312     } else {
3313         opsize = OS_WORD;
3314     }
3315     SRC_EA(env, src, opsize, 1, NULL);
3316     reg = AREG(insn, 9);
3317     gen_update_cc_cmp(s, reg, src, OS_LONG);
3318 }
3319 
3320 DISAS_INSN(cmpm)
3321 {
3322     int opsize = insn_opsize(insn);
3323     TCGv src, dst;
3324 
3325     /* Post-increment load (mode 3) from Ay.  */
3326     src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3327                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3328     /* Post-increment load (mode 3) from Ax.  */
3329     dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3330                       NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3331 
3332     gen_update_cc_cmp(s, dst, src, opsize);
3333 }
3334 
3335 DISAS_INSN(eor)
3336 {
3337     TCGv src;
3338     TCGv dest;
3339     TCGv addr;
3340     int opsize;
3341 
3342     opsize = insn_opsize(insn);
3343 
3344     SRC_EA(env, src, opsize, 0, &addr);
3345     dest = tcg_temp_new();
3346     tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3347     gen_logic_cc(s, dest, opsize);
3348     DEST_EA(env, insn, opsize, dest, &addr);
3349     tcg_temp_free(dest);
3350 }
3351 
3352 static void do_exg(TCGv reg1, TCGv reg2)
3353 {
3354     TCGv temp = tcg_temp_new();
3355     tcg_gen_mov_i32(temp, reg1);
3356     tcg_gen_mov_i32(reg1, reg2);
3357     tcg_gen_mov_i32(reg2, temp);
3358     tcg_temp_free(temp);
3359 }
3360 
3361 DISAS_INSN(exg_dd)
3362 {
3363     /* exchange Dx and Dy */
3364     do_exg(DREG(insn, 9), DREG(insn, 0));
3365 }
3366 
3367 DISAS_INSN(exg_aa)
3368 {
3369     /* exchange Ax and Ay */
3370     do_exg(AREG(insn, 9), AREG(insn, 0));
3371 }
3372 
3373 DISAS_INSN(exg_da)
3374 {
3375     /* exchange Dx and Ay */
3376     do_exg(DREG(insn, 9), AREG(insn, 0));
3377 }
3378 
3379 DISAS_INSN(and)
3380 {
3381     TCGv src;
3382     TCGv reg;
3383     TCGv dest;
3384     TCGv addr;
3385     int opsize;
3386 
3387     dest = tcg_temp_new();
3388 
3389     opsize = insn_opsize(insn);
3390     reg = DREG(insn, 9);
3391     if (insn & 0x100) {
3392         SRC_EA(env, src, opsize, 0, &addr);
3393         tcg_gen_and_i32(dest, src, reg);
3394         DEST_EA(env, insn, opsize, dest, &addr);
3395     } else {
3396         SRC_EA(env, src, opsize, 0, NULL);
3397         tcg_gen_and_i32(dest, src, reg);
3398         gen_partset_reg(opsize, reg, dest);
3399     }
3400     gen_logic_cc(s, dest, opsize);
3401     tcg_temp_free(dest);
3402 }
3403 
3404 DISAS_INSN(adda)
3405 {
3406     TCGv src;
3407     TCGv reg;
3408 
3409     SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3410     reg = AREG(insn, 9);
3411     tcg_gen_add_i32(reg, reg, src);
3412 }
3413 
3414 static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3415 {
3416     TCGv tmp;
3417 
3418     gen_flush_flags(s); /* compute old Z */
3419 
3420     /*
3421      * Perform addition with carry.
3422      * (X, N) = src + dest + X;
3423      */
3424 
3425     tmp = tcg_const_i32(0);
3426     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3427     tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3428     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3429 
3430     /* Compute signed-overflow for addition.  */
3431 
3432     tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3433     tcg_gen_xor_i32(tmp, dest, src);
3434     tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3435     tcg_temp_free(tmp);
3436 
3437     /* Copy the rest of the results into place.  */
3438     tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3439     tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3440 
3441     set_cc_op(s, CC_OP_FLAGS);
3442 
3443     /* result is in QREG_CC_N */
3444 }
3445 
3446 DISAS_INSN(addx_reg)
3447 {
3448     TCGv dest;
3449     TCGv src;
3450     int opsize;
3451 
3452     opsize = insn_opsize(insn);
3453 
3454     dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3455     src = gen_extend(s, DREG(insn, 0), opsize, 1);
3456 
3457     gen_addx(s, src, dest, opsize);
3458 
3459     gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3460 }
3461 
3462 DISAS_INSN(addx_mem)
3463 {
3464     TCGv src;
3465     TCGv addr_src;
3466     TCGv dest;
3467     TCGv addr_dest;
3468     int opsize;
3469 
3470     opsize = insn_opsize(insn);
3471 
3472     addr_src = AREG(insn, 0);
3473     tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3474     src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3475 
3476     addr_dest = AREG(insn, 9);
3477     tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3478     dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3479 
3480     gen_addx(s, src, dest, opsize);
3481 
3482     gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3483 
3484     tcg_temp_free(dest);
3485     tcg_temp_free(src);
3486 }
3487 
3488 static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3489 {
3490     int count = (insn >> 9) & 7;
3491     int logical = insn & 8;
3492     int left = insn & 0x100;
3493     int bits = opsize_bytes(opsize) * 8;
3494     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3495 
3496     if (count == 0) {
3497         count = 8;
3498     }
3499 
3500     tcg_gen_movi_i32(QREG_CC_V, 0);
3501     if (left) {
3502         tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3503         tcg_gen_shli_i32(QREG_CC_N, reg, count);
3504 
3505         /*
3506          * Note that ColdFire always clears V (done above),
3507          * while M68000 sets if the most significant bit is changed at
3508          * any time during the shift operation.
3509          */
3510         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3511             /* if shift count >= bits, V is (reg != 0) */
3512             if (count >= bits) {
3513                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3514             } else {
3515                 TCGv t0 = tcg_temp_new();
3516                 tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3517                 tcg_gen_sari_i32(t0, reg, bits - count - 1);
3518                 tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3519                 tcg_temp_free(t0);
3520             }
3521             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3522         }
3523     } else {
3524         tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3525         if (logical) {
3526             tcg_gen_shri_i32(QREG_CC_N, reg, count);
3527         } else {
3528             tcg_gen_sari_i32(QREG_CC_N, reg, count);
3529         }
3530     }
3531 
3532     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3533     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3534     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3535     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3536 
3537     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3538     set_cc_op(s, CC_OP_FLAGS);
3539 }
3540 
3541 static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3542 {
3543     int logical = insn & 8;
3544     int left = insn & 0x100;
3545     int bits = opsize_bytes(opsize) * 8;
3546     TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3547     TCGv s32;
3548     TCGv_i64 t64, s64;
3549 
3550     t64 = tcg_temp_new_i64();
3551     s64 = tcg_temp_new_i64();
3552     s32 = tcg_temp_new();
3553 
3554     /*
3555      * Note that m68k truncates the shift count modulo 64, not 32.
3556      * In addition, a 64-bit shift makes it easy to find "the last
3557      * bit shifted out", for the carry flag.
3558      */
3559     tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3560     tcg_gen_extu_i32_i64(s64, s32);
3561     tcg_gen_extu_i32_i64(t64, reg);
3562 
3563     /* Optimistically set V=0.  Also used as a zero source below.  */
3564     tcg_gen_movi_i32(QREG_CC_V, 0);
3565     if (left) {
3566         tcg_gen_shl_i64(t64, t64, s64);
3567 
3568         if (opsize == OS_LONG) {
3569             tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3570             /* Note that C=0 if shift count is 0, and we get that for free.  */
3571         } else {
3572             TCGv zero = tcg_const_i32(0);
3573             tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3574             tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3575             tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3576                                 s32, zero, zero, QREG_CC_C);
3577             tcg_temp_free(zero);
3578         }
3579         tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3580 
3581         /* X = C, but only if the shift count was non-zero.  */
3582         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3583                             QREG_CC_C, QREG_CC_X);
3584 
3585         /*
3586          * M68000 sets V if the most significant bit is changed at
3587          * any time during the shift operation.  Do this via creating
3588          * an extension of the sign bit, comparing, and discarding
3589          * the bits below the sign bit.  I.e.
3590          *     int64_t s = (intN_t)reg;
3591          *     int64_t t = (int64_t)(intN_t)reg << count;
3592          *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3593          */
3594         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3595             TCGv_i64 tt = tcg_const_i64(32);
3596             /* if shift is greater than 32, use 32 */
3597             tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3598             tcg_temp_free_i64(tt);
3599             /* Sign extend the input to 64 bits; re-do the shift.  */
3600             tcg_gen_ext_i32_i64(t64, reg);
3601             tcg_gen_shl_i64(s64, t64, s64);
3602             /* Clear all bits that are unchanged.  */
3603             tcg_gen_xor_i64(t64, t64, s64);
3604             /* Ignore the bits below the sign bit.  */
3605             tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3606             /* If any bits remain set, we have overflow.  */
3607             tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3608             tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3609             tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3610         }
3611     } else {
3612         tcg_gen_shli_i64(t64, t64, 32);
3613         if (logical) {
3614             tcg_gen_shr_i64(t64, t64, s64);
3615         } else {
3616             tcg_gen_sar_i64(t64, t64, s64);
3617         }
3618         tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3619 
3620         /* Note that C=0 if shift count is 0, and we get that for free.  */
3621         tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3622 
3623         /* X = C, but only if the shift count was non-zero.  */
3624         tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3625                             QREG_CC_C, QREG_CC_X);
3626     }
3627     gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3628     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3629 
3630     tcg_temp_free(s32);
3631     tcg_temp_free_i64(s64);
3632     tcg_temp_free_i64(t64);
3633 
3634     /* Write back the result.  */
3635     gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3636     set_cc_op(s, CC_OP_FLAGS);
3637 }
3638 
3639 DISAS_INSN(shift8_im)
3640 {
3641     shift_im(s, insn, OS_BYTE);
3642 }
3643 
3644 DISAS_INSN(shift16_im)
3645 {
3646     shift_im(s, insn, OS_WORD);
3647 }
3648 
3649 DISAS_INSN(shift_im)
3650 {
3651     shift_im(s, insn, OS_LONG);
3652 }
3653 
3654 DISAS_INSN(shift8_reg)
3655 {
3656     shift_reg(s, insn, OS_BYTE);
3657 }
3658 
3659 DISAS_INSN(shift16_reg)
3660 {
3661     shift_reg(s, insn, OS_WORD);
3662 }
3663 
3664 DISAS_INSN(shift_reg)
3665 {
3666     shift_reg(s, insn, OS_LONG);
3667 }
3668 
3669 DISAS_INSN(shift_mem)
3670 {
3671     int logical = insn & 8;
3672     int left = insn & 0x100;
3673     TCGv src;
3674     TCGv addr;
3675 
3676     SRC_EA(env, src, OS_WORD, !logical, &addr);
3677     tcg_gen_movi_i32(QREG_CC_V, 0);
3678     if (left) {
3679         tcg_gen_shri_i32(QREG_CC_C, src, 15);
3680         tcg_gen_shli_i32(QREG_CC_N, src, 1);
3681 
3682         /*
3683          * Note that ColdFire always clears V,
3684          * while M68000 sets if the most significant bit is changed at
3685          * any time during the shift operation
3686          */
3687         if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3688             src = gen_extend(s, src, OS_WORD, 1);
3689             tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3690         }
3691     } else {
3692         tcg_gen_mov_i32(QREG_CC_C, src);
3693         if (logical) {
3694             tcg_gen_shri_i32(QREG_CC_N, src, 1);
3695         } else {
3696             tcg_gen_sari_i32(QREG_CC_N, src, 1);
3697         }
3698     }
3699 
3700     gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3701     tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3702     tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3703     tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3704 
3705     DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3706     set_cc_op(s, CC_OP_FLAGS);
3707 }
3708 
3709 static void rotate(TCGv reg, TCGv shift, int left, int size)
3710 {
3711     switch (size) {
3712     case 8:
3713         /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3714         tcg_gen_ext8u_i32(reg, reg);
3715         tcg_gen_muli_i32(reg, reg, 0x01010101);
3716         goto do_long;
3717     case 16:
3718         /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3719         tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3720         goto do_long;
3721     do_long:
3722     default:
3723         if (left) {
3724             tcg_gen_rotl_i32(reg, reg, shift);
3725         } else {
3726             tcg_gen_rotr_i32(reg, reg, shift);
3727         }
3728     }
3729 
3730     /* compute flags */
3731 
3732     switch (size) {
3733     case 8:
3734         tcg_gen_ext8s_i32(reg, reg);
3735         break;
3736     case 16:
3737         tcg_gen_ext16s_i32(reg, reg);
3738         break;
3739     default:
3740         break;
3741     }
3742 
3743     /* QREG_CC_X is not affected */
3744 
3745     tcg_gen_mov_i32(QREG_CC_N, reg);
3746     tcg_gen_mov_i32(QREG_CC_Z, reg);
3747 
3748     if (left) {
3749         tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3750     } else {
3751         tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3752     }
3753 
3754     tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3755 }
3756 
3757 static void rotate_x_flags(TCGv reg, TCGv X, int size)
3758 {
3759     switch (size) {
3760     case 8:
3761         tcg_gen_ext8s_i32(reg, reg);
3762         break;
3763     case 16:
3764         tcg_gen_ext16s_i32(reg, reg);
3765         break;
3766     default:
3767         break;
3768     }
3769     tcg_gen_mov_i32(QREG_CC_N, reg);
3770     tcg_gen_mov_i32(QREG_CC_Z, reg);
3771     tcg_gen_mov_i32(QREG_CC_X, X);
3772     tcg_gen_mov_i32(QREG_CC_C, X);
3773     tcg_gen_movi_i32(QREG_CC_V, 0);
3774 }
3775 
3776 /* Result of rotate_x() is valid if 0 <= shift <= size */
3777 static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3778 {
3779     TCGv X, shl, shr, shx, sz, zero;
3780 
3781     sz = tcg_const_i32(size);
3782 
3783     shr = tcg_temp_new();
3784     shl = tcg_temp_new();
3785     shx = tcg_temp_new();
3786     if (left) {
3787         tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3788         tcg_gen_movi_i32(shr, size + 1);
3789         tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3790         tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3791         /* shx = shx < 0 ? size : shx; */
3792         zero = tcg_const_i32(0);
3793         tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3794         tcg_temp_free(zero);
3795     } else {
3796         tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3797         tcg_gen_movi_i32(shl, size + 1);
3798         tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3799         tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3800     }
3801     tcg_temp_free_i32(sz);
3802 
3803     /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3804 
3805     tcg_gen_shl_i32(shl, reg, shl);
3806     tcg_gen_shr_i32(shr, reg, shr);
3807     tcg_gen_or_i32(reg, shl, shr);
3808     tcg_temp_free(shl);
3809     tcg_temp_free(shr);
3810     tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3811     tcg_gen_or_i32(reg, reg, shx);
3812     tcg_temp_free(shx);
3813 
3814     /* X = (reg >> size) & 1 */
3815 
3816     X = tcg_temp_new();
3817     tcg_gen_extract_i32(X, reg, size, 1);
3818 
3819     return X;
3820 }
3821 
3822 /* Result of rotate32_x() is valid if 0 <= shift < 33 */
3823 static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3824 {
3825     TCGv_i64 t0, shift64;
3826     TCGv X, lo, hi, zero;
3827 
3828     shift64 = tcg_temp_new_i64();
3829     tcg_gen_extu_i32_i64(shift64, shift);
3830 
3831     t0 = tcg_temp_new_i64();
3832 
3833     X = tcg_temp_new();
3834     lo = tcg_temp_new();
3835     hi = tcg_temp_new();
3836 
3837     if (left) {
3838         /* create [reg:X:..] */
3839 
3840         tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3841         tcg_gen_concat_i32_i64(t0, lo, reg);
3842 
3843         /* rotate */
3844 
3845         tcg_gen_rotl_i64(t0, t0, shift64);
3846         tcg_temp_free_i64(shift64);
3847 
3848         /* result is [reg:..:reg:X] */
3849 
3850         tcg_gen_extr_i64_i32(lo, hi, t0);
3851         tcg_gen_andi_i32(X, lo, 1);
3852 
3853         tcg_gen_shri_i32(lo, lo, 1);
3854     } else {
3855         /* create [..:X:reg] */
3856 
3857         tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3858 
3859         tcg_gen_rotr_i64(t0, t0, shift64);
3860         tcg_temp_free_i64(shift64);
3861 
3862         /* result is value: [X:reg:..:reg] */
3863 
3864         tcg_gen_extr_i64_i32(lo, hi, t0);
3865 
3866         /* extract X */
3867 
3868         tcg_gen_shri_i32(X, hi, 31);
3869 
3870         /* extract result */
3871 
3872         tcg_gen_shli_i32(hi, hi, 1);
3873     }
3874     tcg_temp_free_i64(t0);
3875     tcg_gen_or_i32(lo, lo, hi);
3876     tcg_temp_free(hi);
3877 
3878     /* if shift == 0, register and X are not affected */
3879 
3880     zero = tcg_const_i32(0);
3881     tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3882     tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3883     tcg_temp_free(zero);
3884     tcg_temp_free(lo);
3885 
3886     return X;
3887 }
3888 
3889 DISAS_INSN(rotate_im)
3890 {
3891     TCGv shift;
3892     int tmp;
3893     int left = (insn & 0x100);
3894 
3895     tmp = (insn >> 9) & 7;
3896     if (tmp == 0) {
3897         tmp = 8;
3898     }
3899 
3900     shift = tcg_const_i32(tmp);
3901     if (insn & 8) {
3902         rotate(DREG(insn, 0), shift, left, 32);
3903     } else {
3904         TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3905         rotate_x_flags(DREG(insn, 0), X, 32);
3906         tcg_temp_free(X);
3907     }
3908     tcg_temp_free(shift);
3909 
3910     set_cc_op(s, CC_OP_FLAGS);
3911 }
3912 
3913 DISAS_INSN(rotate8_im)
3914 {
3915     int left = (insn & 0x100);
3916     TCGv reg;
3917     TCGv shift;
3918     int tmp;
3919 
3920     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3921 
3922     tmp = (insn >> 9) & 7;
3923     if (tmp == 0) {
3924         tmp = 8;
3925     }
3926 
3927     shift = tcg_const_i32(tmp);
3928     if (insn & 8) {
3929         rotate(reg, shift, left, 8);
3930     } else {
3931         TCGv X = rotate_x(reg, shift, left, 8);
3932         rotate_x_flags(reg, X, 8);
3933         tcg_temp_free(X);
3934     }
3935     tcg_temp_free(shift);
3936     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3937     set_cc_op(s, CC_OP_FLAGS);
3938 }
3939 
3940 DISAS_INSN(rotate16_im)
3941 {
3942     int left = (insn & 0x100);
3943     TCGv reg;
3944     TCGv shift;
3945     int tmp;
3946 
3947     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3948     tmp = (insn >> 9) & 7;
3949     if (tmp == 0) {
3950         tmp = 8;
3951     }
3952 
3953     shift = tcg_const_i32(tmp);
3954     if (insn & 8) {
3955         rotate(reg, shift, left, 16);
3956     } else {
3957         TCGv X = rotate_x(reg, shift, left, 16);
3958         rotate_x_flags(reg, X, 16);
3959         tcg_temp_free(X);
3960     }
3961     tcg_temp_free(shift);
3962     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3963     set_cc_op(s, CC_OP_FLAGS);
3964 }
3965 
3966 DISAS_INSN(rotate_reg)
3967 {
3968     TCGv reg;
3969     TCGv src;
3970     TCGv t0, t1;
3971     int left = (insn & 0x100);
3972 
3973     reg = DREG(insn, 0);
3974     src = DREG(insn, 9);
3975     /* shift in [0..63] */
3976     t0 = tcg_temp_new();
3977     tcg_gen_andi_i32(t0, src, 63);
3978     t1 = tcg_temp_new_i32();
3979     if (insn & 8) {
3980         tcg_gen_andi_i32(t1, src, 31);
3981         rotate(reg, t1, left, 32);
3982         /* if shift == 0, clear C */
3983         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3984                             t0, QREG_CC_V /* 0 */,
3985                             QREG_CC_V /* 0 */, QREG_CC_C);
3986     } else {
3987         TCGv X;
3988         /* modulo 33 */
3989         tcg_gen_movi_i32(t1, 33);
3990         tcg_gen_remu_i32(t1, t0, t1);
3991         X = rotate32_x(DREG(insn, 0), t1, left);
3992         rotate_x_flags(DREG(insn, 0), X, 32);
3993         tcg_temp_free(X);
3994     }
3995     tcg_temp_free(t1);
3996     tcg_temp_free(t0);
3997     set_cc_op(s, CC_OP_FLAGS);
3998 }
3999 
4000 DISAS_INSN(rotate8_reg)
4001 {
4002     TCGv reg;
4003     TCGv src;
4004     TCGv t0, t1;
4005     int left = (insn & 0x100);
4006 
4007     reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
4008     src = DREG(insn, 9);
4009     /* shift in [0..63] */
4010     t0 = tcg_temp_new_i32();
4011     tcg_gen_andi_i32(t0, src, 63);
4012     t1 = tcg_temp_new_i32();
4013     if (insn & 8) {
4014         tcg_gen_andi_i32(t1, src, 7);
4015         rotate(reg, t1, left, 8);
4016         /* if shift == 0, clear C */
4017         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4018                             t0, QREG_CC_V /* 0 */,
4019                             QREG_CC_V /* 0 */, QREG_CC_C);
4020     } else {
4021         TCGv X;
4022         /* modulo 9 */
4023         tcg_gen_movi_i32(t1, 9);
4024         tcg_gen_remu_i32(t1, t0, t1);
4025         X = rotate_x(reg, t1, left, 8);
4026         rotate_x_flags(reg, X, 8);
4027         tcg_temp_free(X);
4028     }
4029     tcg_temp_free(t1);
4030     tcg_temp_free(t0);
4031     gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
4032     set_cc_op(s, CC_OP_FLAGS);
4033 }
4034 
4035 DISAS_INSN(rotate16_reg)
4036 {
4037     TCGv reg;
4038     TCGv src;
4039     TCGv t0, t1;
4040     int left = (insn & 0x100);
4041 
4042     reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4043     src = DREG(insn, 9);
4044     /* shift in [0..63] */
4045     t0 = tcg_temp_new_i32();
4046     tcg_gen_andi_i32(t0, src, 63);
4047     t1 = tcg_temp_new_i32();
4048     if (insn & 8) {
4049         tcg_gen_andi_i32(t1, src, 15);
4050         rotate(reg, t1, left, 16);
4051         /* if shift == 0, clear C */
4052         tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4053                             t0, QREG_CC_V /* 0 */,
4054                             QREG_CC_V /* 0 */, QREG_CC_C);
4055     } else {
4056         TCGv X;
4057         /* modulo 17 */
4058         tcg_gen_movi_i32(t1, 17);
4059         tcg_gen_remu_i32(t1, t0, t1);
4060         X = rotate_x(reg, t1, left, 16);
4061         rotate_x_flags(reg, X, 16);
4062         tcg_temp_free(X);
4063     }
4064     tcg_temp_free(t1);
4065     tcg_temp_free(t0);
4066     gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4067     set_cc_op(s, CC_OP_FLAGS);
4068 }
4069 
4070 DISAS_INSN(rotate_mem)
4071 {
4072     TCGv src;
4073     TCGv addr;
4074     TCGv shift;
4075     int left = (insn & 0x100);
4076 
4077     SRC_EA(env, src, OS_WORD, 0, &addr);
4078 
4079     shift = tcg_const_i32(1);
4080     if (insn & 0x0200) {
4081         rotate(src, shift, left, 16);
4082     } else {
4083         TCGv X = rotate_x(src, shift, left, 16);
4084         rotate_x_flags(src, X, 16);
4085         tcg_temp_free(X);
4086     }
4087     tcg_temp_free(shift);
4088     DEST_EA(env, insn, OS_WORD, src, &addr);
4089     set_cc_op(s, CC_OP_FLAGS);
4090 }
4091 
4092 DISAS_INSN(bfext_reg)
4093 {
4094     int ext = read_im16(env, s);
4095     int is_sign = insn & 0x200;
4096     TCGv src = DREG(insn, 0);
4097     TCGv dst = DREG(ext, 12);
4098     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4099     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4100     int pos = 32 - ofs - len;        /* little bit-endian */
4101     TCGv tmp = tcg_temp_new();
4102     TCGv shift;
4103 
4104     /*
4105      * In general, we're going to rotate the field so that it's at the
4106      * top of the word and then right-shift by the complement of the
4107      * width to extend the field.
4108      */
4109     if (ext & 0x20) {
4110         /* Variable width.  */
4111         if (ext & 0x800) {
4112             /* Variable offset.  */
4113             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4114             tcg_gen_rotl_i32(tmp, src, tmp);
4115         } else {
4116             tcg_gen_rotli_i32(tmp, src, ofs);
4117         }
4118 
4119         shift = tcg_temp_new();
4120         tcg_gen_neg_i32(shift, DREG(ext, 0));
4121         tcg_gen_andi_i32(shift, shift, 31);
4122         tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4123         if (is_sign) {
4124             tcg_gen_mov_i32(dst, QREG_CC_N);
4125         } else {
4126             tcg_gen_shr_i32(dst, tmp, shift);
4127         }
4128         tcg_temp_free(shift);
4129     } else {
4130         /* Immediate width.  */
4131         if (ext & 0x800) {
4132             /* Variable offset */
4133             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4134             tcg_gen_rotl_i32(tmp, src, tmp);
4135             src = tmp;
4136             pos = 32 - len;
4137         } else {
4138             /*
4139              * Immediate offset.  If the field doesn't wrap around the
4140              * end of the word, rely on (s)extract completely.
4141              */
4142             if (pos < 0) {
4143                 tcg_gen_rotli_i32(tmp, src, ofs);
4144                 src = tmp;
4145                 pos = 32 - len;
4146             }
4147         }
4148 
4149         tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4150         if (is_sign) {
4151             tcg_gen_mov_i32(dst, QREG_CC_N);
4152         } else {
4153             tcg_gen_extract_i32(dst, src, pos, len);
4154         }
4155     }
4156 
4157     tcg_temp_free(tmp);
4158     set_cc_op(s, CC_OP_LOGIC);
4159 }
4160 
4161 DISAS_INSN(bfext_mem)
4162 {
4163     int ext = read_im16(env, s);
4164     int is_sign = insn & 0x200;
4165     TCGv dest = DREG(ext, 12);
4166     TCGv addr, len, ofs;
4167 
4168     addr = gen_lea(env, s, insn, OS_UNSIZED);
4169     if (IS_NULL_QREG(addr)) {
4170         gen_addr_fault(s);
4171         return;
4172     }
4173 
4174     if (ext & 0x20) {
4175         len = DREG(ext, 0);
4176     } else {
4177         len = tcg_const_i32(extract32(ext, 0, 5));
4178     }
4179     if (ext & 0x800) {
4180         ofs = DREG(ext, 6);
4181     } else {
4182         ofs = tcg_const_i32(extract32(ext, 6, 5));
4183     }
4184 
4185     if (is_sign) {
4186         gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4187         tcg_gen_mov_i32(QREG_CC_N, dest);
4188     } else {
4189         TCGv_i64 tmp = tcg_temp_new_i64();
4190         gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4191         tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4192         tcg_temp_free_i64(tmp);
4193     }
4194     set_cc_op(s, CC_OP_LOGIC);
4195 
4196     if (!(ext & 0x20)) {
4197         tcg_temp_free(len);
4198     }
4199     if (!(ext & 0x800)) {
4200         tcg_temp_free(ofs);
4201     }
4202 }
4203 
4204 DISAS_INSN(bfop_reg)
4205 {
4206     int ext = read_im16(env, s);
4207     TCGv src = DREG(insn, 0);
4208     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4209     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4210     TCGv mask, tofs, tlen;
4211 
4212     tofs = NULL;
4213     tlen = NULL;
4214     if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4215         tofs = tcg_temp_new();
4216         tlen = tcg_temp_new();
4217     }
4218 
4219     if ((ext & 0x820) == 0) {
4220         /* Immediate width and offset.  */
4221         uint32_t maski = 0x7fffffffu >> (len - 1);
4222         if (ofs + len <= 32) {
4223             tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4224         } else {
4225             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4226         }
4227         tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4228         mask = tcg_const_i32(ror32(maski, ofs));
4229         if (tofs) {
4230             tcg_gen_movi_i32(tofs, ofs);
4231             tcg_gen_movi_i32(tlen, len);
4232         }
4233     } else {
4234         TCGv tmp = tcg_temp_new();
4235         if (ext & 0x20) {
4236             /* Variable width */
4237             tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4238             tcg_gen_andi_i32(tmp, tmp, 31);
4239             mask = tcg_const_i32(0x7fffffffu);
4240             tcg_gen_shr_i32(mask, mask, tmp);
4241             if (tlen) {
4242                 tcg_gen_addi_i32(tlen, tmp, 1);
4243             }
4244         } else {
4245             /* Immediate width */
4246             mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4247             if (tlen) {
4248                 tcg_gen_movi_i32(tlen, len);
4249             }
4250         }
4251         if (ext & 0x800) {
4252             /* Variable offset */
4253             tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4254             tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4255             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4256             tcg_gen_rotr_i32(mask, mask, tmp);
4257             if (tofs) {
4258                 tcg_gen_mov_i32(tofs, tmp);
4259             }
4260         } else {
4261             /* Immediate offset (and variable width) */
4262             tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4263             tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4264             tcg_gen_rotri_i32(mask, mask, ofs);
4265             if (tofs) {
4266                 tcg_gen_movi_i32(tofs, ofs);
4267             }
4268         }
4269         tcg_temp_free(tmp);
4270     }
4271     set_cc_op(s, CC_OP_LOGIC);
4272 
4273     switch (insn & 0x0f00) {
4274     case 0x0a00: /* bfchg */
4275         tcg_gen_eqv_i32(src, src, mask);
4276         break;
4277     case 0x0c00: /* bfclr */
4278         tcg_gen_and_i32(src, src, mask);
4279         break;
4280     case 0x0d00: /* bfffo */
4281         gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4282         tcg_temp_free(tlen);
4283         tcg_temp_free(tofs);
4284         break;
4285     case 0x0e00: /* bfset */
4286         tcg_gen_orc_i32(src, src, mask);
4287         break;
4288     case 0x0800: /* bftst */
4289         /* flags already set; no other work to do.  */
4290         break;
4291     default:
4292         g_assert_not_reached();
4293     }
4294     tcg_temp_free(mask);
4295 }
4296 
4297 DISAS_INSN(bfop_mem)
4298 {
4299     int ext = read_im16(env, s);
4300     TCGv addr, len, ofs;
4301     TCGv_i64 t64;
4302 
4303     addr = gen_lea(env, s, insn, OS_UNSIZED);
4304     if (IS_NULL_QREG(addr)) {
4305         gen_addr_fault(s);
4306         return;
4307     }
4308 
4309     if (ext & 0x20) {
4310         len = DREG(ext, 0);
4311     } else {
4312         len = tcg_const_i32(extract32(ext, 0, 5));
4313     }
4314     if (ext & 0x800) {
4315         ofs = DREG(ext, 6);
4316     } else {
4317         ofs = tcg_const_i32(extract32(ext, 6, 5));
4318     }
4319 
4320     switch (insn & 0x0f00) {
4321     case 0x0a00: /* bfchg */
4322         gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4323         break;
4324     case 0x0c00: /* bfclr */
4325         gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4326         break;
4327     case 0x0d00: /* bfffo */
4328         t64 = tcg_temp_new_i64();
4329         gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4330         tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4331         tcg_temp_free_i64(t64);
4332         break;
4333     case 0x0e00: /* bfset */
4334         gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4335         break;
4336     case 0x0800: /* bftst */
4337         gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4338         break;
4339     default:
4340         g_assert_not_reached();
4341     }
4342     set_cc_op(s, CC_OP_LOGIC);
4343 
4344     if (!(ext & 0x20)) {
4345         tcg_temp_free(len);
4346     }
4347     if (!(ext & 0x800)) {
4348         tcg_temp_free(ofs);
4349     }
4350 }
4351 
4352 DISAS_INSN(bfins_reg)
4353 {
4354     int ext = read_im16(env, s);
4355     TCGv dst = DREG(insn, 0);
4356     TCGv src = DREG(ext, 12);
4357     int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4358     int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4359     int pos = 32 - ofs - len;        /* little bit-endian */
4360     TCGv tmp;
4361 
4362     tmp = tcg_temp_new();
4363 
4364     if (ext & 0x20) {
4365         /* Variable width */
4366         tcg_gen_neg_i32(tmp, DREG(ext, 0));
4367         tcg_gen_andi_i32(tmp, tmp, 31);
4368         tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4369     } else {
4370         /* Immediate width */
4371         tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4372     }
4373     set_cc_op(s, CC_OP_LOGIC);
4374 
4375     /* Immediate width and offset */
4376     if ((ext & 0x820) == 0) {
4377         /* Check for suitability for deposit.  */
4378         if (pos >= 0) {
4379             tcg_gen_deposit_i32(dst, dst, src, pos, len);
4380         } else {
4381             uint32_t maski = -2U << (len - 1);
4382             uint32_t roti = (ofs + len) & 31;
4383             tcg_gen_andi_i32(tmp, src, ~maski);
4384             tcg_gen_rotri_i32(tmp, tmp, roti);
4385             tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4386             tcg_gen_or_i32(dst, dst, tmp);
4387         }
4388     } else {
4389         TCGv mask = tcg_temp_new();
4390         TCGv rot = tcg_temp_new();
4391 
4392         if (ext & 0x20) {
4393             /* Variable width */
4394             tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4395             tcg_gen_andi_i32(rot, rot, 31);
4396             tcg_gen_movi_i32(mask, -2);
4397             tcg_gen_shl_i32(mask, mask, rot);
4398             tcg_gen_mov_i32(rot, DREG(ext, 0));
4399             tcg_gen_andc_i32(tmp, src, mask);
4400         } else {
4401             /* Immediate width (variable offset) */
4402             uint32_t maski = -2U << (len - 1);
4403             tcg_gen_andi_i32(tmp, src, ~maski);
4404             tcg_gen_movi_i32(mask, maski);
4405             tcg_gen_movi_i32(rot, len & 31);
4406         }
4407         if (ext & 0x800) {
4408             /* Variable offset */
4409             tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4410         } else {
4411             /* Immediate offset (variable width) */
4412             tcg_gen_addi_i32(rot, rot, ofs);
4413         }
4414         tcg_gen_andi_i32(rot, rot, 31);
4415         tcg_gen_rotr_i32(mask, mask, rot);
4416         tcg_gen_rotr_i32(tmp, tmp, rot);
4417         tcg_gen_and_i32(dst, dst, mask);
4418         tcg_gen_or_i32(dst, dst, tmp);
4419 
4420         tcg_temp_free(rot);
4421         tcg_temp_free(mask);
4422     }
4423     tcg_temp_free(tmp);
4424 }
4425 
4426 DISAS_INSN(bfins_mem)
4427 {
4428     int ext = read_im16(env, s);
4429     TCGv src = DREG(ext, 12);
4430     TCGv addr, len, ofs;
4431 
4432     addr = gen_lea(env, s, insn, OS_UNSIZED);
4433     if (IS_NULL_QREG(addr)) {
4434         gen_addr_fault(s);
4435         return;
4436     }
4437 
4438     if (ext & 0x20) {
4439         len = DREG(ext, 0);
4440     } else {
4441         len = tcg_const_i32(extract32(ext, 0, 5));
4442     }
4443     if (ext & 0x800) {
4444         ofs = DREG(ext, 6);
4445     } else {
4446         ofs = tcg_const_i32(extract32(ext, 6, 5));
4447     }
4448 
4449     gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4450     set_cc_op(s, CC_OP_LOGIC);
4451 
4452     if (!(ext & 0x20)) {
4453         tcg_temp_free(len);
4454     }
4455     if (!(ext & 0x800)) {
4456         tcg_temp_free(ofs);
4457     }
4458 }
4459 
4460 DISAS_INSN(ff1)
4461 {
4462     TCGv reg;
4463     reg = DREG(insn, 0);
4464     gen_logic_cc(s, reg, OS_LONG);
4465     gen_helper_ff1(reg, reg);
4466 }
4467 
4468 DISAS_INSN(chk)
4469 {
4470     TCGv src, reg;
4471     int opsize;
4472 
4473     switch ((insn >> 7) & 3) {
4474     case 3:
4475         opsize = OS_WORD;
4476         break;
4477     case 2:
4478         if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4479             opsize = OS_LONG;
4480             break;
4481         }
4482         /* fallthru */
4483     default:
4484         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4485         return;
4486     }
4487     SRC_EA(env, src, opsize, 1, NULL);
4488     reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4489 
4490     gen_flush_flags(s);
4491     gen_helper_chk(cpu_env, reg, src);
4492 }
4493 
4494 DISAS_INSN(chk2)
4495 {
4496     uint16_t ext;
4497     TCGv addr1, addr2, bound1, bound2, reg;
4498     int opsize;
4499 
4500     switch ((insn >> 9) & 3) {
4501     case 0:
4502         opsize = OS_BYTE;
4503         break;
4504     case 1:
4505         opsize = OS_WORD;
4506         break;
4507     case 2:
4508         opsize = OS_LONG;
4509         break;
4510     default:
4511         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4512         return;
4513     }
4514 
4515     ext = read_im16(env, s);
4516     if ((ext & 0x0800) == 0) {
4517         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4518         return;
4519     }
4520 
4521     addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4522     addr2 = tcg_temp_new();
4523     tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4524 
4525     bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4526     tcg_temp_free(addr1);
4527     bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4528     tcg_temp_free(addr2);
4529 
4530     reg = tcg_temp_new();
4531     if (ext & 0x8000) {
4532         tcg_gen_mov_i32(reg, AREG(ext, 12));
4533     } else {
4534         gen_ext(reg, DREG(ext, 12), opsize, 1);
4535     }
4536 
4537     gen_flush_flags(s);
4538     gen_helper_chk2(cpu_env, reg, bound1, bound2);
4539     tcg_temp_free(reg);
4540     tcg_temp_free(bound1);
4541     tcg_temp_free(bound2);
4542 }
4543 
4544 static void m68k_copy_line(TCGv dst, TCGv src, int index)
4545 {
4546     TCGv addr;
4547     TCGv_i64 t0, t1;
4548 
4549     addr = tcg_temp_new();
4550 
4551     t0 = tcg_temp_new_i64();
4552     t1 = tcg_temp_new_i64();
4553 
4554     tcg_gen_andi_i32(addr, src, ~15);
4555     tcg_gen_qemu_ld64(t0, addr, index);
4556     tcg_gen_addi_i32(addr, addr, 8);
4557     tcg_gen_qemu_ld64(t1, addr, index);
4558 
4559     tcg_gen_andi_i32(addr, dst, ~15);
4560     tcg_gen_qemu_st64(t0, addr, index);
4561     tcg_gen_addi_i32(addr, addr, 8);
4562     tcg_gen_qemu_st64(t1, addr, index);
4563 
4564     tcg_temp_free_i64(t0);
4565     tcg_temp_free_i64(t1);
4566     tcg_temp_free(addr);
4567 }
4568 
4569 DISAS_INSN(move16_reg)
4570 {
4571     int index = IS_USER(s);
4572     TCGv tmp;
4573     uint16_t ext;
4574 
4575     ext = read_im16(env, s);
4576     if ((ext & (1 << 15)) == 0) {
4577         gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4578     }
4579 
4580     m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4581 
4582     /* Ax can be Ay, so save Ay before incrementing Ax */
4583     tmp = tcg_temp_new();
4584     tcg_gen_mov_i32(tmp, AREG(ext, 12));
4585     tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4586     tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4587     tcg_temp_free(tmp);
4588 }
4589 
4590 DISAS_INSN(move16_mem)
4591 {
4592     int index = IS_USER(s);
4593     TCGv reg, addr;
4594 
4595     reg = AREG(insn, 0);
4596     addr = tcg_const_i32(read_im32(env, s));
4597 
4598     if ((insn >> 3) & 1) {
4599         /* MOVE16 (xxx).L, (Ay) */
4600         m68k_copy_line(reg, addr, index);
4601     } else {
4602         /* MOVE16 (Ay), (xxx).L */
4603         m68k_copy_line(addr, reg, index);
4604     }
4605 
4606     tcg_temp_free(addr);
4607 
4608     if (((insn >> 3) & 2) == 0) {
4609         /* (Ay)+ */
4610         tcg_gen_addi_i32(reg, reg, 16);
4611     }
4612 }
4613 
4614 DISAS_INSN(strldsr)
4615 {
4616     uint16_t ext;
4617     uint32_t addr;
4618 
4619     addr = s->pc - 2;
4620     ext = read_im16(env, s);
4621     if (ext != 0x46FC) {
4622         gen_exception(s, addr, EXCP_ILLEGAL);
4623         return;
4624     }
4625     ext = read_im16(env, s);
4626     if (IS_USER(s) || (ext & SR_S) == 0) {
4627         gen_exception(s, addr, EXCP_PRIVILEGE);
4628         return;
4629     }
4630     gen_push(s, gen_get_sr(s));
4631     gen_set_sr_im(s, ext, 0);
4632 }
4633 
4634 DISAS_INSN(move_from_sr)
4635 {
4636     TCGv sr;
4637 
4638     if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4639         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4640         return;
4641     }
4642     sr = gen_get_sr(s);
4643     DEST_EA(env, insn, OS_WORD, sr, NULL);
4644 }
4645 
4646 #if defined(CONFIG_SOFTMMU)
4647 DISAS_INSN(moves)
4648 {
4649     int opsize;
4650     uint16_t ext;
4651     TCGv reg;
4652     TCGv addr;
4653     int extend;
4654 
4655     if (IS_USER(s)) {
4656         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4657         return;
4658     }
4659 
4660     ext = read_im16(env, s);
4661 
4662     opsize = insn_opsize(insn);
4663 
4664     if (ext & 0x8000) {
4665         /* address register */
4666         reg = AREG(ext, 12);
4667         extend = 1;
4668     } else {
4669         /* data register */
4670         reg = DREG(ext, 12);
4671         extend = 0;
4672     }
4673 
4674     addr = gen_lea(env, s, insn, opsize);
4675     if (IS_NULL_QREG(addr)) {
4676         gen_addr_fault(s);
4677         return;
4678     }
4679 
4680     if (ext & 0x0800) {
4681         /* from reg to ea */
4682         gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4683     } else {
4684         /* from ea to reg */
4685         TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4686         if (extend) {
4687             gen_ext(reg, tmp, opsize, 1);
4688         } else {
4689             gen_partset_reg(opsize, reg, tmp);
4690         }
4691         tcg_temp_free(tmp);
4692     }
4693     switch (extract32(insn, 3, 3)) {
4694     case 3: /* Indirect postincrement.  */
4695         tcg_gen_addi_i32(AREG(insn, 0), addr,
4696                          REG(insn, 0) == 7 && opsize == OS_BYTE
4697                          ? 2
4698                          : opsize_bytes(opsize));
4699         break;
4700     case 4: /* Indirect predecrememnt.  */
4701         tcg_gen_mov_i32(AREG(insn, 0), addr);
4702         break;
4703     }
4704 }
4705 
4706 DISAS_INSN(move_to_sr)
4707 {
4708     if (IS_USER(s)) {
4709         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4710         return;
4711     }
4712     gen_move_to_sr(env, s, insn, false);
4713     gen_exit_tb(s);
4714 }
4715 
4716 DISAS_INSN(move_from_usp)
4717 {
4718     if (IS_USER(s)) {
4719         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4720         return;
4721     }
4722     tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4723                    offsetof(CPUM68KState, sp[M68K_USP]));
4724 }
4725 
4726 DISAS_INSN(move_to_usp)
4727 {
4728     if (IS_USER(s)) {
4729         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4730         return;
4731     }
4732     tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4733                    offsetof(CPUM68KState, sp[M68K_USP]));
4734 }
4735 
4736 DISAS_INSN(halt)
4737 {
4738     if (IS_USER(s)) {
4739         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4740         return;
4741     }
4742 
4743     gen_exception(s, s->pc, EXCP_HALT_INSN);
4744 }
4745 
4746 DISAS_INSN(stop)
4747 {
4748     uint16_t ext;
4749 
4750     if (IS_USER(s)) {
4751         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4752         return;
4753     }
4754 
4755     ext = read_im16(env, s);
4756 
4757     gen_set_sr_im(s, ext, 0);
4758     tcg_gen_movi_i32(cpu_halted, 1);
4759     gen_exception(s, s->pc, EXCP_HLT);
4760 }
4761 
4762 DISAS_INSN(rte)
4763 {
4764     if (IS_USER(s)) {
4765         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4766         return;
4767     }
4768     gen_exception(s, s->base.pc_next, EXCP_RTE);
4769 }
4770 
4771 DISAS_INSN(cf_movec)
4772 {
4773     uint16_t ext;
4774     TCGv reg;
4775 
4776     if (IS_USER(s)) {
4777         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4778         return;
4779     }
4780 
4781     ext = read_im16(env, s);
4782 
4783     if (ext & 0x8000) {
4784         reg = AREG(ext, 12);
4785     } else {
4786         reg = DREG(ext, 12);
4787     }
4788     gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4789     gen_exit_tb(s);
4790 }
4791 
4792 DISAS_INSN(m68k_movec)
4793 {
4794     uint16_t ext;
4795     TCGv reg;
4796 
4797     if (IS_USER(s)) {
4798         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4799         return;
4800     }
4801 
4802     ext = read_im16(env, s);
4803 
4804     if (ext & 0x8000) {
4805         reg = AREG(ext, 12);
4806     } else {
4807         reg = DREG(ext, 12);
4808     }
4809     if (insn & 1) {
4810         gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4811     } else {
4812         gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4813     }
4814     gen_exit_tb(s);
4815 }
4816 
4817 DISAS_INSN(intouch)
4818 {
4819     if (IS_USER(s)) {
4820         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4821         return;
4822     }
4823     /* ICache fetch.  Implement as no-op.  */
4824 }
4825 
4826 DISAS_INSN(cpushl)
4827 {
4828     if (IS_USER(s)) {
4829         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4830         return;
4831     }
4832     /* Cache push/invalidate.  Implement as no-op.  */
4833 }
4834 
4835 DISAS_INSN(cpush)
4836 {
4837     if (IS_USER(s)) {
4838         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4839         return;
4840     }
4841     /* Cache push/invalidate.  Implement as no-op.  */
4842 }
4843 
4844 DISAS_INSN(cinv)
4845 {
4846     if (IS_USER(s)) {
4847         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4848         return;
4849     }
4850     /* Invalidate cache line.  Implement as no-op.  */
4851 }
4852 
4853 #if defined(CONFIG_SOFTMMU)
4854 DISAS_INSN(pflush)
4855 {
4856     TCGv opmode;
4857 
4858     if (IS_USER(s)) {
4859         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4860         return;
4861     }
4862 
4863     opmode = tcg_const_i32((insn >> 3) & 3);
4864     gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4865     tcg_temp_free(opmode);
4866 }
4867 
4868 DISAS_INSN(ptest)
4869 {
4870     TCGv is_read;
4871 
4872     if (IS_USER(s)) {
4873         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4874         return;
4875     }
4876     is_read = tcg_const_i32((insn >> 5) & 1);
4877     gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4878     tcg_temp_free(is_read);
4879 }
4880 #endif
4881 
4882 DISAS_INSN(wddata)
4883 {
4884     gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4885 }
4886 
4887 DISAS_INSN(wdebug)
4888 {
4889     if (IS_USER(s)) {
4890         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4891         return;
4892     }
4893     /* TODO: Implement wdebug.  */
4894     cpu_abort(env_cpu(env), "WDEBUG not implemented");
4895 }
4896 #endif
4897 
4898 DISAS_INSN(trap)
4899 {
4900     gen_exception(s, s->base.pc_next, EXCP_TRAP0 + (insn & 0xf));
4901 }
4902 
4903 static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4904 {
4905     switch (reg) {
4906     case M68K_FPIAR:
4907         tcg_gen_movi_i32(res, 0);
4908         break;
4909     case M68K_FPSR:
4910         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4911         break;
4912     case M68K_FPCR:
4913         tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4914         break;
4915     }
4916 }
4917 
4918 static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4919 {
4920     switch (reg) {
4921     case M68K_FPIAR:
4922         break;
4923     case M68K_FPSR:
4924         tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4925         break;
4926     case M68K_FPCR:
4927         gen_helper_set_fpcr(cpu_env, val);
4928         break;
4929     }
4930 }
4931 
4932 static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4933 {
4934     int index = IS_USER(s);
4935     TCGv tmp;
4936 
4937     tmp = tcg_temp_new();
4938     gen_load_fcr(s, tmp, reg);
4939     tcg_gen_qemu_st32(tmp, addr, index);
4940     tcg_temp_free(tmp);
4941 }
4942 
4943 static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4944 {
4945     int index = IS_USER(s);
4946     TCGv tmp;
4947 
4948     tmp = tcg_temp_new();
4949     tcg_gen_qemu_ld32u(tmp, addr, index);
4950     gen_store_fcr(s, tmp, reg);
4951     tcg_temp_free(tmp);
4952 }
4953 
4954 
4955 static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4956                              uint32_t insn, uint32_t ext)
4957 {
4958     int mask = (ext >> 10) & 7;
4959     int is_write = (ext >> 13) & 1;
4960     int mode = extract32(insn, 3, 3);
4961     int i;
4962     TCGv addr, tmp;
4963 
4964     switch (mode) {
4965     case 0: /* Dn */
4966         if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4967             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4968             return;
4969         }
4970         if (is_write) {
4971             gen_load_fcr(s, DREG(insn, 0), mask);
4972         } else {
4973             gen_store_fcr(s, DREG(insn, 0), mask);
4974         }
4975         return;
4976     case 1: /* An, only with FPIAR */
4977         if (mask != M68K_FPIAR) {
4978             gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4979             return;
4980         }
4981         if (is_write) {
4982             gen_load_fcr(s, AREG(insn, 0), mask);
4983         } else {
4984             gen_store_fcr(s, AREG(insn, 0), mask);
4985         }
4986         return;
4987     case 7: /* Immediate */
4988         if (REG(insn, 0) == 4) {
4989             if (is_write ||
4990                 (mask != M68K_FPIAR && mask != M68K_FPSR &&
4991                  mask != M68K_FPCR)) {
4992                 gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4993                 return;
4994             }
4995             tmp = tcg_const_i32(read_im32(env, s));
4996             gen_store_fcr(s, tmp, mask);
4997             tcg_temp_free(tmp);
4998             return;
4999         }
5000         break;
5001     default:
5002         break;
5003     }
5004 
5005     tmp = gen_lea(env, s, insn, OS_LONG);
5006     if (IS_NULL_QREG(tmp)) {
5007         gen_addr_fault(s);
5008         return;
5009     }
5010 
5011     addr = tcg_temp_new();
5012     tcg_gen_mov_i32(addr, tmp);
5013 
5014     /*
5015      * mask:
5016      *
5017      * 0b100 Floating-Point Control Register
5018      * 0b010 Floating-Point Status Register
5019      * 0b001 Floating-Point Instruction Address Register
5020      *
5021      */
5022 
5023     if (is_write && mode == 4) {
5024         for (i = 2; i >= 0; i--, mask >>= 1) {
5025             if (mask & 1) {
5026                 gen_qemu_store_fcr(s, addr, 1 << i);
5027                 if (mask != 1) {
5028                     tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
5029                 }
5030             }
5031        }
5032        tcg_gen_mov_i32(AREG(insn, 0), addr);
5033     } else {
5034         for (i = 0; i < 3; i++, mask >>= 1) {
5035             if (mask & 1) {
5036                 if (is_write) {
5037                     gen_qemu_store_fcr(s, addr, 1 << i);
5038                 } else {
5039                     gen_qemu_load_fcr(s, addr, 1 << i);
5040                 }
5041                 if (mask != 1 || mode == 3) {
5042                     tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5043                 }
5044             }
5045         }
5046         if (mode == 3) {
5047             tcg_gen_mov_i32(AREG(insn, 0), addr);
5048         }
5049     }
5050     tcg_temp_free_i32(addr);
5051 }
5052 
5053 static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5054                           uint32_t insn, uint32_t ext)
5055 {
5056     int opsize;
5057     TCGv addr, tmp;
5058     int mode = (ext >> 11) & 0x3;
5059     int is_load = ((ext & 0x2000) == 0);
5060 
5061     if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5062         opsize = OS_EXTENDED;
5063     } else {
5064         opsize = OS_DOUBLE;  /* FIXME */
5065     }
5066 
5067     addr = gen_lea(env, s, insn, opsize);
5068     if (IS_NULL_QREG(addr)) {
5069         gen_addr_fault(s);
5070         return;
5071     }
5072 
5073     tmp = tcg_temp_new();
5074     if (mode & 0x1) {
5075         /* Dynamic register list */
5076         tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5077     } else {
5078         /* Static register list */
5079         tcg_gen_movi_i32(tmp, ext & 0xff);
5080     }
5081 
5082     if (!is_load && (mode & 2) == 0) {
5083         /*
5084          * predecrement addressing mode
5085          * only available to store register to memory
5086          */
5087         if (opsize == OS_EXTENDED) {
5088             gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5089         } else {
5090             gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5091         }
5092     } else {
5093         /* postincrement addressing mode */
5094         if (opsize == OS_EXTENDED) {
5095             if (is_load) {
5096                 gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5097             } else {
5098                 gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5099             }
5100         } else {
5101             if (is_load) {
5102                 gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5103             } else {
5104                 gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5105             }
5106         }
5107     }
5108     if ((insn & 070) == 030 || (insn & 070) == 040) {
5109         tcg_gen_mov_i32(AREG(insn, 0), tmp);
5110     }
5111     tcg_temp_free(tmp);
5112 }
5113 
5114 /*
5115  * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5116  * immediately before the next FP instruction is executed.
5117  */
5118 DISAS_INSN(fpu)
5119 {
5120     uint16_t ext;
5121     int opmode;
5122     int opsize;
5123     TCGv_ptr cpu_src, cpu_dest;
5124 
5125     ext = read_im16(env, s);
5126     opmode = ext & 0x7f;
5127     switch ((ext >> 13) & 7) {
5128     case 0:
5129         break;
5130     case 1:
5131         goto undef;
5132     case 2:
5133         if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5134             /* fmovecr */
5135             TCGv rom_offset = tcg_const_i32(opmode);
5136             cpu_dest = gen_fp_ptr(REG(ext, 7));
5137             gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5138             tcg_temp_free_ptr(cpu_dest);
5139             tcg_temp_free(rom_offset);
5140             return;
5141         }
5142         break;
5143     case 3: /* fmove out */
5144         cpu_src = gen_fp_ptr(REG(ext, 7));
5145         opsize = ext_opsize(ext, 10);
5146         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5147                       EA_STORE, IS_USER(s)) == -1) {
5148             gen_addr_fault(s);
5149         }
5150         gen_helper_ftst(cpu_env, cpu_src);
5151         tcg_temp_free_ptr(cpu_src);
5152         return;
5153     case 4: /* fmove to control register.  */
5154     case 5: /* fmove from control register.  */
5155         gen_op_fmove_fcr(env, s, insn, ext);
5156         return;
5157     case 6: /* fmovem */
5158     case 7:
5159         if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5160             goto undef;
5161         }
5162         gen_op_fmovem(env, s, insn, ext);
5163         return;
5164     }
5165     if (ext & (1 << 14)) {
5166         /* Source effective address.  */
5167         opsize = ext_opsize(ext, 10);
5168         cpu_src = gen_fp_result_ptr();
5169         if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5170                       EA_LOADS, IS_USER(s)) == -1) {
5171             gen_addr_fault(s);
5172             return;
5173         }
5174     } else {
5175         /* Source register.  */
5176         opsize = OS_EXTENDED;
5177         cpu_src = gen_fp_ptr(REG(ext, 10));
5178     }
5179     cpu_dest = gen_fp_ptr(REG(ext, 7));
5180     switch (opmode) {
5181     case 0: /* fmove */
5182         gen_fp_move(cpu_dest, cpu_src);
5183         break;
5184     case 0x40: /* fsmove */
5185         gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5186         break;
5187     case 0x44: /* fdmove */
5188         gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5189         break;
5190     case 1: /* fint */
5191         gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5192         break;
5193     case 2: /* fsinh */
5194         gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5195         break;
5196     case 3: /* fintrz */
5197         gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5198         break;
5199     case 4: /* fsqrt */
5200         gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5201         break;
5202     case 0x41: /* fssqrt */
5203         gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5204         break;
5205     case 0x45: /* fdsqrt */
5206         gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5207         break;
5208     case 0x06: /* flognp1 */
5209         gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5210         break;
5211     case 0x08: /* fetoxm1 */
5212         gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5213         break;
5214     case 0x09: /* ftanh */
5215         gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5216         break;
5217     case 0x0a: /* fatan */
5218         gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5219         break;
5220     case 0x0c: /* fasin */
5221         gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5222         break;
5223     case 0x0d: /* fatanh */
5224         gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5225         break;
5226     case 0x0e: /* fsin */
5227         gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5228         break;
5229     case 0x0f: /* ftan */
5230         gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5231         break;
5232     case 0x10: /* fetox */
5233         gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5234         break;
5235     case 0x11: /* ftwotox */
5236         gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5237         break;
5238     case 0x12: /* ftentox */
5239         gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5240         break;
5241     case 0x14: /* flogn */
5242         gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5243         break;
5244     case 0x15: /* flog10 */
5245         gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5246         break;
5247     case 0x16: /* flog2 */
5248         gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5249         break;
5250     case 0x18: /* fabs */
5251         gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5252         break;
5253     case 0x58: /* fsabs */
5254         gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5255         break;
5256     case 0x5c: /* fdabs */
5257         gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5258         break;
5259     case 0x19: /* fcosh */
5260         gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5261         break;
5262     case 0x1a: /* fneg */
5263         gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5264         break;
5265     case 0x5a: /* fsneg */
5266         gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5267         break;
5268     case 0x5e: /* fdneg */
5269         gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5270         break;
5271     case 0x1c: /* facos */
5272         gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5273         break;
5274     case 0x1d: /* fcos */
5275         gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5276         break;
5277     case 0x1e: /* fgetexp */
5278         gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5279         break;
5280     case 0x1f: /* fgetman */
5281         gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5282         break;
5283     case 0x20: /* fdiv */
5284         gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5285         break;
5286     case 0x60: /* fsdiv */
5287         gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5288         break;
5289     case 0x64: /* fddiv */
5290         gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5291         break;
5292     case 0x21: /* fmod */
5293         gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5294         break;
5295     case 0x22: /* fadd */
5296         gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5297         break;
5298     case 0x62: /* fsadd */
5299         gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5300         break;
5301     case 0x66: /* fdadd */
5302         gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5303         break;
5304     case 0x23: /* fmul */
5305         gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5306         break;
5307     case 0x63: /* fsmul */
5308         gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5309         break;
5310     case 0x67: /* fdmul */
5311         gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5312         break;
5313     case 0x24: /* fsgldiv */
5314         gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5315         break;
5316     case 0x25: /* frem */
5317         gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5318         break;
5319     case 0x26: /* fscale */
5320         gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5321         break;
5322     case 0x27: /* fsglmul */
5323         gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5324         break;
5325     case 0x28: /* fsub */
5326         gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5327         break;
5328     case 0x68: /* fssub */
5329         gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5330         break;
5331     case 0x6c: /* fdsub */
5332         gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5333         break;
5334     case 0x30: case 0x31: case 0x32:
5335     case 0x33: case 0x34: case 0x35:
5336     case 0x36: case 0x37: {
5337             TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5338             gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5339             tcg_temp_free_ptr(cpu_dest2);
5340         }
5341         break;
5342     case 0x38: /* fcmp */
5343         gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5344         return;
5345     case 0x3a: /* ftst */
5346         gen_helper_ftst(cpu_env, cpu_src);
5347         return;
5348     default:
5349         goto undef;
5350     }
5351     tcg_temp_free_ptr(cpu_src);
5352     gen_helper_ftst(cpu_env, cpu_dest);
5353     tcg_temp_free_ptr(cpu_dest);
5354     return;
5355 undef:
5356     /* FIXME: Is this right for offset addressing modes?  */
5357     s->pc -= 2;
5358     disas_undef_fpu(env, s, insn);
5359 }
5360 
5361 static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5362 {
5363     TCGv fpsr;
5364 
5365     c->g1 = 1;
5366     c->v2 = tcg_const_i32(0);
5367     c->g2 = 0;
5368     /* TODO: Raise BSUN exception.  */
5369     fpsr = tcg_temp_new();
5370     gen_load_fcr(s, fpsr, M68K_FPSR);
5371     switch (cond) {
5372     case 0:  /* False */
5373     case 16: /* Signaling False */
5374         c->v1 = c->v2;
5375         c->tcond = TCG_COND_NEVER;
5376         break;
5377     case 1:  /* EQual Z */
5378     case 17: /* Signaling EQual Z */
5379         c->v1 = tcg_temp_new();
5380         c->g1 = 0;
5381         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5382         c->tcond = TCG_COND_NE;
5383         break;
5384     case 2:  /* Ordered Greater Than !(A || Z || N) */
5385     case 18: /* Greater Than !(A || Z || N) */
5386         c->v1 = tcg_temp_new();
5387         c->g1 = 0;
5388         tcg_gen_andi_i32(c->v1, fpsr,
5389                          FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5390         c->tcond = TCG_COND_EQ;
5391         break;
5392     case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5393     case 19: /* Greater than or Equal Z || !(A || N) */
5394         c->v1 = tcg_temp_new();
5395         c->g1 = 0;
5396         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5397         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5398         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5399         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5400         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5401         c->tcond = TCG_COND_NE;
5402         break;
5403     case 4:  /* Ordered Less Than !(!N || A || Z); */
5404     case 20: /* Less Than !(!N || A || Z); */
5405         c->v1 = tcg_temp_new();
5406         c->g1 = 0;
5407         tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5408         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5409         c->tcond = TCG_COND_EQ;
5410         break;
5411     case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5412     case 21: /* Less than or Equal Z || (N && !A) */
5413         c->v1 = tcg_temp_new();
5414         c->g1 = 0;
5415         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5416         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5417         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5418         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5419         c->tcond = TCG_COND_NE;
5420         break;
5421     case 6:  /* Ordered Greater or Less than !(A || Z) */
5422     case 22: /* Greater or Less than !(A || Z) */
5423         c->v1 = tcg_temp_new();
5424         c->g1 = 0;
5425         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5426         c->tcond = TCG_COND_EQ;
5427         break;
5428     case 7:  /* Ordered !A */
5429     case 23: /* Greater, Less or Equal !A */
5430         c->v1 = tcg_temp_new();
5431         c->g1 = 0;
5432         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5433         c->tcond = TCG_COND_EQ;
5434         break;
5435     case 8:  /* Unordered A */
5436     case 24: /* Not Greater, Less or Equal A */
5437         c->v1 = tcg_temp_new();
5438         c->g1 = 0;
5439         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5440         c->tcond = TCG_COND_NE;
5441         break;
5442     case 9:  /* Unordered or Equal A || Z */
5443     case 25: /* Not Greater or Less then A || Z */
5444         c->v1 = tcg_temp_new();
5445         c->g1 = 0;
5446         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5447         c->tcond = TCG_COND_NE;
5448         break;
5449     case 10: /* Unordered or Greater Than A || !(N || Z)) */
5450     case 26: /* Not Less or Equal A || !(N || Z)) */
5451         c->v1 = tcg_temp_new();
5452         c->g1 = 0;
5453         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5454         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5455         tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5456         tcg_gen_or_i32(c->v1, c->v1, fpsr);
5457         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5458         c->tcond = TCG_COND_NE;
5459         break;
5460     case 11: /* Unordered or Greater or Equal A || Z || !N */
5461     case 27: /* Not Less Than A || Z || !N */
5462         c->v1 = tcg_temp_new();
5463         c->g1 = 0;
5464         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5465         tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5466         c->tcond = TCG_COND_NE;
5467         break;
5468     case 12: /* Unordered or Less Than A || (N && !Z) */
5469     case 28: /* Not Greater than or Equal A || (N && !Z) */
5470         c->v1 = tcg_temp_new();
5471         c->g1 = 0;
5472         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5473         tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5474         tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5475         tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5476         c->tcond = TCG_COND_NE;
5477         break;
5478     case 13: /* Unordered or Less or Equal A || Z || N */
5479     case 29: /* Not Greater Than A || Z || N */
5480         c->v1 = tcg_temp_new();
5481         c->g1 = 0;
5482         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5483         c->tcond = TCG_COND_NE;
5484         break;
5485     case 14: /* Not Equal !Z */
5486     case 30: /* Signaling Not Equal !Z */
5487         c->v1 = tcg_temp_new();
5488         c->g1 = 0;
5489         tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5490         c->tcond = TCG_COND_EQ;
5491         break;
5492     case 15: /* True */
5493     case 31: /* Signaling True */
5494         c->v1 = c->v2;
5495         c->tcond = TCG_COND_ALWAYS;
5496         break;
5497     }
5498     tcg_temp_free(fpsr);
5499 }
5500 
5501 static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5502 {
5503     DisasCompare c;
5504 
5505     gen_fcc_cond(&c, s, cond);
5506     update_cc_op(s);
5507     tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5508     free_cond(&c);
5509 }
5510 
5511 DISAS_INSN(fbcc)
5512 {
5513     uint32_t offset;
5514     uint32_t base;
5515     TCGLabel *l1;
5516 
5517     base = s->pc;
5518     offset = (int16_t)read_im16(env, s);
5519     if (insn & (1 << 6)) {
5520         offset = (offset << 16) | read_im16(env, s);
5521     }
5522 
5523     l1 = gen_new_label();
5524     update_cc_op(s);
5525     gen_fjmpcc(s, insn & 0x3f, l1);
5526     gen_jmp_tb(s, 0, s->pc);
5527     gen_set_label(l1);
5528     gen_jmp_tb(s, 1, base + offset);
5529 }
5530 
5531 DISAS_INSN(fscc)
5532 {
5533     DisasCompare c;
5534     int cond;
5535     TCGv tmp;
5536     uint16_t ext;
5537 
5538     ext = read_im16(env, s);
5539     cond = ext & 0x3f;
5540     gen_fcc_cond(&c, s, cond);
5541 
5542     tmp = tcg_temp_new();
5543     tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5544     free_cond(&c);
5545 
5546     tcg_gen_neg_i32(tmp, tmp);
5547     DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5548     tcg_temp_free(tmp);
5549 }
5550 
5551 #if defined(CONFIG_SOFTMMU)
5552 DISAS_INSN(frestore)
5553 {
5554     TCGv addr;
5555 
5556     if (IS_USER(s)) {
5557         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5558         return;
5559     }
5560     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5561         SRC_EA(env, addr, OS_LONG, 0, NULL);
5562         /* FIXME: check the state frame */
5563     } else {
5564         disas_undef(env, s, insn);
5565     }
5566 }
5567 
5568 DISAS_INSN(fsave)
5569 {
5570     if (IS_USER(s)) {
5571         gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5572         return;
5573     }
5574 
5575     if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5576         /* always write IDLE */
5577         TCGv idle = tcg_const_i32(0x41000000);
5578         DEST_EA(env, insn, OS_LONG, idle, NULL);
5579         tcg_temp_free(idle);
5580     } else {
5581         disas_undef(env, s, insn);
5582     }
5583 }
5584 #endif
5585 
5586 static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5587 {
5588     TCGv tmp = tcg_temp_new();
5589     if (s->env->macsr & MACSR_FI) {
5590         if (upper)
5591             tcg_gen_andi_i32(tmp, val, 0xffff0000);
5592         else
5593             tcg_gen_shli_i32(tmp, val, 16);
5594     } else if (s->env->macsr & MACSR_SU) {
5595         if (upper)
5596             tcg_gen_sari_i32(tmp, val, 16);
5597         else
5598             tcg_gen_ext16s_i32(tmp, val);
5599     } else {
5600         if (upper)
5601             tcg_gen_shri_i32(tmp, val, 16);
5602         else
5603             tcg_gen_ext16u_i32(tmp, val);
5604     }
5605     return tmp;
5606 }
5607 
5608 static void gen_mac_clear_flags(void)
5609 {
5610     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5611                      ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5612 }
5613 
5614 DISAS_INSN(mac)
5615 {
5616     TCGv rx;
5617     TCGv ry;
5618     uint16_t ext;
5619     int acc;
5620     TCGv tmp;
5621     TCGv addr;
5622     TCGv loadval;
5623     int dual;
5624     TCGv saved_flags;
5625 
5626     if (!s->done_mac) {
5627         s->mactmp = tcg_temp_new_i64();
5628         s->done_mac = 1;
5629     }
5630 
5631     ext = read_im16(env, s);
5632 
5633     acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5634     dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5635     if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5636         disas_undef(env, s, insn);
5637         return;
5638     }
5639     if (insn & 0x30) {
5640         /* MAC with load.  */
5641         tmp = gen_lea(env, s, insn, OS_LONG);
5642         addr = tcg_temp_new();
5643         tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5644         /*
5645          * Load the value now to ensure correct exception behavior.
5646          * Perform writeback after reading the MAC inputs.
5647          */
5648         loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5649 
5650         acc ^= 1;
5651         rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5652         ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5653     } else {
5654         loadval = addr = NULL_QREG;
5655         rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5656         ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5657     }
5658 
5659     gen_mac_clear_flags();
5660 #if 0
5661     l1 = -1;
5662     /* Disabled because conditional branches clobber temporary vars.  */
5663     if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5664         /* Skip the multiply if we know we will ignore it.  */
5665         l1 = gen_new_label();
5666         tmp = tcg_temp_new();
5667         tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5668         gen_op_jmp_nz32(tmp, l1);
5669     }
5670 #endif
5671 
5672     if ((ext & 0x0800) == 0) {
5673         /* Word.  */
5674         rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5675         ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5676     }
5677     if (s->env->macsr & MACSR_FI) {
5678         gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5679     } else {
5680         if (s->env->macsr & MACSR_SU)
5681             gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5682         else
5683             gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5684         switch ((ext >> 9) & 3) {
5685         case 1:
5686             tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5687             break;
5688         case 3:
5689             tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5690             break;
5691         }
5692     }
5693 
5694     if (dual) {
5695         /* Save the overflow flag from the multiply.  */
5696         saved_flags = tcg_temp_new();
5697         tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5698     } else {
5699         saved_flags = NULL_QREG;
5700     }
5701 
5702 #if 0
5703     /* Disabled because conditional branches clobber temporary vars.  */
5704     if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5705         /* Skip the accumulate if the value is already saturated.  */
5706         l1 = gen_new_label();
5707         tmp = tcg_temp_new();
5708         gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5709         gen_op_jmp_nz32(tmp, l1);
5710     }
5711 #endif
5712 
5713     if (insn & 0x100)
5714         tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5715     else
5716         tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5717 
5718     if (s->env->macsr & MACSR_FI)
5719         gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5720     else if (s->env->macsr & MACSR_SU)
5721         gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5722     else
5723         gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5724 
5725 #if 0
5726     /* Disabled because conditional branches clobber temporary vars.  */
5727     if (l1 != -1)
5728         gen_set_label(l1);
5729 #endif
5730 
5731     if (dual) {
5732         /* Dual accumulate variant.  */
5733         acc = (ext >> 2) & 3;
5734         /* Restore the overflow flag from the multiplier.  */
5735         tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5736 #if 0
5737         /* Disabled because conditional branches clobber temporary vars.  */
5738         if ((s->env->macsr & MACSR_OMC) != 0) {
5739             /* Skip the accumulate if the value is already saturated.  */
5740             l1 = gen_new_label();
5741             tmp = tcg_temp_new();
5742             gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5743             gen_op_jmp_nz32(tmp, l1);
5744         }
5745 #endif
5746         if (ext & 2)
5747             tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5748         else
5749             tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5750         if (s->env->macsr & MACSR_FI)
5751             gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5752         else if (s->env->macsr & MACSR_SU)
5753             gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5754         else
5755             gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5756 #if 0
5757         /* Disabled because conditional branches clobber temporary vars.  */
5758         if (l1 != -1)
5759             gen_set_label(l1);
5760 #endif
5761     }
5762     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5763 
5764     if (insn & 0x30) {
5765         TCGv rw;
5766         rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5767         tcg_gen_mov_i32(rw, loadval);
5768         /*
5769          * FIXME: Should address writeback happen with the masked or
5770          * unmasked value?
5771          */
5772         switch ((insn >> 3) & 7) {
5773         case 3: /* Post-increment.  */
5774             tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5775             break;
5776         case 4: /* Pre-decrement.  */
5777             tcg_gen_mov_i32(AREG(insn, 0), addr);
5778         }
5779         tcg_temp_free(loadval);
5780     }
5781 }
5782 
5783 DISAS_INSN(from_mac)
5784 {
5785     TCGv rx;
5786     TCGv_i64 acc;
5787     int accnum;
5788 
5789     rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5790     accnum = (insn >> 9) & 3;
5791     acc = MACREG(accnum);
5792     if (s->env->macsr & MACSR_FI) {
5793         gen_helper_get_macf(rx, cpu_env, acc);
5794     } else if ((s->env->macsr & MACSR_OMC) == 0) {
5795         tcg_gen_extrl_i64_i32(rx, acc);
5796     } else if (s->env->macsr & MACSR_SU) {
5797         gen_helper_get_macs(rx, acc);
5798     } else {
5799         gen_helper_get_macu(rx, acc);
5800     }
5801     if (insn & 0x40) {
5802         tcg_gen_movi_i64(acc, 0);
5803         tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5804     }
5805 }
5806 
5807 DISAS_INSN(move_mac)
5808 {
5809     /* FIXME: This can be done without a helper.  */
5810     int src;
5811     TCGv dest;
5812     src = insn & 3;
5813     dest = tcg_const_i32((insn >> 9) & 3);
5814     gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5815     gen_mac_clear_flags();
5816     gen_helper_mac_set_flags(cpu_env, dest);
5817 }
5818 
5819 DISAS_INSN(from_macsr)
5820 {
5821     TCGv reg;
5822 
5823     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5824     tcg_gen_mov_i32(reg, QREG_MACSR);
5825 }
5826 
5827 DISAS_INSN(from_mask)
5828 {
5829     TCGv reg;
5830     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5831     tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5832 }
5833 
5834 DISAS_INSN(from_mext)
5835 {
5836     TCGv reg;
5837     TCGv acc;
5838     reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5839     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5840     if (s->env->macsr & MACSR_FI)
5841         gen_helper_get_mac_extf(reg, cpu_env, acc);
5842     else
5843         gen_helper_get_mac_exti(reg, cpu_env, acc);
5844 }
5845 
5846 DISAS_INSN(macsr_to_ccr)
5847 {
5848     TCGv tmp = tcg_temp_new();
5849     tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5850     gen_helper_set_sr(cpu_env, tmp);
5851     tcg_temp_free(tmp);
5852     set_cc_op(s, CC_OP_FLAGS);
5853 }
5854 
5855 DISAS_INSN(to_mac)
5856 {
5857     TCGv_i64 acc;
5858     TCGv val;
5859     int accnum;
5860     accnum = (insn >> 9) & 3;
5861     acc = MACREG(accnum);
5862     SRC_EA(env, val, OS_LONG, 0, NULL);
5863     if (s->env->macsr & MACSR_FI) {
5864         tcg_gen_ext_i32_i64(acc, val);
5865         tcg_gen_shli_i64(acc, acc, 8);
5866     } else if (s->env->macsr & MACSR_SU) {
5867         tcg_gen_ext_i32_i64(acc, val);
5868     } else {
5869         tcg_gen_extu_i32_i64(acc, val);
5870     }
5871     tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5872     gen_mac_clear_flags();
5873     gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5874 }
5875 
5876 DISAS_INSN(to_macsr)
5877 {
5878     TCGv val;
5879     SRC_EA(env, val, OS_LONG, 0, NULL);
5880     gen_helper_set_macsr(cpu_env, val);
5881     gen_exit_tb(s);
5882 }
5883 
5884 DISAS_INSN(to_mask)
5885 {
5886     TCGv val;
5887     SRC_EA(env, val, OS_LONG, 0, NULL);
5888     tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5889 }
5890 
5891 DISAS_INSN(to_mext)
5892 {
5893     TCGv val;
5894     TCGv acc;
5895     SRC_EA(env, val, OS_LONG, 0, NULL);
5896     acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5897     if (s->env->macsr & MACSR_FI)
5898         gen_helper_set_mac_extf(cpu_env, val, acc);
5899     else if (s->env->macsr & MACSR_SU)
5900         gen_helper_set_mac_exts(cpu_env, val, acc);
5901     else
5902         gen_helper_set_mac_extu(cpu_env, val, acc);
5903 }
5904 
5905 static disas_proc opcode_table[65536];
5906 
5907 static void
5908 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5909 {
5910   int i;
5911   int from;
5912   int to;
5913 
5914   /* Sanity check.  All set bits must be included in the mask.  */
5915   if (opcode & ~mask) {
5916       fprintf(stderr,
5917               "qemu internal error: bogus opcode definition %04x/%04x\n",
5918               opcode, mask);
5919       abort();
5920   }
5921   /*
5922    * This could probably be cleverer.  For now just optimize the case where
5923    * the top bits are known.
5924    */
5925   /* Find the first zero bit in the mask.  */
5926   i = 0x8000;
5927   while ((i & mask) != 0)
5928       i >>= 1;
5929   /* Iterate over all combinations of this and lower bits.  */
5930   if (i == 0)
5931       i = 1;
5932   else
5933       i <<= 1;
5934   from = opcode & ~(i - 1);
5935   to = from + i;
5936   for (i = from; i < to; i++) {
5937       if ((i & mask) == opcode)
5938           opcode_table[i] = proc;
5939   }
5940 }
5941 
5942 /*
5943  * Register m68k opcode handlers.  Order is important.
5944  * Later insn override earlier ones.
5945  */
5946 void register_m68k_insns (CPUM68KState *env)
5947 {
5948     /*
5949      * Build the opcode table only once to avoid
5950      * multithreading issues.
5951      */
5952     if (opcode_table[0] != NULL) {
5953         return;
5954     }
5955 
5956     /*
5957      * use BASE() for instruction available
5958      * for CF_ISA_A and M68000.
5959      */
5960 #define BASE(name, opcode, mask) \
5961     register_opcode(disas_##name, 0x##opcode, 0x##mask)
5962 #define INSN(name, opcode, mask, feature) do { \
5963     if (m68k_feature(env, M68K_FEATURE_##feature)) \
5964         BASE(name, opcode, mask); \
5965     } while(0)
5966     BASE(undef,     0000, 0000);
5967     INSN(arith_im,  0080, fff8, CF_ISA_A);
5968     INSN(arith_im,  0000, ff00, M68000);
5969     INSN(chk2,      00c0, f9c0, CHK2);
5970     INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5971     BASE(bitop_reg, 0100, f1c0);
5972     BASE(bitop_reg, 0140, f1c0);
5973     BASE(bitop_reg, 0180, f1c0);
5974     BASE(bitop_reg, 01c0, f1c0);
5975     INSN(movep,     0108, f138, MOVEP);
5976     INSN(arith_im,  0280, fff8, CF_ISA_A);
5977     INSN(arith_im,  0200, ff00, M68000);
5978     INSN(undef,     02c0, ffc0, M68000);
5979     INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5980     INSN(arith_im,  0480, fff8, CF_ISA_A);
5981     INSN(arith_im,  0400, ff00, M68000);
5982     INSN(undef,     04c0, ffc0, M68000);
5983     INSN(arith_im,  0600, ff00, M68000);
5984     INSN(undef,     06c0, ffc0, M68000);
5985     INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5986     INSN(arith_im,  0680, fff8, CF_ISA_A);
5987     INSN(arith_im,  0c00, ff38, CF_ISA_A);
5988     INSN(arith_im,  0c00, ff00, M68000);
5989     BASE(bitop_im,  0800, ffc0);
5990     BASE(bitop_im,  0840, ffc0);
5991     BASE(bitop_im,  0880, ffc0);
5992     BASE(bitop_im,  08c0, ffc0);
5993     INSN(arith_im,  0a80, fff8, CF_ISA_A);
5994     INSN(arith_im,  0a00, ff00, M68000);
5995 #if defined(CONFIG_SOFTMMU)
5996     INSN(moves,     0e00, ff00, M68000);
5997 #endif
5998     INSN(cas,       0ac0, ffc0, CAS);
5999     INSN(cas,       0cc0, ffc0, CAS);
6000     INSN(cas,       0ec0, ffc0, CAS);
6001     INSN(cas2w,     0cfc, ffff, CAS);
6002     INSN(cas2l,     0efc, ffff, CAS);
6003     BASE(move,      1000, f000);
6004     BASE(move,      2000, f000);
6005     BASE(move,      3000, f000);
6006     INSN(chk,       4000, f040, M68000);
6007     INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
6008     INSN(negx,      4080, fff8, CF_ISA_A);
6009     INSN(negx,      4000, ff00, M68000);
6010     INSN(undef,     40c0, ffc0, M68000);
6011     INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
6012     INSN(move_from_sr, 40c0, ffc0, M68000);
6013     BASE(lea,       41c0, f1c0);
6014     BASE(clr,       4200, ff00);
6015     BASE(undef,     42c0, ffc0);
6016     INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
6017     INSN(move_from_ccr, 42c0, ffc0, M68000);
6018     INSN(neg,       4480, fff8, CF_ISA_A);
6019     INSN(neg,       4400, ff00, M68000);
6020     INSN(undef,     44c0, ffc0, M68000);
6021     BASE(move_to_ccr, 44c0, ffc0);
6022     INSN(not,       4680, fff8, CF_ISA_A);
6023     INSN(not,       4600, ff00, M68000);
6024 #if defined(CONFIG_SOFTMMU)
6025     BASE(move_to_sr, 46c0, ffc0);
6026 #endif
6027     INSN(nbcd,      4800, ffc0, M68000);
6028     INSN(linkl,     4808, fff8, M68000);
6029     BASE(pea,       4840, ffc0);
6030     BASE(swap,      4840, fff8);
6031     INSN(bkpt,      4848, fff8, BKPT);
6032     INSN(movem,     48d0, fbf8, CF_ISA_A);
6033     INSN(movem,     48e8, fbf8, CF_ISA_A);
6034     INSN(movem,     4880, fb80, M68000);
6035     BASE(ext,       4880, fff8);
6036     BASE(ext,       48c0, fff8);
6037     BASE(ext,       49c0, fff8);
6038     BASE(tst,       4a00, ff00);
6039     INSN(tas,       4ac0, ffc0, CF_ISA_B);
6040     INSN(tas,       4ac0, ffc0, M68000);
6041 #if defined(CONFIG_SOFTMMU)
6042     INSN(halt,      4ac8, ffff, CF_ISA_A);
6043 #endif
6044     INSN(pulse,     4acc, ffff, CF_ISA_A);
6045     BASE(illegal,   4afc, ffff);
6046     INSN(mull,      4c00, ffc0, CF_ISA_A);
6047     INSN(mull,      4c00, ffc0, LONG_MULDIV);
6048     INSN(divl,      4c40, ffc0, CF_ISA_A);
6049     INSN(divl,      4c40, ffc0, LONG_MULDIV);
6050     INSN(sats,      4c80, fff8, CF_ISA_B);
6051     BASE(trap,      4e40, fff0);
6052     BASE(link,      4e50, fff8);
6053     BASE(unlk,      4e58, fff8);
6054 #if defined(CONFIG_SOFTMMU)
6055     INSN(move_to_usp, 4e60, fff8, USP);
6056     INSN(move_from_usp, 4e68, fff8, USP);
6057     INSN(reset,     4e70, ffff, M68000);
6058     BASE(stop,      4e72, ffff);
6059     BASE(rte,       4e73, ffff);
6060     INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6061     INSN(m68k_movec, 4e7a, fffe, MOVEC);
6062 #endif
6063     BASE(nop,       4e71, ffff);
6064     INSN(rtd,       4e74, ffff, RTD);
6065     BASE(rts,       4e75, ffff);
6066     INSN(rtr,       4e77, ffff, M68000);
6067     BASE(jump,      4e80, ffc0);
6068     BASE(jump,      4ec0, ffc0);
6069     INSN(addsubq,   5000, f080, M68000);
6070     BASE(addsubq,   5080, f0c0);
6071     INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6072     INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6073     INSN(dbcc,      50c8, f0f8, M68000);
6074     INSN(tpf,       51f8, fff8, CF_ISA_A);
6075 
6076     /* Branch instructions.  */
6077     BASE(branch,    6000, f000);
6078     /* Disable long branch instructions, then add back the ones we want.  */
6079     BASE(undef,     60ff, f0ff); /* All long branches.  */
6080     INSN(branch,    60ff, f0ff, CF_ISA_B);
6081     INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6082     INSN(branch,    60ff, ffff, BRAL);
6083     INSN(branch,    60ff, f0ff, BCCL);
6084 
6085     BASE(moveq,     7000, f100);
6086     INSN(mvzs,      7100, f100, CF_ISA_B);
6087     BASE(or,        8000, f000);
6088     BASE(divw,      80c0, f0c0);
6089     INSN(sbcd_reg,  8100, f1f8, M68000);
6090     INSN(sbcd_mem,  8108, f1f8, M68000);
6091     BASE(addsub,    9000, f000);
6092     INSN(undef,     90c0, f0c0, CF_ISA_A);
6093     INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6094     INSN(subx_reg,  9100, f138, M68000);
6095     INSN(subx_mem,  9108, f138, M68000);
6096     INSN(suba,      91c0, f1c0, CF_ISA_A);
6097     INSN(suba,      90c0, f0c0, M68000);
6098 
6099     BASE(undef_mac, a000, f000);
6100     INSN(mac,       a000, f100, CF_EMAC);
6101     INSN(from_mac,  a180, f9b0, CF_EMAC);
6102     INSN(move_mac,  a110, f9fc, CF_EMAC);
6103     INSN(from_macsr,a980, f9f0, CF_EMAC);
6104     INSN(from_mask, ad80, fff0, CF_EMAC);
6105     INSN(from_mext, ab80, fbf0, CF_EMAC);
6106     INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6107     INSN(to_mac,    a100, f9c0, CF_EMAC);
6108     INSN(to_macsr,  a900, ffc0, CF_EMAC);
6109     INSN(to_mext,   ab00, fbc0, CF_EMAC);
6110     INSN(to_mask,   ad00, ffc0, CF_EMAC);
6111 
6112     INSN(mov3q,     a140, f1c0, CF_ISA_B);
6113     INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6114     INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6115     INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6116     INSN(cmp,       b080, f1c0, CF_ISA_A);
6117     INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6118     INSN(cmp,       b000, f100, M68000);
6119     INSN(eor,       b100, f100, M68000);
6120     INSN(cmpm,      b108, f138, M68000);
6121     INSN(cmpa,      b0c0, f0c0, M68000);
6122     INSN(eor,       b180, f1c0, CF_ISA_A);
6123     BASE(and,       c000, f000);
6124     INSN(exg_dd,    c140, f1f8, M68000);
6125     INSN(exg_aa,    c148, f1f8, M68000);
6126     INSN(exg_da,    c188, f1f8, M68000);
6127     BASE(mulw,      c0c0, f0c0);
6128     INSN(abcd_reg,  c100, f1f8, M68000);
6129     INSN(abcd_mem,  c108, f1f8, M68000);
6130     BASE(addsub,    d000, f000);
6131     INSN(undef,     d0c0, f0c0, CF_ISA_A);
6132     INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6133     INSN(addx_reg,  d100, f138, M68000);
6134     INSN(addx_mem,  d108, f138, M68000);
6135     INSN(adda,      d1c0, f1c0, CF_ISA_A);
6136     INSN(adda,      d0c0, f0c0, M68000);
6137     INSN(shift_im,  e080, f0f0, CF_ISA_A);
6138     INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6139     INSN(shift8_im, e000, f0f0, M68000);
6140     INSN(shift16_im, e040, f0f0, M68000);
6141     INSN(shift_im,  e080, f0f0, M68000);
6142     INSN(shift8_reg, e020, f0f0, M68000);
6143     INSN(shift16_reg, e060, f0f0, M68000);
6144     INSN(shift_reg, e0a0, f0f0, M68000);
6145     INSN(shift_mem, e0c0, fcc0, M68000);
6146     INSN(rotate_im, e090, f0f0, M68000);
6147     INSN(rotate8_im, e010, f0f0, M68000);
6148     INSN(rotate16_im, e050, f0f0, M68000);
6149     INSN(rotate_reg, e0b0, f0f0, M68000);
6150     INSN(rotate8_reg, e030, f0f0, M68000);
6151     INSN(rotate16_reg, e070, f0f0, M68000);
6152     INSN(rotate_mem, e4c0, fcc0, M68000);
6153     INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6154     INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6155     INSN(bfins_mem, efc0, ffc0, BITFIELD);
6156     INSN(bfins_reg, efc0, fff8, BITFIELD);
6157     INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6158     INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6159     INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6160     INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6161     INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6162     INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6163     INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6164     INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6165     INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6166     INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6167     BASE(undef_fpu, f000, f000);
6168     INSN(fpu,       f200, ffc0, CF_FPU);
6169     INSN(fbcc,      f280, ffc0, CF_FPU);
6170     INSN(fpu,       f200, ffc0, FPU);
6171     INSN(fscc,      f240, ffc0, FPU);
6172     INSN(fbcc,      f280, ff80, FPU);
6173 #if defined(CONFIG_SOFTMMU)
6174     INSN(frestore,  f340, ffc0, CF_FPU);
6175     INSN(fsave,     f300, ffc0, CF_FPU);
6176     INSN(frestore,  f340, ffc0, FPU);
6177     INSN(fsave,     f300, ffc0, FPU);
6178     INSN(intouch,   f340, ffc0, CF_ISA_A);
6179     INSN(cpushl,    f428, ff38, CF_ISA_A);
6180     INSN(cpush,     f420, ff20, M68040);
6181     INSN(cinv,      f400, ff20, M68040);
6182     INSN(pflush,    f500, ffe0, M68040);
6183     INSN(ptest,     f548, ffd8, M68040);
6184     INSN(wddata,    fb00, ff00, CF_ISA_A);
6185     INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6186 #endif
6187     INSN(move16_mem, f600, ffe0, M68040);
6188     INSN(move16_reg, f620, fff8, M68040);
6189 #undef INSN
6190 }
6191 
6192 static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6193 {
6194     DisasContext *dc = container_of(dcbase, DisasContext, base);
6195     CPUM68KState *env = cpu->env_ptr;
6196 
6197     dc->env = env;
6198     dc->pc = dc->base.pc_first;
6199     dc->cc_op = CC_OP_DYNAMIC;
6200     dc->cc_op_synced = 1;
6201     dc->done_mac = 0;
6202     dc->writeback_mask = 0;
6203     init_release_array(dc);
6204 
6205     dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6206     /* If architectural single step active, limit to 1 */
6207     if (is_singlestepping(dc)) {
6208         dc->base.max_insns = 1;
6209     }
6210 }
6211 
6212 static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6213 {
6214 }
6215 
6216 static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6217 {
6218     DisasContext *dc = container_of(dcbase, DisasContext, base);
6219     tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6220 }
6221 
6222 static bool m68k_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
6223                                      const CPUBreakpoint *bp)
6224 {
6225     DisasContext *dc = container_of(dcbase, DisasContext, base);
6226 
6227     gen_exception(dc, dc->base.pc_next, EXCP_DEBUG);
6228     /*
6229      * The address covered by the breakpoint must be included in
6230      * [tb->pc, tb->pc + tb->size) in order to for it to be
6231      * properly cleared -- thus we increment the PC here so that
6232      * the logic setting tb->size below does the right thing.
6233      */
6234     dc->base.pc_next += 2;
6235 
6236     return true;
6237 }
6238 
6239 static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6240 {
6241     DisasContext *dc = container_of(dcbase, DisasContext, base);
6242     CPUM68KState *env = cpu->env_ptr;
6243     uint16_t insn = read_im16(env, dc);
6244 
6245     opcode_table[insn](env, dc, insn);
6246     do_writebacks(dc);
6247     do_release(dc);
6248 
6249     dc->base.pc_next = dc->pc;
6250 
6251     if (dc->base.is_jmp == DISAS_NEXT) {
6252         /*
6253          * Stop translation when the next insn might touch a new page.
6254          * This ensures that prefetch aborts at the right place.
6255          *
6256          * We cannot determine the size of the next insn without
6257          * completely decoding it.  However, the maximum insn size
6258          * is 32 bytes, so end if we do not have that much remaining.
6259          * This may produce several small TBs at the end of each page,
6260          * but they will all be linked with goto_tb.
6261          *
6262          * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6263          * smaller than MC68020's.
6264          */
6265         target_ulong start_page_offset
6266             = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6267 
6268         if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6269             dc->base.is_jmp = DISAS_TOO_MANY;
6270         }
6271     }
6272 }
6273 
6274 static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6275 {
6276     DisasContext *dc = container_of(dcbase, DisasContext, base);
6277 
6278     switch (dc->base.is_jmp) {
6279     case DISAS_NORETURN:
6280         break;
6281     case DISAS_TOO_MANY:
6282         update_cc_op(dc);
6283         if (is_singlestepping(dc)) {
6284             tcg_gen_movi_i32(QREG_PC, dc->pc);
6285             gen_singlestep_exception(dc);
6286         } else {
6287             gen_jmp_tb(dc, 0, dc->pc);
6288         }
6289         break;
6290     case DISAS_JUMP:
6291         /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6292         if (is_singlestepping(dc)) {
6293             gen_singlestep_exception(dc);
6294         } else {
6295             tcg_gen_lookup_and_goto_ptr();
6296         }
6297         break;
6298     case DISAS_EXIT:
6299         /*
6300          * We updated CC_OP and PC in gen_exit_tb, but also modified
6301          * other state that may require returning to the main loop.
6302          */
6303         if (is_singlestepping(dc)) {
6304             gen_singlestep_exception(dc);
6305         } else {
6306             tcg_gen_exit_tb(NULL, 0);
6307         }
6308         break;
6309     default:
6310         g_assert_not_reached();
6311     }
6312 }
6313 
6314 static void m68k_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
6315 {
6316     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
6317     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
6318 }
6319 
6320 static const TranslatorOps m68k_tr_ops = {
6321     .init_disas_context = m68k_tr_init_disas_context,
6322     .tb_start           = m68k_tr_tb_start,
6323     .insn_start         = m68k_tr_insn_start,
6324     .breakpoint_check   = m68k_tr_breakpoint_check,
6325     .translate_insn     = m68k_tr_translate_insn,
6326     .tb_stop            = m68k_tr_tb_stop,
6327     .disas_log          = m68k_tr_disas_log,
6328 };
6329 
6330 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6331 {
6332     DisasContext dc;
6333     translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6334 }
6335 
6336 static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6337 {
6338     floatx80 a = { .high = high, .low = low };
6339     union {
6340         float64 f64;
6341         double d;
6342     } u;
6343 
6344     u.f64 = floatx80_to_float64(a, &env->fp_status);
6345     return u.d;
6346 }
6347 
6348 void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6349 {
6350     M68kCPU *cpu = M68K_CPU(cs);
6351     CPUM68KState *env = &cpu->env;
6352     int i;
6353     uint16_t sr;
6354     for (i = 0; i < 8; i++) {
6355         qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6356                      "F%d = %04x %016"PRIx64"  (%12g)\n",
6357                      i, env->dregs[i], i, env->aregs[i],
6358                      i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6359                      floatx80_to_double(env, env->fregs[i].l.upper,
6360                                         env->fregs[i].l.lower));
6361     }
6362     qemu_fprintf(f, "PC = %08x   ", env->pc);
6363     sr = env->sr | cpu_m68k_get_ccr(env);
6364     qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6365                  sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6366                  (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6367                  (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6368                  (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6369                  (sr & CCF_C) ? 'C' : '-');
6370     qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6371                  (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6372                  (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6373                  (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6374                  (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6375     qemu_fprintf(f, "\n                                "
6376                  "FPCR =     %04x ", env->fpcr);
6377     switch (env->fpcr & FPCR_PREC_MASK) {
6378     case FPCR_PREC_X:
6379         qemu_fprintf(f, "X ");
6380         break;
6381     case FPCR_PREC_S:
6382         qemu_fprintf(f, "S ");
6383         break;
6384     case FPCR_PREC_D:
6385         qemu_fprintf(f, "D ");
6386         break;
6387     }
6388     switch (env->fpcr & FPCR_RND_MASK) {
6389     case FPCR_RND_N:
6390         qemu_fprintf(f, "RN ");
6391         break;
6392     case FPCR_RND_Z:
6393         qemu_fprintf(f, "RZ ");
6394         break;
6395     case FPCR_RND_M:
6396         qemu_fprintf(f, "RM ");
6397         break;
6398     case FPCR_RND_P:
6399         qemu_fprintf(f, "RP ");
6400         break;
6401     }
6402     qemu_fprintf(f, "\n");
6403 #ifdef CONFIG_SOFTMMU
6404     qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6405                  env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6406                  env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6407                  env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6408     qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6409     qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6410     qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6411                  env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6412     qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6413                  env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6414                  env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6415     qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6416                  env->mmu.mmusr, env->mmu.ar);
6417 #endif
6418 }
6419 
6420 void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6421                           target_ulong *data)
6422 {
6423     int cc_op = data[1];
6424     env->pc = data[0];
6425     if (cc_op != CC_OP_DYNAMIC) {
6426         env->cc_op = cc_op;
6427     }
6428 }
6429