xref: /qemu/tcg/tci/tcg-target.c.inc (revision 5086c997)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31    do { \
32        fprintf(stderr, "TODO %s:%u: %s()\n", \
33                __FILE__, __LINE__, __func__); \
34        tcg_abort(); \
35    } while (0)
36
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
40static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
41{
42    switch (op) {
43    case INDEX_op_ld8u_i32:
44    case INDEX_op_ld8s_i32:
45    case INDEX_op_ld16u_i32:
46    case INDEX_op_ld16s_i32:
47    case INDEX_op_ld_i32:
48    case INDEX_op_ld8u_i64:
49    case INDEX_op_ld8s_i64:
50    case INDEX_op_ld16u_i64:
51    case INDEX_op_ld16s_i64:
52    case INDEX_op_ld32u_i64:
53    case INDEX_op_ld32s_i64:
54    case INDEX_op_ld_i64:
55    case INDEX_op_not_i32:
56    case INDEX_op_not_i64:
57    case INDEX_op_neg_i32:
58    case INDEX_op_neg_i64:
59    case INDEX_op_ext8s_i32:
60    case INDEX_op_ext8s_i64:
61    case INDEX_op_ext16s_i32:
62    case INDEX_op_ext16s_i64:
63    case INDEX_op_ext8u_i32:
64    case INDEX_op_ext8u_i64:
65    case INDEX_op_ext16u_i32:
66    case INDEX_op_ext16u_i64:
67    case INDEX_op_ext32s_i64:
68    case INDEX_op_ext32u_i64:
69    case INDEX_op_ext_i32_i64:
70    case INDEX_op_extu_i32_i64:
71    case INDEX_op_bswap16_i32:
72    case INDEX_op_bswap16_i64:
73    case INDEX_op_bswap32_i32:
74    case INDEX_op_bswap32_i64:
75    case INDEX_op_bswap64_i64:
76        return C_O1_I1(r, r);
77
78    case INDEX_op_st8_i32:
79    case INDEX_op_st16_i32:
80    case INDEX_op_st_i32:
81    case INDEX_op_st8_i64:
82    case INDEX_op_st16_i64:
83    case INDEX_op_st32_i64:
84    case INDEX_op_st_i64:
85        return C_O0_I2(r, r);
86
87    case INDEX_op_div_i32:
88    case INDEX_op_div_i64:
89    case INDEX_op_divu_i32:
90    case INDEX_op_divu_i64:
91    case INDEX_op_rem_i32:
92    case INDEX_op_rem_i64:
93    case INDEX_op_remu_i32:
94    case INDEX_op_remu_i64:
95        return C_O1_I2(r, r, r);
96
97    case INDEX_op_add_i32:
98    case INDEX_op_add_i64:
99    case INDEX_op_sub_i32:
100    case INDEX_op_sub_i64:
101    case INDEX_op_mul_i32:
102    case INDEX_op_mul_i64:
103    case INDEX_op_and_i32:
104    case INDEX_op_and_i64:
105    case INDEX_op_andc_i32:
106    case INDEX_op_andc_i64:
107    case INDEX_op_eqv_i32:
108    case INDEX_op_eqv_i64:
109    case INDEX_op_nand_i32:
110    case INDEX_op_nand_i64:
111    case INDEX_op_nor_i32:
112    case INDEX_op_nor_i64:
113    case INDEX_op_or_i32:
114    case INDEX_op_or_i64:
115    case INDEX_op_orc_i32:
116    case INDEX_op_orc_i64:
117    case INDEX_op_xor_i32:
118    case INDEX_op_xor_i64:
119    case INDEX_op_shl_i32:
120    case INDEX_op_shl_i64:
121    case INDEX_op_shr_i32:
122    case INDEX_op_shr_i64:
123    case INDEX_op_sar_i32:
124    case INDEX_op_sar_i64:
125    case INDEX_op_rotl_i32:
126    case INDEX_op_rotl_i64:
127    case INDEX_op_rotr_i32:
128    case INDEX_op_rotr_i64:
129        /* TODO: Does R, RI, RI result in faster code than R, R, RI? */
130        return C_O1_I2(r, ri, ri);
131
132    case INDEX_op_deposit_i32:
133    case INDEX_op_deposit_i64:
134        return C_O1_I2(r, 0, r);
135
136    case INDEX_op_brcond_i32:
137    case INDEX_op_brcond_i64:
138        return C_O0_I2(r, ri);
139
140    case INDEX_op_setcond_i32:
141    case INDEX_op_setcond_i64:
142        return C_O1_I2(r, r, ri);
143
144#if TCG_TARGET_REG_BITS == 32
145    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
146    case INDEX_op_add2_i32:
147    case INDEX_op_sub2_i32:
148        return C_O2_I4(r, r, r, r, r, r);
149    case INDEX_op_brcond2_i32:
150        return C_O0_I4(r, r, ri, ri);
151    case INDEX_op_mulu2_i32:
152        return C_O2_I2(r, r, r, r);
153    case INDEX_op_setcond2_i32:
154        return C_O1_I4(r, r, r, ri, ri);
155#endif
156
157    case INDEX_op_qemu_ld_i32:
158        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
159                ? C_O1_I1(r, r)
160                : C_O1_I2(r, r, r));
161    case INDEX_op_qemu_ld_i64:
162        return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
163                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
164                : C_O2_I2(r, r, r, r));
165    case INDEX_op_qemu_st_i32:
166        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
167                ? C_O0_I2(r, r)
168                : C_O0_I3(r, r, r));
169    case INDEX_op_qemu_st_i64:
170        return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
171                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
172                : C_O0_I4(r, r, r, r));
173
174    default:
175        g_assert_not_reached();
176    }
177}
178
179static const int tcg_target_reg_alloc_order[] = {
180    TCG_REG_R0,
181    TCG_REG_R1,
182    TCG_REG_R2,
183    TCG_REG_R3,
184#if 0 /* used for TCG_REG_CALL_STACK */
185    TCG_REG_R4,
186#endif
187    TCG_REG_R5,
188    TCG_REG_R6,
189    TCG_REG_R7,
190#if TCG_TARGET_NB_REGS >= 16
191    TCG_REG_R8,
192    TCG_REG_R9,
193    TCG_REG_R10,
194    TCG_REG_R11,
195    TCG_REG_R12,
196    TCG_REG_R13,
197    TCG_REG_R14,
198    TCG_REG_R15,
199#endif
200};
201
202#if MAX_OPC_PARAM_IARGS != 6
203# error Fix needed, number of supported input arguments changed!
204#endif
205
206static const int tcg_target_call_iarg_regs[] = {
207    TCG_REG_R0,
208    TCG_REG_R1,
209    TCG_REG_R2,
210    TCG_REG_R3,
211#if 0 /* used for TCG_REG_CALL_STACK */
212    TCG_REG_R4,
213#endif
214    TCG_REG_R5,
215    TCG_REG_R6,
216#if TCG_TARGET_REG_BITS == 32
217    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
218    TCG_REG_R7,
219#if TCG_TARGET_NB_REGS >= 16
220    TCG_REG_R8,
221    TCG_REG_R9,
222    TCG_REG_R10,
223    TCG_REG_R11,
224    TCG_REG_R12,
225#else
226# error Too few input registers available
227#endif
228#endif
229};
230
231static const int tcg_target_call_oarg_regs[] = {
232    TCG_REG_R0,
233#if TCG_TARGET_REG_BITS == 32
234    TCG_REG_R1
235#endif
236};
237
238#ifdef CONFIG_DEBUG_TCG
239static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
240    "r00",
241    "r01",
242    "r02",
243    "r03",
244    "r04",
245    "r05",
246    "r06",
247    "r07",
248#if TCG_TARGET_NB_REGS >= 16
249    "r08",
250    "r09",
251    "r10",
252    "r11",
253    "r12",
254    "r13",
255    "r14",
256    "r15",
257#if TCG_TARGET_NB_REGS >= 32
258    "r16",
259    "r17",
260    "r18",
261    "r19",
262    "r20",
263    "r21",
264    "r22",
265    "r23",
266    "r24",
267    "r25",
268    "r26",
269    "r27",
270    "r28",
271    "r29",
272    "r30",
273    "r31"
274#endif
275#endif
276};
277#endif
278
279static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
280                        intptr_t value, intptr_t addend)
281{
282    /* tcg_out_reloc always uses the same type, addend. */
283    tcg_debug_assert(type == sizeof(tcg_target_long));
284    tcg_debug_assert(addend == 0);
285    tcg_debug_assert(value != 0);
286    if (TCG_TARGET_REG_BITS == 32) {
287        tcg_patch32(code_ptr, value);
288    } else {
289        tcg_patch64(code_ptr, value);
290    }
291    return true;
292}
293
294#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
295/* Show current bytecode. Used by tcg interpreter. */
296void tci_disas(uint8_t opc)
297{
298    const TCGOpDef *def = &tcg_op_defs[opc];
299    fprintf(stderr, "TCG %s %u, %u, %u\n",
300            def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
301}
302#endif
303
304/* Write value (native size). */
305static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
306{
307    if (TCG_TARGET_REG_BITS == 32) {
308        tcg_out32(s, v);
309    } else {
310        tcg_out64(s, v);
311    }
312}
313
314/* Write opcode. */
315static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
316{
317    tcg_out8(s, op);
318    tcg_out8(s, 0);
319}
320
321/* Write register. */
322static void tcg_out_r(TCGContext *s, TCGArg t0)
323{
324    tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
325    tcg_out8(s, t0);
326}
327
328/* Write register or constant (native size). */
329static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
330{
331    if (const_arg) {
332        tcg_debug_assert(const_arg == 1);
333        tcg_out8(s, TCG_CONST);
334        tcg_out_i(s, arg);
335    } else {
336        tcg_out_r(s, arg);
337    }
338}
339
340/* Write register or constant (32 bit). */
341static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
342{
343    if (const_arg) {
344        tcg_debug_assert(const_arg == 1);
345        tcg_out8(s, TCG_CONST);
346        tcg_out32(s, arg);
347    } else {
348        tcg_out_r(s, arg);
349    }
350}
351
352#if TCG_TARGET_REG_BITS == 64
353/* Write register or constant (64 bit). */
354static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
355{
356    if (const_arg) {
357        tcg_debug_assert(const_arg == 1);
358        tcg_out8(s, TCG_CONST);
359        tcg_out64(s, arg);
360    } else {
361        tcg_out_r(s, arg);
362    }
363}
364#endif
365
366/* Write label. */
367static void tci_out_label(TCGContext *s, TCGLabel *label)
368{
369    if (label->has_value) {
370        tcg_out_i(s, label->u.value);
371        tcg_debug_assert(label->u.value);
372    } else {
373        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
374        s->code_ptr += sizeof(tcg_target_ulong);
375    }
376}
377
378static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
379                       intptr_t arg2)
380{
381    uint8_t *old_code_ptr = s->code_ptr;
382    if (type == TCG_TYPE_I32) {
383        tcg_out_op_t(s, INDEX_op_ld_i32);
384        tcg_out_r(s, ret);
385        tcg_out_r(s, arg1);
386        tcg_out32(s, arg2);
387    } else {
388        tcg_debug_assert(type == TCG_TYPE_I64);
389#if TCG_TARGET_REG_BITS == 64
390        tcg_out_op_t(s, INDEX_op_ld_i64);
391        tcg_out_r(s, ret);
392        tcg_out_r(s, arg1);
393        tcg_debug_assert(arg2 == (int32_t)arg2);
394        tcg_out32(s, arg2);
395#else
396        TODO();
397#endif
398    }
399    old_code_ptr[1] = s->code_ptr - old_code_ptr;
400}
401
402static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
403{
404    uint8_t *old_code_ptr = s->code_ptr;
405    tcg_debug_assert(ret != arg);
406#if TCG_TARGET_REG_BITS == 32
407    tcg_out_op_t(s, INDEX_op_mov_i32);
408#else
409    tcg_out_op_t(s, INDEX_op_mov_i64);
410#endif
411    tcg_out_r(s, ret);
412    tcg_out_r(s, arg);
413    old_code_ptr[1] = s->code_ptr - old_code_ptr;
414    return true;
415}
416
417static void tcg_out_movi(TCGContext *s, TCGType type,
418                         TCGReg t0, tcg_target_long arg)
419{
420    uint8_t *old_code_ptr = s->code_ptr;
421    uint32_t arg32 = arg;
422    if (type == TCG_TYPE_I32 || arg == arg32) {
423        tcg_out_op_t(s, INDEX_op_tci_movi_i32);
424        tcg_out_r(s, t0);
425        tcg_out32(s, arg32);
426    } else {
427        tcg_debug_assert(type == TCG_TYPE_I64);
428#if TCG_TARGET_REG_BITS == 64
429        tcg_out_op_t(s, INDEX_op_tci_movi_i64);
430        tcg_out_r(s, t0);
431        tcg_out64(s, arg);
432#else
433        TODO();
434#endif
435    }
436    old_code_ptr[1] = s->code_ptr - old_code_ptr;
437}
438
439static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
440{
441    uint8_t *old_code_ptr = s->code_ptr;
442    tcg_out_op_t(s, INDEX_op_call);
443    tcg_out_ri(s, 1, (uintptr_t)arg);
444    old_code_ptr[1] = s->code_ptr - old_code_ptr;
445}
446
447static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
448                       const int *const_args)
449{
450    uint8_t *old_code_ptr = s->code_ptr;
451
452    tcg_out_op_t(s, opc);
453
454    switch (opc) {
455    case INDEX_op_exit_tb:
456        tcg_out64(s, args[0]);
457        break;
458    case INDEX_op_goto_tb:
459        if (s->tb_jmp_insn_offset) {
460            /* Direct jump method. */
461            /* Align for atomic patching and thread safety */
462            s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
463            s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
464            tcg_out32(s, 0);
465        } else {
466            /* Indirect jump method. */
467            TODO();
468        }
469        set_jmp_reset_offset(s, args[0]);
470        break;
471    case INDEX_op_br:
472        tci_out_label(s, arg_label(args[0]));
473        break;
474    case INDEX_op_setcond_i32:
475        tcg_out_r(s, args[0]);
476        tcg_out_r(s, args[1]);
477        tcg_out_ri32(s, const_args[2], args[2]);
478        tcg_out8(s, args[3]);   /* condition */
479        break;
480#if TCG_TARGET_REG_BITS == 32
481    case INDEX_op_setcond2_i32:
482        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
483        tcg_out_r(s, args[0]);
484        tcg_out_r(s, args[1]);
485        tcg_out_r(s, args[2]);
486        tcg_out_ri32(s, const_args[3], args[3]);
487        tcg_out_ri32(s, const_args[4], args[4]);
488        tcg_out8(s, args[5]);   /* condition */
489        break;
490#elif TCG_TARGET_REG_BITS == 64
491    case INDEX_op_setcond_i64:
492        tcg_out_r(s, args[0]);
493        tcg_out_r(s, args[1]);
494        tcg_out_ri64(s, const_args[2], args[2]);
495        tcg_out8(s, args[3]);   /* condition */
496        break;
497#endif
498    case INDEX_op_ld8u_i32:
499    case INDEX_op_ld8s_i32:
500    case INDEX_op_ld16u_i32:
501    case INDEX_op_ld16s_i32:
502    case INDEX_op_ld_i32:
503    case INDEX_op_st8_i32:
504    case INDEX_op_st16_i32:
505    case INDEX_op_st_i32:
506    case INDEX_op_ld8u_i64:
507    case INDEX_op_ld8s_i64:
508    case INDEX_op_ld16u_i64:
509    case INDEX_op_ld16s_i64:
510    case INDEX_op_ld32u_i64:
511    case INDEX_op_ld32s_i64:
512    case INDEX_op_ld_i64:
513    case INDEX_op_st8_i64:
514    case INDEX_op_st16_i64:
515    case INDEX_op_st32_i64:
516    case INDEX_op_st_i64:
517        tcg_out_r(s, args[0]);
518        tcg_out_r(s, args[1]);
519        tcg_debug_assert(args[2] == (int32_t)args[2]);
520        tcg_out32(s, args[2]);
521        break;
522    case INDEX_op_add_i32:
523    case INDEX_op_sub_i32:
524    case INDEX_op_mul_i32:
525    case INDEX_op_and_i32:
526    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
527    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
528    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
529    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
530    case INDEX_op_or_i32:
531    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
532    case INDEX_op_xor_i32:
533    case INDEX_op_shl_i32:
534    case INDEX_op_shr_i32:
535    case INDEX_op_sar_i32:
536    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
537    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
538        tcg_out_r(s, args[0]);
539        tcg_out_ri32(s, const_args[1], args[1]);
540        tcg_out_ri32(s, const_args[2], args[2]);
541        break;
542    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
543        tcg_out_r(s, args[0]);
544        tcg_out_r(s, args[1]);
545        tcg_out_r(s, args[2]);
546        tcg_debug_assert(args[3] <= UINT8_MAX);
547        tcg_out8(s, args[3]);
548        tcg_debug_assert(args[4] <= UINT8_MAX);
549        tcg_out8(s, args[4]);
550        break;
551
552#if TCG_TARGET_REG_BITS == 64
553    case INDEX_op_add_i64:
554    case INDEX_op_sub_i64:
555    case INDEX_op_mul_i64:
556    case INDEX_op_and_i64:
557    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
558    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
559    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
560    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
561    case INDEX_op_or_i64:
562    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
563    case INDEX_op_xor_i64:
564    case INDEX_op_shl_i64:
565    case INDEX_op_shr_i64:
566    case INDEX_op_sar_i64:
567    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
568    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
569        tcg_out_r(s, args[0]);
570        tcg_out_ri64(s, const_args[1], args[1]);
571        tcg_out_ri64(s, const_args[2], args[2]);
572        break;
573    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
574        tcg_out_r(s, args[0]);
575        tcg_out_r(s, args[1]);
576        tcg_out_r(s, args[2]);
577        tcg_debug_assert(args[3] <= UINT8_MAX);
578        tcg_out8(s, args[3]);
579        tcg_debug_assert(args[4] <= UINT8_MAX);
580        tcg_out8(s, args[4]);
581        break;
582    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
583    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
584    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
585    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
586        TODO();
587        break;
588    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
589    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
590        TODO();
591        break;
592    case INDEX_op_brcond_i64:
593        tcg_out_r(s, args[0]);
594        tcg_out_ri64(s, const_args[1], args[1]);
595        tcg_out8(s, args[2]);           /* condition */
596        tci_out_label(s, arg_label(args[3]));
597        break;
598    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
599    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
600    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
601    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
602    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
603    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
604    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
605    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
606    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
607    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
608    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
609    case INDEX_op_ext_i32_i64:
610    case INDEX_op_extu_i32_i64:
611#endif /* TCG_TARGET_REG_BITS == 64 */
612    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
613    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
614    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
615    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
616    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
617    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
618    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
619    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
620        tcg_out_r(s, args[0]);
621        tcg_out_r(s, args[1]);
622        break;
623    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
624    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
625    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
626    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
627        tcg_out_r(s, args[0]);
628        tcg_out_ri32(s, const_args[1], args[1]);
629        tcg_out_ri32(s, const_args[2], args[2]);
630        break;
631    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
632    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
633        TODO();
634        break;
635#if TCG_TARGET_REG_BITS == 32
636    case INDEX_op_add2_i32:
637    case INDEX_op_sub2_i32:
638        tcg_out_r(s, args[0]);
639        tcg_out_r(s, args[1]);
640        tcg_out_r(s, args[2]);
641        tcg_out_r(s, args[3]);
642        tcg_out_r(s, args[4]);
643        tcg_out_r(s, args[5]);
644        break;
645    case INDEX_op_brcond2_i32:
646        tcg_out_r(s, args[0]);
647        tcg_out_r(s, args[1]);
648        tcg_out_ri32(s, const_args[2], args[2]);
649        tcg_out_ri32(s, const_args[3], args[3]);
650        tcg_out8(s, args[4]);           /* condition */
651        tci_out_label(s, arg_label(args[5]));
652        break;
653    case INDEX_op_mulu2_i32:
654        tcg_out_r(s, args[0]);
655        tcg_out_r(s, args[1]);
656        tcg_out_r(s, args[2]);
657        tcg_out_r(s, args[3]);
658        break;
659#endif
660    case INDEX_op_brcond_i32:
661        tcg_out_r(s, args[0]);
662        tcg_out_ri32(s, const_args[1], args[1]);
663        tcg_out8(s, args[2]);           /* condition */
664        tci_out_label(s, arg_label(args[3]));
665        break;
666    case INDEX_op_qemu_ld_i32:
667        tcg_out_r(s, *args++);
668        tcg_out_r(s, *args++);
669        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
670            tcg_out_r(s, *args++);
671        }
672        tcg_out_i(s, *args++);
673        break;
674    case INDEX_op_qemu_ld_i64:
675        tcg_out_r(s, *args++);
676        if (TCG_TARGET_REG_BITS == 32) {
677            tcg_out_r(s, *args++);
678        }
679        tcg_out_r(s, *args++);
680        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
681            tcg_out_r(s, *args++);
682        }
683        tcg_out_i(s, *args++);
684        break;
685    case INDEX_op_qemu_st_i32:
686        tcg_out_r(s, *args++);
687        tcg_out_r(s, *args++);
688        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
689            tcg_out_r(s, *args++);
690        }
691        tcg_out_i(s, *args++);
692        break;
693    case INDEX_op_qemu_st_i64:
694        tcg_out_r(s, *args++);
695        if (TCG_TARGET_REG_BITS == 32) {
696            tcg_out_r(s, *args++);
697        }
698        tcg_out_r(s, *args++);
699        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
700            tcg_out_r(s, *args++);
701        }
702        tcg_out_i(s, *args++);
703        break;
704    case INDEX_op_mb:
705        break;
706    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
707    case INDEX_op_mov_i64:
708    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
709    default:
710        tcg_abort();
711    }
712    old_code_ptr[1] = s->code_ptr - old_code_ptr;
713}
714
715static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
716                       intptr_t arg2)
717{
718    uint8_t *old_code_ptr = s->code_ptr;
719    if (type == TCG_TYPE_I32) {
720        tcg_out_op_t(s, INDEX_op_st_i32);
721        tcg_out_r(s, arg);
722        tcg_out_r(s, arg1);
723        tcg_out32(s, arg2);
724    } else {
725        tcg_debug_assert(type == TCG_TYPE_I64);
726#if TCG_TARGET_REG_BITS == 64
727        tcg_out_op_t(s, INDEX_op_st_i64);
728        tcg_out_r(s, arg);
729        tcg_out_r(s, arg1);
730        tcg_out32(s, arg2);
731#else
732        TODO();
733#endif
734    }
735    old_code_ptr[1] = s->code_ptr - old_code_ptr;
736}
737
738static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
739                               TCGReg base, intptr_t ofs)
740{
741    return false;
742}
743
744/* Test if a constant matches the constraint. */
745static int tcg_target_const_match(tcg_target_long val, TCGType type,
746                                  const TCGArgConstraint *arg_ct)
747{
748    /* No need to return 0 or 1, 0 or != 0 is good enough. */
749    return arg_ct->ct & TCG_CT_CONST;
750}
751
752static void tcg_target_init(TCGContext *s)
753{
754#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
755    const char *envval = getenv("DEBUG_TCG");
756    if (envval) {
757        qemu_set_log(strtol(envval, NULL, 0));
758    }
759#endif
760
761    /* The current code uses uint8_t for tcg operations. */
762    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
763
764    /* Registers available for 32 bit operations. */
765    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
766    /* Registers available for 64 bit operations. */
767    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
768    /* TODO: Which registers should be set here? */
769    tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
770
771    s->reserved_regs = 0;
772    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
773
774    /* We use negative offsets from "sp" so that we can distinguish
775       stores that might pretend to be call arguments.  */
776    tcg_set_frame(s, TCG_REG_CALL_STACK,
777                  -CPU_TEMP_BUF_NLONGS * sizeof(long),
778                  CPU_TEMP_BUF_NLONGS * sizeof(long));
779}
780
781/* Generate global QEMU prologue and epilogue code. */
782static inline void tcg_target_qemu_prologue(TCGContext *s)
783{
784}
785