xref: /qemu/tcg/tcg-op.c (revision b21e2380)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg.h"
28 #include "tcg/tcg-op.h"
29 #include "tcg/tcg-mo.h"
30 #include "exec/plugin-gen.h"
31 
32 /* Reduce the number of ifdefs below.  This assumes that all uses of
33    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
34    the compiler can eliminate.  */
35 #if TCG_TARGET_REG_BITS == 64
36 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
37 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
38 #define TCGV_LOW  TCGV_LOW_link_error
39 #define TCGV_HIGH TCGV_HIGH_link_error
40 #endif
41 
42 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
43 {
44     TCGOp *op = tcg_emit_op(opc);
45     op->args[0] = a1;
46 }
47 
48 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
49 {
50     TCGOp *op = tcg_emit_op(opc);
51     op->args[0] = a1;
52     op->args[1] = a2;
53 }
54 
55 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
56 {
57     TCGOp *op = tcg_emit_op(opc);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61 }
62 
63 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
64 {
65     TCGOp *op = tcg_emit_op(opc);
66     op->args[0] = a1;
67     op->args[1] = a2;
68     op->args[2] = a3;
69     op->args[3] = a4;
70 }
71 
72 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
73                  TCGArg a4, TCGArg a5)
74 {
75     TCGOp *op = tcg_emit_op(opc);
76     op->args[0] = a1;
77     op->args[1] = a2;
78     op->args[2] = a3;
79     op->args[3] = a4;
80     op->args[4] = a5;
81 }
82 
83 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
84                  TCGArg a4, TCGArg a5, TCGArg a6)
85 {
86     TCGOp *op = tcg_emit_op(opc);
87     op->args[0] = a1;
88     op->args[1] = a2;
89     op->args[2] = a3;
90     op->args[3] = a4;
91     op->args[4] = a5;
92     op->args[5] = a6;
93 }
94 
95 void tcg_gen_mb(TCGBar mb_type)
96 {
97     if (tcg_ctx->tb_cflags & CF_PARALLEL) {
98         tcg_gen_op1(INDEX_op_mb, mb_type);
99     }
100 }
101 
102 /* 32 bit ops */
103 
104 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
105 {
106     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
107 }
108 
109 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
110 {
111     /* some cases can be optimized here */
112     if (arg2 == 0) {
113         tcg_gen_mov_i32(ret, arg1);
114     } else {
115         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
116     }
117 }
118 
119 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
120 {
121     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
122         /* Don't recurse with tcg_gen_neg_i32.  */
123         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
124     } else {
125         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
126     }
127 }
128 
129 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130 {
131     /* some cases can be optimized here */
132     if (arg2 == 0) {
133         tcg_gen_mov_i32(ret, arg1);
134     } else {
135         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
136     }
137 }
138 
139 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
140 {
141     /* Some cases can be optimized here.  */
142     switch (arg2) {
143     case 0:
144         tcg_gen_movi_i32(ret, 0);
145         return;
146     case -1:
147         tcg_gen_mov_i32(ret, arg1);
148         return;
149     case 0xff:
150         /* Don't recurse with tcg_gen_ext8u_i32.  */
151         if (TCG_TARGET_HAS_ext8u_i32) {
152             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
153             return;
154         }
155         break;
156     case 0xffff:
157         if (TCG_TARGET_HAS_ext16u_i32) {
158             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
159             return;
160         }
161         break;
162     }
163 
164     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
165 }
166 
167 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
168 {
169     /* Some cases can be optimized here.  */
170     if (arg2 == -1) {
171         tcg_gen_movi_i32(ret, -1);
172     } else if (arg2 == 0) {
173         tcg_gen_mov_i32(ret, arg1);
174     } else {
175         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
176     }
177 }
178 
179 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
180 {
181     /* Some cases can be optimized here.  */
182     if (arg2 == 0) {
183         tcg_gen_mov_i32(ret, arg1);
184     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
185         /* Don't recurse with tcg_gen_not_i32.  */
186         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
187     } else {
188         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
189     }
190 }
191 
192 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
193 {
194     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
195     if (arg2 == 0) {
196         tcg_gen_mov_i32(ret, arg1);
197     } else {
198         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
199     }
200 }
201 
202 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
203 {
204     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
205     if (arg2 == 0) {
206         tcg_gen_mov_i32(ret, arg1);
207     } else {
208         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
209     }
210 }
211 
212 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
213 {
214     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
215     if (arg2 == 0) {
216         tcg_gen_mov_i32(ret, arg1);
217     } else {
218         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
219     }
220 }
221 
222 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
223 {
224     if (cond == TCG_COND_ALWAYS) {
225         tcg_gen_br(l);
226     } else if (cond != TCG_COND_NEVER) {
227         l->refs++;
228         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
229     }
230 }
231 
232 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
233 {
234     if (cond == TCG_COND_ALWAYS) {
235         tcg_gen_br(l);
236     } else if (cond != TCG_COND_NEVER) {
237         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
238     }
239 }
240 
241 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
242                          TCGv_i32 arg1, TCGv_i32 arg2)
243 {
244     if (cond == TCG_COND_ALWAYS) {
245         tcg_gen_movi_i32(ret, 1);
246     } else if (cond == TCG_COND_NEVER) {
247         tcg_gen_movi_i32(ret, 0);
248     } else {
249         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
250     }
251 }
252 
253 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
254                           TCGv_i32 arg1, int32_t arg2)
255 {
256     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
257 }
258 
259 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
260 {
261     if (arg2 == 0) {
262         tcg_gen_movi_i32(ret, 0);
263     } else if (is_power_of_2(arg2)) {
264         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
265     } else {
266         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
267     }
268 }
269 
270 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
271 {
272     if (TCG_TARGET_HAS_div_i32) {
273         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
274     } else if (TCG_TARGET_HAS_div2_i32) {
275         TCGv_i32 t0 = tcg_temp_new_i32();
276         tcg_gen_sari_i32(t0, arg1, 31);
277         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
278         tcg_temp_free_i32(t0);
279     } else {
280         gen_helper_div_i32(ret, arg1, arg2);
281     }
282 }
283 
284 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
285 {
286     if (TCG_TARGET_HAS_rem_i32) {
287         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
288     } else if (TCG_TARGET_HAS_div_i32) {
289         TCGv_i32 t0 = tcg_temp_new_i32();
290         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
291         tcg_gen_mul_i32(t0, t0, arg2);
292         tcg_gen_sub_i32(ret, arg1, t0);
293         tcg_temp_free_i32(t0);
294     } else if (TCG_TARGET_HAS_div2_i32) {
295         TCGv_i32 t0 = tcg_temp_new_i32();
296         tcg_gen_sari_i32(t0, arg1, 31);
297         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
298         tcg_temp_free_i32(t0);
299     } else {
300         gen_helper_rem_i32(ret, arg1, arg2);
301     }
302 }
303 
304 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
305 {
306     if (TCG_TARGET_HAS_div_i32) {
307         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
308     } else if (TCG_TARGET_HAS_div2_i32) {
309         TCGv_i32 t0 = tcg_temp_new_i32();
310         tcg_gen_movi_i32(t0, 0);
311         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
312         tcg_temp_free_i32(t0);
313     } else {
314         gen_helper_divu_i32(ret, arg1, arg2);
315     }
316 }
317 
318 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
319 {
320     if (TCG_TARGET_HAS_rem_i32) {
321         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
322     } else if (TCG_TARGET_HAS_div_i32) {
323         TCGv_i32 t0 = tcg_temp_new_i32();
324         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
325         tcg_gen_mul_i32(t0, t0, arg2);
326         tcg_gen_sub_i32(ret, arg1, t0);
327         tcg_temp_free_i32(t0);
328     } else if (TCG_TARGET_HAS_div2_i32) {
329         TCGv_i32 t0 = tcg_temp_new_i32();
330         tcg_gen_movi_i32(t0, 0);
331         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
332         tcg_temp_free_i32(t0);
333     } else {
334         gen_helper_remu_i32(ret, arg1, arg2);
335     }
336 }
337 
338 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
339 {
340     if (TCG_TARGET_HAS_andc_i32) {
341         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
342     } else {
343         TCGv_i32 t0 = tcg_temp_new_i32();
344         tcg_gen_not_i32(t0, arg2);
345         tcg_gen_and_i32(ret, arg1, t0);
346         tcg_temp_free_i32(t0);
347     }
348 }
349 
350 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
351 {
352     if (TCG_TARGET_HAS_eqv_i32) {
353         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
354     } else {
355         tcg_gen_xor_i32(ret, arg1, arg2);
356         tcg_gen_not_i32(ret, ret);
357     }
358 }
359 
360 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
361 {
362     if (TCG_TARGET_HAS_nand_i32) {
363         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
364     } else {
365         tcg_gen_and_i32(ret, arg1, arg2);
366         tcg_gen_not_i32(ret, ret);
367     }
368 }
369 
370 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
371 {
372     if (TCG_TARGET_HAS_nor_i32) {
373         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
374     } else {
375         tcg_gen_or_i32(ret, arg1, arg2);
376         tcg_gen_not_i32(ret, ret);
377     }
378 }
379 
380 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
381 {
382     if (TCG_TARGET_HAS_orc_i32) {
383         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
384     } else {
385         TCGv_i32 t0 = tcg_temp_new_i32();
386         tcg_gen_not_i32(t0, arg2);
387         tcg_gen_or_i32(ret, arg1, t0);
388         tcg_temp_free_i32(t0);
389     }
390 }
391 
392 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
393 {
394     if (TCG_TARGET_HAS_clz_i32) {
395         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
396     } else if (TCG_TARGET_HAS_clz_i64) {
397         TCGv_i64 t1 = tcg_temp_new_i64();
398         TCGv_i64 t2 = tcg_temp_new_i64();
399         tcg_gen_extu_i32_i64(t1, arg1);
400         tcg_gen_extu_i32_i64(t2, arg2);
401         tcg_gen_addi_i64(t2, t2, 32);
402         tcg_gen_clz_i64(t1, t1, t2);
403         tcg_gen_extrl_i64_i32(ret, t1);
404         tcg_temp_free_i64(t1);
405         tcg_temp_free_i64(t2);
406         tcg_gen_subi_i32(ret, ret, 32);
407     } else {
408         gen_helper_clz_i32(ret, arg1, arg2);
409     }
410 }
411 
412 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
413 {
414     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
415 }
416 
417 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
418 {
419     if (TCG_TARGET_HAS_ctz_i32) {
420         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
421     } else if (TCG_TARGET_HAS_ctz_i64) {
422         TCGv_i64 t1 = tcg_temp_new_i64();
423         TCGv_i64 t2 = tcg_temp_new_i64();
424         tcg_gen_extu_i32_i64(t1, arg1);
425         tcg_gen_extu_i32_i64(t2, arg2);
426         tcg_gen_ctz_i64(t1, t1, t2);
427         tcg_gen_extrl_i64_i32(ret, t1);
428         tcg_temp_free_i64(t1);
429         tcg_temp_free_i64(t2);
430     } else if (TCG_TARGET_HAS_ctpop_i32
431                || TCG_TARGET_HAS_ctpop_i64
432                || TCG_TARGET_HAS_clz_i32
433                || TCG_TARGET_HAS_clz_i64) {
434         TCGv_i32 z, t = tcg_temp_new_i32();
435 
436         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
437             tcg_gen_subi_i32(t, arg1, 1);
438             tcg_gen_andc_i32(t, t, arg1);
439             tcg_gen_ctpop_i32(t, t);
440         } else {
441             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
442             tcg_gen_neg_i32(t, arg1);
443             tcg_gen_and_i32(t, t, arg1);
444             tcg_gen_clzi_i32(t, t, 32);
445             tcg_gen_xori_i32(t, t, 31);
446         }
447         z = tcg_constant_i32(0);
448         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
449         tcg_temp_free_i32(t);
450     } else {
451         gen_helper_ctz_i32(ret, arg1, arg2);
452     }
453 }
454 
455 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
456 {
457     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
458         /* This equivalence has the advantage of not requiring a fixup.  */
459         TCGv_i32 t = tcg_temp_new_i32();
460         tcg_gen_subi_i32(t, arg1, 1);
461         tcg_gen_andc_i32(t, t, arg1);
462         tcg_gen_ctpop_i32(ret, t);
463         tcg_temp_free_i32(t);
464     } else {
465         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
466     }
467 }
468 
469 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
470 {
471     if (TCG_TARGET_HAS_clz_i32) {
472         TCGv_i32 t = tcg_temp_new_i32();
473         tcg_gen_sari_i32(t, arg, 31);
474         tcg_gen_xor_i32(t, t, arg);
475         tcg_gen_clzi_i32(t, t, 32);
476         tcg_gen_subi_i32(ret, t, 1);
477         tcg_temp_free_i32(t);
478     } else {
479         gen_helper_clrsb_i32(ret, arg);
480     }
481 }
482 
483 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
484 {
485     if (TCG_TARGET_HAS_ctpop_i32) {
486         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
487     } else if (TCG_TARGET_HAS_ctpop_i64) {
488         TCGv_i64 t = tcg_temp_new_i64();
489         tcg_gen_extu_i32_i64(t, arg1);
490         tcg_gen_ctpop_i64(t, t);
491         tcg_gen_extrl_i64_i32(ret, t);
492         tcg_temp_free_i64(t);
493     } else {
494         gen_helper_ctpop_i32(ret, arg1);
495     }
496 }
497 
498 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
499 {
500     if (TCG_TARGET_HAS_rot_i32) {
501         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
502     } else {
503         TCGv_i32 t0, t1;
504 
505         t0 = tcg_temp_new_i32();
506         t1 = tcg_temp_new_i32();
507         tcg_gen_shl_i32(t0, arg1, arg2);
508         tcg_gen_subfi_i32(t1, 32, arg2);
509         tcg_gen_shr_i32(t1, arg1, t1);
510         tcg_gen_or_i32(ret, t0, t1);
511         tcg_temp_free_i32(t0);
512         tcg_temp_free_i32(t1);
513     }
514 }
515 
516 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
517 {
518     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
519     /* some cases can be optimized here */
520     if (arg2 == 0) {
521         tcg_gen_mov_i32(ret, arg1);
522     } else if (TCG_TARGET_HAS_rot_i32) {
523         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
524     } else {
525         TCGv_i32 t0, t1;
526         t0 = tcg_temp_new_i32();
527         t1 = tcg_temp_new_i32();
528         tcg_gen_shli_i32(t0, arg1, arg2);
529         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
530         tcg_gen_or_i32(ret, t0, t1);
531         tcg_temp_free_i32(t0);
532         tcg_temp_free_i32(t1);
533     }
534 }
535 
536 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
537 {
538     if (TCG_TARGET_HAS_rot_i32) {
539         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
540     } else {
541         TCGv_i32 t0, t1;
542 
543         t0 = tcg_temp_new_i32();
544         t1 = tcg_temp_new_i32();
545         tcg_gen_shr_i32(t0, arg1, arg2);
546         tcg_gen_subfi_i32(t1, 32, arg2);
547         tcg_gen_shl_i32(t1, arg1, t1);
548         tcg_gen_or_i32(ret, t0, t1);
549         tcg_temp_free_i32(t0);
550         tcg_temp_free_i32(t1);
551     }
552 }
553 
554 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
555 {
556     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
557     /* some cases can be optimized here */
558     if (arg2 == 0) {
559         tcg_gen_mov_i32(ret, arg1);
560     } else {
561         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
562     }
563 }
564 
565 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
566                          unsigned int ofs, unsigned int len)
567 {
568     uint32_t mask;
569     TCGv_i32 t1;
570 
571     tcg_debug_assert(ofs < 32);
572     tcg_debug_assert(len > 0);
573     tcg_debug_assert(len <= 32);
574     tcg_debug_assert(ofs + len <= 32);
575 
576     if (len == 32) {
577         tcg_gen_mov_i32(ret, arg2);
578         return;
579     }
580     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
581         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
582         return;
583     }
584 
585     t1 = tcg_temp_new_i32();
586 
587     if (TCG_TARGET_HAS_extract2_i32) {
588         if (ofs + len == 32) {
589             tcg_gen_shli_i32(t1, arg1, len);
590             tcg_gen_extract2_i32(ret, t1, arg2, len);
591             goto done;
592         }
593         if (ofs == 0) {
594             tcg_gen_extract2_i32(ret, arg1, arg2, len);
595             tcg_gen_rotli_i32(ret, ret, len);
596             goto done;
597         }
598     }
599 
600     mask = (1u << len) - 1;
601     if (ofs + len < 32) {
602         tcg_gen_andi_i32(t1, arg2, mask);
603         tcg_gen_shli_i32(t1, t1, ofs);
604     } else {
605         tcg_gen_shli_i32(t1, arg2, ofs);
606     }
607     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
608     tcg_gen_or_i32(ret, ret, t1);
609  done:
610     tcg_temp_free_i32(t1);
611 }
612 
613 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
614                            unsigned int ofs, unsigned int len)
615 {
616     tcg_debug_assert(ofs < 32);
617     tcg_debug_assert(len > 0);
618     tcg_debug_assert(len <= 32);
619     tcg_debug_assert(ofs + len <= 32);
620 
621     if (ofs + len == 32) {
622         tcg_gen_shli_i32(ret, arg, ofs);
623     } else if (ofs == 0) {
624         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
625     } else if (TCG_TARGET_HAS_deposit_i32
626                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
627         TCGv_i32 zero = tcg_constant_i32(0);
628         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
629     } else {
630         /* To help two-operand hosts we prefer to zero-extend first,
631            which allows ARG to stay live.  */
632         switch (len) {
633         case 16:
634             if (TCG_TARGET_HAS_ext16u_i32) {
635                 tcg_gen_ext16u_i32(ret, arg);
636                 tcg_gen_shli_i32(ret, ret, ofs);
637                 return;
638             }
639             break;
640         case 8:
641             if (TCG_TARGET_HAS_ext8u_i32) {
642                 tcg_gen_ext8u_i32(ret, arg);
643                 tcg_gen_shli_i32(ret, ret, ofs);
644                 return;
645             }
646             break;
647         }
648         /* Otherwise prefer zero-extension over AND for code size.  */
649         switch (ofs + len) {
650         case 16:
651             if (TCG_TARGET_HAS_ext16u_i32) {
652                 tcg_gen_shli_i32(ret, arg, ofs);
653                 tcg_gen_ext16u_i32(ret, ret);
654                 return;
655             }
656             break;
657         case 8:
658             if (TCG_TARGET_HAS_ext8u_i32) {
659                 tcg_gen_shli_i32(ret, arg, ofs);
660                 tcg_gen_ext8u_i32(ret, ret);
661                 return;
662             }
663             break;
664         }
665         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
666         tcg_gen_shli_i32(ret, ret, ofs);
667     }
668 }
669 
670 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
671                          unsigned int ofs, unsigned int len)
672 {
673     tcg_debug_assert(ofs < 32);
674     tcg_debug_assert(len > 0);
675     tcg_debug_assert(len <= 32);
676     tcg_debug_assert(ofs + len <= 32);
677 
678     /* Canonicalize certain special cases, even if extract is supported.  */
679     if (ofs + len == 32) {
680         tcg_gen_shri_i32(ret, arg, 32 - len);
681         return;
682     }
683     if (ofs == 0) {
684         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
685         return;
686     }
687 
688     if (TCG_TARGET_HAS_extract_i32
689         && TCG_TARGET_extract_i32_valid(ofs, len)) {
690         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
691         return;
692     }
693 
694     /* Assume that zero-extension, if available, is cheaper than a shift.  */
695     switch (ofs + len) {
696     case 16:
697         if (TCG_TARGET_HAS_ext16u_i32) {
698             tcg_gen_ext16u_i32(ret, arg);
699             tcg_gen_shri_i32(ret, ret, ofs);
700             return;
701         }
702         break;
703     case 8:
704         if (TCG_TARGET_HAS_ext8u_i32) {
705             tcg_gen_ext8u_i32(ret, arg);
706             tcg_gen_shri_i32(ret, ret, ofs);
707             return;
708         }
709         break;
710     }
711 
712     /* ??? Ideally we'd know what values are available for immediate AND.
713        Assume that 8 bits are available, plus the special case of 16,
714        so that we get ext8u, ext16u.  */
715     switch (len) {
716     case 1 ... 8: case 16:
717         tcg_gen_shri_i32(ret, arg, ofs);
718         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
719         break;
720     default:
721         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
722         tcg_gen_shri_i32(ret, ret, 32 - len);
723         break;
724     }
725 }
726 
727 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
728                           unsigned int ofs, unsigned int len)
729 {
730     tcg_debug_assert(ofs < 32);
731     tcg_debug_assert(len > 0);
732     tcg_debug_assert(len <= 32);
733     tcg_debug_assert(ofs + len <= 32);
734 
735     /* Canonicalize certain special cases, even if extract is supported.  */
736     if (ofs + len == 32) {
737         tcg_gen_sari_i32(ret, arg, 32 - len);
738         return;
739     }
740     if (ofs == 0) {
741         switch (len) {
742         case 16:
743             tcg_gen_ext16s_i32(ret, arg);
744             return;
745         case 8:
746             tcg_gen_ext8s_i32(ret, arg);
747             return;
748         }
749     }
750 
751     if (TCG_TARGET_HAS_sextract_i32
752         && TCG_TARGET_extract_i32_valid(ofs, len)) {
753         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
754         return;
755     }
756 
757     /* Assume that sign-extension, if available, is cheaper than a shift.  */
758     switch (ofs + len) {
759     case 16:
760         if (TCG_TARGET_HAS_ext16s_i32) {
761             tcg_gen_ext16s_i32(ret, arg);
762             tcg_gen_sari_i32(ret, ret, ofs);
763             return;
764         }
765         break;
766     case 8:
767         if (TCG_TARGET_HAS_ext8s_i32) {
768             tcg_gen_ext8s_i32(ret, arg);
769             tcg_gen_sari_i32(ret, ret, ofs);
770             return;
771         }
772         break;
773     }
774     switch (len) {
775     case 16:
776         if (TCG_TARGET_HAS_ext16s_i32) {
777             tcg_gen_shri_i32(ret, arg, ofs);
778             tcg_gen_ext16s_i32(ret, ret);
779             return;
780         }
781         break;
782     case 8:
783         if (TCG_TARGET_HAS_ext8s_i32) {
784             tcg_gen_shri_i32(ret, arg, ofs);
785             tcg_gen_ext8s_i32(ret, ret);
786             return;
787         }
788         break;
789     }
790 
791     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
792     tcg_gen_sari_i32(ret, ret, 32 - len);
793 }
794 
795 /*
796  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
797  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
798  */
799 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
800                           unsigned int ofs)
801 {
802     tcg_debug_assert(ofs <= 32);
803     if (ofs == 0) {
804         tcg_gen_mov_i32(ret, al);
805     } else if (ofs == 32) {
806         tcg_gen_mov_i32(ret, ah);
807     } else if (al == ah) {
808         tcg_gen_rotri_i32(ret, al, ofs);
809     } else if (TCG_TARGET_HAS_extract2_i32) {
810         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
811     } else {
812         TCGv_i32 t0 = tcg_temp_new_i32();
813         tcg_gen_shri_i32(t0, al, ofs);
814         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
815         tcg_temp_free_i32(t0);
816     }
817 }
818 
819 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
820                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
821 {
822     if (cond == TCG_COND_ALWAYS) {
823         tcg_gen_mov_i32(ret, v1);
824     } else if (cond == TCG_COND_NEVER) {
825         tcg_gen_mov_i32(ret, v2);
826     } else if (TCG_TARGET_HAS_movcond_i32) {
827         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
828     } else {
829         TCGv_i32 t0 = tcg_temp_new_i32();
830         TCGv_i32 t1 = tcg_temp_new_i32();
831         tcg_gen_setcond_i32(cond, t0, c1, c2);
832         tcg_gen_neg_i32(t0, t0);
833         tcg_gen_and_i32(t1, v1, t0);
834         tcg_gen_andc_i32(ret, v2, t0);
835         tcg_gen_or_i32(ret, ret, t1);
836         tcg_temp_free_i32(t0);
837         tcg_temp_free_i32(t1);
838     }
839 }
840 
841 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
842                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
843 {
844     if (TCG_TARGET_HAS_add2_i32) {
845         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
846     } else {
847         TCGv_i64 t0 = tcg_temp_new_i64();
848         TCGv_i64 t1 = tcg_temp_new_i64();
849         tcg_gen_concat_i32_i64(t0, al, ah);
850         tcg_gen_concat_i32_i64(t1, bl, bh);
851         tcg_gen_add_i64(t0, t0, t1);
852         tcg_gen_extr_i64_i32(rl, rh, t0);
853         tcg_temp_free_i64(t0);
854         tcg_temp_free_i64(t1);
855     }
856 }
857 
858 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
859                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
860 {
861     if (TCG_TARGET_HAS_sub2_i32) {
862         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
863     } else {
864         TCGv_i64 t0 = tcg_temp_new_i64();
865         TCGv_i64 t1 = tcg_temp_new_i64();
866         tcg_gen_concat_i32_i64(t0, al, ah);
867         tcg_gen_concat_i32_i64(t1, bl, bh);
868         tcg_gen_sub_i64(t0, t0, t1);
869         tcg_gen_extr_i64_i32(rl, rh, t0);
870         tcg_temp_free_i64(t0);
871         tcg_temp_free_i64(t1);
872     }
873 }
874 
875 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
876 {
877     if (TCG_TARGET_HAS_mulu2_i32) {
878         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
879     } else if (TCG_TARGET_HAS_muluh_i32) {
880         TCGv_i32 t = tcg_temp_new_i32();
881         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
882         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
883         tcg_gen_mov_i32(rl, t);
884         tcg_temp_free_i32(t);
885     } else {
886         TCGv_i64 t0 = tcg_temp_new_i64();
887         TCGv_i64 t1 = tcg_temp_new_i64();
888         tcg_gen_extu_i32_i64(t0, arg1);
889         tcg_gen_extu_i32_i64(t1, arg2);
890         tcg_gen_mul_i64(t0, t0, t1);
891         tcg_gen_extr_i64_i32(rl, rh, t0);
892         tcg_temp_free_i64(t0);
893         tcg_temp_free_i64(t1);
894     }
895 }
896 
897 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
898 {
899     if (TCG_TARGET_HAS_muls2_i32) {
900         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
901     } else if (TCG_TARGET_HAS_mulsh_i32) {
902         TCGv_i32 t = tcg_temp_new_i32();
903         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
904         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
905         tcg_gen_mov_i32(rl, t);
906         tcg_temp_free_i32(t);
907     } else if (TCG_TARGET_REG_BITS == 32) {
908         TCGv_i32 t0 = tcg_temp_new_i32();
909         TCGv_i32 t1 = tcg_temp_new_i32();
910         TCGv_i32 t2 = tcg_temp_new_i32();
911         TCGv_i32 t3 = tcg_temp_new_i32();
912         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
913         /* Adjust for negative inputs.  */
914         tcg_gen_sari_i32(t2, arg1, 31);
915         tcg_gen_sari_i32(t3, arg2, 31);
916         tcg_gen_and_i32(t2, t2, arg2);
917         tcg_gen_and_i32(t3, t3, arg1);
918         tcg_gen_sub_i32(rh, t1, t2);
919         tcg_gen_sub_i32(rh, rh, t3);
920         tcg_gen_mov_i32(rl, t0);
921         tcg_temp_free_i32(t0);
922         tcg_temp_free_i32(t1);
923         tcg_temp_free_i32(t2);
924         tcg_temp_free_i32(t3);
925     } else {
926         TCGv_i64 t0 = tcg_temp_new_i64();
927         TCGv_i64 t1 = tcg_temp_new_i64();
928         tcg_gen_ext_i32_i64(t0, arg1);
929         tcg_gen_ext_i32_i64(t1, arg2);
930         tcg_gen_mul_i64(t0, t0, t1);
931         tcg_gen_extr_i64_i32(rl, rh, t0);
932         tcg_temp_free_i64(t0);
933         tcg_temp_free_i64(t1);
934     }
935 }
936 
937 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
938 {
939     if (TCG_TARGET_REG_BITS == 32) {
940         TCGv_i32 t0 = tcg_temp_new_i32();
941         TCGv_i32 t1 = tcg_temp_new_i32();
942         TCGv_i32 t2 = tcg_temp_new_i32();
943         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
944         /* Adjust for negative input for the signed arg1.  */
945         tcg_gen_sari_i32(t2, arg1, 31);
946         tcg_gen_and_i32(t2, t2, arg2);
947         tcg_gen_sub_i32(rh, t1, t2);
948         tcg_gen_mov_i32(rl, t0);
949         tcg_temp_free_i32(t0);
950         tcg_temp_free_i32(t1);
951         tcg_temp_free_i32(t2);
952     } else {
953         TCGv_i64 t0 = tcg_temp_new_i64();
954         TCGv_i64 t1 = tcg_temp_new_i64();
955         tcg_gen_ext_i32_i64(t0, arg1);
956         tcg_gen_extu_i32_i64(t1, arg2);
957         tcg_gen_mul_i64(t0, t0, t1);
958         tcg_gen_extr_i64_i32(rl, rh, t0);
959         tcg_temp_free_i64(t0);
960         tcg_temp_free_i64(t1);
961     }
962 }
963 
964 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
965 {
966     if (TCG_TARGET_HAS_ext8s_i32) {
967         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
968     } else {
969         tcg_gen_shli_i32(ret, arg, 24);
970         tcg_gen_sari_i32(ret, ret, 24);
971     }
972 }
973 
974 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
975 {
976     if (TCG_TARGET_HAS_ext16s_i32) {
977         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
978     } else {
979         tcg_gen_shli_i32(ret, arg, 16);
980         tcg_gen_sari_i32(ret, ret, 16);
981     }
982 }
983 
984 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
985 {
986     if (TCG_TARGET_HAS_ext8u_i32) {
987         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
988     } else {
989         tcg_gen_andi_i32(ret, arg, 0xffu);
990     }
991 }
992 
993 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
994 {
995     if (TCG_TARGET_HAS_ext16u_i32) {
996         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
997     } else {
998         tcg_gen_andi_i32(ret, arg, 0xffffu);
999     }
1000 }
1001 
1002 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1003 {
1004     /* Only one extension flag may be present. */
1005     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1006 
1007     if (TCG_TARGET_HAS_bswap16_i32) {
1008         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1009     } else {
1010         TCGv_i32 t0 = tcg_temp_new_i32();
1011         TCGv_i32 t1 = tcg_temp_new_i32();
1012 
1013         tcg_gen_shri_i32(t0, arg, 8);
1014         if (!(flags & TCG_BSWAP_IZ)) {
1015             tcg_gen_ext8u_i32(t0, t0);
1016         }
1017 
1018         if (flags & TCG_BSWAP_OS) {
1019             tcg_gen_shli_i32(t1, arg, 24);
1020             tcg_gen_sari_i32(t1, t1, 16);
1021         } else if (flags & TCG_BSWAP_OZ) {
1022             tcg_gen_ext8u_i32(t1, arg);
1023             tcg_gen_shli_i32(t1, t1, 8);
1024         } else {
1025             tcg_gen_shli_i32(t1, arg, 8);
1026         }
1027 
1028         tcg_gen_or_i32(ret, t0, t1);
1029         tcg_temp_free_i32(t0);
1030         tcg_temp_free_i32(t1);
1031     }
1032 }
1033 
1034 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1035 {
1036     if (TCG_TARGET_HAS_bswap32_i32) {
1037         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1038     } else {
1039         TCGv_i32 t0 = tcg_temp_new_i32();
1040         TCGv_i32 t1 = tcg_temp_new_i32();
1041         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1042 
1043                                         /* arg = abcd */
1044         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1045         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1046         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1047         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1048         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1049 
1050         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1051         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1052         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1053 
1054         tcg_temp_free_i32(t0);
1055         tcg_temp_free_i32(t1);
1056     }
1057 }
1058 
1059 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1060 {
1061     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1062 }
1063 
1064 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1065 {
1066     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1067 }
1068 
1069 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1070 {
1071     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1072 }
1073 
1074 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1075 {
1076     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1077 }
1078 
1079 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1080 {
1081     TCGv_i32 t = tcg_temp_new_i32();
1082 
1083     tcg_gen_sari_i32(t, a, 31);
1084     tcg_gen_xor_i32(ret, a, t);
1085     tcg_gen_sub_i32(ret, ret, t);
1086     tcg_temp_free_i32(t);
1087 }
1088 
1089 /* 64-bit ops */
1090 
1091 #if TCG_TARGET_REG_BITS == 32
1092 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1093 
1094 void tcg_gen_discard_i64(TCGv_i64 arg)
1095 {
1096     tcg_gen_discard_i32(TCGV_LOW(arg));
1097     tcg_gen_discard_i32(TCGV_HIGH(arg));
1098 }
1099 
1100 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1101 {
1102     TCGTemp *ts = tcgv_i64_temp(arg);
1103 
1104     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1105     if (ts->kind == TEMP_CONST) {
1106         tcg_gen_movi_i64(ret, ts->val);
1107     } else {
1108         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1109         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1110     }
1111 }
1112 
1113 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1114 {
1115     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1116     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1117 }
1118 
1119 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1120 {
1121     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1122     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1123 }
1124 
1125 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1126 {
1127     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1128     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1129 }
1130 
1131 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1132 {
1133     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1134     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1135 }
1136 
1137 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1138 {
1139     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1140     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1141 }
1142 
1143 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1144 {
1145     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1146     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1147 }
1148 
1149 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1150 {
1151     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1152     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1153 }
1154 
1155 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1156 {
1157     /* Since arg2 and ret have different types,
1158        they cannot be the same temporary */
1159 #ifdef HOST_WORDS_BIGENDIAN
1160     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1161     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1162 #else
1163     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1164     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1165 #endif
1166 }
1167 
1168 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1169 {
1170 #ifdef HOST_WORDS_BIGENDIAN
1171     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1172     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1173 #else
1174     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1175     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1176 #endif
1177 }
1178 
1179 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1180 {
1181     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1182     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1183 }
1184 
1185 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1186 {
1187     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1188     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1189 }
1190 
1191 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1192 {
1193     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1194     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1195 }
1196 
1197 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1198 {
1199     gen_helper_shl_i64(ret, arg1, arg2);
1200 }
1201 
1202 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1203 {
1204     gen_helper_shr_i64(ret, arg1, arg2);
1205 }
1206 
1207 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1208 {
1209     gen_helper_sar_i64(ret, arg1, arg2);
1210 }
1211 
1212 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1213 {
1214     TCGv_i64 t0;
1215     TCGv_i32 t1;
1216 
1217     t0 = tcg_temp_new_i64();
1218     t1 = tcg_temp_new_i32();
1219 
1220     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1221                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1222 
1223     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1224     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1225     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1226     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1227 
1228     tcg_gen_mov_i64(ret, t0);
1229     tcg_temp_free_i64(t0);
1230     tcg_temp_free_i32(t1);
1231 }
1232 
1233 #else
1234 
1235 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1236 {
1237     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1238 }
1239 
1240 #endif /* TCG_TARGET_REG_SIZE == 32 */
1241 
1242 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1243 {
1244     /* some cases can be optimized here */
1245     if (arg2 == 0) {
1246         tcg_gen_mov_i64(ret, arg1);
1247     } else if (TCG_TARGET_REG_BITS == 64) {
1248         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1249     } else {
1250         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1251                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1252                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1253     }
1254 }
1255 
1256 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1257 {
1258     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1259         /* Don't recurse with tcg_gen_neg_i64.  */
1260         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1261     } else if (TCG_TARGET_REG_BITS == 64) {
1262         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1263     } else {
1264         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1265                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1266                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1267     }
1268 }
1269 
1270 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1271 {
1272     /* some cases can be optimized here */
1273     if (arg2 == 0) {
1274         tcg_gen_mov_i64(ret, arg1);
1275     } else if (TCG_TARGET_REG_BITS == 64) {
1276         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1277     } else {
1278         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1279                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1280                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1281     }
1282 }
1283 
1284 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1285 {
1286     if (TCG_TARGET_REG_BITS == 32) {
1287         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1288         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1289         return;
1290     }
1291 
1292     /* Some cases can be optimized here.  */
1293     switch (arg2) {
1294     case 0:
1295         tcg_gen_movi_i64(ret, 0);
1296         return;
1297     case -1:
1298         tcg_gen_mov_i64(ret, arg1);
1299         return;
1300     case 0xff:
1301         /* Don't recurse with tcg_gen_ext8u_i64.  */
1302         if (TCG_TARGET_HAS_ext8u_i64) {
1303             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1304             return;
1305         }
1306         break;
1307     case 0xffff:
1308         if (TCG_TARGET_HAS_ext16u_i64) {
1309             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1310             return;
1311         }
1312         break;
1313     case 0xffffffffu:
1314         if (TCG_TARGET_HAS_ext32u_i64) {
1315             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1316             return;
1317         }
1318         break;
1319     }
1320 
1321     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1322 }
1323 
1324 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1325 {
1326     if (TCG_TARGET_REG_BITS == 32) {
1327         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1328         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1329         return;
1330     }
1331     /* Some cases can be optimized here.  */
1332     if (arg2 == -1) {
1333         tcg_gen_movi_i64(ret, -1);
1334     } else if (arg2 == 0) {
1335         tcg_gen_mov_i64(ret, arg1);
1336     } else {
1337         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1338     }
1339 }
1340 
1341 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1342 {
1343     if (TCG_TARGET_REG_BITS == 32) {
1344         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1345         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1346         return;
1347     }
1348     /* Some cases can be optimized here.  */
1349     if (arg2 == 0) {
1350         tcg_gen_mov_i64(ret, arg1);
1351     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1352         /* Don't recurse with tcg_gen_not_i64.  */
1353         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1354     } else {
1355         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1356     }
1357 }
1358 
1359 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1360                                       unsigned c, bool right, bool arith)
1361 {
1362     tcg_debug_assert(c < 64);
1363     if (c == 0) {
1364         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1365         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1366     } else if (c >= 32) {
1367         c -= 32;
1368         if (right) {
1369             if (arith) {
1370                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1371                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1372             } else {
1373                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1374                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1375             }
1376         } else {
1377             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1378             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1379         }
1380     } else if (right) {
1381         if (TCG_TARGET_HAS_extract2_i32) {
1382             tcg_gen_extract2_i32(TCGV_LOW(ret),
1383                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1384         } else {
1385             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1386             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1387                                 TCGV_HIGH(arg1), 32 - c, c);
1388         }
1389         if (arith) {
1390             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1391         } else {
1392             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1393         }
1394     } else {
1395         if (TCG_TARGET_HAS_extract2_i32) {
1396             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1397                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1398         } else {
1399             TCGv_i32 t0 = tcg_temp_new_i32();
1400             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1401             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1402                                 TCGV_HIGH(arg1), c, 32 - c);
1403             tcg_temp_free_i32(t0);
1404         }
1405         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1406     }
1407 }
1408 
1409 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1410 {
1411     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1412     if (TCG_TARGET_REG_BITS == 32) {
1413         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1414     } else if (arg2 == 0) {
1415         tcg_gen_mov_i64(ret, arg1);
1416     } else {
1417         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1418     }
1419 }
1420 
1421 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1422 {
1423     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1424     if (TCG_TARGET_REG_BITS == 32) {
1425         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1426     } else if (arg2 == 0) {
1427         tcg_gen_mov_i64(ret, arg1);
1428     } else {
1429         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1430     }
1431 }
1432 
1433 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1434 {
1435     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1436     if (TCG_TARGET_REG_BITS == 32) {
1437         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1438     } else if (arg2 == 0) {
1439         tcg_gen_mov_i64(ret, arg1);
1440     } else {
1441         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1442     }
1443 }
1444 
1445 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1446 {
1447     if (cond == TCG_COND_ALWAYS) {
1448         tcg_gen_br(l);
1449     } else if (cond != TCG_COND_NEVER) {
1450         l->refs++;
1451         if (TCG_TARGET_REG_BITS == 32) {
1452             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1453                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1454                               TCGV_HIGH(arg2), cond, label_arg(l));
1455         } else {
1456             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1457                               label_arg(l));
1458         }
1459     }
1460 }
1461 
1462 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1463 {
1464     if (TCG_TARGET_REG_BITS == 64) {
1465         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1466     } else if (cond == TCG_COND_ALWAYS) {
1467         tcg_gen_br(l);
1468     } else if (cond != TCG_COND_NEVER) {
1469         l->refs++;
1470         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1471                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1472                           tcg_constant_i32(arg2),
1473                           tcg_constant_i32(arg2 >> 32),
1474                           cond, label_arg(l));
1475     }
1476 }
1477 
1478 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1479                          TCGv_i64 arg1, TCGv_i64 arg2)
1480 {
1481     if (cond == TCG_COND_ALWAYS) {
1482         tcg_gen_movi_i64(ret, 1);
1483     } else if (cond == TCG_COND_NEVER) {
1484         tcg_gen_movi_i64(ret, 0);
1485     } else {
1486         if (TCG_TARGET_REG_BITS == 32) {
1487             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1488                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1489                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1490             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1491         } else {
1492             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1493         }
1494     }
1495 }
1496 
1497 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1498                           TCGv_i64 arg1, int64_t arg2)
1499 {
1500     if (TCG_TARGET_REG_BITS == 64) {
1501         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1502     } else if (cond == TCG_COND_ALWAYS) {
1503         tcg_gen_movi_i64(ret, 1);
1504     } else if (cond == TCG_COND_NEVER) {
1505         tcg_gen_movi_i64(ret, 0);
1506     } else {
1507         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1508                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1509                          tcg_constant_i32(arg2),
1510                          tcg_constant_i32(arg2 >> 32), cond);
1511         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1512     }
1513 }
1514 
1515 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1516 {
1517     if (arg2 == 0) {
1518         tcg_gen_movi_i64(ret, 0);
1519     } else if (is_power_of_2(arg2)) {
1520         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1521     } else {
1522         TCGv_i64 t0 = tcg_const_i64(arg2);
1523         tcg_gen_mul_i64(ret, arg1, t0);
1524         tcg_temp_free_i64(t0);
1525     }
1526 }
1527 
1528 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1529 {
1530     if (TCG_TARGET_HAS_div_i64) {
1531         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1532     } else if (TCG_TARGET_HAS_div2_i64) {
1533         TCGv_i64 t0 = tcg_temp_new_i64();
1534         tcg_gen_sari_i64(t0, arg1, 63);
1535         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1536         tcg_temp_free_i64(t0);
1537     } else {
1538         gen_helper_div_i64(ret, arg1, arg2);
1539     }
1540 }
1541 
1542 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1543 {
1544     if (TCG_TARGET_HAS_rem_i64) {
1545         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1546     } else if (TCG_TARGET_HAS_div_i64) {
1547         TCGv_i64 t0 = tcg_temp_new_i64();
1548         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1549         tcg_gen_mul_i64(t0, t0, arg2);
1550         tcg_gen_sub_i64(ret, arg1, t0);
1551         tcg_temp_free_i64(t0);
1552     } else if (TCG_TARGET_HAS_div2_i64) {
1553         TCGv_i64 t0 = tcg_temp_new_i64();
1554         tcg_gen_sari_i64(t0, arg1, 63);
1555         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1556         tcg_temp_free_i64(t0);
1557     } else {
1558         gen_helper_rem_i64(ret, arg1, arg2);
1559     }
1560 }
1561 
1562 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1563 {
1564     if (TCG_TARGET_HAS_div_i64) {
1565         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1566     } else if (TCG_TARGET_HAS_div2_i64) {
1567         TCGv_i64 t0 = tcg_temp_new_i64();
1568         tcg_gen_movi_i64(t0, 0);
1569         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1570         tcg_temp_free_i64(t0);
1571     } else {
1572         gen_helper_divu_i64(ret, arg1, arg2);
1573     }
1574 }
1575 
1576 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1577 {
1578     if (TCG_TARGET_HAS_rem_i64) {
1579         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1580     } else if (TCG_TARGET_HAS_div_i64) {
1581         TCGv_i64 t0 = tcg_temp_new_i64();
1582         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1583         tcg_gen_mul_i64(t0, t0, arg2);
1584         tcg_gen_sub_i64(ret, arg1, t0);
1585         tcg_temp_free_i64(t0);
1586     } else if (TCG_TARGET_HAS_div2_i64) {
1587         TCGv_i64 t0 = tcg_temp_new_i64();
1588         tcg_gen_movi_i64(t0, 0);
1589         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1590         tcg_temp_free_i64(t0);
1591     } else {
1592         gen_helper_remu_i64(ret, arg1, arg2);
1593     }
1594 }
1595 
1596 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1597 {
1598     if (TCG_TARGET_REG_BITS == 32) {
1599         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1600         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1601     } else if (TCG_TARGET_HAS_ext8s_i64) {
1602         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1603     } else {
1604         tcg_gen_shli_i64(ret, arg, 56);
1605         tcg_gen_sari_i64(ret, ret, 56);
1606     }
1607 }
1608 
1609 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1610 {
1611     if (TCG_TARGET_REG_BITS == 32) {
1612         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1613         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1614     } else if (TCG_TARGET_HAS_ext16s_i64) {
1615         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1616     } else {
1617         tcg_gen_shli_i64(ret, arg, 48);
1618         tcg_gen_sari_i64(ret, ret, 48);
1619     }
1620 }
1621 
1622 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1623 {
1624     if (TCG_TARGET_REG_BITS == 32) {
1625         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1626         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1627     } else if (TCG_TARGET_HAS_ext32s_i64) {
1628         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1629     } else {
1630         tcg_gen_shli_i64(ret, arg, 32);
1631         tcg_gen_sari_i64(ret, ret, 32);
1632     }
1633 }
1634 
1635 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1636 {
1637     if (TCG_TARGET_REG_BITS == 32) {
1638         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1639         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1640     } else if (TCG_TARGET_HAS_ext8u_i64) {
1641         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1642     } else {
1643         tcg_gen_andi_i64(ret, arg, 0xffu);
1644     }
1645 }
1646 
1647 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1648 {
1649     if (TCG_TARGET_REG_BITS == 32) {
1650         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1651         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1652     } else if (TCG_TARGET_HAS_ext16u_i64) {
1653         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1654     } else {
1655         tcg_gen_andi_i64(ret, arg, 0xffffu);
1656     }
1657 }
1658 
1659 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1660 {
1661     if (TCG_TARGET_REG_BITS == 32) {
1662         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1663         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1664     } else if (TCG_TARGET_HAS_ext32u_i64) {
1665         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1666     } else {
1667         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1668     }
1669 }
1670 
1671 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1672 {
1673     /* Only one extension flag may be present. */
1674     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1675 
1676     if (TCG_TARGET_REG_BITS == 32) {
1677         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1678         if (flags & TCG_BSWAP_OS) {
1679             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1680         } else {
1681             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1682         }
1683     } else if (TCG_TARGET_HAS_bswap16_i64) {
1684         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1685     } else {
1686         TCGv_i64 t0 = tcg_temp_new_i64();
1687         TCGv_i64 t1 = tcg_temp_new_i64();
1688 
1689         tcg_gen_shri_i64(t0, arg, 8);
1690         if (!(flags & TCG_BSWAP_IZ)) {
1691             tcg_gen_ext8u_i64(t0, t0);
1692         }
1693 
1694         if (flags & TCG_BSWAP_OS) {
1695             tcg_gen_shli_i64(t1, arg, 56);
1696             tcg_gen_sari_i64(t1, t1, 48);
1697         } else if (flags & TCG_BSWAP_OZ) {
1698             tcg_gen_ext8u_i64(t1, arg);
1699             tcg_gen_shli_i64(t1, t1, 8);
1700         } else {
1701             tcg_gen_shli_i64(t1, arg, 8);
1702         }
1703 
1704         tcg_gen_or_i64(ret, t0, t1);
1705         tcg_temp_free_i64(t0);
1706         tcg_temp_free_i64(t1);
1707     }
1708 }
1709 
1710 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1711 {
1712     /* Only one extension flag may be present. */
1713     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1714 
1715     if (TCG_TARGET_REG_BITS == 32) {
1716         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1717         if (flags & TCG_BSWAP_OS) {
1718             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1719         } else {
1720             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1721         }
1722     } else if (TCG_TARGET_HAS_bswap32_i64) {
1723         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1724     } else {
1725         TCGv_i64 t0 = tcg_temp_new_i64();
1726         TCGv_i64 t1 = tcg_temp_new_i64();
1727         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1728 
1729                                             /* arg = xxxxabcd */
1730         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1731         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1732         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1733         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1734         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1735 
1736         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1737         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1738         if (flags & TCG_BSWAP_OS) {
1739             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1740         } else {
1741             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1742         }
1743         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba */
1744 
1745         tcg_temp_free_i64(t0);
1746         tcg_temp_free_i64(t1);
1747     }
1748 }
1749 
1750 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1751 {
1752     if (TCG_TARGET_REG_BITS == 32) {
1753         TCGv_i32 t0, t1;
1754         t0 = tcg_temp_new_i32();
1755         t1 = tcg_temp_new_i32();
1756 
1757         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1758         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1759         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1760         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1761         tcg_temp_free_i32(t0);
1762         tcg_temp_free_i32(t1);
1763     } else if (TCG_TARGET_HAS_bswap64_i64) {
1764         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1765     } else {
1766         TCGv_i64 t0 = tcg_temp_new_i64();
1767         TCGv_i64 t1 = tcg_temp_new_i64();
1768         TCGv_i64 t2 = tcg_temp_new_i64();
1769 
1770                                         /* arg = abcdefgh */
1771         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1772         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1773         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1774         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1775         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1776         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1777 
1778         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1779         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1780         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1781         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1782         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1783         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1784 
1785         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1786         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1787         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1788 
1789         tcg_temp_free_i64(t0);
1790         tcg_temp_free_i64(t1);
1791         tcg_temp_free_i64(t2);
1792     }
1793 }
1794 
1795 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1796 {
1797     if (TCG_TARGET_REG_BITS == 32) {
1798         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1799         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1800     } else if (TCG_TARGET_HAS_not_i64) {
1801         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1802     } else {
1803         tcg_gen_xori_i64(ret, arg, -1);
1804     }
1805 }
1806 
1807 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1808 {
1809     if (TCG_TARGET_REG_BITS == 32) {
1810         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1811         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1812     } else if (TCG_TARGET_HAS_andc_i64) {
1813         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1814     } else {
1815         TCGv_i64 t0 = tcg_temp_new_i64();
1816         tcg_gen_not_i64(t0, arg2);
1817         tcg_gen_and_i64(ret, arg1, t0);
1818         tcg_temp_free_i64(t0);
1819     }
1820 }
1821 
1822 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1823 {
1824     if (TCG_TARGET_REG_BITS == 32) {
1825         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1826         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1827     } else if (TCG_TARGET_HAS_eqv_i64) {
1828         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1829     } else {
1830         tcg_gen_xor_i64(ret, arg1, arg2);
1831         tcg_gen_not_i64(ret, ret);
1832     }
1833 }
1834 
1835 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1836 {
1837     if (TCG_TARGET_REG_BITS == 32) {
1838         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1839         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1840     } else if (TCG_TARGET_HAS_nand_i64) {
1841         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1842     } else {
1843         tcg_gen_and_i64(ret, arg1, arg2);
1844         tcg_gen_not_i64(ret, ret);
1845     }
1846 }
1847 
1848 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1849 {
1850     if (TCG_TARGET_REG_BITS == 32) {
1851         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1852         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1853     } else if (TCG_TARGET_HAS_nor_i64) {
1854         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1855     } else {
1856         tcg_gen_or_i64(ret, arg1, arg2);
1857         tcg_gen_not_i64(ret, ret);
1858     }
1859 }
1860 
1861 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1862 {
1863     if (TCG_TARGET_REG_BITS == 32) {
1864         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1865         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1866     } else if (TCG_TARGET_HAS_orc_i64) {
1867         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1868     } else {
1869         TCGv_i64 t0 = tcg_temp_new_i64();
1870         tcg_gen_not_i64(t0, arg2);
1871         tcg_gen_or_i64(ret, arg1, t0);
1872         tcg_temp_free_i64(t0);
1873     }
1874 }
1875 
1876 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1877 {
1878     if (TCG_TARGET_HAS_clz_i64) {
1879         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1880     } else {
1881         gen_helper_clz_i64(ret, arg1, arg2);
1882     }
1883 }
1884 
1885 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1886 {
1887     if (TCG_TARGET_REG_BITS == 32
1888         && TCG_TARGET_HAS_clz_i32
1889         && arg2 <= 0xffffffffu) {
1890         TCGv_i32 t = tcg_temp_new_i32();
1891         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1892         tcg_gen_addi_i32(t, t, 32);
1893         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1894         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1895         tcg_temp_free_i32(t);
1896     } else {
1897         TCGv_i64 t0 = tcg_const_i64(arg2);
1898         tcg_gen_clz_i64(ret, arg1, t0);
1899         tcg_temp_free_i64(t0);
1900     }
1901 }
1902 
1903 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1904 {
1905     if (TCG_TARGET_HAS_ctz_i64) {
1906         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1907     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1908         TCGv_i64 z, t = tcg_temp_new_i64();
1909 
1910         if (TCG_TARGET_HAS_ctpop_i64) {
1911             tcg_gen_subi_i64(t, arg1, 1);
1912             tcg_gen_andc_i64(t, t, arg1);
1913             tcg_gen_ctpop_i64(t, t);
1914         } else {
1915             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1916             tcg_gen_neg_i64(t, arg1);
1917             tcg_gen_and_i64(t, t, arg1);
1918             tcg_gen_clzi_i64(t, t, 64);
1919             tcg_gen_xori_i64(t, t, 63);
1920         }
1921         z = tcg_constant_i64(0);
1922         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1923         tcg_temp_free_i64(t);
1924         tcg_temp_free_i64(z);
1925     } else {
1926         gen_helper_ctz_i64(ret, arg1, arg2);
1927     }
1928 }
1929 
1930 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1931 {
1932     if (TCG_TARGET_REG_BITS == 32
1933         && TCG_TARGET_HAS_ctz_i32
1934         && arg2 <= 0xffffffffu) {
1935         TCGv_i32 t32 = tcg_temp_new_i32();
1936         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
1937         tcg_gen_addi_i32(t32, t32, 32);
1938         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1939         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1940         tcg_temp_free_i32(t32);
1941     } else if (!TCG_TARGET_HAS_ctz_i64
1942                && TCG_TARGET_HAS_ctpop_i64
1943                && arg2 == 64) {
1944         /* This equivalence has the advantage of not requiring a fixup.  */
1945         TCGv_i64 t = tcg_temp_new_i64();
1946         tcg_gen_subi_i64(t, arg1, 1);
1947         tcg_gen_andc_i64(t, t, arg1);
1948         tcg_gen_ctpop_i64(ret, t);
1949         tcg_temp_free_i64(t);
1950     } else {
1951         TCGv_i64 t0 = tcg_const_i64(arg2);
1952         tcg_gen_ctz_i64(ret, arg1, t0);
1953         tcg_temp_free_i64(t0);
1954     }
1955 }
1956 
1957 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1958 {
1959     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1960         TCGv_i64 t = tcg_temp_new_i64();
1961         tcg_gen_sari_i64(t, arg, 63);
1962         tcg_gen_xor_i64(t, t, arg);
1963         tcg_gen_clzi_i64(t, t, 64);
1964         tcg_gen_subi_i64(ret, t, 1);
1965         tcg_temp_free_i64(t);
1966     } else {
1967         gen_helper_clrsb_i64(ret, arg);
1968     }
1969 }
1970 
1971 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1972 {
1973     if (TCG_TARGET_HAS_ctpop_i64) {
1974         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1975     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1976         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1977         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1978         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1979         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1980     } else {
1981         gen_helper_ctpop_i64(ret, arg1);
1982     }
1983 }
1984 
1985 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1986 {
1987     if (TCG_TARGET_HAS_rot_i64) {
1988         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1989     } else {
1990         TCGv_i64 t0, t1;
1991         t0 = tcg_temp_new_i64();
1992         t1 = tcg_temp_new_i64();
1993         tcg_gen_shl_i64(t0, arg1, arg2);
1994         tcg_gen_subfi_i64(t1, 64, arg2);
1995         tcg_gen_shr_i64(t1, arg1, t1);
1996         tcg_gen_or_i64(ret, t0, t1);
1997         tcg_temp_free_i64(t0);
1998         tcg_temp_free_i64(t1);
1999     }
2000 }
2001 
2002 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2003 {
2004     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2005     /* some cases can be optimized here */
2006     if (arg2 == 0) {
2007         tcg_gen_mov_i64(ret, arg1);
2008     } else if (TCG_TARGET_HAS_rot_i64) {
2009         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2010     } else {
2011         TCGv_i64 t0, t1;
2012         t0 = tcg_temp_new_i64();
2013         t1 = tcg_temp_new_i64();
2014         tcg_gen_shli_i64(t0, arg1, arg2);
2015         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2016         tcg_gen_or_i64(ret, t0, t1);
2017         tcg_temp_free_i64(t0);
2018         tcg_temp_free_i64(t1);
2019     }
2020 }
2021 
2022 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2023 {
2024     if (TCG_TARGET_HAS_rot_i64) {
2025         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2026     } else {
2027         TCGv_i64 t0, t1;
2028         t0 = tcg_temp_new_i64();
2029         t1 = tcg_temp_new_i64();
2030         tcg_gen_shr_i64(t0, arg1, arg2);
2031         tcg_gen_subfi_i64(t1, 64, arg2);
2032         tcg_gen_shl_i64(t1, arg1, t1);
2033         tcg_gen_or_i64(ret, t0, t1);
2034         tcg_temp_free_i64(t0);
2035         tcg_temp_free_i64(t1);
2036     }
2037 }
2038 
2039 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2040 {
2041     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2042     /* some cases can be optimized here */
2043     if (arg2 == 0) {
2044         tcg_gen_mov_i64(ret, arg1);
2045     } else {
2046         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2047     }
2048 }
2049 
2050 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2051                          unsigned int ofs, unsigned int len)
2052 {
2053     uint64_t mask;
2054     TCGv_i64 t1;
2055 
2056     tcg_debug_assert(ofs < 64);
2057     tcg_debug_assert(len > 0);
2058     tcg_debug_assert(len <= 64);
2059     tcg_debug_assert(ofs + len <= 64);
2060 
2061     if (len == 64) {
2062         tcg_gen_mov_i64(ret, arg2);
2063         return;
2064     }
2065     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2066         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2067         return;
2068     }
2069 
2070     if (TCG_TARGET_REG_BITS == 32) {
2071         if (ofs >= 32) {
2072             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2073                                 TCGV_LOW(arg2), ofs - 32, len);
2074             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2075             return;
2076         }
2077         if (ofs + len <= 32) {
2078             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2079                                 TCGV_LOW(arg2), ofs, len);
2080             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2081             return;
2082         }
2083     }
2084 
2085     t1 = tcg_temp_new_i64();
2086 
2087     if (TCG_TARGET_HAS_extract2_i64) {
2088         if (ofs + len == 64) {
2089             tcg_gen_shli_i64(t1, arg1, len);
2090             tcg_gen_extract2_i64(ret, t1, arg2, len);
2091             goto done;
2092         }
2093         if (ofs == 0) {
2094             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2095             tcg_gen_rotli_i64(ret, ret, len);
2096             goto done;
2097         }
2098     }
2099 
2100     mask = (1ull << len) - 1;
2101     if (ofs + len < 64) {
2102         tcg_gen_andi_i64(t1, arg2, mask);
2103         tcg_gen_shli_i64(t1, t1, ofs);
2104     } else {
2105         tcg_gen_shli_i64(t1, arg2, ofs);
2106     }
2107     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2108     tcg_gen_or_i64(ret, ret, t1);
2109  done:
2110     tcg_temp_free_i64(t1);
2111 }
2112 
2113 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2114                            unsigned int ofs, unsigned int len)
2115 {
2116     tcg_debug_assert(ofs < 64);
2117     tcg_debug_assert(len > 0);
2118     tcg_debug_assert(len <= 64);
2119     tcg_debug_assert(ofs + len <= 64);
2120 
2121     if (ofs + len == 64) {
2122         tcg_gen_shli_i64(ret, arg, ofs);
2123     } else if (ofs == 0) {
2124         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2125     } else if (TCG_TARGET_HAS_deposit_i64
2126                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2127         TCGv_i64 zero = tcg_constant_i64(0);
2128         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2129     } else {
2130         if (TCG_TARGET_REG_BITS == 32) {
2131             if (ofs >= 32) {
2132                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2133                                       ofs - 32, len);
2134                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2135                 return;
2136             }
2137             if (ofs + len <= 32) {
2138                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2139                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2140                 return;
2141             }
2142         }
2143         /* To help two-operand hosts we prefer to zero-extend first,
2144            which allows ARG to stay live.  */
2145         switch (len) {
2146         case 32:
2147             if (TCG_TARGET_HAS_ext32u_i64) {
2148                 tcg_gen_ext32u_i64(ret, arg);
2149                 tcg_gen_shli_i64(ret, ret, ofs);
2150                 return;
2151             }
2152             break;
2153         case 16:
2154             if (TCG_TARGET_HAS_ext16u_i64) {
2155                 tcg_gen_ext16u_i64(ret, arg);
2156                 tcg_gen_shli_i64(ret, ret, ofs);
2157                 return;
2158             }
2159             break;
2160         case 8:
2161             if (TCG_TARGET_HAS_ext8u_i64) {
2162                 tcg_gen_ext8u_i64(ret, arg);
2163                 tcg_gen_shli_i64(ret, ret, ofs);
2164                 return;
2165             }
2166             break;
2167         }
2168         /* Otherwise prefer zero-extension over AND for code size.  */
2169         switch (ofs + len) {
2170         case 32:
2171             if (TCG_TARGET_HAS_ext32u_i64) {
2172                 tcg_gen_shli_i64(ret, arg, ofs);
2173                 tcg_gen_ext32u_i64(ret, ret);
2174                 return;
2175             }
2176             break;
2177         case 16:
2178             if (TCG_TARGET_HAS_ext16u_i64) {
2179                 tcg_gen_shli_i64(ret, arg, ofs);
2180                 tcg_gen_ext16u_i64(ret, ret);
2181                 return;
2182             }
2183             break;
2184         case 8:
2185             if (TCG_TARGET_HAS_ext8u_i64) {
2186                 tcg_gen_shli_i64(ret, arg, ofs);
2187                 tcg_gen_ext8u_i64(ret, ret);
2188                 return;
2189             }
2190             break;
2191         }
2192         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2193         tcg_gen_shli_i64(ret, ret, ofs);
2194     }
2195 }
2196 
2197 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2198                          unsigned int ofs, unsigned int len)
2199 {
2200     tcg_debug_assert(ofs < 64);
2201     tcg_debug_assert(len > 0);
2202     tcg_debug_assert(len <= 64);
2203     tcg_debug_assert(ofs + len <= 64);
2204 
2205     /* Canonicalize certain special cases, even if extract is supported.  */
2206     if (ofs + len == 64) {
2207         tcg_gen_shri_i64(ret, arg, 64 - len);
2208         return;
2209     }
2210     if (ofs == 0) {
2211         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2212         return;
2213     }
2214 
2215     if (TCG_TARGET_REG_BITS == 32) {
2216         /* Look for a 32-bit extract within one of the two words.  */
2217         if (ofs >= 32) {
2218             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2219             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2220             return;
2221         }
2222         if (ofs + len <= 32) {
2223             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2224             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2225             return;
2226         }
2227         /* The field is split across two words.  One double-word
2228            shift is better than two double-word shifts.  */
2229         goto do_shift_and;
2230     }
2231 
2232     if (TCG_TARGET_HAS_extract_i64
2233         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2234         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2235         return;
2236     }
2237 
2238     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2239     switch (ofs + len) {
2240     case 32:
2241         if (TCG_TARGET_HAS_ext32u_i64) {
2242             tcg_gen_ext32u_i64(ret, arg);
2243             tcg_gen_shri_i64(ret, ret, ofs);
2244             return;
2245         }
2246         break;
2247     case 16:
2248         if (TCG_TARGET_HAS_ext16u_i64) {
2249             tcg_gen_ext16u_i64(ret, arg);
2250             tcg_gen_shri_i64(ret, ret, ofs);
2251             return;
2252         }
2253         break;
2254     case 8:
2255         if (TCG_TARGET_HAS_ext8u_i64) {
2256             tcg_gen_ext8u_i64(ret, arg);
2257             tcg_gen_shri_i64(ret, ret, ofs);
2258             return;
2259         }
2260         break;
2261     }
2262 
2263     /* ??? Ideally we'd know what values are available for immediate AND.
2264        Assume that 8 bits are available, plus the special cases of 16 and 32,
2265        so that we get ext8u, ext16u, and ext32u.  */
2266     switch (len) {
2267     case 1 ... 8: case 16: case 32:
2268     do_shift_and:
2269         tcg_gen_shri_i64(ret, arg, ofs);
2270         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2271         break;
2272     default:
2273         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2274         tcg_gen_shri_i64(ret, ret, 64 - len);
2275         break;
2276     }
2277 }
2278 
2279 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2280                           unsigned int ofs, unsigned int len)
2281 {
2282     tcg_debug_assert(ofs < 64);
2283     tcg_debug_assert(len > 0);
2284     tcg_debug_assert(len <= 64);
2285     tcg_debug_assert(ofs + len <= 64);
2286 
2287     /* Canonicalize certain special cases, even if sextract is supported.  */
2288     if (ofs + len == 64) {
2289         tcg_gen_sari_i64(ret, arg, 64 - len);
2290         return;
2291     }
2292     if (ofs == 0) {
2293         switch (len) {
2294         case 32:
2295             tcg_gen_ext32s_i64(ret, arg);
2296             return;
2297         case 16:
2298             tcg_gen_ext16s_i64(ret, arg);
2299             return;
2300         case 8:
2301             tcg_gen_ext8s_i64(ret, arg);
2302             return;
2303         }
2304     }
2305 
2306     if (TCG_TARGET_REG_BITS == 32) {
2307         /* Look for a 32-bit extract within one of the two words.  */
2308         if (ofs >= 32) {
2309             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2310         } else if (ofs + len <= 32) {
2311             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2312         } else if (ofs == 0) {
2313             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2314             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2315             return;
2316         } else if (len > 32) {
2317             TCGv_i32 t = tcg_temp_new_i32();
2318             /* Extract the bits for the high word normally.  */
2319             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2320             /* Shift the field down for the low part.  */
2321             tcg_gen_shri_i64(ret, arg, ofs);
2322             /* Overwrite the shift into the high part.  */
2323             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2324             tcg_temp_free_i32(t);
2325             return;
2326         } else {
2327             /* Shift the field down for the low part, such that the
2328                field sits at the MSB.  */
2329             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2330             /* Shift the field down from the MSB, sign extending.  */
2331             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2332         }
2333         /* Sign-extend the field from 32 bits.  */
2334         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2335         return;
2336     }
2337 
2338     if (TCG_TARGET_HAS_sextract_i64
2339         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2340         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2341         return;
2342     }
2343 
2344     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2345     switch (ofs + len) {
2346     case 32:
2347         if (TCG_TARGET_HAS_ext32s_i64) {
2348             tcg_gen_ext32s_i64(ret, arg);
2349             tcg_gen_sari_i64(ret, ret, ofs);
2350             return;
2351         }
2352         break;
2353     case 16:
2354         if (TCG_TARGET_HAS_ext16s_i64) {
2355             tcg_gen_ext16s_i64(ret, arg);
2356             tcg_gen_sari_i64(ret, ret, ofs);
2357             return;
2358         }
2359         break;
2360     case 8:
2361         if (TCG_TARGET_HAS_ext8s_i64) {
2362             tcg_gen_ext8s_i64(ret, arg);
2363             tcg_gen_sari_i64(ret, ret, ofs);
2364             return;
2365         }
2366         break;
2367     }
2368     switch (len) {
2369     case 32:
2370         if (TCG_TARGET_HAS_ext32s_i64) {
2371             tcg_gen_shri_i64(ret, arg, ofs);
2372             tcg_gen_ext32s_i64(ret, ret);
2373             return;
2374         }
2375         break;
2376     case 16:
2377         if (TCG_TARGET_HAS_ext16s_i64) {
2378             tcg_gen_shri_i64(ret, arg, ofs);
2379             tcg_gen_ext16s_i64(ret, ret);
2380             return;
2381         }
2382         break;
2383     case 8:
2384         if (TCG_TARGET_HAS_ext8s_i64) {
2385             tcg_gen_shri_i64(ret, arg, ofs);
2386             tcg_gen_ext8s_i64(ret, ret);
2387             return;
2388         }
2389         break;
2390     }
2391     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2392     tcg_gen_sari_i64(ret, ret, 64 - len);
2393 }
2394 
2395 /*
2396  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2397  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2398  */
2399 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2400                           unsigned int ofs)
2401 {
2402     tcg_debug_assert(ofs <= 64);
2403     if (ofs == 0) {
2404         tcg_gen_mov_i64(ret, al);
2405     } else if (ofs == 64) {
2406         tcg_gen_mov_i64(ret, ah);
2407     } else if (al == ah) {
2408         tcg_gen_rotri_i64(ret, al, ofs);
2409     } else if (TCG_TARGET_HAS_extract2_i64) {
2410         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2411     } else {
2412         TCGv_i64 t0 = tcg_temp_new_i64();
2413         tcg_gen_shri_i64(t0, al, ofs);
2414         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2415         tcg_temp_free_i64(t0);
2416     }
2417 }
2418 
2419 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2420                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2421 {
2422     if (cond == TCG_COND_ALWAYS) {
2423         tcg_gen_mov_i64(ret, v1);
2424     } else if (cond == TCG_COND_NEVER) {
2425         tcg_gen_mov_i64(ret, v2);
2426     } else if (TCG_TARGET_REG_BITS == 32) {
2427         TCGv_i32 t0 = tcg_temp_new_i32();
2428         TCGv_i32 t1 = tcg_temp_new_i32();
2429         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2430                          TCGV_LOW(c1), TCGV_HIGH(c1),
2431                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2432 
2433         if (TCG_TARGET_HAS_movcond_i32) {
2434             tcg_gen_movi_i32(t1, 0);
2435             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2436                                 TCGV_LOW(v1), TCGV_LOW(v2));
2437             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2438                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2439         } else {
2440             tcg_gen_neg_i32(t0, t0);
2441 
2442             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2443             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2444             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2445 
2446             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2447             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2448             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2449         }
2450         tcg_temp_free_i32(t0);
2451         tcg_temp_free_i32(t1);
2452     } else if (TCG_TARGET_HAS_movcond_i64) {
2453         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2454     } else {
2455         TCGv_i64 t0 = tcg_temp_new_i64();
2456         TCGv_i64 t1 = tcg_temp_new_i64();
2457         tcg_gen_setcond_i64(cond, t0, c1, c2);
2458         tcg_gen_neg_i64(t0, t0);
2459         tcg_gen_and_i64(t1, v1, t0);
2460         tcg_gen_andc_i64(ret, v2, t0);
2461         tcg_gen_or_i64(ret, ret, t1);
2462         tcg_temp_free_i64(t0);
2463         tcg_temp_free_i64(t1);
2464     }
2465 }
2466 
2467 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2468                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2469 {
2470     if (TCG_TARGET_HAS_add2_i64) {
2471         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2472     } else {
2473         TCGv_i64 t0 = tcg_temp_new_i64();
2474         TCGv_i64 t1 = tcg_temp_new_i64();
2475         tcg_gen_add_i64(t0, al, bl);
2476         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2477         tcg_gen_add_i64(rh, ah, bh);
2478         tcg_gen_add_i64(rh, rh, t1);
2479         tcg_gen_mov_i64(rl, t0);
2480         tcg_temp_free_i64(t0);
2481         tcg_temp_free_i64(t1);
2482     }
2483 }
2484 
2485 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2486                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2487 {
2488     if (TCG_TARGET_HAS_sub2_i64) {
2489         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2490     } else {
2491         TCGv_i64 t0 = tcg_temp_new_i64();
2492         TCGv_i64 t1 = tcg_temp_new_i64();
2493         tcg_gen_sub_i64(t0, al, bl);
2494         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2495         tcg_gen_sub_i64(rh, ah, bh);
2496         tcg_gen_sub_i64(rh, rh, t1);
2497         tcg_gen_mov_i64(rl, t0);
2498         tcg_temp_free_i64(t0);
2499         tcg_temp_free_i64(t1);
2500     }
2501 }
2502 
2503 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2504 {
2505     if (TCG_TARGET_HAS_mulu2_i64) {
2506         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2507     } else if (TCG_TARGET_HAS_muluh_i64) {
2508         TCGv_i64 t = tcg_temp_new_i64();
2509         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2510         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2511         tcg_gen_mov_i64(rl, t);
2512         tcg_temp_free_i64(t);
2513     } else {
2514         TCGv_i64 t0 = tcg_temp_new_i64();
2515         tcg_gen_mul_i64(t0, arg1, arg2);
2516         gen_helper_muluh_i64(rh, arg1, arg2);
2517         tcg_gen_mov_i64(rl, t0);
2518         tcg_temp_free_i64(t0);
2519     }
2520 }
2521 
2522 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2523 {
2524     if (TCG_TARGET_HAS_muls2_i64) {
2525         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2526     } else if (TCG_TARGET_HAS_mulsh_i64) {
2527         TCGv_i64 t = tcg_temp_new_i64();
2528         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2529         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2530         tcg_gen_mov_i64(rl, t);
2531         tcg_temp_free_i64(t);
2532     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2533         TCGv_i64 t0 = tcg_temp_new_i64();
2534         TCGv_i64 t1 = tcg_temp_new_i64();
2535         TCGv_i64 t2 = tcg_temp_new_i64();
2536         TCGv_i64 t3 = tcg_temp_new_i64();
2537         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2538         /* Adjust for negative inputs.  */
2539         tcg_gen_sari_i64(t2, arg1, 63);
2540         tcg_gen_sari_i64(t3, arg2, 63);
2541         tcg_gen_and_i64(t2, t2, arg2);
2542         tcg_gen_and_i64(t3, t3, arg1);
2543         tcg_gen_sub_i64(rh, t1, t2);
2544         tcg_gen_sub_i64(rh, rh, t3);
2545         tcg_gen_mov_i64(rl, t0);
2546         tcg_temp_free_i64(t0);
2547         tcg_temp_free_i64(t1);
2548         tcg_temp_free_i64(t2);
2549         tcg_temp_free_i64(t3);
2550     } else {
2551         TCGv_i64 t0 = tcg_temp_new_i64();
2552         tcg_gen_mul_i64(t0, arg1, arg2);
2553         gen_helper_mulsh_i64(rh, arg1, arg2);
2554         tcg_gen_mov_i64(rl, t0);
2555         tcg_temp_free_i64(t0);
2556     }
2557 }
2558 
2559 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2560 {
2561     TCGv_i64 t0 = tcg_temp_new_i64();
2562     TCGv_i64 t1 = tcg_temp_new_i64();
2563     TCGv_i64 t2 = tcg_temp_new_i64();
2564     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2565     /* Adjust for negative input for the signed arg1.  */
2566     tcg_gen_sari_i64(t2, arg1, 63);
2567     tcg_gen_and_i64(t2, t2, arg2);
2568     tcg_gen_sub_i64(rh, t1, t2);
2569     tcg_gen_mov_i64(rl, t0);
2570     tcg_temp_free_i64(t0);
2571     tcg_temp_free_i64(t1);
2572     tcg_temp_free_i64(t2);
2573 }
2574 
2575 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2576 {
2577     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2578 }
2579 
2580 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2581 {
2582     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2583 }
2584 
2585 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2586 {
2587     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2588 }
2589 
2590 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2591 {
2592     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2593 }
2594 
2595 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2596 {
2597     TCGv_i64 t = tcg_temp_new_i64();
2598 
2599     tcg_gen_sari_i64(t, a, 63);
2600     tcg_gen_xor_i64(ret, a, t);
2601     tcg_gen_sub_i64(ret, ret, t);
2602     tcg_temp_free_i64(t);
2603 }
2604 
2605 /* Size changing operations.  */
2606 
2607 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2608 {
2609     if (TCG_TARGET_REG_BITS == 32) {
2610         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2611     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2612         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2613                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2614     } else {
2615         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2616     }
2617 }
2618 
2619 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2620 {
2621     if (TCG_TARGET_REG_BITS == 32) {
2622         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2623     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2624         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2625                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2626     } else {
2627         TCGv_i64 t = tcg_temp_new_i64();
2628         tcg_gen_shri_i64(t, arg, 32);
2629         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2630         tcg_temp_free_i64(t);
2631     }
2632 }
2633 
2634 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2635 {
2636     if (TCG_TARGET_REG_BITS == 32) {
2637         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2638         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2639     } else {
2640         tcg_gen_op2(INDEX_op_extu_i32_i64,
2641                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2642     }
2643 }
2644 
2645 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2646 {
2647     if (TCG_TARGET_REG_BITS == 32) {
2648         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2649         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2650     } else {
2651         tcg_gen_op2(INDEX_op_ext_i32_i64,
2652                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2653     }
2654 }
2655 
2656 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2657 {
2658     TCGv_i64 tmp;
2659 
2660     if (TCG_TARGET_REG_BITS == 32) {
2661         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2662         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2663         return;
2664     }
2665 
2666     tmp = tcg_temp_new_i64();
2667     /* These extensions are only needed for type correctness.
2668        We may be able to do better given target specific information.  */
2669     tcg_gen_extu_i32_i64(tmp, high);
2670     tcg_gen_extu_i32_i64(dest, low);
2671     /* If deposit is available, use it.  Otherwise use the extra
2672        knowledge that we have of the zero-extensions above.  */
2673     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2674         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2675     } else {
2676         tcg_gen_shli_i64(tmp, tmp, 32);
2677         tcg_gen_or_i64(dest, dest, tmp);
2678     }
2679     tcg_temp_free_i64(tmp);
2680 }
2681 
2682 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2683 {
2684     if (TCG_TARGET_REG_BITS == 32) {
2685         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2686         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2687     } else {
2688         tcg_gen_extrl_i64_i32(lo, arg);
2689         tcg_gen_extrh_i64_i32(hi, arg);
2690     }
2691 }
2692 
2693 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2694 {
2695     tcg_gen_ext32u_i64(lo, arg);
2696     tcg_gen_shri_i64(hi, arg, 32);
2697 }
2698 
2699 /* QEMU specific operations.  */
2700 
2701 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2702 {
2703     /*
2704      * Let the jit code return the read-only version of the
2705      * TranslationBlock, so that we minimize the pc-relative
2706      * distance of the address of the exit_tb code to TB.
2707      * This will improve utilization of pc-relative address loads.
2708      *
2709      * TODO: Move this to translator_loop, so that all const
2710      * TranslationBlock pointers refer to read-only memory.
2711      * This requires coordination with targets that do not use
2712      * the translator_loop.
2713      */
2714     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2715 
2716     if (tb == NULL) {
2717         tcg_debug_assert(idx == 0);
2718     } else if (idx <= TB_EXIT_IDXMAX) {
2719 #ifdef CONFIG_DEBUG_TCG
2720         /* This is an exit following a goto_tb.  Verify that we have
2721            seen this numbered exit before, via tcg_gen_goto_tb.  */
2722         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2723 #endif
2724     } else {
2725         /* This is an exit via the exitreq label.  */
2726         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2727     }
2728 
2729     plugin_gen_disable_mem_helpers();
2730     tcg_gen_op1i(INDEX_op_exit_tb, val);
2731 }
2732 
2733 void tcg_gen_goto_tb(unsigned idx)
2734 {
2735     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2736     tcg_debug_assert(!(tcg_ctx->tb_cflags & CF_NO_GOTO_TB));
2737     /* We only support two chained exits.  */
2738     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2739 #ifdef CONFIG_DEBUG_TCG
2740     /* Verify that we haven't seen this numbered exit before.  */
2741     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2742     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2743 #endif
2744     plugin_gen_disable_mem_helpers();
2745     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2746 }
2747 
2748 void tcg_gen_lookup_and_goto_ptr(void)
2749 {
2750     TCGv_ptr ptr;
2751 
2752     if (tcg_ctx->tb_cflags & CF_NO_GOTO_PTR) {
2753         tcg_gen_exit_tb(NULL, 0);
2754         return;
2755     }
2756 
2757     plugin_gen_disable_mem_helpers();
2758     ptr = tcg_temp_new_ptr();
2759     gen_helper_lookup_tb_ptr(ptr, cpu_env);
2760     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2761     tcg_temp_free_ptr(ptr);
2762 }
2763 
2764 static inline MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
2765 {
2766     /* Trigger the asserts within as early as possible.  */
2767     unsigned a_bits = get_alignment_bits(op);
2768 
2769     /* Prefer MO_ALIGN+MO_XX over MO_ALIGN_XX+MO_XX */
2770     if (a_bits == (op & MO_SIZE)) {
2771         op = (op & ~MO_AMASK) | MO_ALIGN;
2772     }
2773 
2774     switch (op & MO_SIZE) {
2775     case MO_8:
2776         op &= ~MO_BSWAP;
2777         break;
2778     case MO_16:
2779         break;
2780     case MO_32:
2781         if (!is64) {
2782             op &= ~MO_SIGN;
2783         }
2784         break;
2785     case MO_64:
2786         if (is64) {
2787             op &= ~MO_SIGN;
2788             break;
2789         }
2790         /* fall through */
2791     default:
2792         g_assert_not_reached();
2793     }
2794     if (st) {
2795         op &= ~MO_SIGN;
2796     }
2797     return op;
2798 }
2799 
2800 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2801                          MemOp memop, TCGArg idx)
2802 {
2803     MemOpIdx oi = make_memop_idx(memop, idx);
2804 #if TARGET_LONG_BITS == 32
2805     tcg_gen_op3i_i32(opc, val, addr, oi);
2806 #else
2807     if (TCG_TARGET_REG_BITS == 32) {
2808         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2809     } else {
2810         tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_i64_arg(addr), oi);
2811     }
2812 #endif
2813 }
2814 
2815 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2816                          MemOp memop, TCGArg idx)
2817 {
2818     MemOpIdx oi = make_memop_idx(memop, idx);
2819 #if TARGET_LONG_BITS == 32
2820     if (TCG_TARGET_REG_BITS == 32) {
2821         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2822     } else {
2823         tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_i32_arg(addr), oi);
2824     }
2825 #else
2826     if (TCG_TARGET_REG_BITS == 32) {
2827         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2828                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2829     } else {
2830         tcg_gen_op3i_i64(opc, val, addr, oi);
2831     }
2832 #endif
2833 }
2834 
2835 static void tcg_gen_req_mo(TCGBar type)
2836 {
2837 #ifdef TCG_GUEST_DEFAULT_MO
2838     type &= TCG_GUEST_DEFAULT_MO;
2839 #endif
2840     type &= ~TCG_TARGET_DEFAULT_MO;
2841     if (type) {
2842         tcg_gen_mb(type | TCG_BAR_SC);
2843     }
2844 }
2845 
2846 static inline TCGv plugin_prep_mem_callbacks(TCGv vaddr)
2847 {
2848 #ifdef CONFIG_PLUGIN
2849     if (tcg_ctx->plugin_insn != NULL) {
2850         /* Save a copy of the vaddr for use after a load.  */
2851         TCGv temp = tcg_temp_new();
2852         tcg_gen_mov_tl(temp, vaddr);
2853         return temp;
2854     }
2855 #endif
2856     return vaddr;
2857 }
2858 
2859 static void plugin_gen_mem_callbacks(TCGv vaddr, MemOpIdx oi,
2860                                      enum qemu_plugin_mem_rw rw)
2861 {
2862 #ifdef CONFIG_PLUGIN
2863     if (tcg_ctx->plugin_insn != NULL) {
2864         qemu_plugin_meminfo_t info = make_plugin_meminfo(oi, rw);
2865         plugin_gen_empty_mem_callback(vaddr, info);
2866         tcg_temp_free(vaddr);
2867     }
2868 #endif
2869 }
2870 
2871 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2872 {
2873     MemOp orig_memop;
2874     MemOpIdx oi;
2875 
2876     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2877     memop = tcg_canonicalize_memop(memop, 0, 0);
2878     oi = make_memop_idx(memop, idx);
2879 
2880     orig_memop = memop;
2881     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2882         memop &= ~MO_BSWAP;
2883         /* The bswap primitive benefits from zero-extended input.  */
2884         if ((memop & MO_SSIZE) == MO_SW) {
2885             memop &= ~MO_SIGN;
2886         }
2887     }
2888 
2889     addr = plugin_prep_mem_callbacks(addr);
2890     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2891     plugin_gen_mem_callbacks(addr, oi, QEMU_PLUGIN_MEM_R);
2892 
2893     if ((orig_memop ^ memop) & MO_BSWAP) {
2894         switch (orig_memop & MO_SIZE) {
2895         case MO_16:
2896             tcg_gen_bswap16_i32(val, val, (orig_memop & MO_SIGN
2897                                            ? TCG_BSWAP_IZ | TCG_BSWAP_OS
2898                                            : TCG_BSWAP_IZ | TCG_BSWAP_OZ));
2899             break;
2900         case MO_32:
2901             tcg_gen_bswap32_i32(val, val);
2902             break;
2903         default:
2904             g_assert_not_reached();
2905         }
2906     }
2907 }
2908 
2909 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2910 {
2911     TCGv_i32 swap = NULL;
2912     MemOpIdx oi;
2913 
2914     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2915     memop = tcg_canonicalize_memop(memop, 0, 1);
2916     oi = make_memop_idx(memop, idx);
2917 
2918     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2919         swap = tcg_temp_new_i32();
2920         switch (memop & MO_SIZE) {
2921         case MO_16:
2922             tcg_gen_bswap16_i32(swap, val, 0);
2923             break;
2924         case MO_32:
2925             tcg_gen_bswap32_i32(swap, val);
2926             break;
2927         default:
2928             g_assert_not_reached();
2929         }
2930         val = swap;
2931         memop &= ~MO_BSWAP;
2932     }
2933 
2934     addr = plugin_prep_mem_callbacks(addr);
2935     if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) {
2936         gen_ldst_i32(INDEX_op_qemu_st8_i32, val, addr, memop, idx);
2937     } else {
2938         gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2939     }
2940     plugin_gen_mem_callbacks(addr, oi, QEMU_PLUGIN_MEM_W);
2941 
2942     if (swap) {
2943         tcg_temp_free_i32(swap);
2944     }
2945 }
2946 
2947 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2948 {
2949     MemOp orig_memop;
2950     MemOpIdx oi;
2951 
2952     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2953         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2954         if (memop & MO_SIGN) {
2955             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2956         } else {
2957             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2958         }
2959         return;
2960     }
2961 
2962     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2963     memop = tcg_canonicalize_memop(memop, 1, 0);
2964     oi = make_memop_idx(memop, idx);
2965 
2966     orig_memop = memop;
2967     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2968         memop &= ~MO_BSWAP;
2969         /* The bswap primitive benefits from zero-extended input.  */
2970         if ((memop & MO_SIGN) && (memop & MO_SIZE) < MO_64) {
2971             memop &= ~MO_SIGN;
2972         }
2973     }
2974 
2975     addr = plugin_prep_mem_callbacks(addr);
2976     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2977     plugin_gen_mem_callbacks(addr, oi, QEMU_PLUGIN_MEM_R);
2978 
2979     if ((orig_memop ^ memop) & MO_BSWAP) {
2980         int flags = (orig_memop & MO_SIGN
2981                      ? TCG_BSWAP_IZ | TCG_BSWAP_OS
2982                      : TCG_BSWAP_IZ | TCG_BSWAP_OZ);
2983         switch (orig_memop & MO_SIZE) {
2984         case MO_16:
2985             tcg_gen_bswap16_i64(val, val, flags);
2986             break;
2987         case MO_32:
2988             tcg_gen_bswap32_i64(val, val, flags);
2989             break;
2990         case MO_64:
2991             tcg_gen_bswap64_i64(val, val);
2992             break;
2993         default:
2994             g_assert_not_reached();
2995         }
2996     }
2997 }
2998 
2999 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
3000 {
3001     TCGv_i64 swap = NULL;
3002     MemOpIdx oi;
3003 
3004     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
3005         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
3006         return;
3007     }
3008 
3009     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
3010     memop = tcg_canonicalize_memop(memop, 1, 1);
3011     oi = make_memop_idx(memop, idx);
3012 
3013     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
3014         swap = tcg_temp_new_i64();
3015         switch (memop & MO_SIZE) {
3016         case MO_16:
3017             tcg_gen_bswap16_i64(swap, val, 0);
3018             break;
3019         case MO_32:
3020             tcg_gen_bswap32_i64(swap, val, 0);
3021             break;
3022         case MO_64:
3023             tcg_gen_bswap64_i64(swap, val);
3024             break;
3025         default:
3026             g_assert_not_reached();
3027         }
3028         val = swap;
3029         memop &= ~MO_BSWAP;
3030     }
3031 
3032     addr = plugin_prep_mem_callbacks(addr);
3033     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
3034     plugin_gen_mem_callbacks(addr, oi, QEMU_PLUGIN_MEM_W);
3035 
3036     if (swap) {
3037         tcg_temp_free_i64(swap);
3038     }
3039 }
3040 
3041 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, MemOp opc)
3042 {
3043     switch (opc & MO_SSIZE) {
3044     case MO_SB:
3045         tcg_gen_ext8s_i32(ret, val);
3046         break;
3047     case MO_UB:
3048         tcg_gen_ext8u_i32(ret, val);
3049         break;
3050     case MO_SW:
3051         tcg_gen_ext16s_i32(ret, val);
3052         break;
3053     case MO_UW:
3054         tcg_gen_ext16u_i32(ret, val);
3055         break;
3056     default:
3057         tcg_gen_mov_i32(ret, val);
3058         break;
3059     }
3060 }
3061 
3062 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, MemOp opc)
3063 {
3064     switch (opc & MO_SSIZE) {
3065     case MO_SB:
3066         tcg_gen_ext8s_i64(ret, val);
3067         break;
3068     case MO_UB:
3069         tcg_gen_ext8u_i64(ret, val);
3070         break;
3071     case MO_SW:
3072         tcg_gen_ext16s_i64(ret, val);
3073         break;
3074     case MO_UW:
3075         tcg_gen_ext16u_i64(ret, val);
3076         break;
3077     case MO_SL:
3078         tcg_gen_ext32s_i64(ret, val);
3079         break;
3080     case MO_UL:
3081         tcg_gen_ext32u_i64(ret, val);
3082         break;
3083     default:
3084         tcg_gen_mov_i64(ret, val);
3085         break;
3086     }
3087 }
3088 
3089 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
3090                                   TCGv_i32, TCGv_i32, TCGv_i32);
3091 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
3092                                   TCGv_i64, TCGv_i64, TCGv_i32);
3093 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
3094                                   TCGv_i32, TCGv_i32);
3095 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
3096                                   TCGv_i64, TCGv_i32);
3097 
3098 #ifdef CONFIG_ATOMIC64
3099 # define WITH_ATOMIC64(X) X,
3100 #else
3101 # define WITH_ATOMIC64(X)
3102 #endif
3103 
3104 static void * const table_cmpxchg[(MO_SIZE | MO_BSWAP) + 1] = {
3105     [MO_8] = gen_helper_atomic_cmpxchgb,
3106     [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
3107     [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
3108     [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
3109     [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
3110     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
3111     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
3112 };
3113 
3114 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
3115                                 TCGv_i32 newv, TCGArg idx, MemOp memop)
3116 {
3117     memop = tcg_canonicalize_memop(memop, 0, 0);
3118 
3119     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3120         TCGv_i32 t1 = tcg_temp_new_i32();
3121         TCGv_i32 t2 = tcg_temp_new_i32();
3122 
3123         tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
3124 
3125         tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
3126         tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
3127         tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3128         tcg_temp_free_i32(t2);
3129 
3130         if (memop & MO_SIGN) {
3131             tcg_gen_ext_i32(retv, t1, memop);
3132         } else {
3133             tcg_gen_mov_i32(retv, t1);
3134         }
3135         tcg_temp_free_i32(t1);
3136     } else {
3137         gen_atomic_cx_i32 gen;
3138         MemOpIdx oi;
3139 
3140         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3141         tcg_debug_assert(gen != NULL);
3142 
3143         oi = make_memop_idx(memop & ~MO_SIGN, idx);
3144         gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3145 
3146         if (memop & MO_SIGN) {
3147             tcg_gen_ext_i32(retv, retv, memop);
3148         }
3149     }
3150 }
3151 
3152 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
3153                                 TCGv_i64 newv, TCGArg idx, MemOp memop)
3154 {
3155     memop = tcg_canonicalize_memop(memop, 1, 0);
3156 
3157     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3158         TCGv_i64 t1 = tcg_temp_new_i64();
3159         TCGv_i64 t2 = tcg_temp_new_i64();
3160 
3161         tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
3162 
3163         tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
3164         tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
3165         tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3166         tcg_temp_free_i64(t2);
3167 
3168         if (memop & MO_SIGN) {
3169             tcg_gen_ext_i64(retv, t1, memop);
3170         } else {
3171             tcg_gen_mov_i64(retv, t1);
3172         }
3173         tcg_temp_free_i64(t1);
3174     } else if ((memop & MO_SIZE) == MO_64) {
3175 #ifdef CONFIG_ATOMIC64
3176         gen_atomic_cx_i64 gen;
3177         MemOpIdx oi;
3178 
3179         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3180         tcg_debug_assert(gen != NULL);
3181 
3182         oi = make_memop_idx(memop, idx);
3183         gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3184 #else
3185         gen_helper_exit_atomic(cpu_env);
3186         /* Produce a result, so that we have a well-formed opcode stream
3187            with respect to uses of the result in the (dead) code following.  */
3188         tcg_gen_movi_i64(retv, 0);
3189 #endif /* CONFIG_ATOMIC64 */
3190     } else {
3191         TCGv_i32 c32 = tcg_temp_new_i32();
3192         TCGv_i32 n32 = tcg_temp_new_i32();
3193         TCGv_i32 r32 = tcg_temp_new_i32();
3194 
3195         tcg_gen_extrl_i64_i32(c32, cmpv);
3196         tcg_gen_extrl_i64_i32(n32, newv);
3197         tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
3198         tcg_temp_free_i32(c32);
3199         tcg_temp_free_i32(n32);
3200 
3201         tcg_gen_extu_i32_i64(retv, r32);
3202         tcg_temp_free_i32(r32);
3203 
3204         if (memop & MO_SIGN) {
3205             tcg_gen_ext_i64(retv, retv, memop);
3206         }
3207     }
3208 }
3209 
3210 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3211                                 TCGArg idx, MemOp memop, bool new_val,
3212                                 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
3213 {
3214     TCGv_i32 t1 = tcg_temp_new_i32();
3215     TCGv_i32 t2 = tcg_temp_new_i32();
3216 
3217     memop = tcg_canonicalize_memop(memop, 0, 0);
3218 
3219     tcg_gen_qemu_ld_i32(t1, addr, idx, memop);
3220     tcg_gen_ext_i32(t2, val, memop);
3221     gen(t2, t1, t2);
3222     tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3223 
3224     tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
3225     tcg_temp_free_i32(t1);
3226     tcg_temp_free_i32(t2);
3227 }
3228 
3229 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3230                              TCGArg idx, MemOp memop, void * const table[])
3231 {
3232     gen_atomic_op_i32 gen;
3233     MemOpIdx oi;
3234 
3235     memop = tcg_canonicalize_memop(memop, 0, 0);
3236 
3237     gen = table[memop & (MO_SIZE | MO_BSWAP)];
3238     tcg_debug_assert(gen != NULL);
3239 
3240     oi = make_memop_idx(memop & ~MO_SIGN, idx);
3241     gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3242 
3243     if (memop & MO_SIGN) {
3244         tcg_gen_ext_i32(ret, ret, memop);
3245     }
3246 }
3247 
3248 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3249                                 TCGArg idx, MemOp memop, bool new_val,
3250                                 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
3251 {
3252     TCGv_i64 t1 = tcg_temp_new_i64();
3253     TCGv_i64 t2 = tcg_temp_new_i64();
3254 
3255     memop = tcg_canonicalize_memop(memop, 1, 0);
3256 
3257     tcg_gen_qemu_ld_i64(t1, addr, idx, memop);
3258     tcg_gen_ext_i64(t2, val, memop);
3259     gen(t2, t1, t2);
3260     tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3261 
3262     tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
3263     tcg_temp_free_i64(t1);
3264     tcg_temp_free_i64(t2);
3265 }
3266 
3267 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3268                              TCGArg idx, MemOp memop, void * const table[])
3269 {
3270     memop = tcg_canonicalize_memop(memop, 1, 0);
3271 
3272     if ((memop & MO_SIZE) == MO_64) {
3273 #ifdef CONFIG_ATOMIC64
3274         gen_atomic_op_i64 gen;
3275         MemOpIdx oi;
3276 
3277         gen = table[memop & (MO_SIZE | MO_BSWAP)];
3278         tcg_debug_assert(gen != NULL);
3279 
3280         oi = make_memop_idx(memop & ~MO_SIGN, idx);
3281         gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3282 #else
3283         gen_helper_exit_atomic(cpu_env);
3284         /* Produce a result, so that we have a well-formed opcode stream
3285            with respect to uses of the result in the (dead) code following.  */
3286         tcg_gen_movi_i64(ret, 0);
3287 #endif /* CONFIG_ATOMIC64 */
3288     } else {
3289         TCGv_i32 v32 = tcg_temp_new_i32();
3290         TCGv_i32 r32 = tcg_temp_new_i32();
3291 
3292         tcg_gen_extrl_i64_i32(v32, val);
3293         do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
3294         tcg_temp_free_i32(v32);
3295 
3296         tcg_gen_extu_i32_i64(ret, r32);
3297         tcg_temp_free_i32(r32);
3298 
3299         if (memop & MO_SIGN) {
3300             tcg_gen_ext_i64(ret, ret, memop);
3301         }
3302     }
3303 }
3304 
3305 #define GEN_ATOMIC_HELPER(NAME, OP, NEW)                                \
3306 static void * const table_##NAME[(MO_SIZE | MO_BSWAP) + 1] = {          \
3307     [MO_8] = gen_helper_atomic_##NAME##b,                               \
3308     [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le,                   \
3309     [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be,                   \
3310     [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le,                   \
3311     [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be,                   \
3312     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le)     \
3313     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be)     \
3314 };                                                                      \
3315 void tcg_gen_atomic_##NAME##_i32                                        \
3316     (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, MemOp memop)    \
3317 {                                                                       \
3318     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3319         do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME);     \
3320     } else {                                                            \
3321         do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW,            \
3322                             tcg_gen_##OP##_i32);                        \
3323     }                                                                   \
3324 }                                                                       \
3325 void tcg_gen_atomic_##NAME##_i64                                        \
3326     (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, MemOp memop)    \
3327 {                                                                       \
3328     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3329         do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME);     \
3330     } else {                                                            \
3331         do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW,            \
3332                             tcg_gen_##OP##_i64);                        \
3333     }                                                                   \
3334 }
3335 
3336 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3337 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3338 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3339 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3340 GEN_ATOMIC_HELPER(fetch_smin, smin, 0)
3341 GEN_ATOMIC_HELPER(fetch_umin, umin, 0)
3342 GEN_ATOMIC_HELPER(fetch_smax, smax, 0)
3343 GEN_ATOMIC_HELPER(fetch_umax, umax, 0)
3344 
3345 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3346 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3347 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3348 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3349 GEN_ATOMIC_HELPER(smin_fetch, smin, 1)
3350 GEN_ATOMIC_HELPER(umin_fetch, umin, 1)
3351 GEN_ATOMIC_HELPER(smax_fetch, smax, 1)
3352 GEN_ATOMIC_HELPER(umax_fetch, umax, 1)
3353 
3354 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3355 {
3356     tcg_gen_mov_i32(r, b);
3357 }
3358 
3359 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3360 {
3361     tcg_gen_mov_i64(r, b);
3362 }
3363 
3364 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3365 
3366 #undef GEN_ATOMIC_HELPER
3367