1 /*
2  *    Stack-less Just-In-Time compiler
3  *
4  *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without modification, are
7  * permitted provided that the following conditions are met:
8  *
9  *   1. Redistributions of source code must retain the above copyright notice, this list of
10  *      conditions and the following disclaimer.
11  *
12  *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13  *      of conditions and the following disclaimer in the documentation and/or other materials
14  *      provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19  * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21  * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
sljit_get_platform_name(void)27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28 {
29 	return "ARM-64" SLJIT_CPUINFO;
30 }
31 
32 /* Length of an instruction word */
33 typedef sljit_u32 sljit_ins;
34 
35 #define TMP_ZERO	(0)
36 
37 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
38 #define TMP_REG2	(SLJIT_NUMBER_OF_REGISTERS + 3)
39 #define TMP_LR		(SLJIT_NUMBER_OF_REGISTERS + 4)
40 #define TMP_FP		(SLJIT_NUMBER_OF_REGISTERS + 5)
41 
42 #define TMP_FREG1	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
43 #define TMP_FREG2	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
44 
45 /* r18 - platform register, currently not used */
46 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
47 	31, 0, 1, 2, 3, 4, 5, 6, 7, 11, 12, 13, 14, 15, 16, 17, 8, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 31, 9, 10, 30, 29
48 };
49 
50 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
51 	0, 0, 1, 2, 3, 4, 5, 6, 7
52 };
53 
54 #define W_OP (1u << 31)
55 #define RD(rd) (reg_map[rd])
56 #define RT(rt) (reg_map[rt])
57 #define RN(rn) (reg_map[rn] << 5)
58 #define RT2(rt2) (reg_map[rt2] << 10)
59 #define RM(rm) (reg_map[rm] << 16)
60 #define VD(vd) (freg_map[vd])
61 #define VT(vt) (freg_map[vt])
62 #define VN(vn) (freg_map[vn] << 5)
63 #define VM(vm) (freg_map[vm] << 16)
64 
65 /* --------------------------------------------------------------------- */
66 /*  Instrucion forms                                                     */
67 /* --------------------------------------------------------------------- */
68 
69 #define ADC 0x9a000000
70 #define ADD 0x8b000000
71 #define ADDE 0x8b200000
72 #define ADDI 0x91000000
73 #define AND 0x8a000000
74 #define ANDI 0x92000000
75 #define ASRV 0x9ac02800
76 #define B 0x14000000
77 #define B_CC 0x54000000
78 #define BL 0x94000000
79 #define BLR 0xd63f0000
80 #define BR 0xd61f0000
81 #define BRK 0xd4200000
82 #define CBZ 0xb4000000
83 #define CLZ 0xdac01000
84 #define CSEL 0x9a800000
85 #define CSINC 0x9a800400
86 #define EOR 0xca000000
87 #define EORI 0xd2000000
88 #define FABS 0x1e60c000
89 #define FADD 0x1e602800
90 #define FCMP 0x1e602000
91 #define FCVT 0x1e224000
92 #define FCVTZS 0x9e780000
93 #define FDIV 0x1e601800
94 #define FMOV 0x1e604000
95 #define FMUL 0x1e600800
96 #define FNEG 0x1e614000
97 #define FSUB 0x1e603800
98 #define LDRI 0xf9400000
99 #define LDP 0xa9400000
100 #define LDP_PRE 0xa9c00000
101 #define LDR_PRE 0xf8400c00
102 #define LSLV 0x9ac02000
103 #define LSRV 0x9ac02400
104 #define MADD 0x9b000000
105 #define MOVK 0xf2800000
106 #define MOVN 0x92800000
107 #define MOVZ 0xd2800000
108 #define NOP 0xd503201f
109 #define ORN 0xaa200000
110 #define ORR 0xaa000000
111 #define ORRI 0xb2000000
112 #define RET 0xd65f0000
113 #define SBC 0xda000000
114 #define SBFM 0x93000000
115 #define SCVTF 0x9e620000
116 #define SDIV 0x9ac00c00
117 #define SMADDL 0x9b200000
118 #define SMULH 0x9b403c00
119 #define STP 0xa9000000
120 #define STP_PRE 0xa9800000
121 #define STRB 0x38206800
122 #define STRBI 0x39000000
123 #define STRI 0xf9000000
124 #define STR_FI 0x3d000000
125 #define STR_FR 0x3c206800
126 #define STUR_FI 0x3c000000
127 #define STURBI 0x38000000
128 #define SUB 0xcb000000
129 #define SUBI 0xd1000000
130 #define SUBS 0xeb000000
131 #define UBFM 0xd3000000
132 #define UDIV 0x9ac00800
133 #define UMULH 0x9bc03c00
134 
135 /* dest_reg is the absolute name of the register
136    Useful for reordering instructions in the delay slot. */
push_inst(struct sljit_compiler * compiler,sljit_ins ins)137 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
138 {
139 	sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
140 	FAIL_IF(!ptr);
141 	*ptr = ins;
142 	compiler->size++;
143 	return SLJIT_SUCCESS;
144 }
145 
emit_imm64_const(struct sljit_compiler * compiler,sljit_s32 dst,sljit_uw imm)146 static SLJIT_INLINE sljit_s32 emit_imm64_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
147 {
148 	FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((imm & 0xffff) << 5)));
149 	FAIL_IF(push_inst(compiler, MOVK | RD(dst) | (((imm >> 16) & 0xffff) << 5) | (1 << 21)));
150 	FAIL_IF(push_inst(compiler, MOVK | RD(dst) | (((imm >> 32) & 0xffff) << 5) | (2 << 21)));
151 	return push_inst(compiler, MOVK | RD(dst) | ((imm >> 48) << 5) | (3 << 21));
152 }
153 
detect_jump_type(struct sljit_jump * jump,sljit_ins * code_ptr,sljit_ins * code,sljit_sw executable_offset)154 static SLJIT_INLINE sljit_sw detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
155 {
156 	sljit_sw diff;
157 	sljit_uw target_addr;
158 
159 	if (jump->flags & SLJIT_REWRITABLE_JUMP) {
160 		jump->flags |= PATCH_ABS64;
161 		return 0;
162 	}
163 
164 	if (jump->flags & JUMP_ADDR)
165 		target_addr = jump->u.target;
166 	else {
167 		SLJIT_ASSERT(jump->flags & JUMP_LABEL);
168 		target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
169 	}
170 
171 	diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset;
172 
173 	if (jump->flags & IS_COND) {
174 		diff += sizeof(sljit_ins);
175 		if (diff <= 0xfffff && diff >= -0x100000) {
176 			code_ptr[-5] ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1;
177 			jump->addr -= sizeof(sljit_ins);
178 			jump->flags |= PATCH_COND;
179 			return 5;
180 		}
181 		diff -= sizeof(sljit_ins);
182 	}
183 
184 	if (diff <= 0x7ffffff && diff >= -0x8000000) {
185 		jump->flags |= PATCH_B;
186 		return 4;
187 	}
188 
189 	if (target_addr < 0x100000000l) {
190 		if (jump->flags & IS_COND)
191 			code_ptr[-5] -= (2 << 5);
192 		code_ptr[-2] = code_ptr[0];
193 		return 2;
194 	}
195 
196 	if (target_addr < 0x1000000000000l) {
197 		if (jump->flags & IS_COND)
198 			code_ptr[-5] -= (1 << 5);
199 		jump->flags |= PATCH_ABS48;
200 		code_ptr[-1] = code_ptr[0];
201 		return 1;
202 	}
203 
204 	jump->flags |= PATCH_ABS64;
205 	return 0;
206 }
207 
put_label_get_length(struct sljit_put_label * put_label,sljit_uw max_label)208 static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
209 {
210 	if (max_label < 0x100000000l) {
211 		put_label->flags = 0;
212 		return 2;
213 	}
214 
215 	if (max_label < 0x1000000000000l) {
216 		put_label->flags = 1;
217 		return 1;
218 	}
219 
220 	put_label->flags = 2;
221 	return 0;
222 }
223 
sljit_generate_code(struct sljit_compiler * compiler)224 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
225 {
226 	struct sljit_memory_fragment *buf;
227 	sljit_ins *code;
228 	sljit_ins *code_ptr;
229 	sljit_ins *buf_ptr;
230 	sljit_ins *buf_end;
231 	sljit_uw word_count;
232 	sljit_uw next_addr;
233 	sljit_sw executable_offset;
234 	sljit_uw addr;
235 	sljit_s32 dst;
236 
237 	struct sljit_label *label;
238 	struct sljit_jump *jump;
239 	struct sljit_const *const_;
240 	struct sljit_put_label *put_label;
241 
242 	CHECK_ERROR_PTR();
243 	CHECK_PTR(check_sljit_generate_code(compiler));
244 	reverse_buf(compiler);
245 
246 	code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
247 	PTR_FAIL_WITH_EXEC_IF(code);
248 	buf = compiler->buf;
249 
250 	code_ptr = code;
251 	word_count = 0;
252 	next_addr = 0;
253 	executable_offset = SLJIT_EXEC_OFFSET(code);
254 
255 	label = compiler->labels;
256 	jump = compiler->jumps;
257 	const_ = compiler->consts;
258 	put_label = compiler->put_labels;
259 
260 	do {
261 		buf_ptr = (sljit_ins*)buf->memory;
262 		buf_end = buf_ptr + (buf->used_size >> 2);
263 		do {
264 			*code_ptr = *buf_ptr++;
265 			if (next_addr == word_count) {
266 				SLJIT_ASSERT(!label || label->size >= word_count);
267 				SLJIT_ASSERT(!jump || jump->addr >= word_count);
268 				SLJIT_ASSERT(!const_ || const_->addr >= word_count);
269 				SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
270 
271 				/* These structures are ordered by their address. */
272 				if (label && label->size == word_count) {
273 					label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
274 					label->size = code_ptr - code;
275 					label = label->next;
276 				}
277 				if (jump && jump->addr == word_count) {
278 						jump->addr = (sljit_uw)(code_ptr - 4);
279 						code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset);
280 						jump = jump->next;
281 				}
282 				if (const_ && const_->addr == word_count) {
283 					const_->addr = (sljit_uw)code_ptr;
284 					const_ = const_->next;
285 				}
286 				if (put_label && put_label->addr == word_count) {
287 					SLJIT_ASSERT(put_label->label);
288 					put_label->addr = (sljit_uw)(code_ptr - 3);
289 					code_ptr -= put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
290 					put_label = put_label->next;
291 				}
292 				next_addr = compute_next_addr(label, jump, const_, put_label);
293 			}
294 			code_ptr ++;
295 			word_count ++;
296 		} while (buf_ptr < buf_end);
297 
298 		buf = buf->next;
299 	} while (buf);
300 
301 	if (label && label->size == word_count) {
302 		label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
303 		label->size = code_ptr - code;
304 		label = label->next;
305 	}
306 
307 	SLJIT_ASSERT(!label);
308 	SLJIT_ASSERT(!jump);
309 	SLJIT_ASSERT(!const_);
310 	SLJIT_ASSERT(!put_label);
311 	SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
312 
313 	jump = compiler->jumps;
314 	while (jump) {
315 		do {
316 			addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
317 			buf_ptr = (sljit_ins *)jump->addr;
318 
319 			if (jump->flags & PATCH_B) {
320 				addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
321 				SLJIT_ASSERT((sljit_sw)addr <= 0x1ffffff && (sljit_sw)addr >= -0x2000000);
322 				buf_ptr[0] = ((jump->flags & IS_BL) ? BL : B) | (addr & 0x3ffffff);
323 				if (jump->flags & IS_COND)
324 					buf_ptr[-1] -= (4 << 5);
325 				break;
326 			}
327 			if (jump->flags & PATCH_COND) {
328 				addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
329 				SLJIT_ASSERT((sljit_sw)addr <= 0x3ffff && (sljit_sw)addr >= -0x40000);
330 				buf_ptr[0] = (buf_ptr[0] & ~0xffffe0) | ((addr & 0x7ffff) << 5);
331 				break;
332 			}
333 
334 			SLJIT_ASSERT((jump->flags & (PATCH_ABS48 | PATCH_ABS64)) || addr <= 0xffffffffl);
335 			SLJIT_ASSERT((jump->flags & PATCH_ABS64) || addr <= 0xffffffffffffl);
336 
337 			dst = buf_ptr[0] & 0x1f;
338 			buf_ptr[0] = MOVZ | dst | ((addr & 0xffff) << 5);
339 			buf_ptr[1] = MOVK | dst | (((addr >> 16) & 0xffff) << 5) | (1 << 21);
340 			if (jump->flags & (PATCH_ABS48 | PATCH_ABS64))
341 				buf_ptr[2] = MOVK | dst | (((addr >> 32) & 0xffff) << 5) | (2 << 21);
342 			if (jump->flags & PATCH_ABS64)
343 				buf_ptr[3] = MOVK | dst | (((addr >> 48) & 0xffff) << 5) | (3 << 21);
344 		} while (0);
345 		jump = jump->next;
346 	}
347 
348 	put_label = compiler->put_labels;
349 	while (put_label) {
350 		addr = put_label->label->addr;
351 		buf_ptr = (sljit_ins *)put_label->addr;
352 
353 		buf_ptr[0] |= (addr & 0xffff) << 5;
354 		buf_ptr[1] |= ((addr >> 16) & 0xffff) << 5;
355 
356 		if (put_label->flags >= 1)
357 			buf_ptr[2] |= ((addr >> 32) & 0xffff) << 5;
358 
359 		if (put_label->flags >= 2)
360 			buf_ptr[3] |= ((addr >> 48) & 0xffff) << 5;
361 
362 		put_label = put_label->next;
363 	}
364 
365 	compiler->error = SLJIT_ERR_COMPILED;
366 	compiler->executable_offset = executable_offset;
367 	compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
368 
369 	code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
370 	code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
371 
372 	SLJIT_CACHE_FLUSH(code, code_ptr);
373 	SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
374 	return code;
375 }
376 
sljit_has_cpu_feature(sljit_s32 feature_type)377 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
378 {
379 	switch (feature_type) {
380 	case SLJIT_HAS_FPU:
381 #ifdef SLJIT_IS_FPU_AVAILABLE
382 		return SLJIT_IS_FPU_AVAILABLE;
383 #else
384 		/* Available by default. */
385 		return 1;
386 #endif
387 
388 	case SLJIT_HAS_CLZ:
389 	case SLJIT_HAS_CMOV:
390 	case SLJIT_HAS_PREFETCH:
391 		return 1;
392 
393 	default:
394 		return 0;
395 	}
396 }
397 
398 /* --------------------------------------------------------------------- */
399 /*  Core code generator functions.                                       */
400 /* --------------------------------------------------------------------- */
401 
402 #define COUNT_TRAILING_ZERO(value, result) \
403 	result = 0; \
404 	if (!(value & 0xffffffff)) { \
405 		result += 32; \
406 		value >>= 32; \
407 	} \
408 	if (!(value & 0xffff)) { \
409 		result += 16; \
410 		value >>= 16; \
411 	} \
412 	if (!(value & 0xff)) { \
413 		result += 8; \
414 		value >>= 8; \
415 	} \
416 	if (!(value & 0xf)) { \
417 		result += 4; \
418 		value >>= 4; \
419 	} \
420 	if (!(value & 0x3)) { \
421 		result += 2; \
422 		value >>= 2; \
423 	} \
424 	if (!(value & 0x1)) { \
425 		result += 1; \
426 		value >>= 1; \
427 	}
428 
429 #define LOGICAL_IMM_CHECK 0x100
430 
logical_imm(sljit_sw imm,sljit_s32 len)431 static sljit_ins logical_imm(sljit_sw imm, sljit_s32 len)
432 {
433 	sljit_s32 negated, ones, right;
434 	sljit_uw mask, uimm;
435 	sljit_ins ins;
436 
437 	if (len & LOGICAL_IMM_CHECK) {
438 		len &= ~LOGICAL_IMM_CHECK;
439 		if (len == 32 && (imm == 0 || imm == -1))
440 			return 0;
441 		if (len == 16 && ((sljit_s32)imm == 0 || (sljit_s32)imm == -1))
442 			return 0;
443 	}
444 
445 	SLJIT_ASSERT((len == 32 && imm != 0 && imm != -1)
446 		|| (len == 16 && (sljit_s32)imm != 0 && (sljit_s32)imm != -1));
447 
448 	uimm = (sljit_uw)imm;
449 	while (1) {
450 		if (len <= 0) {
451 			SLJIT_UNREACHABLE();
452 			return 0;
453 		}
454 
455 		mask = ((sljit_uw)1 << len) - 1;
456 		if ((uimm & mask) != ((uimm >> len) & mask))
457 			break;
458 		len >>= 1;
459 	}
460 
461 	len <<= 1;
462 
463 	negated = 0;
464 	if (uimm & 0x1) {
465 		negated = 1;
466 		uimm = ~uimm;
467 	}
468 
469 	if (len < 64)
470 		uimm &= ((sljit_uw)1 << len) - 1;
471 
472 	/* Unsigned right shift. */
473 	COUNT_TRAILING_ZERO(uimm, right);
474 
475 	/* Signed shift. We also know that the highest bit is set. */
476 	imm = (sljit_sw)~uimm;
477 	SLJIT_ASSERT(imm < 0);
478 
479 	COUNT_TRAILING_ZERO(imm, ones);
480 
481 	if (~imm)
482 		return 0;
483 
484 	if (len == 64)
485 		ins = 1 << 22;
486 	else
487 		ins = (0x3f - ((len << 1) - 1)) << 10;
488 
489 	if (negated)
490 		return ins | ((len - ones - 1) << 10) | ((len - ones - right) << 16);
491 
492 	return ins | ((ones - 1) << 10) | ((len - right) << 16);
493 }
494 
495 #undef COUNT_TRAILING_ZERO
496 
load_immediate(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw simm)497 static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw simm)
498 {
499 	sljit_uw imm = (sljit_uw)simm;
500 	sljit_s32 i, zeros, ones, first;
501 	sljit_ins bitmask;
502 
503 	/* Handling simple immediates first. */
504 	if (imm <= 0xffff)
505 		return push_inst(compiler, MOVZ | RD(dst) | (imm << 5));
506 
507 	if (simm < 0 && simm >= -0x10000)
508 		return push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff) << 5));
509 
510 	if (imm <= 0xffffffffl) {
511 		if ((imm & 0xffff) == 0)
512 			return push_inst(compiler, MOVZ | RD(dst) | ((imm >> 16) << 5) | (1 << 21));
513 		if ((imm & 0xffff0000l) == 0xffff0000)
514 			return push_inst(compiler, (MOVN ^ W_OP) | RD(dst) | ((~imm & 0xffff) << 5));
515 		if ((imm & 0xffff) == 0xffff)
516 			return push_inst(compiler, (MOVN ^ W_OP) | RD(dst) | ((~imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
517 
518 		bitmask = logical_imm(simm, 16);
519 		if (bitmask != 0)
520 			return push_inst(compiler, (ORRI ^ W_OP) | RD(dst) | RN(TMP_ZERO) | bitmask);
521 
522 		FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((imm & 0xffff) << 5)));
523 		return push_inst(compiler, MOVK | RD(dst) | ((imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
524 	}
525 
526 	bitmask = logical_imm(simm, 32);
527 	if (bitmask != 0)
528 		return push_inst(compiler, ORRI | RD(dst) | RN(TMP_ZERO) | bitmask);
529 
530 	if (simm < 0 && simm >= -0x100000000l) {
531 		if ((imm & 0xffff) == 0xffff)
532 			return push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
533 
534 		FAIL_IF(push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff) << 5)));
535 		return push_inst(compiler, MOVK | RD(dst) | ((imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
536 	}
537 
538 	/* A large amount of number can be constructed from ORR and MOVx, but computing them is costly. */
539 
540 	zeros = 0;
541 	ones = 0;
542 	for (i = 4; i > 0; i--) {
543 		if ((simm & 0xffff) == 0)
544 			zeros++;
545 		if ((simm & 0xffff) == 0xffff)
546 			ones++;
547 		simm >>= 16;
548 	}
549 
550 	simm = (sljit_sw)imm;
551 	first = 1;
552 	if (ones > zeros) {
553 		simm = ~simm;
554 		for (i = 0; i < 4; i++) {
555 			if (!(simm & 0xffff)) {
556 				simm >>= 16;
557 				continue;
558 			}
559 			if (first) {
560 				first = 0;
561 				FAIL_IF(push_inst(compiler, MOVN | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
562 			}
563 			else
564 				FAIL_IF(push_inst(compiler, MOVK | RD(dst) | ((~simm & 0xffff) << 5) | (i << 21)));
565 			simm >>= 16;
566 		}
567 		return SLJIT_SUCCESS;
568 	}
569 
570 	for (i = 0; i < 4; i++) {
571 		if (!(simm & 0xffff)) {
572 			simm >>= 16;
573 			continue;
574 		}
575 		if (first) {
576 			first = 0;
577 			FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
578 		}
579 		else
580 			FAIL_IF(push_inst(compiler, MOVK | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
581 		simm >>= 16;
582 	}
583 	return SLJIT_SUCCESS;
584 }
585 
586 #define ARG1_IMM	0x0010000
587 #define ARG2_IMM	0x0020000
588 #define INT_OP		0x0040000
589 #define SET_FLAGS	0x0080000
590 #define UNUSED_RETURN	0x0100000
591 
592 #define CHECK_FLAGS(flag_bits) \
593 	if (flags & SET_FLAGS) { \
594 		inv_bits |= flag_bits; \
595 		if (flags & UNUSED_RETURN) \
596 			dst = TMP_ZERO; \
597 	}
598 
emit_op_imm(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 dst,sljit_sw arg1,sljit_sw arg2)599 static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 dst, sljit_sw arg1, sljit_sw arg2)
600 {
601 	/* dst must be register, TMP_REG1
602 	   arg1 must be register, TMP_REG1, imm
603 	   arg2 must be register, TMP_REG2, imm */
604 	sljit_ins inv_bits = (flags & INT_OP) ? W_OP : 0;
605 	sljit_ins inst_bits;
606 	sljit_s32 op = (flags & 0xffff);
607 	sljit_s32 reg;
608 	sljit_sw imm, nimm;
609 
610 	if (SLJIT_UNLIKELY((flags & (ARG1_IMM | ARG2_IMM)) == (ARG1_IMM | ARG2_IMM))) {
611 		/* Both are immediates. */
612 		flags &= ~ARG1_IMM;
613 		if (arg1 == 0 && op != SLJIT_ADD && op != SLJIT_SUB)
614 			arg1 = TMP_ZERO;
615 		else {
616 			FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
617 			arg1 = TMP_REG1;
618 		}
619 	}
620 
621 	if (flags & (ARG1_IMM | ARG2_IMM)) {
622 		reg = (flags & ARG2_IMM) ? arg1 : arg2;
623 		imm = (flags & ARG2_IMM) ? arg2 : arg1;
624 
625 		switch (op) {
626 		case SLJIT_MUL:
627 		case SLJIT_NEG:
628 		case SLJIT_CLZ:
629 		case SLJIT_ADDC:
630 		case SLJIT_SUBC:
631 			/* No form with immediate operand (except imm 0, which
632 			is represented by a ZERO register). */
633 			break;
634 		case SLJIT_MOV:
635 			SLJIT_ASSERT(!(flags & SET_FLAGS) && (flags & ARG2_IMM) && arg1 == TMP_REG1);
636 			return load_immediate(compiler, dst, imm);
637 		case SLJIT_NOT:
638 			SLJIT_ASSERT(flags & ARG2_IMM);
639 			FAIL_IF(load_immediate(compiler, dst, (flags & INT_OP) ? (~imm & 0xffffffff) : ~imm));
640 			goto set_flags;
641 		case SLJIT_SUB:
642 			if (flags & ARG1_IMM)
643 				break;
644 			imm = -imm;
645 			/* Fall through. */
646 		case SLJIT_ADD:
647 			if (imm == 0) {
648 				CHECK_FLAGS(1 << 29);
649 				return push_inst(compiler, ((op == SLJIT_ADD ? ADDI : SUBI) ^ inv_bits) | RD(dst) | RN(reg));
650 			}
651 			if (imm > 0 && imm <= 0xfff) {
652 				CHECK_FLAGS(1 << 29);
653 				return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | (imm << 10));
654 			}
655 			nimm = -imm;
656 			if (nimm > 0 && nimm <= 0xfff) {
657 				CHECK_FLAGS(1 << 29);
658 				return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | (nimm << 10));
659 			}
660 			if (imm > 0 && imm <= 0xffffff && !(imm & 0xfff)) {
661 				CHECK_FLAGS(1 << 29);
662 				return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | ((imm >> 12) << 10) | (1 << 22));
663 			}
664 			if (nimm > 0 && nimm <= 0xffffff && !(nimm & 0xfff)) {
665 				CHECK_FLAGS(1 << 29);
666 				return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | ((nimm >> 12) << 10) | (1 << 22));
667 			}
668 			if (imm > 0 && imm <= 0xffffff && !(flags & SET_FLAGS)) {
669 				FAIL_IF(push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | ((imm >> 12) << 10) | (1 << 22)));
670 				return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(dst) | ((imm & 0xfff) << 10));
671 			}
672 			if (nimm > 0 && nimm <= 0xffffff && !(flags & SET_FLAGS)) {
673 				FAIL_IF(push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | ((nimm >> 12) << 10) | (1 << 22)));
674 				return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(dst) | ((nimm & 0xfff) << 10));
675 			}
676 			break;
677 		case SLJIT_AND:
678 			inst_bits = logical_imm(imm, LOGICAL_IMM_CHECK | ((flags & INT_OP) ? 16 : 32));
679 			if (!inst_bits)
680 				break;
681 			CHECK_FLAGS(3 << 29);
682 			return push_inst(compiler, (ANDI ^ inv_bits) | RD(dst) | RN(reg) | inst_bits);
683 		case SLJIT_OR:
684 		case SLJIT_XOR:
685 			inst_bits = logical_imm(imm, LOGICAL_IMM_CHECK | ((flags & INT_OP) ? 16 : 32));
686 			if (!inst_bits)
687 				break;
688 			if (op == SLJIT_OR)
689 				inst_bits |= ORRI;
690 			else
691 				inst_bits |= EORI;
692 			FAIL_IF(push_inst(compiler, (inst_bits ^ inv_bits) | RD(dst) | RN(reg)));
693 			goto set_flags;
694 		case SLJIT_SHL:
695 			if (flags & ARG1_IMM)
696 				break;
697 			if (flags & INT_OP) {
698 				imm &= 0x1f;
699 				FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | ((-imm & 0x1f) << 16) | ((31 - imm) << 10)));
700 			}
701 			else {
702 				imm &= 0x3f;
703 				FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (1 << 22) | ((-imm & 0x3f) << 16) | ((63 - imm) << 10)));
704 			}
705 			goto set_flags;
706 		case SLJIT_LSHR:
707 		case SLJIT_ASHR:
708 			if (flags & ARG1_IMM)
709 				break;
710 			if (op == SLJIT_ASHR)
711 				inv_bits |= 1 << 30;
712 			if (flags & INT_OP) {
713 				imm &= 0x1f;
714 				FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (imm << 16) | (31 << 10)));
715 			}
716 			else {
717 				imm &= 0x3f;
718 				FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (1 << 22) | (imm << 16) | (63 << 10)));
719 			}
720 			goto set_flags;
721 		default:
722 			SLJIT_UNREACHABLE();
723 			break;
724 		}
725 
726 		if (flags & ARG2_IMM) {
727 			if (arg2 == 0)
728 				arg2 = TMP_ZERO;
729 			else {
730 				FAIL_IF(load_immediate(compiler, TMP_REG2, arg2));
731 				arg2 = TMP_REG2;
732 			}
733 		}
734 		else {
735 			if (arg1 == 0)
736 				arg1 = TMP_ZERO;
737 			else {
738 				FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
739 				arg1 = TMP_REG1;
740 			}
741 		}
742 	}
743 
744 	/* Both arguments are registers. */
745 	switch (op) {
746 	case SLJIT_MOV:
747 	case SLJIT_MOV_P:
748 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
749 		if (dst == arg2)
750 			return SLJIT_SUCCESS;
751 		return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(arg2));
752 	case SLJIT_MOV_U8:
753 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
754 		return push_inst(compiler, (UBFM ^ W_OP) | RD(dst) | RN(arg2) | (7 << 10));
755 	case SLJIT_MOV_S8:
756 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
757 		if (!(flags & INT_OP))
758 			inv_bits |= 1 << 22;
759 		return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (7 << 10));
760 	case SLJIT_MOV_U16:
761 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
762 		return push_inst(compiler, (UBFM ^ W_OP) | RD(dst) | RN(arg2) | (15 << 10));
763 	case SLJIT_MOV_S16:
764 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
765 		if (!(flags & INT_OP))
766 			inv_bits |= 1 << 22;
767 		return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (15 << 10));
768 	case SLJIT_MOV_U32:
769 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
770 		if ((flags & INT_OP) && dst == arg2)
771 			return SLJIT_SUCCESS;
772 		return push_inst(compiler, (ORR ^ W_OP) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
773 	case SLJIT_MOV_S32:
774 		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
775 		if ((flags & INT_OP) && dst == arg2)
776 			return SLJIT_SUCCESS;
777 		return push_inst(compiler, SBFM | (1 << 22) | RD(dst) | RN(arg2) | (31 << 10));
778 	case SLJIT_NOT:
779 		SLJIT_ASSERT(arg1 == TMP_REG1);
780 		FAIL_IF(push_inst(compiler, (ORN ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2)));
781 		break; /* Set flags. */
782 	case SLJIT_NEG:
783 		SLJIT_ASSERT(arg1 == TMP_REG1);
784 		if (flags & SET_FLAGS)
785 			inv_bits |= 1 << 29;
786 		return push_inst(compiler, (SUB ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
787 	case SLJIT_CLZ:
788 		SLJIT_ASSERT(arg1 == TMP_REG1);
789 		return push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(arg2));
790 	case SLJIT_ADD:
791 		CHECK_FLAGS(1 << 29);
792 		return push_inst(compiler, (ADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
793 	case SLJIT_ADDC:
794 		CHECK_FLAGS(1 << 29);
795 		return push_inst(compiler, (ADC ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
796 	case SLJIT_SUB:
797 		CHECK_FLAGS(1 << 29);
798 		return push_inst(compiler, (SUB ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
799 	case SLJIT_SUBC:
800 		CHECK_FLAGS(1 << 29);
801 		return push_inst(compiler, (SBC ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
802 	case SLJIT_MUL:
803 		if (!(flags & SET_FLAGS))
804 			return push_inst(compiler, (MADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2) | RT2(TMP_ZERO));
805 		if (flags & INT_OP) {
806 			FAIL_IF(push_inst(compiler, SMADDL | RD(dst) | RN(arg1) | RM(arg2) | (31 << 10)));
807 			FAIL_IF(push_inst(compiler, ADD | RD(TMP_LR) | RN(TMP_ZERO) | RM(dst) | (2 << 22) | (31 << 10)));
808 			return push_inst(compiler, SUBS | RD(TMP_ZERO) | RN(TMP_LR) | RM(dst) | (2 << 22) | (63 << 10));
809 		}
810 		FAIL_IF(push_inst(compiler, SMULH | RD(TMP_LR) | RN(arg1) | RM(arg2)));
811 		FAIL_IF(push_inst(compiler, MADD | RD(dst) | RN(arg1) | RM(arg2) | RT2(TMP_ZERO)));
812 		return push_inst(compiler, SUBS | RD(TMP_ZERO) | RN(TMP_LR) | RM(dst) | (2 << 22) | (63 << 10));
813 	case SLJIT_AND:
814 		CHECK_FLAGS(3 << 29);
815 		return push_inst(compiler, (AND ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
816 	case SLJIT_OR:
817 		FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
818 		break; /* Set flags. */
819 	case SLJIT_XOR:
820 		FAIL_IF(push_inst(compiler, (EOR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
821 		break; /* Set flags. */
822 	case SLJIT_SHL:
823 		FAIL_IF(push_inst(compiler, (LSLV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
824 		break; /* Set flags. */
825 	case SLJIT_LSHR:
826 		FAIL_IF(push_inst(compiler, (LSRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
827 		break; /* Set flags. */
828 	case SLJIT_ASHR:
829 		FAIL_IF(push_inst(compiler, (ASRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
830 		break; /* Set flags. */
831 	default:
832 		SLJIT_UNREACHABLE();
833 		return SLJIT_SUCCESS;
834 	}
835 
836 set_flags:
837 	if (flags & SET_FLAGS)
838 		return push_inst(compiler, (SUBS ^ inv_bits) | RD(TMP_ZERO) | RN(dst) | RM(TMP_ZERO));
839 	return SLJIT_SUCCESS;
840 }
841 
842 #define STORE		0x10
843 #define SIGNED		0x20
844 
845 #define BYTE_SIZE	0x0
846 #define HALF_SIZE	0x1
847 #define INT_SIZE	0x2
848 #define WORD_SIZE	0x3
849 
850 #define MEM_SIZE_SHIFT(flags) ((flags) & 0x3)
851 
emit_op_mem(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw,sljit_s32 tmp_reg)852 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
853 	sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg)
854 {
855 	sljit_u32 shift = MEM_SIZE_SHIFT(flags);
856 	sljit_u32 type = (shift << 30);
857 
858 	if (!(flags & STORE))
859 		type |= (flags & SIGNED) ? 0x00800000 : 0x00400000;
860 
861 	SLJIT_ASSERT(arg & SLJIT_MEM);
862 
863 	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
864 		argw &= 0x3;
865 
866 		if (argw == 0 || argw == shift)
867 			return push_inst(compiler, STRB | type | RT(reg)
868 				| RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw ? (1 << 12) : 0));
869 
870 		FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw << 10)));
871 		return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg));
872 	}
873 
874 	arg &= REG_MASK;
875 
876 	if (arg == SLJIT_UNUSED) {
877 		FAIL_IF(load_immediate(compiler, tmp_reg, argw & ~(0xfff << shift)));
878 
879 		argw = (argw >> shift) & 0xfff;
880 
881 		return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg) | (argw << 10));
882 	}
883 
884 	if (argw >= 0 && (argw & ((1 << shift) - 1)) == 0) {
885 		if ((argw >> shift) <= 0xfff) {
886 			return push_inst(compiler, STRBI | type | RT(reg) | RN(arg) | (argw << (10 - shift)));
887 		}
888 
889 		if (argw <= 0xffffff) {
890 			FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(tmp_reg) | RN(arg) | ((argw >> 12) << 10)));
891 
892 			argw = ((argw & 0xfff) >> shift);
893 			return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg) | (argw << 10));
894 		}
895 	}
896 
897 	if (argw <= 255 && argw >= -256)
898 		return push_inst(compiler, STURBI | type | RT(reg) | RN(arg) | ((argw & 0x1ff) << 12));
899 
900 	FAIL_IF(load_immediate(compiler, tmp_reg, argw));
901 
902 	return push_inst(compiler, STRB | type | RT(reg) | RN(arg) | RM(tmp_reg));
903 }
904 
905 /* --------------------------------------------------------------------- */
906 /*  Entry, exit                                                          */
907 /* --------------------------------------------------------------------- */
908 
sljit_emit_enter(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)909 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
910 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
911 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
912 {
913 	sljit_s32 args, i, tmp, offs, prev, saved_regs_size;
914 
915 	CHECK_ERROR();
916 	CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
917 	set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
918 
919 	saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 2);
920 	if (saved_regs_size & 0x8)
921 		saved_regs_size += sizeof(sljit_sw);
922 
923 	local_size = (local_size + 15) & ~0xf;
924 	compiler->local_size = local_size + saved_regs_size;
925 
926 	FAIL_IF(push_inst(compiler, STP_PRE | RT(TMP_FP) | RT2(TMP_LR)
927 		| RN(SLJIT_SP) | ((-(saved_regs_size >> 3) & 0x7f) << 15)));
928 
929 #ifdef _WIN32
930 	if (local_size >= 4096)
931 		FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(SLJIT_SP) | (1 << 10) | (1 << 22)));
932 	else if (local_size > 256)
933 		FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(SLJIT_SP) | (local_size << 10)));
934 #endif
935 
936 	tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
937 	prev = -1;
938 	offs = 2 << 15;
939 	for (i = SLJIT_S0; i >= tmp; i--) {
940 		if (prev == -1) {
941 			prev = i;
942 			continue;
943 		}
944 		FAIL_IF(push_inst(compiler, STP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
945 		offs += 2 << 15;
946 		prev = -1;
947 	}
948 
949 	for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
950 		if (prev == -1) {
951 			prev = i;
952 			continue;
953 		}
954 		FAIL_IF(push_inst(compiler, STP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
955 		offs += 2 << 15;
956 		prev = -1;
957 	}
958 
959 	if (prev != -1)
960 		FAIL_IF(push_inst(compiler, STRI | RT(prev) | RN(SLJIT_SP) | (offs >> 5)));
961 
962 
963 	FAIL_IF(push_inst(compiler, ADDI | RD(TMP_FP) | RN(SLJIT_SP) | (0 << 10)));
964 
965 	args = get_arg_count(arg_types);
966 
967 	if (args >= 1)
968 		FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S0) | RN(TMP_ZERO) | RM(SLJIT_R0)));
969 	if (args >= 2)
970 		FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S1) | RN(TMP_ZERO) | RM(SLJIT_R1)));
971 	if (args >= 3)
972 		FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S2) | RN(TMP_ZERO) | RM(SLJIT_R2)));
973 
974 #ifdef _WIN32
975 	if (local_size >= 4096) {
976 		if (local_size < 4 * 4096) {
977 			/* No need for a loop. */
978 			if (local_size >= 2 * 4096) {
979 				FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
980 				FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
981 				local_size -= 4096;
982 			}
983 
984 			if (local_size >= 2 * 4096) {
985 				FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
986 				FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
987 				local_size -= 4096;
988 			}
989 
990 			FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
991 			local_size -= 4096;
992 		}
993 		else {
994 			FAIL_IF(push_inst(compiler, MOVZ | RD(TMP_REG2) | (((local_size >> 12) - 1) << 5)));
995 			FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
996 			FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
997 			FAIL_IF(push_inst(compiler, SUBI | (1 << 29) | RD(TMP_REG2) | RN(TMP_REG2) | (1 << 10)));
998 			FAIL_IF(push_inst(compiler, B_CC | ((((sljit_ins) -3) & 0x7ffff) << 5) | 0x1 /* not-equal */));
999 			FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
1000 
1001 			local_size &= 0xfff;
1002 		}
1003 
1004 		if (local_size > 256) {
1005 			FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (local_size << 10)));
1006 			FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
1007 		}
1008 		else if (local_size > 0)
1009 			FAIL_IF(push_inst(compiler, LDR_PRE | RT(TMP_ZERO) | RN(TMP_REG1) | ((-local_size & 0x1ff) << 12)));
1010 
1011 		FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(TMP_REG1) | (0 << 10)));
1012 	}
1013 	else if (local_size > 256) {
1014 		FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
1015 		FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(TMP_REG1) | (0 << 10)));
1016 	}
1017 	else if (local_size > 0)
1018 		FAIL_IF(push_inst(compiler, LDR_PRE | RT(TMP_ZERO) | RN(SLJIT_SP) | ((-local_size & 0x1ff) << 12)));
1019 
1020 #else /* !_WIN32 */
1021 
1022 	/* The local_size does not include saved registers size. */
1023 	if (local_size > 0xfff) {
1024 		FAIL_IF(push_inst(compiler, SUBI | RD(SLJIT_SP) | RN(SLJIT_SP) | ((local_size >> 12) << 10) | (1 << 22)));
1025 		local_size &= 0xfff;
1026 	}
1027 	if (local_size != 0)
1028 		FAIL_IF(push_inst(compiler, SUBI | RD(SLJIT_SP) | RN(SLJIT_SP) | (local_size << 10)));
1029 
1030 #endif /* _WIN32 */
1031 
1032 	return SLJIT_SUCCESS;
1033 }
1034 
sljit_set_context(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)1035 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
1036 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
1037 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
1038 {
1039 	sljit_s32 saved_regs_size;
1040 
1041 	CHECK_ERROR();
1042 	CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1043 	set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1044 
1045 	saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 2);
1046 	if (saved_regs_size & 0x8)
1047 		saved_regs_size += sizeof(sljit_sw);
1048 
1049 	compiler->local_size = saved_regs_size + ((local_size + 15) & ~0xf);
1050 	return SLJIT_SUCCESS;
1051 }
1052 
sljit_emit_return(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1053 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
1054 {
1055 	sljit_s32 local_size;
1056 	sljit_s32 i, tmp, offs, prev, saved_regs_size;
1057 
1058 	CHECK_ERROR();
1059 	CHECK(check_sljit_emit_return(compiler, op, src, srcw));
1060 
1061 	FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
1062 
1063 	saved_regs_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 2);
1064 	if (saved_regs_size & 0x8)
1065 		saved_regs_size += sizeof(sljit_sw);
1066 
1067 	local_size = compiler->local_size - saved_regs_size;
1068 
1069 	/* Load LR as early as possible. */
1070 	if (local_size == 0)
1071 		FAIL_IF(push_inst(compiler, LDP | RT(TMP_FP) | RT2(TMP_LR) | RN(SLJIT_SP)));
1072 	else if (local_size < 63 * sizeof(sljit_sw)) {
1073 		FAIL_IF(push_inst(compiler, LDP_PRE | RT(TMP_FP) | RT2(TMP_LR)
1074 			| RN(SLJIT_SP) | (local_size << (15 - 3))));
1075 	}
1076 	else {
1077 		if (local_size > 0xfff) {
1078 			FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | ((local_size >> 12) << 10) | (1 << 22)));
1079 			local_size &= 0xfff;
1080 		}
1081 		if (local_size)
1082 			FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | (local_size << 10)));
1083 
1084 		FAIL_IF(push_inst(compiler, LDP | RT(TMP_FP) | RT2(TMP_LR) | RN(SLJIT_SP)));
1085 	}
1086 
1087 	tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
1088 	prev = -1;
1089 	offs = 2 << 15;
1090 	for (i = SLJIT_S0; i >= tmp; i--) {
1091 		if (prev == -1) {
1092 			prev = i;
1093 			continue;
1094 		}
1095 		FAIL_IF(push_inst(compiler, LDP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
1096 		offs += 2 << 15;
1097 		prev = -1;
1098 	}
1099 
1100 	for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
1101 		if (prev == -1) {
1102 			prev = i;
1103 			continue;
1104 		}
1105 		FAIL_IF(push_inst(compiler, LDP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
1106 		offs += 2 << 15;
1107 		prev = -1;
1108 	}
1109 
1110 	if (prev != -1)
1111 		FAIL_IF(push_inst(compiler, LDRI | RT(prev) | RN(SLJIT_SP) | (offs >> 5)));
1112 
1113 	/* These two can be executed in parallel. */
1114 	FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | (saved_regs_size << 10)));
1115 	return push_inst(compiler, RET | RN(TMP_LR));
1116 }
1117 
1118 /* --------------------------------------------------------------------- */
1119 /*  Operators                                                            */
1120 /* --------------------------------------------------------------------- */
1121 
sljit_emit_op0(struct sljit_compiler * compiler,sljit_s32 op)1122 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1123 {
1124 	sljit_ins inv_bits = (op & SLJIT_I32_OP) ? W_OP : 0;
1125 
1126 	CHECK_ERROR();
1127 	CHECK(check_sljit_emit_op0(compiler, op));
1128 
1129 	op = GET_OPCODE(op);
1130 	switch (op) {
1131 	case SLJIT_BREAKPOINT:
1132 		return push_inst(compiler, BRK);
1133 	case SLJIT_NOP:
1134 		return push_inst(compiler, NOP);
1135 	case SLJIT_LMUL_UW:
1136 	case SLJIT_LMUL_SW:
1137 		FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
1138 		FAIL_IF(push_inst(compiler, MADD | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
1139 		return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULH : SMULH) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
1140 	case SLJIT_DIVMOD_UW:
1141 	case SLJIT_DIVMOD_SW:
1142 		FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
1143 		FAIL_IF(push_inst(compiler, ((op == SLJIT_DIVMOD_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1)));
1144 		FAIL_IF(push_inst(compiler, (MADD ^ inv_bits) | RD(SLJIT_R1) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
1145 		return push_inst(compiler, (SUB ^ inv_bits) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
1146 	case SLJIT_DIV_UW:
1147 	case SLJIT_DIV_SW:
1148 		return push_inst(compiler, ((op == SLJIT_DIV_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1));
1149 	case SLJIT_ENDBR:
1150 	case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1151 		return SLJIT_SUCCESS;
1152 	}
1153 
1154 	return SLJIT_SUCCESS;
1155 }
1156 
sljit_emit_op1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1157 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1158 	sljit_s32 dst, sljit_sw dstw,
1159 	sljit_s32 src, sljit_sw srcw)
1160 {
1161 	sljit_s32 dst_r, flags, mem_flags;
1162 	sljit_s32 op_flags = GET_ALL_FLAGS(op);
1163 
1164 	CHECK_ERROR();
1165 	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1166 	ADJUST_LOCAL_OFFSET(dst, dstw);
1167 	ADJUST_LOCAL_OFFSET(src, srcw);
1168 
1169 	dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1170 
1171 	op = GET_OPCODE(op);
1172 	if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1173 		/* Both operands are registers. */
1174 		if (dst_r != TMP_REG1 && FAST_IS_REG(src))
1175 			return emit_op_imm(compiler, op | ((op_flags & SLJIT_I32_OP) ? INT_OP : 0), dst_r, TMP_REG1, src);
1176 
1177 		switch (op) {
1178 		case SLJIT_MOV:
1179 		case SLJIT_MOV_P:
1180 			mem_flags = WORD_SIZE;
1181 			break;
1182 		case SLJIT_MOV_U8:
1183 			mem_flags = BYTE_SIZE;
1184 			if (src & SLJIT_IMM)
1185 				srcw = (sljit_u8)srcw;
1186 			break;
1187 		case SLJIT_MOV_S8:
1188 			mem_flags = BYTE_SIZE | SIGNED;
1189 			if (src & SLJIT_IMM)
1190 				srcw = (sljit_s8)srcw;
1191 			break;
1192 		case SLJIT_MOV_U16:
1193 			mem_flags = HALF_SIZE;
1194 			if (src & SLJIT_IMM)
1195 				srcw = (sljit_u16)srcw;
1196 			break;
1197 		case SLJIT_MOV_S16:
1198 			mem_flags = HALF_SIZE | SIGNED;
1199 			if (src & SLJIT_IMM)
1200 				srcw = (sljit_s16)srcw;
1201 			break;
1202 		case SLJIT_MOV_U32:
1203 			mem_flags = INT_SIZE;
1204 			if (src & SLJIT_IMM)
1205 				srcw = (sljit_u32)srcw;
1206 			break;
1207 		case SLJIT_MOV_S32:
1208 			mem_flags = INT_SIZE | SIGNED;
1209 			if (src & SLJIT_IMM)
1210 				srcw = (sljit_s32)srcw;
1211 			break;
1212 		default:
1213 			SLJIT_UNREACHABLE();
1214 			mem_flags = 0;
1215 			break;
1216 		}
1217 
1218 		if (src & SLJIT_IMM)
1219 			FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG1, srcw));
1220 		else if (!(src & SLJIT_MEM))
1221 			dst_r = src;
1222 		else
1223 			FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, src, srcw, TMP_REG1));
1224 
1225 		if (dst & SLJIT_MEM)
1226 			return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1227 		return SLJIT_SUCCESS;
1228 	}
1229 
1230 	flags = HAS_FLAGS(op_flags) ? SET_FLAGS : 0;
1231 	mem_flags = WORD_SIZE;
1232 
1233 	if (op_flags & SLJIT_I32_OP) {
1234 		flags |= INT_OP;
1235 		mem_flags = INT_SIZE;
1236 	}
1237 
1238 	if (dst == SLJIT_UNUSED)
1239 		flags |= UNUSED_RETURN;
1240 
1241 	if (src & SLJIT_MEM) {
1242 		FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG2, src, srcw, TMP_REG2));
1243 		src = TMP_REG2;
1244 	}
1245 
1246 	emit_op_imm(compiler, flags | op, dst_r, TMP_REG1, src);
1247 
1248 	if (SLJIT_UNLIKELY(dst & SLJIT_MEM))
1249 		return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1250 	return SLJIT_SUCCESS;
1251 }
1252 
sljit_emit_op2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1253 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
1254 	sljit_s32 dst, sljit_sw dstw,
1255 	sljit_s32 src1, sljit_sw src1w,
1256 	sljit_s32 src2, sljit_sw src2w)
1257 {
1258 	sljit_s32 dst_r, flags, mem_flags;
1259 
1260 	CHECK_ERROR();
1261 	CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
1262 	ADJUST_LOCAL_OFFSET(dst, dstw);
1263 	ADJUST_LOCAL_OFFSET(src1, src1w);
1264 	ADJUST_LOCAL_OFFSET(src2, src2w);
1265 
1266 	if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
1267 		return SLJIT_SUCCESS;
1268 
1269 	dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1270 	flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1271 	mem_flags = WORD_SIZE;
1272 
1273 	if (op & SLJIT_I32_OP) {
1274 		flags |= INT_OP;
1275 		mem_flags = INT_SIZE;
1276 	}
1277 
1278 	if (dst == SLJIT_UNUSED)
1279 		flags |= UNUSED_RETURN;
1280 
1281 	if (src1 & SLJIT_MEM) {
1282 		FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG1, src1, src1w, TMP_REG1));
1283 		src1 = TMP_REG1;
1284 	}
1285 
1286 	if (src2 & SLJIT_MEM) {
1287 		FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG2, src2, src2w, TMP_REG2));
1288 		src2 = TMP_REG2;
1289 	}
1290 
1291 	if (src1 & SLJIT_IMM)
1292 		flags |= ARG1_IMM;
1293 	else
1294 		src1w = src1;
1295 
1296 	if (src2 & SLJIT_IMM)
1297 		flags |= ARG2_IMM;
1298 	else
1299 		src2w = src2;
1300 
1301 	emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src1w, src2w);
1302 
1303 	if (dst & SLJIT_MEM)
1304 		return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1305 	return SLJIT_SUCCESS;
1306 }
1307 
sljit_emit_op_src(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1308 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
1309 	sljit_s32 src, sljit_sw srcw)
1310 {
1311 	CHECK_ERROR();
1312 	CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
1313 	ADJUST_LOCAL_OFFSET(src, srcw);
1314 
1315 	switch (op) {
1316 	case SLJIT_FAST_RETURN:
1317 		if (FAST_IS_REG(src))
1318 			FAIL_IF(push_inst(compiler, ORR | RD(TMP_LR) | RN(TMP_ZERO) | RM(src)));
1319 		else
1320 			FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_LR, src, srcw, TMP_REG1));
1321 
1322 		return push_inst(compiler, RET | RN(TMP_LR));
1323 	case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
1324 		return SLJIT_SUCCESS;
1325 	case SLJIT_PREFETCH_L1:
1326 	case SLJIT_PREFETCH_L2:
1327 	case SLJIT_PREFETCH_L3:
1328 	case SLJIT_PREFETCH_ONCE:
1329 		SLJIT_ASSERT(reg_map[1] == 0 && reg_map[3] == 2 && reg_map[5] == 4);
1330 
1331 		/* The reg_map[op] should provide the appropriate constant. */
1332 		if (op == SLJIT_PREFETCH_L1)
1333 			op = 1;
1334 		else if (op == SLJIT_PREFETCH_L2)
1335 			op = 3;
1336 		else if (op == SLJIT_PREFETCH_L3)
1337 			op = 5;
1338 		else
1339 			op = 2;
1340 
1341 		/* Signed word sized load is the prefetch instruction. */
1342 		return emit_op_mem(compiler, WORD_SIZE | SIGNED, op, src, srcw, TMP_REG1);
1343 	}
1344 
1345 	return SLJIT_SUCCESS;
1346 }
1347 
sljit_get_register_index(sljit_s32 reg)1348 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
1349 {
1350 	CHECK_REG_INDEX(check_sljit_get_register_index(reg));
1351 	return reg_map[reg];
1352 }
1353 
sljit_get_float_register_index(sljit_s32 reg)1354 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
1355 {
1356 	CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
1357 	return freg_map[reg];
1358 }
1359 
sljit_emit_op_custom(struct sljit_compiler * compiler,void * instruction,sljit_s32 size)1360 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
1361 	void *instruction, sljit_s32 size)
1362 {
1363 	CHECK_ERROR();
1364 	CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
1365 
1366 	return push_inst(compiler, *(sljit_ins*)instruction);
1367 }
1368 
1369 /* --------------------------------------------------------------------- */
1370 /*  Floating point operators                                             */
1371 /* --------------------------------------------------------------------- */
1372 
emit_fop_mem(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw)1373 static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
1374 {
1375 	sljit_u32 shift = MEM_SIZE_SHIFT(flags);
1376 	sljit_ins type = (shift << 30);
1377 
1378 	SLJIT_ASSERT(arg & SLJIT_MEM);
1379 
1380 	if (!(flags & STORE))
1381 		type |= 0x00400000;
1382 
1383 	if (arg & OFFS_REG_MASK) {
1384 		argw &= 3;
1385 		if (argw == 0 || argw == shift)
1386 			return push_inst(compiler, STR_FR | type | VT(reg)
1387 				| RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw ? (1 << 12) : 0));
1388 
1389 		FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw << 10)));
1390 		return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1));
1391 	}
1392 
1393 	arg &= REG_MASK;
1394 
1395 	if (arg == SLJIT_UNUSED) {
1396 		FAIL_IF(load_immediate(compiler, TMP_REG1, argw & ~(0xfff << shift)));
1397 
1398 		argw = (argw >> shift) & 0xfff;
1399 
1400 		return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | (argw << 10));
1401 	}
1402 
1403 	if (argw >= 0 && (argw & ((1 << shift) - 1)) == 0) {
1404 		if ((argw >> shift) <= 0xfff)
1405 			return push_inst(compiler, STR_FI | type | VT(reg) | RN(arg) | (argw << (10 - shift)));
1406 
1407 		if (argw <= 0xffffff) {
1408 			FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(TMP_REG1) | RN(arg) | ((argw >> 12) << 10)));
1409 
1410 			argw = ((argw & 0xfff) >> shift);
1411 			return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | (argw << 10));
1412 		}
1413 	}
1414 
1415 	if (argw <= 255 && argw >= -256)
1416 		return push_inst(compiler, STUR_FI | type | VT(reg) | RN(arg) | ((argw & 0x1ff) << 12));
1417 
1418 	FAIL_IF(load_immediate(compiler, TMP_REG1, argw));
1419 	return push_inst(compiler, STR_FR | type | VT(reg) | RN(arg) | RM(TMP_REG1));
1420 }
1421 
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1422 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
1423 	sljit_s32 dst, sljit_sw dstw,
1424 	sljit_s32 src, sljit_sw srcw)
1425 {
1426 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1427 	sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1428 
1429 	if (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64)
1430 		inv_bits |= W_OP;
1431 
1432 	if (src & SLJIT_MEM) {
1433 		emit_fop_mem(compiler, (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw);
1434 		src = TMP_FREG1;
1435 	}
1436 
1437 	FAIL_IF(push_inst(compiler, (FCVTZS ^ inv_bits) | RD(dst_r) | VN(src)));
1438 
1439 	if (dst & SLJIT_MEM)
1440 		return emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? INT_SIZE : WORD_SIZE) | STORE, TMP_REG1, dst, dstw, TMP_REG2);
1441 	return SLJIT_SUCCESS;
1442 }
1443 
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1444 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
1445 	sljit_s32 dst, sljit_sw dstw,
1446 	sljit_s32 src, sljit_sw srcw)
1447 {
1448 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1449 	sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1450 
1451 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1452 		inv_bits |= W_OP;
1453 
1454 	if (src & SLJIT_MEM) {
1455 		emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? INT_SIZE : WORD_SIZE), TMP_REG1, src, srcw, TMP_REG1);
1456 		src = TMP_REG1;
1457 	} else if (src & SLJIT_IMM) {
1458 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1459 		if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1460 			srcw = (sljit_s32)srcw;
1461 #endif
1462 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1463 		src = TMP_REG1;
1464 	}
1465 
1466 	FAIL_IF(push_inst(compiler, (SCVTF ^ inv_bits) | VD(dst_r) | RN(src)));
1467 
1468 	if (dst & SLJIT_MEM)
1469 		return emit_fop_mem(compiler, ((op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE) | STORE, TMP_FREG1, dst, dstw);
1470 	return SLJIT_SUCCESS;
1471 }
1472 
sljit_emit_fop1_cmp(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1473 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
1474 	sljit_s32 src1, sljit_sw src1w,
1475 	sljit_s32 src2, sljit_sw src2w)
1476 {
1477 	sljit_s32 mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1478 	sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1479 
1480 	if (src1 & SLJIT_MEM) {
1481 		emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
1482 		src1 = TMP_FREG1;
1483 	}
1484 
1485 	if (src2 & SLJIT_MEM) {
1486 		emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
1487 		src2 = TMP_FREG2;
1488 	}
1489 
1490 	return push_inst(compiler, (FCMP ^ inv_bits) | VN(src1) | VM(src2));
1491 }
1492 
sljit_emit_fop1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1493 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
1494 	sljit_s32 dst, sljit_sw dstw,
1495 	sljit_s32 src, sljit_sw srcw)
1496 {
1497 	sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1498 	sljit_ins inv_bits;
1499 
1500 	CHECK_ERROR();
1501 
1502 	SLJIT_COMPILE_ASSERT((INT_SIZE ^ 0x1) == WORD_SIZE, must_be_one_bit_difference);
1503 	SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
1504 
1505 	inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1506 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1507 
1508 	if (src & SLJIT_MEM) {
1509 		emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) ? (mem_flags ^ 0x1) : mem_flags, dst_r, src, srcw);
1510 		src = dst_r;
1511 	}
1512 
1513 	switch (GET_OPCODE(op)) {
1514 	case SLJIT_MOV_F64:
1515 		if (src != dst_r) {
1516 			if (dst_r != TMP_FREG1)
1517 				FAIL_IF(push_inst(compiler, (FMOV ^ inv_bits) | VD(dst_r) | VN(src)));
1518 			else
1519 				dst_r = src;
1520 		}
1521 		break;
1522 	case SLJIT_NEG_F64:
1523 		FAIL_IF(push_inst(compiler, (FNEG ^ inv_bits) | VD(dst_r) | VN(src)));
1524 		break;
1525 	case SLJIT_ABS_F64:
1526 		FAIL_IF(push_inst(compiler, (FABS ^ inv_bits) | VD(dst_r) | VN(src)));
1527 		break;
1528 	case SLJIT_CONV_F64_FROM_F32:
1529 		FAIL_IF(push_inst(compiler, FCVT | ((op & SLJIT_F32_OP) ? (1 << 22) : (1 << 15)) | VD(dst_r) | VN(src)));
1530 		break;
1531 	}
1532 
1533 	if (dst & SLJIT_MEM)
1534 		return emit_fop_mem(compiler, mem_flags | STORE, dst_r, dst, dstw);
1535 	return SLJIT_SUCCESS;
1536 }
1537 
sljit_emit_fop2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1538 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
1539 	sljit_s32 dst, sljit_sw dstw,
1540 	sljit_s32 src1, sljit_sw src1w,
1541 	sljit_s32 src2, sljit_sw src2w)
1542 {
1543 	sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1544 	sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1545 
1546 	CHECK_ERROR();
1547 	CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
1548 	ADJUST_LOCAL_OFFSET(dst, dstw);
1549 	ADJUST_LOCAL_OFFSET(src1, src1w);
1550 	ADJUST_LOCAL_OFFSET(src2, src2w);
1551 
1552 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1553 	if (src1 & SLJIT_MEM) {
1554 		emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
1555 		src1 = TMP_FREG1;
1556 	}
1557 	if (src2 & SLJIT_MEM) {
1558 		emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
1559 		src2 = TMP_FREG2;
1560 	}
1561 
1562 	switch (GET_OPCODE(op)) {
1563 	case SLJIT_ADD_F64:
1564 		FAIL_IF(push_inst(compiler, (FADD ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1565 		break;
1566 	case SLJIT_SUB_F64:
1567 		FAIL_IF(push_inst(compiler, (FSUB ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1568 		break;
1569 	case SLJIT_MUL_F64:
1570 		FAIL_IF(push_inst(compiler, (FMUL ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1571 		break;
1572 	case SLJIT_DIV_F64:
1573 		FAIL_IF(push_inst(compiler, (FDIV ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1574 		break;
1575 	}
1576 
1577 	if (!(dst & SLJIT_MEM))
1578 		return SLJIT_SUCCESS;
1579 	return emit_fop_mem(compiler, mem_flags | STORE, TMP_FREG1, dst, dstw);
1580 }
1581 
1582 /* --------------------------------------------------------------------- */
1583 /*  Other instructions                                                   */
1584 /* --------------------------------------------------------------------- */
1585 
sljit_emit_fast_enter(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)1586 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
1587 {
1588 	CHECK_ERROR();
1589 	CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
1590 	ADJUST_LOCAL_OFFSET(dst, dstw);
1591 
1592 	if (FAST_IS_REG(dst))
1593 		return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(TMP_LR));
1594 
1595 	/* Memory. */
1596 	return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_LR, dst, dstw, TMP_REG1);
1597 }
1598 
1599 /* --------------------------------------------------------------------- */
1600 /*  Conditional instructions                                             */
1601 /* --------------------------------------------------------------------- */
1602 
get_cc(sljit_s32 type)1603 static sljit_uw get_cc(sljit_s32 type)
1604 {
1605 	switch (type) {
1606 	case SLJIT_EQUAL:
1607 	case SLJIT_MUL_NOT_OVERFLOW:
1608 	case SLJIT_EQUAL_F64:
1609 		return 0x1;
1610 
1611 	case SLJIT_NOT_EQUAL:
1612 	case SLJIT_MUL_OVERFLOW:
1613 	case SLJIT_NOT_EQUAL_F64:
1614 		return 0x0;
1615 
1616 	case SLJIT_LESS:
1617 	case SLJIT_LESS_F64:
1618 		return 0x2;
1619 
1620 	case SLJIT_GREATER_EQUAL:
1621 	case SLJIT_GREATER_EQUAL_F64:
1622 		return 0x3;
1623 
1624 	case SLJIT_GREATER:
1625 	case SLJIT_GREATER_F64:
1626 		return 0x9;
1627 
1628 	case SLJIT_LESS_EQUAL:
1629 	case SLJIT_LESS_EQUAL_F64:
1630 		return 0x8;
1631 
1632 	case SLJIT_SIG_LESS:
1633 		return 0xa;
1634 
1635 	case SLJIT_SIG_GREATER_EQUAL:
1636 		return 0xb;
1637 
1638 	case SLJIT_SIG_GREATER:
1639 		return 0xd;
1640 
1641 	case SLJIT_SIG_LESS_EQUAL:
1642 		return 0xc;
1643 
1644 	case SLJIT_OVERFLOW:
1645 	case SLJIT_UNORDERED_F64:
1646 		return 0x7;
1647 
1648 	case SLJIT_NOT_OVERFLOW:
1649 	case SLJIT_ORDERED_F64:
1650 		return 0x6;
1651 
1652 	default:
1653 		SLJIT_UNREACHABLE();
1654 		return 0xe;
1655 	}
1656 }
1657 
sljit_emit_label(struct sljit_compiler * compiler)1658 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
1659 {
1660 	struct sljit_label *label;
1661 
1662 	CHECK_ERROR_PTR();
1663 	CHECK_PTR(check_sljit_emit_label(compiler));
1664 
1665 	if (compiler->last_label && compiler->last_label->size == compiler->size)
1666 		return compiler->last_label;
1667 
1668 	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
1669 	PTR_FAIL_IF(!label);
1670 	set_label(label, compiler);
1671 	return label;
1672 }
1673 
sljit_emit_jump(struct sljit_compiler * compiler,sljit_s32 type)1674 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
1675 {
1676 	struct sljit_jump *jump;
1677 
1678 	CHECK_ERROR_PTR();
1679 	CHECK_PTR(check_sljit_emit_jump(compiler, type));
1680 
1681 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1682 	PTR_FAIL_IF(!jump);
1683 	set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1684 	type &= 0xff;
1685 
1686 	if (type < SLJIT_JUMP) {
1687 		jump->flags |= IS_COND;
1688 		PTR_FAIL_IF(push_inst(compiler, B_CC | (6 << 5) | get_cc(type)));
1689 	}
1690 	else if (type >= SLJIT_FAST_CALL)
1691 		jump->flags |= IS_BL;
1692 
1693 	PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1694 	jump->addr = compiler->size;
1695 	PTR_FAIL_IF(push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1)));
1696 
1697 	return jump;
1698 }
1699 
sljit_emit_call(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types)1700 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
1701 	sljit_s32 arg_types)
1702 {
1703 	CHECK_ERROR_PTR();
1704 	CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
1705 
1706 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1707 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1708 	compiler->skip_checks = 1;
1709 #endif
1710 
1711 	return sljit_emit_jump(compiler, type);
1712 }
1713 
emit_cmp_to0(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src,sljit_sw srcw)1714 static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compiler, sljit_s32 type,
1715 	sljit_s32 src, sljit_sw srcw)
1716 {
1717 	struct sljit_jump *jump;
1718 	sljit_ins inv_bits = (type & SLJIT_I32_OP) ? W_OP : 0;
1719 
1720 	SLJIT_ASSERT((type & 0xff) == SLJIT_EQUAL || (type & 0xff) == SLJIT_NOT_EQUAL);
1721 	ADJUST_LOCAL_OFFSET(src, srcw);
1722 
1723 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1724 	PTR_FAIL_IF(!jump);
1725 	set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1726 	jump->flags |= IS_CBZ | IS_COND;
1727 
1728 	if (src & SLJIT_MEM) {
1729 		PTR_FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
1730 		src = TMP_REG1;
1731 	}
1732 	else if (src & SLJIT_IMM) {
1733 		PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1734 		src = TMP_REG1;
1735 	}
1736 
1737 	SLJIT_ASSERT(FAST_IS_REG(src));
1738 
1739 	if ((type & 0xff) == SLJIT_EQUAL)
1740 		inv_bits |= 1 << 24;
1741 
1742 	PTR_FAIL_IF(push_inst(compiler, (CBZ ^ inv_bits) | (6 << 5) | RT(src)));
1743 	PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1744 	jump->addr = compiler->size;
1745 	PTR_FAIL_IF(push_inst(compiler, BR | RN(TMP_REG1)));
1746 	return jump;
1747 }
1748 
sljit_emit_ijump(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src,sljit_sw srcw)1749 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
1750 {
1751 	struct sljit_jump *jump;
1752 
1753 	CHECK_ERROR();
1754 	CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
1755 	ADJUST_LOCAL_OFFSET(src, srcw);
1756 
1757 	if (!(src & SLJIT_IMM)) {
1758 		if (src & SLJIT_MEM) {
1759 			FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
1760 			src = TMP_REG1;
1761 		}
1762 		return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(src));
1763 	}
1764 
1765 	/* These jumps are converted to jump/call instructions when possible. */
1766 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1767 	FAIL_IF(!jump);
1768 	set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
1769 	jump->u.target = srcw;
1770 
1771 	FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1772 	jump->addr = compiler->size;
1773 	return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1));
1774 }
1775 
sljit_emit_icall(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types,sljit_s32 src,sljit_sw srcw)1776 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
1777 	sljit_s32 arg_types,
1778 	sljit_s32 src, sljit_sw srcw)
1779 {
1780 	CHECK_ERROR();
1781 	CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
1782 
1783 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1784 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1785 	compiler->skip_checks = 1;
1786 #endif
1787 
1788 	return sljit_emit_ijump(compiler, type, src, srcw);
1789 }
1790 
sljit_emit_op_flags(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 type)1791 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
1792 	sljit_s32 dst, sljit_sw dstw,
1793 	sljit_s32 type)
1794 {
1795 	sljit_s32 dst_r, src_r, flags, mem_flags;
1796 	sljit_ins cc;
1797 
1798 	CHECK_ERROR();
1799 	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
1800 	ADJUST_LOCAL_OFFSET(dst, dstw);
1801 
1802 	cc = get_cc(type & 0xff);
1803 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1804 
1805 	if (GET_OPCODE(op) < SLJIT_ADD) {
1806 		FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(dst_r) | RN(TMP_ZERO) | RM(TMP_ZERO)));
1807 
1808 		if (dst_r == TMP_REG1) {
1809 			mem_flags = (GET_OPCODE(op) == SLJIT_MOV ? WORD_SIZE : INT_SIZE) | STORE;
1810 			return emit_op_mem(compiler, mem_flags, TMP_REG1, dst, dstw, TMP_REG2);
1811 		}
1812 
1813 		return SLJIT_SUCCESS;
1814 	}
1815 
1816 	flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1817 	mem_flags = WORD_SIZE;
1818 
1819 	if (op & SLJIT_I32_OP) {
1820 		flags |= INT_OP;
1821 		mem_flags = INT_SIZE;
1822 	}
1823 
1824 	src_r = dst;
1825 
1826 	if (dst & SLJIT_MEM) {
1827 		FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG1, dst, dstw, TMP_REG1));
1828 		src_r = TMP_REG1;
1829 	}
1830 
1831 	FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(TMP_REG2) | RN(TMP_ZERO) | RM(TMP_ZERO)));
1832 	emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src_r, TMP_REG2);
1833 
1834 	if (dst & SLJIT_MEM)
1835 		return emit_op_mem(compiler, mem_flags | STORE, TMP_REG1, dst, dstw, TMP_REG2);
1836 	return SLJIT_SUCCESS;
1837 }
1838 
sljit_emit_cmov(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 dst_reg,sljit_s32 src,sljit_sw srcw)1839 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
1840 	sljit_s32 dst_reg,
1841 	sljit_s32 src, sljit_sw srcw)
1842 {
1843 	sljit_ins inv_bits = (dst_reg & SLJIT_I32_OP) ? W_OP : 0;
1844 	sljit_ins cc;
1845 
1846 	CHECK_ERROR();
1847 	CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
1848 
1849 	if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
1850 		if (dst_reg & SLJIT_I32_OP)
1851 			srcw = (sljit_s32)srcw;
1852 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1853 		src = TMP_REG1;
1854 		srcw = 0;
1855 	}
1856 
1857 	cc = get_cc(type & 0xff);
1858 	dst_reg &= ~SLJIT_I32_OP;
1859 
1860 	return push_inst(compiler, (CSEL ^ inv_bits) | (cc << 12) | RD(dst_reg) | RN(dst_reg) | RM(src));
1861 }
1862 
sljit_emit_mem(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 reg,sljit_s32 mem,sljit_sw memw)1863 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
1864 	sljit_s32 reg,
1865 	sljit_s32 mem, sljit_sw memw)
1866 {
1867 	sljit_u32 sign = 0, inst;
1868 
1869 	CHECK_ERROR();
1870 	CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
1871 
1872 	if ((mem & OFFS_REG_MASK) || (memw > 255 || memw < -256))
1873 		return SLJIT_ERR_UNSUPPORTED;
1874 
1875 	if (type & SLJIT_MEM_SUPP)
1876 		return SLJIT_SUCCESS;
1877 
1878 	switch (type & 0xff) {
1879 	case SLJIT_MOV:
1880 	case SLJIT_MOV_P:
1881 		inst = STURBI | (MEM_SIZE_SHIFT(WORD_SIZE) << 30) | 0x400;
1882 		break;
1883 	case SLJIT_MOV_S8:
1884 		sign = 1;
1885 	case SLJIT_MOV_U8:
1886 		inst = STURBI | (MEM_SIZE_SHIFT(BYTE_SIZE) << 30) | 0x400;
1887 		break;
1888 	case SLJIT_MOV_S16:
1889 		sign = 1;
1890 	case SLJIT_MOV_U16:
1891 		inst = STURBI | (MEM_SIZE_SHIFT(HALF_SIZE) << 30) | 0x400;
1892 		break;
1893 	case SLJIT_MOV_S32:
1894 		sign = 1;
1895 	case SLJIT_MOV_U32:
1896 		inst = STURBI | (MEM_SIZE_SHIFT(INT_SIZE) << 30) | 0x400;
1897 		break;
1898 	default:
1899 		SLJIT_UNREACHABLE();
1900 		inst = STURBI | (MEM_SIZE_SHIFT(WORD_SIZE) << 30) | 0x400;
1901 		break;
1902 	}
1903 
1904 	if (!(type & SLJIT_MEM_STORE))
1905 		inst |= sign ? 0x00800000 : 0x00400000;
1906 
1907 	if (type & SLJIT_MEM_PRE)
1908 		inst |= 0x800;
1909 
1910 	return push_inst(compiler, inst | RT(reg) | RN(mem & REG_MASK) | ((memw & 0x1ff) << 12));
1911 }
1912 
sljit_emit_fmem(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 freg,sljit_s32 mem,sljit_sw memw)1913 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type,
1914 	sljit_s32 freg,
1915 	sljit_s32 mem, sljit_sw memw)
1916 {
1917 	sljit_u32 inst;
1918 
1919 	CHECK_ERROR();
1920 	CHECK(check_sljit_emit_fmem(compiler, type, freg, mem, memw));
1921 
1922 	if ((mem & OFFS_REG_MASK) || (memw > 255 || memw < -256))
1923 		return SLJIT_ERR_UNSUPPORTED;
1924 
1925 	if (type & SLJIT_MEM_SUPP)
1926 		return SLJIT_SUCCESS;
1927 
1928 	inst = STUR_FI | 0x80000400;
1929 
1930 	if (!(type & SLJIT_F32_OP))
1931 		inst |= 0x40000000;
1932 
1933 	if (!(type & SLJIT_MEM_STORE))
1934 		inst |= 0x00400000;
1935 
1936 	if (type & SLJIT_MEM_PRE)
1937 		inst |= 0x800;
1938 
1939 	return push_inst(compiler, inst | VT(freg) | RN(mem & REG_MASK) | ((memw & 0x1ff) << 12));
1940 }
1941 
sljit_get_local_base(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_sw offset)1942 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
1943 {
1944 	sljit_s32 dst_reg;
1945 	sljit_ins ins;
1946 
1947 	CHECK_ERROR();
1948 	CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
1949 
1950 	SLJIT_ASSERT (SLJIT_LOCALS_OFFSET_BASE == 0);
1951 
1952 	dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
1953 
1954 	if (offset <= 0xffffff && offset >= -0xffffff) {
1955 		ins = ADDI;
1956 		if (offset < 0) {
1957 			offset = -offset;
1958 			ins = SUBI;
1959 		}
1960 
1961 		if (offset <= 0xfff)
1962 			FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(SLJIT_SP) | (offset << 10)));
1963 		else {
1964 			FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(SLJIT_SP) | ((offset & 0xfff000) >> (12 - 10)) | (1 << 22)));
1965 
1966 			offset &= 0xfff;
1967 			if (offset != 0)
1968 				FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(dst_reg) | (offset << 10)));
1969 		}
1970 	}
1971 	else {
1972 		FAIL_IF(load_immediate (compiler, dst_reg, offset));
1973 		/* Add extended register form. */
1974 		FAIL_IF(push_inst(compiler, ADDE | (0x3 << 13) | RD(dst_reg) | RN(SLJIT_SP) | RM(dst_reg)));
1975 	}
1976 
1977 	if (SLJIT_UNLIKELY(dst & SLJIT_MEM))
1978 		return emit_op_mem(compiler, WORD_SIZE | STORE, dst_reg, dst, dstw, TMP_REG1);
1979 	return SLJIT_SUCCESS;
1980 }
1981 
sljit_emit_const(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_sw init_value)1982 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
1983 {
1984 	struct sljit_const *const_;
1985 	sljit_s32 dst_r;
1986 
1987 	CHECK_ERROR_PTR();
1988 	CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
1989 	ADJUST_LOCAL_OFFSET(dst, dstw);
1990 
1991 	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
1992 	PTR_FAIL_IF(!const_);
1993 	set_const(const_, compiler);
1994 
1995 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1996 	PTR_FAIL_IF(emit_imm64_const(compiler, dst_r, init_value));
1997 
1998 	if (dst & SLJIT_MEM)
1999 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2));
2000 	return const_;
2001 }
2002 
sljit_emit_put_label(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)2003 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2004 {
2005 	struct sljit_put_label *put_label;
2006 	sljit_s32 dst_r;
2007 
2008 	CHECK_ERROR_PTR();
2009 	CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
2010 	ADJUST_LOCAL_OFFSET(dst, dstw);
2011 
2012 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2013 	PTR_FAIL_IF(emit_imm64_const(compiler, dst_r, 0));
2014 
2015 	put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
2016 	PTR_FAIL_IF(!put_label);
2017 	set_put_label(put_label, compiler, 1);
2018 
2019 	if (dst & SLJIT_MEM)
2020 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2));
2021 
2022 	return put_label;
2023 }
2024 
sljit_set_jump_addr(sljit_uw addr,sljit_uw new_target,sljit_sw executable_offset)2025 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
2026 {
2027 	sljit_ins* inst = (sljit_ins*)addr;
2028 	sljit_s32 dst;
2029 	SLJIT_UNUSED_ARG(executable_offset);
2030 
2031 	SLJIT_UPDATE_WX_FLAGS(inst, inst + 4, 0);
2032 
2033 	dst = inst[0] & 0x1f;
2034 	SLJIT_ASSERT((inst[0] & 0xffe00000) == MOVZ && (inst[1] & 0xffe00000) == (MOVK | (1 << 21)));
2035 	inst[0] = MOVZ | dst | ((new_target & 0xffff) << 5);
2036 	inst[1] = MOVK | dst | (((new_target >> 16) & 0xffff) << 5) | (1 << 21);
2037 	inst[2] = MOVK | dst | (((new_target >> 32) & 0xffff) << 5) | (2 << 21);
2038 	inst[3] = MOVK | dst | ((new_target >> 48) << 5) | (3 << 21);
2039 
2040 	SLJIT_UPDATE_WX_FLAGS(inst, inst + 4, 1);
2041 	inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
2042 	SLJIT_CACHE_FLUSH(inst, inst + 4);
2043 }
2044 
sljit_set_const(sljit_uw addr,sljit_sw new_constant,sljit_sw executable_offset)2045 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
2046 {
2047 	sljit_set_jump_addr(addr, new_constant, executable_offset);
2048 }
2049