1 /**
2 * \file
3 * gsharedvt support code for arm
4 *
5 * Authors:
6 * Zoltan Varga <vargaz@gmail.com>
7 *
8 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
9 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
10 */
11 #include <config.h>
12 #include <glib.h>
13
14 #include <mono/metadata/abi-details.h>
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/metadata/profiler-private.h>
19 #include <mono/arch/arm/arm-codegen.h>
20 #include <mono/arch/arm/arm-vfp-codegen.h>
21
22 #include "mini.h"
23 #include "mini-arm.h"
24
25 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
26
27 #ifdef MONO_ARCH_GSHAREDVT_SUPPORTED
28
29 static inline guint8*
emit_bx(guint8 * code,int reg)30 emit_bx (guint8* code, int reg)
31 {
32 if (mono_arm_thumb_supported ())
33 ARM_BX (code, reg);
34 else
35 ARM_MOV_REG_REG (code, ARMREG_PC, reg);
36 return code;
37 }
38
39 gpointer
mono_arm_start_gsharedvt_call(GSharedVtCallInfo * info,gpointer * caller,gpointer * callee,gpointer mrgctx_reg,double * caller_fregs,double * callee_fregs)40 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg,
41 double *caller_fregs, double *callee_fregs)
42 {
43 int i;
44
45 /*
46 * The caller/callee regs are mapped to slot 0..3, stack slot 0 is mapped to slot 4, etc.
47 */
48
49 /* Set vtype ret arg */
50 if (info->vret_slot != -1) {
51 callee [info->vret_arg_reg] = &callee [info->vret_slot];
52 }
53
54 for (i = 0; i < info->map_count; ++i) {
55 int src = info->map [i * 2];
56 int dst = info->map [(i * 2) + 1];
57 int arg_marshal = (src >> 24) & 0xff;
58
59 switch (arg_marshal) {
60 case GSHAREDVT_ARG_NONE:
61 callee [dst] = caller [src];
62 break;
63 case GSHAREDVT_ARG_BYVAL_TO_BYREF:
64 /* gsharedvt argument passed by addr in reg/stack slot */
65 src = src & 0xffff;
66 callee [dst] = caller + src;
67 break;
68 case GSHAREDVT_ARG_BYREF_TO_BYVAL: {
69 /* gsharedvt argument passed by value */
70 int nslots = (src >> 8) & 0xff;
71 int src_slot = src & 0xff;
72 int j;
73 gpointer *addr = caller [src_slot];
74
75 for (j = 0; j < nslots; ++j)
76 callee [dst + j] = addr [j];
77 break;
78 }
79 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I1: {
80 int src_slot = src & 0xff;
81 gpointer *addr = caller [src_slot];
82
83 callee [dst] = GINT_TO_POINTER ((int)*(gint8*)addr);
84 break;
85 }
86 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I2: {
87 int src_slot = src & 0xff;
88 gpointer *addr = caller [src_slot];
89
90 callee [dst] = GINT_TO_POINTER ((int)*(gint16*)addr);
91 break;
92 }
93 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U1: {
94 int src_slot = src & 0xff;
95 gpointer *addr = caller [src_slot];
96
97 callee [dst] = GUINT_TO_POINTER ((guint)*(guint8*)addr);
98 break;
99 }
100 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U2: {
101 int src_slot = src & 0xff;
102 gpointer *addr = caller [src_slot];
103
104 callee [dst] = GUINT_TO_POINTER ((guint)*(guint16*)addr);
105 break;
106 }
107 default:
108 g_assert_not_reached ();
109 break;
110 }
111 }
112
113 /* The slot based approach above is very complicated, use a nested switch instead for fp regs */
114 // FIXME: Use this for the other cases as well
115 if (info->have_fregs) {
116 CallInfo *caller_cinfo = info->caller_cinfo;
117 CallInfo *callee_cinfo = info->callee_cinfo;
118 int aindex;
119
120 for (aindex = 0; aindex < caller_cinfo->nargs; ++aindex) {
121 ArgInfo *ainfo = &caller_cinfo->args [aindex];
122 ArgInfo *ainfo2 = &callee_cinfo->args [aindex];
123
124 switch (ainfo->storage) {
125 case RegTypeFP: {
126 switch (ainfo2->storage) {
127 case RegTypeFP:
128 callee_fregs [ainfo2->reg / 2] = caller_fregs [ainfo->reg / 2];
129 break;
130 case RegTypeGSharedVtInReg:
131 callee [ainfo2->reg] = &caller_fregs [ainfo->reg / 2];
132 break;
133 case RegTypeGSharedVtOnStack: {
134 int sslot = ainfo2->offset / 4;
135 callee [sslot + 4] = &caller_fregs [ainfo->reg / 2];
136 break;
137 }
138 default:
139 g_assert_not_reached ();
140 break;
141 }
142 break;
143 }
144 case RegTypeGSharedVtInReg: {
145 switch (ainfo2->storage) {
146 case RegTypeFP: {
147 callee_fregs [ainfo2->reg / 2] = *(double*)caller [ainfo->reg];
148 break;
149 }
150 default:
151 break;
152 }
153 break;
154 }
155 case RegTypeGSharedVtOnStack: {
156 switch (ainfo2->storage) {
157 case RegTypeFP: {
158 int sslot = ainfo->offset / 4;
159 callee_fregs [ainfo2->reg / 2] = *(double*)caller [sslot + 4];
160 break;
161 }
162 default:
163 break;
164 }
165 break;
166 }
167 default:
168 break;
169 }
170 }
171 }
172
173 if (info->vcall_offset != -1) {
174 MonoObject *this_obj = caller [0];
175
176 if (G_UNLIKELY (!this_obj))
177 return NULL;
178 if (info->vcall_offset == MONO_GSHAREDVT_DEL_INVOKE_VT_OFFSET)
179 /* delegate invoke */
180 return ((MonoDelegate*)this_obj)->invoke_impl;
181 else
182 return *(gpointer*)((char*)this_obj->vtable + info->vcall_offset);
183 } else if (info->calli) {
184 /* The address to call is passed in the mrgctx reg */
185 return mrgctx_reg;
186 } else {
187 return info->addr;
188 }
189 }
190
191 #ifndef DISABLE_JIT
192
193 gpointer
mono_arch_get_gsharedvt_trampoline(MonoTrampInfo ** info,gboolean aot)194 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
195 {
196 guint8 *code, *buf;
197 int buf_len, cfa_offset;
198 GSList *unwind_ops = NULL;
199 MonoJumpInfo *ji = NULL;
200 guint8 *br_out, *br [16], *br_ret [16];
201 int i, offset, arg_reg, npushed, info_offset, mrgctx_offset;
202 int caller_reg_area_offset, caller_freg_area_offset, callee_reg_area_offset, callee_freg_area_offset;
203 int lr_offset, fp, br_ret_index, args_size;
204
205 buf_len = 784;
206 buf = code = mono_global_codeman_reserve (buf_len);
207
208 arg_reg = ARMREG_R0;
209 /* Registers pushed by the arg trampoline */
210 npushed = 4;
211
212 // ios abi compatible frame
213 fp = ARMREG_R7;
214 cfa_offset = npushed * sizeof (gpointer);
215 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, cfa_offset);
216 ARM_PUSH (code, (1 << fp) | (1 << ARMREG_LR));
217 cfa_offset += 2 * sizeof (gpointer);
218 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
219 mono_add_unwind_op_offset (unwind_ops, code, buf, fp, (- cfa_offset));
220 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_LR, ((- cfa_offset) + 4));
221 ARM_MOV_REG_REG (code, fp, ARMREG_SP);
222 mono_add_unwind_op_def_cfa_reg (unwind_ops, code, buf, fp);
223 /* Allocate stack frame */
224 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 32 + (16 * sizeof (double)));
225 if (MONO_ARCH_FRAME_ALIGNMENT > 8)
226 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, (MONO_ARCH_FRAME_ALIGNMENT - 8));
227 offset = 4;
228 info_offset = -offset;
229 offset += 4;
230 mrgctx_offset = -offset;
231 offset += 4 * 4;
232 callee_reg_area_offset = -offset;
233 offset += 8 * 8;
234 caller_freg_area_offset = -offset;
235 offset += 8 * 8;
236 callee_freg_area_offset = -offset;
237
238 caller_reg_area_offset = cfa_offset - (npushed * sizeof (gpointer));
239 lr_offset = 4;
240 /* Save info struct which is in r0 */
241 ARM_STR_IMM (code, arg_reg, fp, info_offset);
242 /* Save rgctx reg */
243 ARM_STR_IMM (code, MONO_ARCH_RGCTX_REG, fp, mrgctx_offset);
244 /* Allocate callee area */
245 ARM_LDR_IMM (code, ARMREG_IP, arg_reg, MONO_STRUCT_OFFSET (GSharedVtCallInfo, stack_usage));
246 ARM_SUB_REG_REG (code, ARMREG_SP, ARMREG_SP, ARMREG_IP);
247 /* Allocate callee register area just below the callee area so the slots are correct */
248 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 4 * sizeof (gpointer));
249 if (mono_arm_is_hard_float ()) {
250 /* Save caller fregs */
251 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -caller_freg_area_offset);
252 for (i = 0; i < 8; ++i)
253 ARM_FSTD (code, i * 2, ARMREG_IP, (i * sizeof (double)));
254 }
255
256 /*
257 * The stack now looks like this:
258 * <caller frame>
259 * <saved r0-r3, lr>
260 * <saved fp> <- fp
261 * <our frame>
262 * <callee area> <- sp
263 */
264 g_assert (mono_arm_thumb_supported ());
265
266 /* Call start_gsharedvt_call () */
267 /* 6 arguments, needs 2 stack slot, need to clean it up after the call */
268 args_size = 2 * sizeof (gpointer);
269 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, args_size);
270 /* arg1 == info */
271 ARM_LDR_IMM (code, ARMREG_R0, fp, info_offset);
272 /* arg2 == caller stack area */
273 ARM_ADD_REG_IMM8 (code, ARMREG_R1, fp, cfa_offset - 4 * sizeof (gpointer));
274 /* arg3 == callee stack area */
275 ARM_ADD_REG_IMM8 (code, ARMREG_R2, ARMREG_SP, args_size);
276 /* arg4 == mrgctx reg */
277 ARM_LDR_IMM (code, ARMREG_R3, fp, mrgctx_offset);
278 /* arg5 == caller freg area */
279 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -caller_freg_area_offset);
280 ARM_STR_IMM (code, ARMREG_IP, ARMREG_SP, 0);
281 /* arg6 == callee freg area */
282 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -callee_freg_area_offset);
283 ARM_STR_IMM (code, ARMREG_IP, ARMREG_SP, 4);
284 /* Make the call */
285 if (aot) {
286 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_arm_start_gsharedvt_call");
287 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
288 ARM_B (code, 0);
289 *(gpointer*)code = NULL;
290 code += 4;
291 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
292 } else {
293 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
294 ARM_B (code, 0);
295 *(gpointer*)code = mono_arm_start_gsharedvt_call;
296 code += 4;
297 }
298 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
299 code = emit_bx (code, ARMREG_IP);
300 /* Clean up stack */
301 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, args_size);
302
303 /* Make the real method call */
304 /* R0 contains the addr to call */
305 ARM_MOV_REG_REG (code, ARMREG_IP, ARMREG_R0);
306 /* Load argument registers */
307 ARM_LDM (code, ARMREG_SP, (1 << ARMREG_R0) | (1 << ARMREG_R1) | (1 << ARMREG_R2) | (1 << ARMREG_R3));
308 if (mono_arm_is_hard_float ()) {
309 /* Load argument fregs */
310 ARM_SUB_REG_IMM8 (code, ARMREG_LR, fp, -callee_freg_area_offset);
311 for (i = 0; i < 8; ++i)
312 ARM_FLDD (code, i * 2, ARMREG_LR, (i * sizeof (double)));
313 }
314 /* Pop callee register area */
315 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 4 * sizeof (gpointer));
316 /* Load rgctx */
317 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, fp, mrgctx_offset);
318 /* Make the call */
319 #if 0
320 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
321 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, addr));
322 #endif
323 /* mono_arch_find_imt_method () depends on this */
324 ARM_ADD_REG_IMM8 (code, ARMREG_LR, ARMREG_PC, 4);
325 ARM_BX (code, ARMREG_IP);
326 *((gpointer*)code) = NULL;
327 code += 4;
328
329 br_ret_index = 0;
330
331 /* Branch between IN/OUT cases */
332 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
333 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, gsharedvt_in));
334
335 ARM_CMP_REG_IMM8 (code, ARMREG_IP, 1);
336 br_out = code;
337 ARM_B_COND (code, ARMCOND_NE, 0);
338
339 /* IN CASE */
340
341 /* LR == return marshalling type */
342 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
343 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, ret_marshal));
344
345 /* Continue if no marshalling required */
346 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_NONE);
347 br_ret [br_ret_index ++] = code;
348 ARM_B_COND (code, ARMCOND_EQ, 0);
349
350 /* Compute vret area address in LR */
351 ARM_LDR_IMM (code, ARMREG_LR, fp, info_offset);
352 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_LR, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_slot));
353 /* The slot value is off by 4 */
354 ARM_SUB_REG_IMM8 (code, ARMREG_LR, ARMREG_LR, 4);
355 ARM_SHL_IMM (code, ARMREG_LR, ARMREG_LR, 2);
356 ARM_ADD_REG_REG (code, ARMREG_LR, ARMREG_LR, ARMREG_SP);
357
358 /* Branch to specific marshalling code */
359 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREG);
360 br [0] = code;
361 ARM_B_COND (code, ARMCOND_EQ, 0);
362 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREGS);
363 br [1] = code;
364 ARM_B_COND (code, ARMCOND_EQ, 0);
365 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_I1);
366 br [2] = code;
367 ARM_B_COND (code, ARMCOND_EQ, 0);
368 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U1);
369 br [3] = code;
370 ARM_B_COND (code, ARMCOND_EQ, 0);
371 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_I2);
372 br [4] = code;
373 ARM_B_COND (code, ARMCOND_EQ, 0);
374 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U2);
375 br [5] = code;
376 ARM_B_COND (code, ARMCOND_EQ, 0);
377 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R4);
378 br [6] = code;
379 ARM_B_COND (code, ARMCOND_EQ, 0);
380 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R8);
381 br [7] = code;
382 ARM_B_COND (code, ARMCOND_EQ, 0);
383 br_ret [br_ret_index ++] = code;
384 ARM_B (code, 0);
385
386 /* IN IREG case */
387 arm_patch (br [0], code);
388 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_LR, 0);
389 br_ret [br_ret_index ++] = code;
390 ARM_B (code, 0);
391 /* IN IREGS case */
392 arm_patch (br [1], code);
393 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_LR, 0);
394 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_LR, 4);
395 br_ret [br_ret_index ++] = code;
396 ARM_B (code, 0);
397 /* I1 case */
398 arm_patch (br [2], code);
399 ARM_LDRSB_IMM (code, ARMREG_R0, ARMREG_LR, 0);
400 br_ret [br_ret_index ++] = code;
401 ARM_B (code, 0);
402 /* U1 case */
403 arm_patch (br [3], code);
404 ARM_LDRB_IMM (code, ARMREG_R0, ARMREG_LR, 0);
405 br_ret [br_ret_index ++] = code;
406 ARM_B (code, 0);
407 /* I2 case */
408 arm_patch (br [4], code);
409 ARM_LDRSH_IMM (code, ARMREG_R0, ARMREG_LR, 0);
410 br_ret [br_ret_index ++] = code;
411 ARM_B (code, 0);
412 /* U2 case */
413 arm_patch (br [5], code);
414 ARM_LDRH_IMM (code, ARMREG_R0, ARMREG_LR, 0);
415 br_ret [br_ret_index ++] = code;
416 ARM_B (code, 0);
417 /* R4 case */
418 arm_patch (br [6], code);
419 ARM_FLDS (code, ARM_VFP_D0, ARMREG_LR, 0);
420 code += 4;
421 br_ret [br_ret_index ++] = code;
422 ARM_B (code, 0);
423 /* R8 case */
424 arm_patch (br [7], code);
425 ARM_FLDD (code, ARM_VFP_D0, ARMREG_LR, 0);
426 code += 4;
427 br_ret [br_ret_index ++] = code;
428 ARM_B (code, 0);
429
430 /* OUT CASE */
431 arm_patch (br_out, code);
432
433 /* Marshal return value */
434 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
435 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, ret_marshal));
436
437 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREGS);
438 br [0] = code;
439 ARM_B_COND (code, ARMCOND_NE, 0);
440
441 /* OUT IREGS case */
442 /* Load vtype ret addr from the caller arg regs */
443 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
444 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
445 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
446 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
447 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
448 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
449 /* Save both registers for simplicity */
450 ARM_STR_IMM (code, ARMREG_R0, ARMREG_IP, 0);
451 ARM_STR_IMM (code, ARMREG_R1, ARMREG_IP, 4);
452 br_ret [br_ret_index ++] = code;
453 ARM_B (code, 0);
454 arm_patch (br [0], code);
455
456 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREG);
457 br [0] = code;
458 ARM_B_COND (code, ARMCOND_NE, 0);
459
460 /* OUT IREG case */
461 /* Load vtype ret addr from the caller arg regs */
462 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
463 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
464 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
465 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
466 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
467 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
468 /* Save the return value to the buffer pointed to by the vret addr */
469 ARM_STR_IMM (code, ARMREG_R0, ARMREG_IP, 0);
470 br_ret [br_ret_index ++] = code;
471 ARM_B (code, 0);
472 arm_patch (br [0], code);
473
474 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U1);
475 br [0] = code;
476 ARM_B_COND (code, ARMCOND_NE, 0);
477
478 /* OUT U1 case */
479 /* Load vtype ret addr from the caller arg regs */
480 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
481 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
482 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
483 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
484 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
485 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
486 /* Save the return value to the buffer pointed to by the vret addr */
487 ARM_STRB_IMM (code, ARMREG_R0, ARMREG_IP, 0);
488 br_ret [br_ret_index ++] = code;
489 ARM_B (code, 0);
490 arm_patch (br [0], code);
491
492 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R4);
493 br [0] = code;
494 ARM_B_COND (code, ARMCOND_NE, 0);
495
496 /* OUT R4 case */
497 /* Load vtype ret addr from the caller arg regs */
498 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
499 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
500 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
501 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
502 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
503 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
504 /* Save the return value to the buffer pointed to by the vret addr */
505 ARM_FSTS (code, ARM_VFP_D0, ARMREG_IP, 0);
506 br_ret [br_ret_index ++] = code;
507 ARM_B (code, 0);
508 arm_patch (br [0], code);
509
510 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R8);
511 br [0] = code;
512 ARM_B_COND (code, ARMCOND_NE, 0);
513
514 /* OUT R8 case */
515 /* Load vtype ret addr from the caller arg regs */
516 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
517 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
518 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
519 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
520 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
521 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
522 /* Save the return value to the buffer pointed to by the vret addr */
523 ARM_FSTD (code, ARM_VFP_D0, ARMREG_IP, 0);
524 br_ret [br_ret_index ++] = code;
525 ARM_B (code, 0);
526 arm_patch (br [0], code);
527
528 /* OUT other cases */
529 br_ret [br_ret_index ++] = code;
530 ARM_B (code, 0);
531
532 for (i = 0; i < br_ret_index; ++i)
533 arm_patch (br_ret [i], code);
534
535 /* Normal return */
536 /* Restore registers + stack */
537 ARM_MOV_REG_REG (code, ARMREG_SP, fp);
538 ARM_LDM (code, fp, (1 << fp) | (1 << ARMREG_LR));
539 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, cfa_offset);
540 /* Return */
541 ARM_BX (code, ARMREG_LR);
542
543 g_assert ((code - buf) < buf_len);
544
545 if (info)
546 *info = mono_tramp_info_create ("gsharedvt_trampoline", buf, code - buf, ji, unwind_ops);
547
548 mono_arch_flush_icache (buf, code - buf);
549 return buf;
550 }
551
552 #else
553
554 gpointer
mono_arch_get_gsharedvt_trampoline(MonoTrampInfo ** info,gboolean aot)555 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
556 {
557 g_assert_not_reached ();
558 return NULL;
559 }
560
561 #endif
562
563
564 #else
565
566
567 gpointer
mono_arm_start_gsharedvt_call(GSharedVtCallInfo * info,gpointer * caller,gpointer * callee,gpointer mrgctx_reg)568 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg)
569 {
570 g_assert_not_reached ();
571 return NULL;
572 }
573
574 gpointer
mono_arch_get_gsharedvt_trampoline(MonoTrampInfo ** info,gboolean aot)575 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
576 {
577 *info = NULL;
578 return NULL;
579 }
580
581
582 #endif
583