1/* CMSE wrapper function used to save, clear and restore callee saved registers
2   for cmse_nonsecure_call's.
3
4   Copyright (C) 2016-2018 Free Software Foundation, Inc.
5   Contributed by ARM Ltd.
6
7   This file is free software; you can redistribute it and/or modify it
8   under the terms of the GNU General Public License as published by the
9   Free Software Foundation; either version 3, or (at your option) any
10   later version.
11
12   This file is distributed in the hope that it will be useful, but
13   WITHOUT ANY WARRANTY; without even the implied warranty of
14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15   General Public License for more details.
16
17   Under Section 7 of GPL version 3, you are granted additional
18   permissions described in the GCC Runtime Library Exception, version
19   3.1, as published by the Free Software Foundation.
20
21   You should have received a copy of the GNU General Public License and
22   a copy of the GCC Runtime Library Exception along with this program;
23   see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
24   <http://www.gnu.org/licenses/>.  */
25
26.syntax unified
27#ifdef __ARM_PCS_VFP
28# if __ARM_FP & 0x8
29	.fpu fpv5-d16
30# else
31	.fpu fpv4-sp-d16
32# endif
33#endif
34
35.thumb
36.global __gnu_cmse_nonsecure_call
37__gnu_cmse_nonsecure_call:
38#if defined(__ARM_ARCH_8M_MAIN__)
39push	    {r5-r11,lr}
40mov	    r7, r4
41mov	    r8, r4
42mov	    r9, r4
43mov	    r10, r4
44mov	    r11, r4
45mov	    ip, r4
46
47/* Save and clear callee-saved registers only if we are dealing with hard float
48   ABI.  The unused caller-saved registers have already been cleared by GCC
49   generated code.  */
50#ifdef __ARM_PCS_VFP
51vpush.f64   {d8-d15}
52mov	    r5, #0
53vmov	    d8, r5, r5
54#if __ARM_FP & 0x04
55vmov	    s18, s19, r5, r5
56vmov	    s20, s21, r5, r5
57vmov	    s22, s23, r5, r5
58vmov	    s24, s25, r5, r5
59vmov	    s26, s27, r5, r5
60vmov	    s28, s29, r5, r5
61vmov	    s30, s31, r5, r5
62#elif __ARM_FP & 0x08
63vmov.f64    d9, d8
64vmov.f64    d10, d8
65vmov.f64    d11, d8
66vmov.f64    d12, d8
67vmov.f64    d13, d8
68vmov.f64    d14, d8
69vmov.f64    d15, d8
70#else
71#error "Half precision implementation not supported."
72#endif
73/* Clear the cumulative exception-status bits (0-4,7) and the
74   condition code bits (28-31) of the FPSCR.  */
75vmrs	    r5, fpscr
76movw	    r6, #65376
77movt	    r6, #4095
78ands	    r5, r6
79vmsr	    fpscr, r5
80
81/* We are not dealing with hard float ABI, so we can safely use the vlstm and
82   vlldm instructions without needing to preserve the registers used for
83   argument passing.  */
84#else
85sub	    sp, sp, #0x88 /* Reserve stack space to save all floating point
86			     registers, including FPSCR.  */
87vlstm	    sp		  /* Lazy store and clearance of d0-d16 and FPSCR.  */
88#endif /* __ARM_PCS_VFP */
89
90/* Make sure to clear the 'GE' bits of the APSR register if 32-bit SIMD
91   instructions are available.  */
92#if defined(__ARM_FEATURE_SIMD32)
93msr	    APSR_nzcvqg, r4
94#else
95msr	    APSR_nzcvq, r4
96#endif
97
98mov	    r5, r4
99mov	    r6, r4
100blxns	    r4
101
102#ifdef __ARM_PCS_VFP
103vpop.f64    {d8-d15}
104#else
105vlldm	    sp		  /* Lazy restore of d0-d16 and FPSCR.  */
106add	    sp, sp, #0x88 /* Free space used to save floating point registers.  */
107#endif /* __ARM_PCS_VFP */
108
109pop	    {r5-r11, pc}
110
111#elif defined (__ARM_ARCH_8M_BASE__)
112push	    {r5-r7, lr}
113mov	    r5, r8
114mov	    r6, r9
115mov	    r7, r10
116push	    {r5-r7}
117mov	    r5, r11
118push	    {r5}
119mov	    r5, r4
120mov	    r6, r4
121mov	    r7, r4
122mov	    r8, r4
123mov	    r9, r4
124mov	    r10, r4
125mov	    r11, r4
126mov	    ip, r4
127msr	    APSR_nzcvq, r4
128blxns	    r4
129pop	    {r5}
130mov	    r11, r5
131pop	    {r5-r7}
132mov	    r10, r7
133mov	    r9, r6
134mov	    r8, r5
135pop	    {r5-r7, pc}
136
137#else
138#error "This should only be used for armv8-m base- and mainline."
139#endif
140