1/*
2 * Copyright 2018-2020 NXP
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8#include <asm_macros.S>
9#include <bl31_data.h>
10
11.global el2_2_aarch32
12.global prefetch_disable
13
14#define  SPSR_EL3_M4     0x10
15#define  SPSR_EL_MASK    0xC
16#define  SPSR_EL2        0x8
17#define  SCR_EL3_4_EL2_AARCH32  0x131
18#define  SPSR32_EL2_LE          0x1DA
19
20#define  MIDR_PARTNUM_START      4
21#define  MIDR_PARTNUM_WIDTH      12
22#define  MIDR_PARTNUM_A53        0xD03
23#define  MIDR_PARTNUM_A57        0xD07
24#define  MIDR_PARTNUM_A72        0xD08
25
26/*
27 * uint64_t el2_2_aarch32(u_register_t smc_id,
28 *                   u_register_t start_addr,
29 *                   u_register_t parm1,
30 *                   u_register_t parm2)
31 * this function allows changing the execution width of EL2 from Aarch64
32 * to Aarch32
33 * Note: MUST be called from EL2 @ Aarch64
34 * in:  x0 = smc function id
35 *      x1 = start address for EL2 @ Aarch32
36 *      x2 = first parameter to pass to EL2 @ Aarch32
37 *      x3 = second parameter to pass to EL2 @ Aarch32
38 * out: x0 = 0,  on success
39 *      x0 = -1, on failure
40 * uses x0, x1, x2, x3
41 */
42func el2_2_aarch32
43
44	/* check that caller is EL2 @ Aarch64 - err return if not */
45	mrs  x0, spsr_el3
46	/* see if we were called from Aarch32 */
47	tst  x0, #SPSR_EL3_M4
48	b.ne 2f
49
50	/* see if we were called from EL2 */
51	and   x0, x0, SPSR_EL_MASK
52	cmp   x0, SPSR_EL2
53	b.ne  2f
54
55	/* set ELR_EL3 */
56	msr  elr_el3, x1
57
58	/* set scr_el3 */
59	mov  x0, #SCR_EL3_4_EL2_AARCH32
60	msr  scr_el3, x0
61
62	/* set sctlr_el2 */
63	ldr   x1, =SCTLR_EL2_RES1
64	msr  sctlr_el2, x1
65
66	/* set spsr_el3 */
67	ldr  x0, =SPSR32_EL2_LE
68	msr  spsr_el3, x0
69
70	/* x2 = parm 1
71	 * x3 = parm2
72	 */
73
74	/* set the parameters to be passed-thru to EL2 @ Aarch32 */
75	mov  x1, x2
76	mov  x2, x3
77
78	/* x1 = parm 1
79	 * x2 = parm2
80	 */
81
82	mov  x0, xzr
83	/* invalidate the icache */
84	ic iallu
85	dsb sy
86	isb
87	b  1f
882:
89	/* error return */
90	mvn  x0, xzr
91	ret
921:
93	eret
94endfunc el2_2_aarch32
95
96/*
97 * int prefetch_disable(u_register_t smc_id, u_register_t mask)
98 * this function marks cores which need to have the prefetch disabled -
99 * secondary cores have prefetch disabled when they are released from reset -
100 * the bootcore has prefetch disabled when this call is made
101 * in:  x0 = function id
102 *      x1 = core mask, where bit[0]=core0, bit[1]=core1, etc
103 *           if a bit in the mask is set, then prefetch is disabled for that
104 *           core
105 * out: x0 = SMC_SUCCESS
106 */
107func prefetch_disable
108	stp  x4, x30, [sp, #-16]!
109
110	mov   x3, x1
111
112	/* x1 = core prefetch disable mask */
113	/* x3 = core prefetch disable mask */
114
115	/* store the mask */
116	mov   x0, #PREFETCH_DIS_OFFSET
117	bl   _set_global_data
118
119	/* x3 = core prefetch disable mask */
120
121	/* see if we need to disable prefetch on THIS core */
122	bl   plat_my_core_mask
123
124	/* x0 = core mask lsb */
125	/* x3 = core prefetch disable mask */
126
127	tst   x3, x0
128	b.eq  1f
129
130	/* read midr_el1 */
131	mrs   x1, midr_el1
132
133	/* x1 = midr_el1 */
134
135	mov   x0, xzr
136	bfxil x0, x1, #MIDR_PARTNUM_START, #MIDR_PARTNUM_WIDTH
137
138	/* x0 = part number (a53, a57, a72, etc) */
139
140	/* branch on cpu-specific */
141	cmp   x0, #MIDR_PARTNUM_A57
142	b.eq  1f
143	cmp   x0, #MIDR_PARTNUM_A72
144	b.ne  1f
145
146	bl    _disable_ldstr_pfetch_A72
147	b     1f
1481:
149	ldp   x4, x30, [sp], #16
150	mov   x0, xzr
151	ret
152endfunc prefetch_disable
153