1 /*
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2019 Western Digital Corporation or its affiliates.
5 *
6 * Authors:
7 * Anup Patel <anup.patel@wdc.com>
8 */
9
10 #include <sbi/riscv_asm.h>
11 #include <sbi/riscv_encoding.h>
12 #include <sbi/riscv_unpriv.h>
13 #include <sbi/riscv_fp.h>
14 #include <sbi/sbi_error.h>
15 #include <sbi/sbi_misaligned_ldst.h>
16 #include <sbi/sbi_trap.h>
17
18 union reg_data {
19 u8 data_bytes[8];
20 ulong data_ulong;
21 u64 data_u64;
22 };
23
sbi_misaligned_load_handler(u32 hartid,ulong mcause,struct sbi_trap_regs * regs,struct sbi_scratch * scratch)24 int sbi_misaligned_load_handler(u32 hartid, ulong mcause,
25 struct sbi_trap_regs *regs,
26 struct sbi_scratch *scratch)
27 {
28 union reg_data val;
29 struct unpriv_trap uptrap;
30 ulong addr = csr_read(CSR_MTVAL);
31 int i, fp = 0, shift = 0, len = 0;
32 #if __riscv_xlen == 32
33 bool virt = (regs->mstatusH & MSTATUSH_MPV) ? TRUE : FALSE;
34 #else
35 bool virt = (regs->mstatus & MSTATUS_MPV) ? TRUE : FALSE;
36 #endif
37 ulong insn = get_insn(regs->mepc, virt, scratch, &uptrap);
38
39 if (uptrap.cause)
40 return sbi_trap_redirect(regs, scratch, regs->mepc,
41 uptrap.cause, uptrap.tval);
42
43 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
44 len = 4;
45 shift = 8 * (sizeof(ulong) - len);
46 #if __riscv_xlen == 64
47 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
48 len = 8;
49 shift = 8 * (sizeof(ulong) - len);
50 } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) {
51 len = 4;
52 #endif
53 #ifdef __riscv_flen
54 } else if ((insn & INSN_MASK_FLD) == INSN_MATCH_FLD) {
55 fp = 1;
56 len = 8;
57 } else if ((insn & INSN_MASK_FLW) == INSN_MATCH_FLW) {
58 fp = 1;
59 len = 4;
60 #endif
61 } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) {
62 len = 2;
63 shift = 8 * (sizeof(ulong) - len);
64 } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) {
65 len = 2;
66 #ifdef __riscv_compressed
67 #if __riscv_xlen >= 64
68 } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) {
69 len = 8;
70 shift = 8 * (sizeof(ulong) - len);
71 insn = RVC_RS2S(insn) << SH_RD;
72 } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP &&
73 ((insn >> SH_RD) & 0x1f)) {
74 len = 8;
75 shift = 8 * (sizeof(ulong) - len);
76 #endif
77 } else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) {
78 len = 4;
79 shift = 8 * (sizeof(ulong) - len);
80 insn = RVC_RS2S(insn) << SH_RD;
81 } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP &&
82 ((insn >> SH_RD) & 0x1f)) {
83 len = 4;
84 shift = 8 * (sizeof(ulong) - len);
85 #ifdef __riscv_flen
86 } else if ((insn & INSN_MASK_C_FLD) == INSN_MATCH_C_FLD) {
87 fp = 1;
88 len = 8;
89 insn = RVC_RS2S(insn) << SH_RD;
90 } else if ((insn & INSN_MASK_C_FLDSP) == INSN_MATCH_C_FLDSP) {
91 fp = 1;
92 len = 8;
93 #if __riscv_xlen == 32
94 } else if ((insn & INSN_MASK_C_FLW) == INSN_MATCH_C_FLW) {
95 fp = 1;
96 len = 4;
97 insn = RVC_RS2S(insn) << SH_RD;
98 } else if ((insn & INSN_MASK_C_FLWSP) == INSN_MATCH_C_FLWSP) {
99 fp = 1;
100 len = 4;
101 #endif
102 #endif
103 #endif
104 } else
105 return sbi_trap_redirect(regs, scratch, regs->mepc,
106 mcause, addr);
107
108 val.data_u64 = 0;
109 for (i = 0; i < len; i++) {
110 val.data_bytes[i] = load_u8((void *)(addr + i),
111 scratch, &uptrap);
112 if (uptrap.cause)
113 return sbi_trap_redirect(regs, scratch, regs->mepc,
114 uptrap.cause, uptrap.tval);
115 }
116
117 if (!fp)
118 SET_RD(insn, regs, val.data_ulong << shift >> shift);
119 #ifdef __riscv_flen
120 else if (len == 8)
121 SET_F64_RD(insn, regs, val.data_u64);
122 else
123 SET_F32_RD(insn, regs, val.data_ulong);
124 #endif
125
126 regs->mepc += INSN_LEN(insn);
127
128 return 0;
129 }
130
sbi_misaligned_store_handler(u32 hartid,ulong mcause,struct sbi_trap_regs * regs,struct sbi_scratch * scratch)131 int sbi_misaligned_store_handler(u32 hartid, ulong mcause,
132 struct sbi_trap_regs *regs,
133 struct sbi_scratch *scratch)
134 {
135 union reg_data val;
136 struct unpriv_trap uptrap;
137 ulong addr = csr_read(CSR_MTVAL);
138 int i, len = 0;
139 #if __riscv_xlen == 32
140 bool virt = (regs->mstatusH & MSTATUSH_MPV) ? TRUE : FALSE;
141 #else
142 bool virt = (regs->mstatus & MSTATUS_MPV) ? TRUE : FALSE;
143 #endif
144 ulong insn = get_insn(regs->mepc, virt, scratch, &uptrap);
145
146 if (uptrap.cause)
147 return sbi_trap_redirect(regs, scratch, regs->mepc,
148 uptrap.cause, uptrap.tval);
149
150 val.data_ulong = GET_RS2(insn, regs);
151
152 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
153 len = 4;
154 #if __riscv_xlen == 64
155 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {
156 len = 8;
157 #endif
158 #ifdef __riscv_flen
159 } else if ((insn & INSN_MASK_FSD) == INSN_MATCH_FSD) {
160 len = 8;
161 val.data_u64 = GET_F64_RS2(insn, regs);
162 } else if ((insn & INSN_MASK_FSW) == INSN_MATCH_FSW) {
163 len = 4;
164 val.data_ulong = GET_F32_RS2(insn, regs);
165 #endif
166 } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) {
167 len = 2;
168 #ifdef __riscv_compressed
169 #if __riscv_xlen >= 64
170 } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) {
171 len = 8;
172 val.data_ulong = GET_RS2S(insn, regs);
173 } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP &&
174 ((insn >> SH_RD) & 0x1f)) {
175 len = 8;
176 val.data_ulong = GET_RS2C(insn, regs);
177 #endif
178 } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) {
179 len = 4;
180 val.data_ulong = GET_RS2S(insn, regs);
181 } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP &&
182 ((insn >> SH_RD) & 0x1f)) {
183 len = 4;
184 val.data_ulong = GET_RS2C(insn, regs);
185 #ifdef __riscv_flen
186 } else if ((insn & INSN_MASK_C_FSD) == INSN_MATCH_C_FSD) {
187 len = 8;
188 val.data_u64 = GET_F64_RS2S(insn, regs);
189 } else if ((insn & INSN_MASK_C_FSDSP) == INSN_MATCH_C_FSDSP) {
190 len = 8;
191 val.data_u64 = GET_F64_RS2C(insn, regs);
192 #if __riscv_xlen == 32
193 } else if ((insn & INSN_MASK_C_FSW) == INSN_MATCH_C_FSW) {
194 len = 4;
195 val.data_ulong = GET_F32_RS2S(insn, regs);
196 } else if ((insn & INSN_MASK_C_FSWSP) == INSN_MATCH_C_FSWSP) {
197 len = 4;
198 val.data_ulong = GET_F32_RS2C(insn, regs);
199 #endif
200 #endif
201 #endif
202 } else
203 return sbi_trap_redirect(regs, scratch, regs->mepc,
204 mcause, addr);
205
206 for (i = 0; i < len; i++) {
207 store_u8((void *)(addr + i), val.data_bytes[i],
208 scratch, &uptrap);
209 if (uptrap.cause)
210 return sbi_trap_redirect(regs, scratch, regs->mepc,
211 uptrap.cause, uptrap.tval);
212 }
213
214 regs->mepc += INSN_LEN(insn);
215
216 return 0;
217 }
218