1#------------------------------------------------------------------------------
2#
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
5#
6# This program and the accompanying materials
7# are licensed and made available under the terms and conditions of the BSD License
8# which accompanies this distribution.  The full text of the license may be found at
9# http://opensource.org/licenses/bsd-license.php
10#
11# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13#
14#------------------------------------------------------------------------------
15
16#include <Chipset/AArch64.h>
17#include <AsmMacroIoLibV8.h>
18
19.text
20.align 3
21
22GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
23GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
24GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
25GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
26GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
27GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
28GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)
29GCC_ASM_EXPORT (ArmDrainWriteBuffer)
30GCC_ASM_EXPORT (ArmEnableMmu)
31GCC_ASM_EXPORT (ArmDisableMmu)
32GCC_ASM_EXPORT (ArmDisableCachesAndMmu)
33GCC_ASM_EXPORT (ArmMmuEnabled)
34GCC_ASM_EXPORT (ArmEnableDataCache)
35GCC_ASM_EXPORT (ArmDisableDataCache)
36GCC_ASM_EXPORT (ArmEnableInstructionCache)
37GCC_ASM_EXPORT (ArmDisableInstructionCache)
38GCC_ASM_EXPORT (ArmDisableAlignmentCheck)
39GCC_ASM_EXPORT (ArmEnableAlignmentCheck)
40GCC_ASM_EXPORT (ArmEnableBranchPrediction)
41GCC_ASM_EXPORT (ArmDisableBranchPrediction)
42GCC_ASM_EXPORT (AArch64AllDataCachesOperation)
43GCC_ASM_EXPORT (AArch64PerformPoUDataCacheOperation)
44GCC_ASM_EXPORT (ArmDataMemoryBarrier)
45GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)
46GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)
47GCC_ASM_EXPORT (ArmWriteVBar)
48GCC_ASM_EXPORT (ArmReadVBar)
49GCC_ASM_EXPORT (ArmEnableVFP)
50GCC_ASM_EXPORT (ArmCallWFI)
51GCC_ASM_EXPORT (ArmInvalidateInstructionAndDataTlb)
52GCC_ASM_EXPORT (ArmReadMpidr)
53GCC_ASM_EXPORT (ArmReadTpidrurw)
54GCC_ASM_EXPORT (ArmWriteTpidrurw)
55GCC_ASM_EXPORT (ArmIsArchTimerImplemented)
56GCC_ASM_EXPORT (ArmReadIdPfr0)
57GCC_ASM_EXPORT (ArmReadIdPfr1)
58GCC_ASM_EXPORT (ArmWriteHcr)
59GCC_ASM_EXPORT (ArmReadCurrentEL)
60
61.set CTRL_M_BIT,      (1 << 0)
62.set CTRL_A_BIT,      (1 << 1)
63.set CTRL_C_BIT,      (1 << 2)
64.set CTRL_I_BIT,      (1 << 12)
65.set CTRL_V_BIT,      (1 << 12)
66.set CPACR_VFP_BITS,  (3 << 20)
67
68ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
69  dc      ivac, x0    // Invalidate single data cache line
70  dsb     sy
71  isb
72  ret
73
74
75ASM_PFX(ArmCleanDataCacheEntryByMVA):
76  dc      cvac, x0    // Clean single data cache line
77  dsb     sy
78  isb
79  ret
80
81
82ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
83  dc      civac, x0   // Clean and invalidate single data cache line
84  dsb     sy
85  isb
86  ret
87
88
89ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
90  dc      isw, x0     // Invalidate this line
91  dsb     sy
92  isb
93  ret
94
95
96ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
97  dc      cisw, x0    // Clean and Invalidate this line
98  dsb     sy
99  isb
100  ret
101
102
103ASM_PFX(ArmCleanDataCacheEntryBySetWay):
104  dc      csw, x0     // Clean this line
105  dsb     sy
106  isb
107  ret
108
109
110ASM_PFX(ArmInvalidateInstructionCache):
111  ic      iallu       // Invalidate entire instruction cache
112  dsb     sy
113  isb
114  ret
115
116
117ASM_PFX(ArmEnableMmu):
118   EL1_OR_EL2_OR_EL3(x1)
1191: mrs     x0, sctlr_el1       // Read System control register EL1
120   b       4f
1212: mrs     x0, sctlr_el2       // Read System control register EL2
122   b       4f
1233: mrs     x0, sctlr_el3       // Read System control register EL3
1244: orr     x0, x0, #CTRL_M_BIT // Set MMU enable bit
125   EL1_OR_EL2_OR_EL3(x1)
1261: tlbi    vmalle1
127   isb
128   msr     sctlr_el1, x0       // Write back
129   b       4f
1302: tlbi    alle2
131   isb
132   msr     sctlr_el2, x0       // Write back
133   b       4f
1343: tlbi    alle3
135   isb
136   msr     sctlr_el3, x0       // Write back
1374: dsb     sy
138   isb
139   ret
140
141
142ASM_PFX(ArmDisableMmu):
143   EL1_OR_EL2_OR_EL3(x1)
1441: mrs     x0, sctlr_el1        // Read System Control Register EL1
145   b       4f
1462: mrs     x0, sctlr_el2        // Read System Control Register EL2
147   b       4f
1483: mrs     x0, sctlr_el3        // Read System Control Register EL3
1494: and     x0, x0, #~CTRL_M_BIT  // Clear MMU enable bit
150   EL1_OR_EL2_OR_EL3(x1)
1511: msr     sctlr_el1, x0        // Write back
152   tlbi    vmalle1
153   b       4f
1542: msr     sctlr_el2, x0        // Write back
155   tlbi    alle2
156   b       4f
1573: msr     sctlr_el3, x0        // Write back
158   tlbi    alle3
1594: dsb     sy
160   isb
161   ret
162
163
164ASM_PFX(ArmDisableCachesAndMmu):
165   EL1_OR_EL2_OR_EL3(x1)
1661: mrs     x0, sctlr_el1        // Get control register EL1
167   b       4f
1682: mrs     x0, sctlr_el2        // Get control register EL2
169   b       4f
1703: mrs     x0, sctlr_el3        // Get control register EL3
1714: mov     x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT)  // Disable MMU, D & I caches
172   and     x0, x0, x1
173   EL1_OR_EL2_OR_EL3(x1)
1741: msr     sctlr_el1, x0        // Write back control register
175   b       4f
1762: msr     sctlr_el2, x0        // Write back control register
177   b       4f
1783: msr     sctlr_el3, x0        // Write back control register
1794: dsb     sy
180   isb
181   ret
182
183
184ASM_PFX(ArmMmuEnabled):
185   EL1_OR_EL2_OR_EL3(x1)
1861: mrs     x0, sctlr_el1        // Get control register EL1
187   b       4f
1882: mrs     x0, sctlr_el2        // Get control register EL2
189   b       4f
1903: mrs     x0, sctlr_el3        // Get control register EL3
1914: and     x0, x0, #CTRL_M_BIT
192   ret
193
194
195ASM_PFX(ArmEnableDataCache):
196   EL1_OR_EL2_OR_EL3(x1)
1971: mrs     x0, sctlr_el1        // Get control register EL1
198   b       4f
1992: mrs     x0, sctlr_el2        // Get control register EL2
200   b       4f
2013: mrs     x0, sctlr_el3        // Get control register EL3
2024: orr     x0, x0, #CTRL_C_BIT  // Set C bit
203   EL1_OR_EL2_OR_EL3(x1)
2041: msr     sctlr_el1, x0        // Write back control register
205   b       4f
2062: msr     sctlr_el2, x0        // Write back control register
207   b       4f
2083: msr     sctlr_el3, x0        // Write back control register
2094: dsb     sy
210   isb
211   ret
212
213
214ASM_PFX(ArmDisableDataCache):
215   EL1_OR_EL2_OR_EL3(x1)
2161: mrs     x0, sctlr_el1        // Get control register EL1
217   b       4f
2182: mrs     x0, sctlr_el2        // Get control register EL2
219   b       4f
2203: mrs     x0, sctlr_el3        // Get control register EL3
2214: and     x0, x0, #~CTRL_C_BIT  // Clear C bit
222   EL1_OR_EL2_OR_EL3(x1)
2231: msr     sctlr_el1, x0        // Write back control register
224   b       4f
2252: msr     sctlr_el2, x0        // Write back control register
226   b       4f
2273: msr     sctlr_el3, x0        // Write back control register
2284: dsb     sy
229   isb
230   ret
231
232
233ASM_PFX(ArmEnableInstructionCache):
234   EL1_OR_EL2_OR_EL3(x1)
2351: mrs     x0, sctlr_el1        // Get control register EL1
236   b       4f
2372: mrs     x0, sctlr_el2        // Get control register EL2
238   b       4f
2393: mrs     x0, sctlr_el3        // Get control register EL3
2404: orr     x0, x0, #CTRL_I_BIT  // Set I bit
241   EL1_OR_EL2_OR_EL3(x1)
2421: msr     sctlr_el1, x0        // Write back control register
243   b       4f
2442: msr     sctlr_el2, x0        // Write back control register
245   b       4f
2463: msr     sctlr_el3, x0        // Write back control register
2474: dsb     sy
248   isb
249   ret
250
251
252ASM_PFX(ArmDisableInstructionCache):
253   EL1_OR_EL2_OR_EL3(x1)
2541: mrs     x0, sctlr_el1        // Get control register EL1
255   b       4f
2562: mrs     x0, sctlr_el2        // Get control register EL2
257   b       4f
2583: mrs     x0, sctlr_el3        // Get control register EL3
2594: and     x0, x0, #~CTRL_I_BIT  // Clear I bit
260   EL1_OR_EL2_OR_EL3(x1)
2611: msr     sctlr_el1, x0        // Write back control register
262   b       4f
2632: msr     sctlr_el2, x0        // Write back control register
264   b       4f
2653: msr     sctlr_el3, x0        // Write back control register
2664: dsb     sy
267   isb
268   ret
269
270
271ASM_PFX(ArmEnableAlignmentCheck):
272   EL1_OR_EL2(x1)
2731: mrs     x0, sctlr_el1        // Get control register EL1
274   b       3f
2752: mrs     x0, sctlr_el2        // Get control register EL2
2763: orr     x0, x0, #CTRL_A_BIT  // Set A (alignment check) bit
277   EL1_OR_EL2(x1)
2781: msr     sctlr_el1, x0        // Write back control register
279   b       3f
2802: msr     sctlr_el2, x0        // Write back control register
2813: dsb     sy
282   isb
283   ret
284
285
286ASM_PFX(ArmDisableAlignmentCheck):
287   EL1_OR_EL2_OR_EL3(x1)
2881: mrs     x0, sctlr_el1        // Get control register EL1
289   b       4f
2902: mrs     x0, sctlr_el2        // Get control register EL2
291   b       4f
2923: mrs     x0, sctlr_el3        // Get control register EL3
2934: and     x0, x0, #~CTRL_A_BIT  // Clear A (alignment check) bit
294   EL1_OR_EL2_OR_EL3(x1)
2951: msr     sctlr_el1, x0        // Write back control register
296   b       4f
2972: msr     sctlr_el2, x0        // Write back control register
298   b       4f
2993: msr     sctlr_el3, x0        // Write back control register
3004: dsb     sy
301   isb
302   ret
303
304
305// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
306ASM_PFX(ArmEnableBranchPrediction):
307  ret
308
309
310// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
311ASM_PFX(ArmDisableBranchPrediction):
312  ret
313
314
315ASM_PFX(AArch64AllDataCachesOperation):
316// We can use regs 0-7 and 9-15 without having to save/restore.
317// Save our link register on the stack.
318  str   x30, [sp, #-0x10]!
319  mov   x1, x0                  // Save Function call in x1
320  mrs   x6, clidr_el1           // Read EL1 CLIDR
321  and   x3, x6, #0x7000000      // Mask out all but Level of Coherency (LoC)
322  lsr   x3, x3, #23             // Left align cache level value - the level is shifted by 1 to the
323                                // right to ease the access to CSSELR and the Set/Way operation.
324  cbz   x3, L_Finished          // No need to clean if LoC is 0
325  mov   x10, #0                 // Start clean at cache level 0
326  b     Loop1
327
328ASM_PFX(AArch64PerformPoUDataCacheOperation):
329// We can use regs 0-7 and 9-15 without having to save/restore.
330// Save our link register on the stack.
331  str   x30, [sp, #-0x10]!
332  mov   x1, x0                  // Save Function call in x1
333  mrs   x6, clidr_el1           // Read EL1 CLIDR
334  and   x3, x6, #0x38000000     // Mask out all but Point of Unification (PoU)
335  lsr   x3, x3, #26             // Left align cache level value - the level is shifted by 1 to the
336                                // right to ease the access to CSSELR and the Set/Way operation.
337  cbz   x3, L_Finished          // No need to clean if LoC is 0
338  mov   x10, #0                 // Start clean at cache level 0
339
340Loop1:
341  add   x2, x10, x10, lsr #1    // Work out 3x cachelevel for cache info
342  lsr   x12, x6, x2             // bottom 3 bits are the Cache type for this level
343  and   x12, x12, #7            // get those 3 bits alone
344  cmp   x12, #2                 // what cache at this level?
345  b.lt  L_Skip                  // no cache or only instruction cache at this level
346  msr   csselr_el1, x10         // write the Cache Size selection register with current level (CSSELR)
347  isb                           // isb to sync the change to the CacheSizeID reg
348  mrs   x12, ccsidr_el1         // reads current Cache Size ID register (CCSIDR)
349  and   x2, x12, #0x7           // extract the line length field
350  add   x2, x2, #4              // add 4 for the line length offset (log2 16 bytes)
351  mov   x4, #0x400
352  sub   x4, x4, #1
353  and   x4, x4, x12, lsr #3     // x4 is the max number on the way size (right aligned)
354  clz   w5, w4                  // w5 is the bit position of the way size increment
355  mov   x7, #0x00008000
356  sub   x7, x7, #1
357  and   x7, x7, x12, lsr #13    // x7 is the max number of the index size (right aligned)
358
359Loop2:
360  mov   x9, x4                  // x9 working copy of the max way size (right aligned)
361
362Loop3:
363  lsl   x11, x9, x5
364  orr   x0, x10, x11            // factor in the way number and cache number
365  lsl   x11, x7, x2
366  orr   x0, x0, x11             // factor in the index number
367
368  blr   x1                      // Goto requested cache operation
369
370  subs  x9, x9, #1              // decrement the way number
371  b.ge  Loop3
372  subs  x7, x7, #1              // decrement the index
373  b.ge  Loop2
374L_Skip:
375  add   x10, x10, #2            // increment the cache number
376  cmp   x3, x10
377  b.gt  Loop1
378
379L_Finished:
380  dsb   sy
381  isb
382  ldr   x30, [sp], #0x10
383  ret
384
385
386ASM_PFX(ArmDataMemoryBarrier):
387  dmb   sy
388  ret
389
390
391ASM_PFX(ArmDataSyncronizationBarrier):
392ASM_PFX(ArmDrainWriteBuffer):
393  dsb   sy
394  ret
395
396
397ASM_PFX(ArmInstructionSynchronizationBarrier):
398  isb
399  ret
400
401
402ASM_PFX(ArmWriteVBar):
403   EL1_OR_EL2_OR_EL3(x1)
4041: msr   vbar_el1, x0            // Set the Address of the EL1 Vector Table in the VBAR register
405   b     4f
4062: msr   vbar_el2, x0            // Set the Address of the EL2 Vector Table in the VBAR register
407   b     4f
4083: msr   vbar_el3, x0            // Set the Address of the EL3 Vector Table in the VBAR register
4094: isb
410   ret
411
412ASM_PFX(ArmReadVBar):
413   EL1_OR_EL2_OR_EL3(x1)
4141: mrs   x0, vbar_el1            // Set the Address of the EL1 Vector Table in the VBAR register
415   ret
4162: mrs   x0, vbar_el2            // Set the Address of the EL2 Vector Table in the VBAR register
417   ret
4183: mrs   x0, vbar_el3            // Set the Address of the EL3 Vector Table in the VBAR register
419   ret
420
421
422ASM_PFX(ArmEnableVFP):
423  // Check whether floating-point is implemented in the processor.
424  mov   x1, x30                 // Save LR
425  bl    ArmReadIdPfr0           // Read EL1 Processor Feature Register (PFR0)
426  mov   x30, x1                 // Restore LR
427  ands  x0, x0, #AARCH64_PFR0_FP// Extract bits indicating VFP implementation
428  cmp   x0, #0                  // VFP is implemented if '0'.
429  b.ne  4f                      // Exit if VFP not implemented.
430  // FVP is implemented.
431  // Make sure VFP exceptions are not trapped (to any exception level).
432  mrs   x0, cpacr_el1           // Read EL1 Coprocessor Access Control Register (CPACR)
433  orr   x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
434  msr   cpacr_el1, x0           // Write back EL1 Coprocessor Access Control Register (CPACR)
435  mov   x1, #AARCH64_CPTR_TFP   // TFP Bit for trapping VFP Exceptions
436  EL1_OR_EL2_OR_EL3(x2)
4371:ret                           // Not configurable in EL1
4382:mrs   x0, cptr_el2            // Disable VFP traps to EL2
439  bic   x0, x0, x1
440  msr   cptr_el2, x0
441  ret
4423:mrs   x0, cptr_el3            // Disable VFP traps to EL3
443  bic   x0, x0, x1
444  msr   cptr_el3, x0
4454:ret
446
447
448ASM_PFX(ArmCallWFI):
449  wfi
450  ret
451
452
453ASM_PFX(ArmInvalidateInstructionAndDataTlb):
454   EL1_OR_EL2_OR_EL3(x0)
4551: tlbi  vmalle1
456   b     4f
4572: tlbi  alle2
458   b     4f
4593: tlbi  alle3
4604: dsb   sy
461   isb
462   ret
463
464
465ASM_PFX(ArmReadMpidr):
466  mrs   x0, mpidr_el1           // read EL1 MPIDR
467  ret
468
469
470// Keep old function names for C compatibilty for now. Change later?
471ASM_PFX(ArmReadTpidrurw):
472  mrs   x0, tpidr_el0           // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
473  ret
474
475
476// Keep old function names for C compatibilty for now. Change later?
477ASM_PFX(ArmWriteTpidrurw):
478  msr   tpidr_el0, x0           // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
479  ret
480
481
482// Arch timers are mandatory on AArch64
483ASM_PFX(ArmIsArchTimerImplemented):
484  mov   x0, #1
485  ret
486
487
488ASM_PFX(ArmReadIdPfr0):
489  mrs   x0, id_aa64pfr0_el1   // Read ID_AA64PFR0 Register
490  ret
491
492
493// Q: id_aa64pfr1_el1 not defined yet. What does this funtion want to access?
494// A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
495//    See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
496//    Not defined yet, but stick in here for now, should read all zeros.
497ASM_PFX(ArmReadIdPfr1):
498  mrs   x0, id_aa64pfr1_el1   // Read ID_PFR1 Register
499  ret
500
501// VOID ArmWriteHcr(UINTN Hcr)
502ASM_PFX(ArmWriteHcr):
503  msr   hcr_el2, x0        // Write the passed HCR value
504  ret
505
506// UINTN ArmReadCurrentEL(VOID)
507ASM_PFX(ArmReadCurrentEL):
508  mrs   x0, CurrentEL
509  ret
510
511ASM_FUNCTION_REMOVE_IF_UNREFERENCED
512