xref: /linux/arch/arm/include/asm/cachetype.h (revision 8690bbcf)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_ARM_CACHETYPE_H
3 #define __ASM_ARM_CACHETYPE_H
4 
5 #define CACHEID_VIVT			(1 << 0)
6 #define CACHEID_VIPT_NONALIASING	(1 << 1)
7 #define CACHEID_VIPT_ALIASING		(1 << 2)
8 #define CACHEID_VIPT			(CACHEID_VIPT_ALIASING|CACHEID_VIPT_NONALIASING)
9 #define CACHEID_ASID_TAGGED		(1 << 3)
10 #define CACHEID_VIPT_I_ALIASING		(1 << 4)
11 #define CACHEID_PIPT			(1 << 5)
12 
13 extern unsigned int cacheid;
14 
15 #define cache_is_vivt()			cacheid_is(CACHEID_VIVT)
16 #define cache_is_vipt()			cacheid_is(CACHEID_VIPT)
17 #define cache_is_vipt_nonaliasing()	cacheid_is(CACHEID_VIPT_NONALIASING)
18 #define cache_is_vipt_aliasing()	cacheid_is(CACHEID_VIPT_ALIASING)
19 #define icache_is_vivt_asid_tagged()	cacheid_is(CACHEID_ASID_TAGGED)
20 #define icache_is_vipt_aliasing()	cacheid_is(CACHEID_VIPT_I_ALIASING)
21 #define icache_is_pipt()		cacheid_is(CACHEID_PIPT)
22 
23 #define cpu_dcache_is_aliasing()	(cache_is_vivt() || cache_is_vipt_aliasing())
24 
25 /*
26  * __LINUX_ARM_ARCH__ is the minimum supported CPU architecture
27  * Mask out support which will never be present on newer CPUs.
28  * - v6+ is never VIVT
29  * - v7+ VIPT never aliases on D-side
30  */
31 #if __LINUX_ARM_ARCH__ >= 7
32 #define __CACHEID_ARCH_MIN	(CACHEID_VIPT_NONALIASING |\
33 				 CACHEID_ASID_TAGGED |\
34 				 CACHEID_VIPT_I_ALIASING |\
35 				 CACHEID_PIPT)
36 #elif __LINUX_ARM_ARCH__ >= 6
37 #define	__CACHEID_ARCH_MIN	(~CACHEID_VIVT)
38 #else
39 #define __CACHEID_ARCH_MIN	(~0)
40 #endif
41 
42 /*
43  * Mask out support which isn't configured
44  */
45 #if defined(CONFIG_CPU_CACHE_VIVT) && !defined(CONFIG_CPU_CACHE_VIPT)
46 #define __CACHEID_ALWAYS	(CACHEID_VIVT)
47 #define __CACHEID_NEVER		(~CACHEID_VIVT)
48 #elif !defined(CONFIG_CPU_CACHE_VIVT) && defined(CONFIG_CPU_CACHE_VIPT)
49 #define __CACHEID_ALWAYS	(0)
50 #define __CACHEID_NEVER		(CACHEID_VIVT)
51 #else
52 #define __CACHEID_ALWAYS	(0)
53 #define __CACHEID_NEVER		(0)
54 #endif
55 
cacheid_is(unsigned int mask)56 static inline unsigned int __attribute__((pure)) cacheid_is(unsigned int mask)
57 {
58 	return (__CACHEID_ALWAYS & mask) |
59 	       (~__CACHEID_NEVER & __CACHEID_ARCH_MIN & mask & cacheid);
60 }
61 
62 #define CSSELR_ICACHE	1
63 #define CSSELR_DCACHE	0
64 
65 #define CSSELR_L1	(0 << 1)
66 #define CSSELR_L2	(1 << 1)
67 #define CSSELR_L3	(2 << 1)
68 #define CSSELR_L4	(3 << 1)
69 #define CSSELR_L5	(4 << 1)
70 #define CSSELR_L6	(5 << 1)
71 #define CSSELR_L7	(6 << 1)
72 
73 #ifndef CONFIG_CPU_V7M
set_csselr(unsigned int cache_selector)74 static inline void set_csselr(unsigned int cache_selector)
75 {
76 	asm volatile("mcr p15, 2, %0, c0, c0, 0" : : "r" (cache_selector));
77 }
78 
read_ccsidr(void)79 static inline unsigned int read_ccsidr(void)
80 {
81 	unsigned int val;
82 
83 	asm volatile("mrc p15, 1, %0, c0, c0, 0" : "=r" (val));
84 	return val;
85 }
86 #else /* CONFIG_CPU_V7M */
87 #include <linux/io.h>
88 #include "asm/v7m.h"
89 
set_csselr(unsigned int cache_selector)90 static inline void set_csselr(unsigned int cache_selector)
91 {
92 	writel(cache_selector, BASEADDR_V7M_SCB + V7M_SCB_CTR);
93 }
94 
read_ccsidr(void)95 static inline unsigned int read_ccsidr(void)
96 {
97 	return readl(BASEADDR_V7M_SCB + V7M_SCB_CCSIDR);
98 }
99 #endif
100 
101 #endif
102