1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * (C) Copyright 2002
4 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
5 */
6
7 #include <common.h>
8 #include <asm/immap.h>
9 #include <asm/cache.h>
10
11 volatile int *cf_icache_status = (int *)ICACHE_STATUS;
12 volatile int *cf_dcache_status = (int *)DCACHE_STATUS;
13
flush_cache(ulong start_addr,ulong size)14 void flush_cache(ulong start_addr, ulong size)
15 {
16 /* Must be implemented for all M68k processors with copy-back data cache */
17 }
18
icache_status(void)19 int icache_status(void)
20 {
21 return *cf_icache_status;
22 }
23
dcache_status(void)24 int dcache_status(void)
25 {
26 return *cf_dcache_status;
27 }
28
icache_enable(void)29 void icache_enable(void)
30 {
31 icache_invalid();
32
33 *cf_icache_status = 1;
34
35 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
36 __asm__ __volatile__("movec %0, %%acr2"::"r"(CONFIG_SYS_CACHE_ACR2));
37 __asm__ __volatile__("movec %0, %%acr3"::"r"(CONFIG_SYS_CACHE_ACR3));
38 #if defined(CONFIG_CF_V4E)
39 __asm__ __volatile__("movec %0, %%acr6"::"r"(CONFIG_SYS_CACHE_ACR6));
40 __asm__ __volatile__("movec %0, %%acr7"::"r"(CONFIG_SYS_CACHE_ACR7));
41 #endif
42 #else
43 __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
44 __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
45 #endif
46
47 __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_ICACR));
48 }
49
icache_disable(void)50 void icache_disable(void)
51 {
52 u32 temp = 0;
53
54 *cf_icache_status = 0;
55 icache_invalid();
56
57 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
58 __asm__ __volatile__("movec %0, %%acr2"::"r"(temp));
59 __asm__ __volatile__("movec %0, %%acr3"::"r"(temp));
60 #if defined(CONFIG_CF_V4E)
61 __asm__ __volatile__("movec %0, %%acr6"::"r"(temp));
62 __asm__ __volatile__("movec %0, %%acr7"::"r"(temp));
63 #endif
64 #else
65 __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
66 __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
67 #endif
68 }
69
icache_invalid(void)70 void icache_invalid(void)
71 {
72 u32 temp;
73
74 temp = CONFIG_SYS_ICACHE_INV;
75 if (*cf_icache_status)
76 temp |= CONFIG_SYS_CACHE_ICACR;
77
78 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
79 }
80
81 /*
82 * data cache only for ColdFire V4 such as MCF547x_8x, MCF5445x
83 * the dcache will be dummy in ColdFire V2 and V3
84 */
dcache_enable(void)85 void dcache_enable(void)
86 {
87 dcache_invalid();
88 *cf_dcache_status = 1;
89
90 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
91 __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
92 __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
93 #if defined(CONFIG_CF_V4E)
94 __asm__ __volatile__("movec %0, %%acr4"::"r"(CONFIG_SYS_CACHE_ACR4));
95 __asm__ __volatile__("movec %0, %%acr5"::"r"(CONFIG_SYS_CACHE_ACR5));
96 #endif
97 #endif
98
99 __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_DCACR));
100 }
101
dcache_disable(void)102 void dcache_disable(void)
103 {
104 u32 temp = 0;
105
106 *cf_dcache_status = 0;
107 dcache_invalid();
108
109 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
110
111 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
112 __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
113 __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
114 #if defined(CONFIG_CF_V4E)
115 __asm__ __volatile__("movec %0, %%acr4"::"r"(temp));
116 __asm__ __volatile__("movec %0, %%acr5"::"r"(temp));
117 #endif
118 #endif
119 }
120
dcache_invalid(void)121 void dcache_invalid(void)
122 {
123 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
124 u32 temp;
125
126 temp = CONFIG_SYS_DCACHE_INV;
127 if (*cf_dcache_status)
128 temp |= CONFIG_SYS_CACHE_DCACR;
129 if (*cf_icache_status)
130 temp |= CONFIG_SYS_CACHE_ICACR;
131
132 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
133 #endif
134 }
135
invalidate_dcache_range(unsigned long start,unsigned long stop)136 __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
137 {
138 /* An empty stub, real implementation should be in platform code */
139 }
flush_dcache_range(unsigned long start,unsigned long stop)140 __weak void flush_dcache_range(unsigned long start, unsigned long stop)
141 {
142 /* An empty stub, real implementation should be in platform code */
143 }
144