xref: /openbsd/sys/arch/powerpc64/include/cpufunc.h (revision dfe2a243)
1 /*	$OpenBSD: cpufunc.h,v 1.2 2020/06/06 22:36:22 kettenis Exp $	*/
2 
3 /*
4  * Copyright (c) 2020 Mark Kettenis <kettenis@openbsd.org>
5  *
6  * Permission to use, copy, modify, and distribute this software for any
7  * purpose with or without fee is hereby granted, provided that the above
8  * copyright notice and this permission notice appear in all copies.
9  *
10  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17  */
18 
19 #ifndef _MACHINE_CPUFUNC_H_
20 #define _MACHINE_CPUFUNC_H_
21 
22 static inline void
23 eieio(void)
24 {
25 	__asm volatile ("eieio" ::: "memory");
26 }
27 
28 static inline void
29 isync(void)
30 {
31 	__asm volatile ("isync" ::: "memory");
32 }
33 
34 static inline void
35 ptesync(void)
36 {
37 	__asm volatile ("ptesync" ::: "memory");
38 }
39 
40 static inline void
41 slbia(void)
42 {
43 	__asm volatile ("slbia");
44 }
45 
46 static inline void
47 slbie(uint64_t esid)
48 {
49 	__asm volatile ("slbie %0" :: "r"(esid));
50 }
51 
52 static inline uint64_t
53 slbmfee(uint64_t entry)
54 {
55 	uint64_t value;
56 	__asm volatile ("slbmfee %0, %1" : "=r"(value) : "r"(entry));
57 	return value;
58 }
59 
60 static inline void
61 slbmte(uint64_t slbv, uint64_t slbe)
62 {
63 	__asm volatile ("slbmte %0, %1" :: "r"(slbv), "r"(slbe));
64 }
65 
66 static inline void
67 tlbie(uint64_t ava)
68 {
69 	__asm volatile ("tlbie %0, %1" :: "r"(ava), "r"(0));
70 }
71 
72 static inline void
73 tlbiel(uint64_t ava)
74 {
75 	__asm volatile ("tlbiel %0" :: "r"(ava));
76 }
77 
78 static inline void
79 tlbsync(void)
80 {
81 	__asm volatile ("tlbsync" ::: "memory");
82 }
83 
84 static inline uint64_t
85 mfmsr(void)
86 {
87 	uint64_t value;
88 	__asm volatile ("mfmsr %0" : "=r"(value));
89 	return value;
90 }
91 
92 static inline void
93 mtmsr(uint64_t value)
94 {
95 	__asm volatile ("mtmsr %0" :: "r"(value));
96 }
97 
98 static inline uint64_t
99 mftb(void)
100 {
101 	uint64_t value;
102 	__asm volatile ("mftb %0" : "=r"(value));
103 	return value;
104 }
105 
106 static inline uint32_t
107 mfdsisr(void)
108 {
109 	uint32_t value;
110 	__asm volatile ("mfdsisr %0" : "=r"(value));
111 	return value;
112 }
113 
114 static inline uint64_t
115 mfdar(void)
116 {
117 	uint64_t value;
118 	__asm volatile ("mfdar %0" : "=r"(value));
119 	return value;
120 }
121 
122 static inline uint64_t
123 mflpcr(void)
124 {
125 	uint64_t value;
126 	__asm volatile ("mfspr %0, 318" : "=r"(value));
127 	return value;
128 }
129 
130 static inline void
131 mtlpcr(uint64_t value)
132 {
133 	__asm volatile ("mtspr 318, %0" :: "r"(value));
134 }
135 
136 static inline void
137 mtptcr(uint64_t value)
138 {
139 	__asm volatile ("mtspr 464, %0" :: "r"(value));
140 }
141 
142 extern int cacheline_size;
143 
144 void	__syncicache(void *, size_t);
145 
146 #endif /* _MACHINE_CPUFUNC_H_ */
147