xref: /openbsd/lib/libcrypto/crypto_internal.h (revision ecb0d0d4)
1 /*	$OpenBSD: crypto_internal.h,v 1.15 2025/01/19 07:51:41 jsing Exp $ */
2 /*
3  * Copyright (c) 2023 Joel Sing <jsing@openbsd.org>
4  *
5  * Permission to use, copy, modify, and distribute this software for any
6  * purpose with or without fee is hereby granted, provided that the above
7  * copyright notice and this permission notice appear in all copies.
8  *
9  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16  */
17 
18 #include <endian.h>
19 #include <stddef.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include "crypto_arch.h"
24 
25 #ifndef HEADER_CRYPTO_INTERNAL_H
26 #define HEADER_CRYPTO_INTERNAL_H
27 
28 #define CTASSERT(x) \
29     extern char _ctassert[(x) ? 1 : -1] __attribute__((__unused__))
30 
31 /*
32  * Constant time functions for size_t.
33  */
34 #ifndef HAVE_CRYPTO_CT_NE_ZERO
35 static inline int
crypto_ct_ne_zero(size_t v)36 crypto_ct_ne_zero(size_t v)
37 {
38 	return (v | ~(v - 1)) >> ((sizeof(v) * 8) - 1);
39 }
40 #endif
41 
42 #ifndef HAVE_CRYPTO_CT_NE_ZERO_MASK
43 static inline size_t
crypto_ct_ne_zero_mask(size_t v)44 crypto_ct_ne_zero_mask(size_t v)
45 {
46 	return 0 - crypto_ct_ne_zero(v);
47 }
48 #endif
49 
50 #ifndef HAVE_CRYPTO_CT_EQ_ZERO
51 static inline int
crypto_ct_eq_zero(size_t v)52 crypto_ct_eq_zero(size_t v)
53 {
54 	return 1 - crypto_ct_ne_zero(v);
55 }
56 #endif
57 
58 #ifndef HAVE_CRYPTO_CT_EQ_ZERO_MASK_U8
59 static inline size_t
crypto_ct_eq_zero_mask(size_t v)60 crypto_ct_eq_zero_mask(size_t v)
61 {
62 	return 0 - crypto_ct_eq_zero(v);
63 }
64 #endif
65 
66 #ifndef HAVE_CRYPTO_CT_LT
67 static inline int
crypto_ct_lt(size_t a,size_t b)68 crypto_ct_lt(size_t a, size_t b)
69 {
70 	return (((a - b) | (b & ~a)) & (b | ~a)) >>
71 	    (sizeof(size_t) * 8 - 1);
72 }
73 #endif
74 
75 #ifndef HAVE_CRYPTO_CT_LT_MASK
76 static inline size_t
crypto_ct_lt_mask(size_t a,size_t b)77 crypto_ct_lt_mask(size_t a, size_t b)
78 {
79 	return 0 - crypto_ct_lt(a, b);
80 }
81 #endif
82 
83 #ifndef HAVE_CRYPTO_CT_GT
84 static inline int
crypto_ct_gt(size_t a,size_t b)85 crypto_ct_gt(size_t a, size_t b)
86 {
87 	return crypto_ct_lt(b, a);
88 }
89 #endif
90 
91 #ifndef HAVE_CRYPTO_CT_GT_MASK
92 static inline size_t
crypto_ct_gt_mask(size_t a,size_t b)93 crypto_ct_gt_mask(size_t a, size_t b)
94 {
95 	return 0 - crypto_ct_gt(a, b);
96 }
97 #endif
98 
99 /*
100  * Constant time operations for uint8_t.
101  */
102 #ifndef HAVE_CRYPTO_CT_NE_ZERO_U8
103 static inline int
crypto_ct_ne_zero_u8(uint8_t v)104 crypto_ct_ne_zero_u8(uint8_t v)
105 {
106 	return (uint8_t)(v | ~(v - 1)) >> ((sizeof(v) * 8) - 1);
107 }
108 #endif
109 
110 #ifndef HAVE_CRYPTO_CT_NE_ZERO_MASK_U8
111 static inline uint8_t
crypto_ct_ne_zero_mask_u8(uint8_t v)112 crypto_ct_ne_zero_mask_u8(uint8_t v)
113 {
114 	return 0 - crypto_ct_ne_zero_u8(v);
115 }
116 #endif
117 
118 #ifndef HAVE_CRYPTO_CT_EQ_ZERO_U8
119 static inline int
crypto_ct_eq_zero_u8(uint8_t v)120 crypto_ct_eq_zero_u8(uint8_t v)
121 {
122 	return 1 - crypto_ct_ne_zero_u8(v);
123 }
124 #endif
125 
126 #ifndef HAVE_CRYPTO_CT_EQ_ZERO_MASK_U8
127 static inline uint8_t
crypto_ct_eq_zero_mask_u8(uint8_t v)128 crypto_ct_eq_zero_mask_u8(uint8_t v)
129 {
130 	return 0 - crypto_ct_eq_zero_u8(v);
131 }
132 #endif
133 
134 #ifndef HAVE_CRYPTO_CT_NE_U8
135 static inline int
crypto_ct_ne_u8(uint8_t a,uint8_t b)136 crypto_ct_ne_u8(uint8_t a, uint8_t b)
137 {
138 	return crypto_ct_ne_zero_u8(a - b);
139 }
140 #endif
141 
142 #ifndef HAVE_CRYPTO_CT_NE_MASK_U8
143 static inline uint8_t
crypto_ct_ne_mask_u8(uint8_t a,uint8_t b)144 crypto_ct_ne_mask_u8(uint8_t a, uint8_t b)
145 {
146 	return 0 - crypto_ct_ne_u8(a, b);
147 }
148 #endif
149 
150 #ifndef HAVE_CRYPTO_CT_EQ_U8
151 static inline int
crypto_ct_eq_u8(uint8_t a,uint8_t b)152 crypto_ct_eq_u8(uint8_t a, uint8_t b)
153 {
154 	return crypto_ct_eq_zero_u8(a - b);
155 }
156 #endif
157 
158 #ifndef HAVE_CRYPTO_CT_EQ_MASK_U8
159 static inline uint8_t
crypto_ct_eq_mask_u8(uint8_t a,uint8_t b)160 crypto_ct_eq_mask_u8(uint8_t a, uint8_t b)
161 {
162 	return 0 - crypto_ct_eq_u8(a, b);
163 }
164 #endif
165 
166 /*
167  * crypto_load_be32toh() loads a 32 bit unsigned big endian value as a 32 bit
168  * unsigned host endian value, from the specified address in memory. The memory
169  * address may have any alignment.
170  */
171 #ifndef HAVE_CRYPTO_LOAD_BE32TOH
172 static inline uint32_t
crypto_load_be32toh(const uint8_t * src)173 crypto_load_be32toh(const uint8_t *src)
174 {
175 	uint32_t v;
176 
177 	memcpy(&v, src, sizeof(v));
178 
179 	return be32toh(v);
180 }
181 #endif
182 
183 /*
184  * crypto_store_htobe32() stores a 32 bit unsigned host endian value as a 32 bit
185  * unsigned big endian value, at the specified address in memory. The memory
186  * address may have any alignment.
187  */
188 #ifndef HAVE_CRYPTO_STORE_HTOBE32
189 static inline void
crypto_store_htobe32(uint8_t * dst,uint32_t v)190 crypto_store_htobe32(uint8_t *dst, uint32_t v)
191 {
192 	v = htobe32(v);
193 	memcpy(dst, &v, sizeof(v));
194 }
195 #endif
196 
197 /*
198  * crypto_load_be64toh() loads a 64 bit unsigned big endian value as a 64 bit
199  * unsigned host endian value, from the specified address in memory. The memory
200  * address may have any alignment.
201  */
202 #ifndef HAVE_CRYPTO_LOAD_BE64TOH
203 static inline uint64_t
crypto_load_be64toh(const uint8_t * src)204 crypto_load_be64toh(const uint8_t *src)
205 {
206 	uint64_t v;
207 
208 	memcpy(&v, src, sizeof(v));
209 
210 	return be64toh(v);
211 }
212 #endif
213 
214 /*
215  * crypto_store_htobe64() stores a 64 bit unsigned host endian value as a 64 bit
216  * unsigned big endian value, at the specified address in memory. The memory
217  * address may have any alignment.
218  */
219 #ifndef HAVE_CRYPTO_STORE_HTOBE64
220 static inline void
crypto_store_htobe64(uint8_t * dst,uint64_t v)221 crypto_store_htobe64(uint8_t *dst, uint64_t v)
222 {
223 	v = htobe64(v);
224 	memcpy(dst, &v, sizeof(v));
225 }
226 #endif
227 
228 /*
229  * crypto_load_le32toh() loads a 32 bit unsigned little endian value as a 32 bit
230  * unsigned host endian value, from the specified address in memory. The memory
231  * address may have any alignment.
232  */
233 #ifndef HAVE_CRYPTO_LOAD_LE32TOH
234 static inline uint32_t
crypto_load_le32toh(const uint8_t * src)235 crypto_load_le32toh(const uint8_t *src)
236 {
237 	uint32_t v;
238 
239 	memcpy(&v, src, sizeof(v));
240 
241 	return le32toh(v);
242 }
243 #endif
244 
245 /*
246  * crypto_store_htole32() stores a 32 bit unsigned host endian value as a 32 bit
247  * unsigned little endian value, at the specified address in memory. The memory
248  * address may have any alignment.
249  */
250 #ifndef HAVE_CRYPTO_STORE_HTOLE32
251 static inline void
crypto_store_htole32(uint8_t * dst,uint32_t v)252 crypto_store_htole32(uint8_t *dst, uint32_t v)
253 {
254 	v = htole32(v);
255 	memcpy(dst, &v, sizeof(v));
256 }
257 #endif
258 
259 #ifndef HAVE_CRYPTO_ADD_U32DW_U64
260 static inline void
crypto_add_u32dw_u64(uint32_t * h,uint32_t * l,uint64_t v)261 crypto_add_u32dw_u64(uint32_t *h, uint32_t *l, uint64_t v)
262 {
263 	v += ((uint64_t)*h << 32) | *l;
264 	*h = v >> 32;
265 	*l = v;
266 }
267 #endif
268 
269 #ifndef HAVE_CRYPTO_ROL_U32
270 static inline uint32_t
crypto_rol_u32(uint32_t v,size_t shift)271 crypto_rol_u32(uint32_t v, size_t shift)
272 {
273 	return (v << shift) | (v >> (32 - shift));
274 }
275 #endif
276 
277 #ifndef HAVE_CRYPTO_ROR_U32
278 static inline uint32_t
crypto_ror_u32(uint32_t v,size_t shift)279 crypto_ror_u32(uint32_t v, size_t shift)
280 {
281 	return (v << (32 - shift)) | (v >> shift);
282 }
283 #endif
284 
285 #ifndef HAVE_CRYPTO_ROL_U64
286 static inline uint64_t
crypto_rol_u64(uint64_t v,size_t shift)287 crypto_rol_u64(uint64_t v, size_t shift)
288 {
289 	return (v << shift) | (v >> (64 - shift));
290 }
291 #endif
292 
293 #ifndef HAVE_CRYPTO_ROR_U64
294 static inline uint64_t
crypto_ror_u64(uint64_t v,size_t shift)295 crypto_ror_u64(uint64_t v, size_t shift)
296 {
297 	return (v << (64 - shift)) | (v >> shift);
298 }
299 #endif
300 
301 void crypto_cpu_caps_init(void);
302 
303 uint64_t crypto_cpu_caps_ia32(void);
304 
305 #endif
306