1 /**
2 * Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
3 * All rights reserved.
4 *
5 * This source code is licensed under the BSD-style license found in the
6 * LICENSE file in the root directory of this source tree. An additional grant
7 * of patent rights can be found in the PATENTS file in the same directory.
8 */
9
10 #ifndef MEM_H_MODULE
11 #define MEM_H_MODULE
12
13 #if defined (__cplusplus)
14 extern "C" {
15 #endif
16
17 /*-****************************************
18 * Dependencies
19 ******************************************/
20 #include <stddef.h> /* size_t, ptrdiff_t */
21 #include <string.h> /* memcpy */
22
23
24 /*-****************************************
25 * Compiler specifics
26 ******************************************/
27 #if defined(_MSC_VER) /* Visual Studio */
28 # include <stdlib.h> /* _byteswap_ulong */
29 # include <intrin.h> /* _byteswap_* */
30 #endif
31 #if defined(__GNUC__)
32 # define MEM_STATIC static __inline __attribute__((unused))
33 #elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
34 # define MEM_STATIC static inline
35 #elif defined(_MSC_VER)
36 # define MEM_STATIC static __inline
37 #else
38 # define MEM_STATIC static /* this version may generate warnings for unused static functions; disable the relevant warning */
39 #endif
40
41 /* code only tested on 32 and 64 bits systems */
42 #define MEM_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(int)(!!(c)) }; }
MEM_check(void)43 MEM_STATIC void MEM_check(void) { MEM_STATIC_ASSERT((sizeof(size_t)==4) || (sizeof(size_t)==8)); }
44
45
46 /*-**************************************************************
47 * Basic Types
48 *****************************************************************/
49 #if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
50 # include <stdint.h>
51 typedef uint8_t BYTE;
52 typedef uint16_t U16;
53 typedef int16_t S16;
54 typedef uint32_t U32;
55 typedef int32_t S32;
56 typedef uint64_t U64;
57 typedef int64_t S64;
58 typedef intptr_t iPtrDiff;
59 #else
60 typedef unsigned char BYTE;
61 typedef unsigned short U16;
62 typedef signed short S16;
63 typedef unsigned int U32;
64 typedef signed int S32;
65 typedef unsigned long long U64;
66 typedef signed long long S64;
67 typedef ptrdiff_t iPtrDiff;
68 #endif
69
70
71 /*-**************************************************************
72 * Memory I/O
73 *****************************************************************/
74 /* MEM_FORCE_MEMORY_ACCESS :
75 * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable.
76 * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal.
77 * The below switch allow to select different access method for improved performance.
78 * Method 0 (default) : use `memcpy()`. Safe and portable.
79 * Method 1 : `__packed` statement. It depends on compiler extension (ie, not portable).
80 * This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`.
81 * Method 2 : direct access. This method is portable but violate C standard.
82 * It can generate buggy code on targets depending on alignment.
83 * In some circumstances, it's the only known way to get the most performance (ie GCC + ARMv6)
84 * See http://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html for details.
85 * Prefer these methods in priority order (0 > 1 > 2)
86 */
87 #ifndef MEM_FORCE_MEMORY_ACCESS /* can be defined externally, on command line for example */
88 # if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
89 # define MEM_FORCE_MEMORY_ACCESS 2
90 # elif defined(__INTEL_COMPILER) /*|| defined(_MSC_VER)*/ || \
91 (defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__) ))
92 # define MEM_FORCE_MEMORY_ACCESS 1
93 # endif
94 #endif
95
MEM_32bits(void)96 MEM_STATIC unsigned MEM_32bits(void) { return sizeof(size_t)==4; }
MEM_64bits(void)97 MEM_STATIC unsigned MEM_64bits(void) { return sizeof(size_t)==8; }
98
MEM_isLittleEndian(void)99 MEM_STATIC unsigned MEM_isLittleEndian(void)
100 {
101 const union { U32 u; BYTE c[4]; } one = { 1 }; /* don't use static : performance detrimental */
102 return one.c[0];
103 }
104
105 #if defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==2)
106
107 /* violates C standard, by lying on structure alignment.
108 Only use if no other choice to achieve best performance on target platform */
MEM_read16(const void * memPtr)109 MEM_STATIC U16 MEM_read16(const void* memPtr) { return *(const U16*) memPtr; }
MEM_read32(const void * memPtr)110 MEM_STATIC U32 MEM_read32(const void* memPtr) { return *(const U32*) memPtr; }
MEM_read64(const void * memPtr)111 MEM_STATIC U64 MEM_read64(const void* memPtr) { return *(const U64*) memPtr; }
MEM_readST(const void * memPtr)112 MEM_STATIC U64 MEM_readST(const void* memPtr) { return *(const size_t*) memPtr; }
113
MEM_write16(void * memPtr,U16 value)114 MEM_STATIC void MEM_write16(void* memPtr, U16 value) { *(U16*)memPtr = value; }
MEM_write32(void * memPtr,U32 value)115 MEM_STATIC void MEM_write32(void* memPtr, U32 value) { *(U32*)memPtr = value; }
MEM_write64(void * memPtr,U64 value)116 MEM_STATIC void MEM_write64(void* memPtr, U64 value) { *(U64*)memPtr = value; }
117
118 #elif defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==1)
119
120 /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
121 /* currently only defined for gcc and icc */
122 #if defined(_MSC_VER) || (defined(__INTEL_COMPILER) && defined(WIN32))
123 __pragma( pack(push, 1) )
124 typedef union { U16 u16; U32 u32; U64 u64; size_t st; } unalign;
__pragma(pack (pop))125 __pragma( pack(pop) )
126 #else
127 typedef union { U16 u16; U32 u32; U64 u64; size_t st; } __attribute__((packed)) unalign;
128 #endif
129
130 MEM_STATIC U16 MEM_read16(const void* ptr) { return ((const unalign*)ptr)->u16; }
MEM_read32(const void * ptr)131 MEM_STATIC U32 MEM_read32(const void* ptr) { return ((const unalign*)ptr)->u32; }
MEM_read64(const void * ptr)132 MEM_STATIC U64 MEM_read64(const void* ptr) { return ((const unalign*)ptr)->u64; }
MEM_readST(const void * ptr)133 MEM_STATIC U64 MEM_readST(const void* ptr) { return ((const unalign*)ptr)->st; }
134
MEM_write16(void * memPtr,U16 value)135 MEM_STATIC void MEM_write16(void* memPtr, U16 value) { ((unalign*)memPtr)->u16 = value; }
MEM_write32(void * memPtr,U32 value)136 MEM_STATIC void MEM_write32(void* memPtr, U32 value) { ((unalign*)memPtr)->u32 = value; }
MEM_write64(void * memPtr,U64 value)137 MEM_STATIC void MEM_write64(void* memPtr, U64 value) { ((unalign*)memPtr)->u64 = value; }
138
139 #else
140
141 /* default method, safe and standard.
142 can sometimes prove slower */
143
MEM_read16(const void * memPtr)144 MEM_STATIC U16 MEM_read16(const void* memPtr)
145 {
146 U16 val; memcpy(&val, memPtr, sizeof(val)); return val;
147 }
148
MEM_read32(const void * memPtr)149 MEM_STATIC U32 MEM_read32(const void* memPtr)
150 {
151 U32 val; memcpy(&val, memPtr, sizeof(val)); return val;
152 }
153
MEM_read64(const void * memPtr)154 MEM_STATIC U64 MEM_read64(const void* memPtr)
155 {
156 U64 val; memcpy(&val, memPtr, sizeof(val)); return val;
157 }
158
MEM_readST(const void * memPtr)159 MEM_STATIC size_t MEM_readST(const void* memPtr)
160 {
161 size_t val; memcpy(&val, memPtr, sizeof(val)); return val;
162 }
163
MEM_write16(void * memPtr,U16 value)164 MEM_STATIC void MEM_write16(void* memPtr, U16 value)
165 {
166 memcpy(memPtr, &value, sizeof(value));
167 }
168
MEM_write32(void * memPtr,U32 value)169 MEM_STATIC void MEM_write32(void* memPtr, U32 value)
170 {
171 memcpy(memPtr, &value, sizeof(value));
172 }
173
MEM_write64(void * memPtr,U64 value)174 MEM_STATIC void MEM_write64(void* memPtr, U64 value)
175 {
176 memcpy(memPtr, &value, sizeof(value));
177 }
178
179 #endif /* MEM_FORCE_MEMORY_ACCESS */
180
MEM_swap32(U32 in)181 MEM_STATIC U32 MEM_swap32(U32 in)
182 {
183 #if defined(_MSC_VER) /* Visual Studio */
184 return _byteswap_ulong(in);
185 #elif defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)
186 return __builtin_bswap32(in);
187 #else
188 return ((in << 24) & 0xff000000 ) |
189 ((in << 8) & 0x00ff0000 ) |
190 ((in >> 8) & 0x0000ff00 ) |
191 ((in >> 24) & 0x000000ff );
192 #endif
193 }
194
MEM_swap64(U64 in)195 MEM_STATIC U64 MEM_swap64(U64 in)
196 {
197 #if defined(_MSC_VER) /* Visual Studio */
198 return _byteswap_uint64(in);
199 #elif defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)
200 return __builtin_bswap64(in);
201 #else
202 return ((in << 56) & 0xff00000000000000ULL) |
203 ((in << 40) & 0x00ff000000000000ULL) |
204 ((in << 24) & 0x0000ff0000000000ULL) |
205 ((in << 8) & 0x000000ff00000000ULL) |
206 ((in >> 8) & 0x00000000ff000000ULL) |
207 ((in >> 24) & 0x0000000000ff0000ULL) |
208 ((in >> 40) & 0x000000000000ff00ULL) |
209 ((in >> 56) & 0x00000000000000ffULL);
210 #endif
211 }
212
MEM_swapST(size_t in)213 MEM_STATIC size_t MEM_swapST(size_t in)
214 {
215 if (MEM_32bits())
216 return (size_t)MEM_swap32((U32)in);
217 else
218 return (size_t)MEM_swap64((U64)in);
219 }
220
221 /*=== Little endian r/w ===*/
222
MEM_readLE16(const void * memPtr)223 MEM_STATIC U16 MEM_readLE16(const void* memPtr)
224 {
225 if (MEM_isLittleEndian())
226 return MEM_read16(memPtr);
227 else {
228 const BYTE* p = (const BYTE*)memPtr;
229 return (U16)(p[0] + (p[1]<<8));
230 }
231 }
232
MEM_writeLE16(void * memPtr,U16 val)233 MEM_STATIC void MEM_writeLE16(void* memPtr, U16 val)
234 {
235 if (MEM_isLittleEndian()) {
236 MEM_write16(memPtr, val);
237 } else {
238 BYTE* p = (BYTE*)memPtr;
239 p[0] = (BYTE)val;
240 p[1] = (BYTE)(val>>8);
241 }
242 }
243
MEM_readLE24(const void * memPtr)244 MEM_STATIC U32 MEM_readLE24(const void* memPtr)
245 {
246 return MEM_readLE16(memPtr) + (((const BYTE*)memPtr)[2] << 16);
247 }
248
MEM_writeLE24(void * memPtr,U32 val)249 MEM_STATIC void MEM_writeLE24(void* memPtr, U32 val)
250 {
251 MEM_writeLE16(memPtr, (U16)val);
252 ((BYTE*)memPtr)[2] = (BYTE)(val>>16);
253 }
254
MEM_readLE32(const void * memPtr)255 MEM_STATIC U32 MEM_readLE32(const void* memPtr)
256 {
257 if (MEM_isLittleEndian())
258 return MEM_read32(memPtr);
259 else
260 return MEM_swap32(MEM_read32(memPtr));
261 }
262
MEM_writeLE32(void * memPtr,U32 val32)263 MEM_STATIC void MEM_writeLE32(void* memPtr, U32 val32)
264 {
265 if (MEM_isLittleEndian())
266 MEM_write32(memPtr, val32);
267 else
268 MEM_write32(memPtr, MEM_swap32(val32));
269 }
270
MEM_readLE64(const void * memPtr)271 MEM_STATIC U64 MEM_readLE64(const void* memPtr)
272 {
273 if (MEM_isLittleEndian())
274 return MEM_read64(memPtr);
275 else
276 return MEM_swap64(MEM_read64(memPtr));
277 }
278
MEM_writeLE64(void * memPtr,U64 val64)279 MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64)
280 {
281 if (MEM_isLittleEndian())
282 MEM_write64(memPtr, val64);
283 else
284 MEM_write64(memPtr, MEM_swap64(val64));
285 }
286
MEM_readLEST(const void * memPtr)287 MEM_STATIC size_t MEM_readLEST(const void* memPtr)
288 {
289 if (MEM_32bits())
290 return (size_t)MEM_readLE32(memPtr);
291 else
292 return (size_t)MEM_readLE64(memPtr);
293 }
294
MEM_writeLEST(void * memPtr,size_t val)295 MEM_STATIC void MEM_writeLEST(void* memPtr, size_t val)
296 {
297 if (MEM_32bits())
298 MEM_writeLE32(memPtr, (U32)val);
299 else
300 MEM_writeLE64(memPtr, (U64)val);
301 }
302
303 /*=== Big endian r/w ===*/
304
MEM_readBE32(const void * memPtr)305 MEM_STATIC U32 MEM_readBE32(const void* memPtr)
306 {
307 if (MEM_isLittleEndian())
308 return MEM_swap32(MEM_read32(memPtr));
309 else
310 return MEM_read32(memPtr);
311 }
312
MEM_writeBE32(void * memPtr,U32 val32)313 MEM_STATIC void MEM_writeBE32(void* memPtr, U32 val32)
314 {
315 if (MEM_isLittleEndian())
316 MEM_write32(memPtr, MEM_swap32(val32));
317 else
318 MEM_write32(memPtr, val32);
319 }
320
MEM_readBE64(const void * memPtr)321 MEM_STATIC U64 MEM_readBE64(const void* memPtr)
322 {
323 if (MEM_isLittleEndian())
324 return MEM_swap64(MEM_read64(memPtr));
325 else
326 return MEM_read64(memPtr);
327 }
328
MEM_writeBE64(void * memPtr,U64 val64)329 MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64)
330 {
331 if (MEM_isLittleEndian())
332 MEM_write64(memPtr, MEM_swap64(val64));
333 else
334 MEM_write64(memPtr, val64);
335 }
336
MEM_readBEST(const void * memPtr)337 MEM_STATIC size_t MEM_readBEST(const void* memPtr)
338 {
339 if (MEM_32bits())
340 return (size_t)MEM_readBE32(memPtr);
341 else
342 return (size_t)MEM_readBE64(memPtr);
343 }
344
MEM_writeBEST(void * memPtr,size_t val)345 MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val)
346 {
347 if (MEM_32bits())
348 MEM_writeBE32(memPtr, (U32)val);
349 else
350 MEM_writeBE64(memPtr, (U64)val);
351 }
352
353
354 /* function safe only for comparisons */
MEM_readMINMATCH(const void * memPtr,U32 length)355 MEM_STATIC U32 MEM_readMINMATCH(const void* memPtr, U32 length)
356 {
357 switch (length)
358 {
359 default :
360 case 4 : return MEM_read32(memPtr);
361 case 3 : if (MEM_isLittleEndian())
362 return MEM_read32(memPtr)<<8;
363 else
364 return MEM_read32(memPtr)>>8;
365 }
366 }
367
368 #if defined (__cplusplus)
369 }
370 #endif
371
372 #endif /* MEM_H_MODULE */
373