1 /*
2 Copyright 2011 Google Inc. All Rights Reserved.
3 
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions are
6 met:
7 
8     * Redistributions of source code must retain the above copyright
9 notice, this list of conditions and the following disclaimer.
10     * Redistributions in binary form must reproduce the above
11 copyright notice, this list of conditions and the following disclaimer
12 in the documentation and/or other materials provided with the
13 distribution.
14     * Neither the name of Google Inc. nor the names of its
15 contributors may be used to endorse or promote products derived from
16 this software without specific prior written permission.
17 
18 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 
30 Various stubs for the open-source version of Snappy.
31 
32 File modified for the Linux Kernel by
33 Zeev Tarantov <zeev.tarantov@gmail.com>
34 
35 File modified for Sereal by
36 Steffen Mueller <smueller@cpan.org>
37 */
38 
39 #ifndef CSNAPPY_INTERNAL_H_
40 #define CSNAPPY_INTERNAL_H_
41 
42 #include "csnappy_compat.h"
43 
44 #ifndef __KERNEL__
45 #include "csnappy_internal_userspace.h"
46 #include <string.h>
47 #else
48 
49 #include <linux/types.h>
50 #include <linux/string.h>
51 #include <linux/compiler.h>
52 #include <asm/byteorder.h>
53 #include <asm/unaligned.h>
54 
55 #if (defined(__LITTLE_ENDIAN) && defined(__BIG_ENDIAN)) || \
56     (!defined(__LITTLE_ENDIAN) && !defined(__BIG_ENDIAN))
57 #error either __LITTLE_ENDIAN or __BIG_ENDIAN must be defined
58 #endif
59 #if defined(__LITTLE_ENDIAN)
60 #define __BYTE_ORDER __LITTLE_ENDIAN
61 #else
62 #define __BYTE_ORDER __BIG_ENDIAN
63 #endif
64 
65 #ifdef DEBUG
66 #define DCHECK(cond)	if (!(cond)) \
67 			printk(KERN_DEBUG "assert failed @ %s:%i\n", \
68 				__FILE__, __LINE__)
69 #else
70 #define DCHECK(cond)
71 #endif
72 
73 #define UNALIGNED_LOAD16(_p)		get_unaligned((const uint16_t *)(_p))
74 #define UNALIGNED_LOAD32(_p)		get_unaligned((const uint32_t *)(_p))
75 #define UNALIGNED_LOAD64(_p)		get_unaligned((const uint64_t *)(_p))
76 #define UNALIGNED_STORE16(_p, _val)	put_unaligned((_val), (uint16_t *)(_p))
77 #define UNALIGNED_STORE32(_p, _val)	put_unaligned((_val), (uint32_t *)(_p))
78 #define UNALIGNED_STORE64(_p, _val)	put_unaligned((_val), (uint64_t *)(_p))
79 
80 #define FindLSBSetNonZero(n)		__builtin_ctz(n)
81 #define FindLSBSetNonZero64(n)		__builtin_ctzll(n)
82 
83 #endif /* __KERNEL__ */
84 
85 #if (!defined(__LITTLE_ENDIAN) && !defined(__BIG_ENDIAN)) || ! defined(__BYTE_ORDER)
86 #  error either __LITTLE_ENDIAN or __BIG_ENDIAN, plus __BYTE_ORDER must be defined
87 #endif
88 
89 #define ARCH_ARM_HAVE_UNALIGNED \
90     defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) || defined(__ARMV6__) || \
91     defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__)
92 
UnalignedCopy64(const void * src,void * dst)93 static INLINE void UnalignedCopy64(const void *src, void *dst) {
94 #if defined(__i386__) || defined(__x86_64__) || defined(__powerpc__) || ARCH_ARM_HAVE_UNALIGNED
95   if ((sizeof(void *) == 8) || (sizeof(long) == 8)) {
96     UNALIGNED_STORE64(dst, UNALIGNED_LOAD64(src));
97   } else {
98    /* This can be more efficient than UNALIGNED_LOAD64 + UNALIGNED_STORE64
99       on some platforms, in particular ARM. */
100     const uint8_t *src_bytep = (const uint8_t *)src;
101     uint8_t *dst_bytep = (uint8_t *)dst;
102 
103     UNALIGNED_STORE32(dst_bytep, UNALIGNED_LOAD32(src_bytep));
104     UNALIGNED_STORE32(dst_bytep + 4, UNALIGNED_LOAD32(src_bytep + 4));
105   }
106 #else
107   const uint8_t *src_bytep = (const uint8_t *)src;
108   uint8_t *dst_bytep = (uint8_t *)dst;
109   dst_bytep[0] = src_bytep[0];
110   dst_bytep[1] = src_bytep[1];
111   dst_bytep[2] = src_bytep[2];
112   dst_bytep[3] = src_bytep[3];
113   dst_bytep[4] = src_bytep[4];
114   dst_bytep[5] = src_bytep[5];
115   dst_bytep[6] = src_bytep[6];
116   dst_bytep[7] = src_bytep[7];
117 #endif
118 }
119 
120 #if defined(__arm__)
121   #if ARCH_ARM_HAVE_UNALIGNED
get_unaligned_le(const void * p,uint32_t n)122      static INLINE uint32_t get_unaligned_le(const void *p, uint32_t n)
123      {
124        uint32_t wordmask = (1U << (8 * n)) - 1;
125        return get_unaligned_le32(p) & wordmask;
126      }
127   #else
128      extern uint32_t get_unaligned_le_armv5(const void *p, uint32_t n);
129      #define get_unaligned_le get_unaligned_le_armv5
130   #endif
131 #else
get_unaligned_le(const void * p,uint32_t n)132   static INLINE uint32_t get_unaligned_le(const void *p, uint32_t n)
133   {
134     /* Mapping from i in range [0,4] to a mask to extract the bottom 8*i bits */
135     static const uint32_t wordmask[] = {
136       0u, 0xffu, 0xffffu, 0xffffffu, 0xffffffffu
137     };
138     return get_unaligned_le32(p) & wordmask[n];
139   }
140 #endif
141 
142 #define DCHECK_EQ(a, b)	DCHECK(((a) == (b)))
143 #define DCHECK_NE(a, b)	DCHECK(((a) != (b)))
144 #define DCHECK_GT(a, b)	DCHECK(((a) >  (b)))
145 #define DCHECK_GE(a, b)	DCHECK(((a) >= (b)))
146 #define DCHECK_LT(a, b)	DCHECK(((a) <  (b)))
147 #define DCHECK_LE(a, b)	DCHECK(((a) <= (b)))
148 
149 enum {
150 	LITERAL = 0,
151 	COPY_1_BYTE_OFFSET = 1,  /* 3 bit length + 3 bits of offset in opcode */
152 	COPY_2_BYTE_OFFSET = 2,
153 	COPY_4_BYTE_OFFSET = 3
154 };
155 
156 #endif  /* CSNAPPY_INTERNAL_H_ */
157