1 /* SPDX-License-Identifier: MIT
2  *
3  * Permission is hereby granted, free of charge, to any person
4  * obtaining a copy of this software and associated documentation
5  * files (the "Software"), to deal in the Software without
6  * restriction, including without limitation the rights to use, copy,
7  * modify, merge, publish, distribute, sublicense, and/or sell copies
8  * of the Software, and to permit persons to whom the Software is
9  * furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be
12  * included in all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
18  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
19  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  *
23  * Copyright:
24  *   2017      Evan Nemerson <evan@nemerson.com>
25  *   2020      Hidayat Khan <huk2209@gmail.com>
26  */
27 
28 #if !defined(SIMDE_X86_SSE4_2_H)
29 #define SIMDE_X86_SSE4_2_H
30 
31 #include "sse4.1.h"
32 
33 HEDLEY_DIAGNOSTIC_PUSH
34 SIMDE_DISABLE_UNWANTED_DIAGNOSTICS
35 SIMDE_BEGIN_DECLS_
36 
37 #if defined(SIMDE_X86_SSE4_2_NATIVE)
38   #define SIMDE_SIDD_UBYTE_OPS _SIDD_UBYTE_OPS
39   #define SIMDE_SIDD_UWORD_OPS _SIDD_UWORD_OPS
40   #define SIMDE_SIDD_SBYTE_OPS _SIDD_SBYTE_OPS
41   #define SIMDE_SIDD_SWORD_OPS _SIDD_SWORD_OPS
42   #define SIMDE_SIDD_CMP_EQUAL_ANY _SIDD_CMP_EQUAL_ANY
43   #define SIMDE_SIDD_CMP_RANGES _SIDD_CMP_RANGES
44   #define SIMDE_SIDD_CMP_EQUAL_EACH _SIDD_CMP_EQUAL_EACH
45   #define SIMDE_SIDD_CMP_EQUAL_ORDERED _SIDD_CMP_EQUAL_ORDERED
46   #define SIMDE_SIDD_POSITIVE_POLARITY _SIDD_POSITIVE_POLARITY
47   #define SIMDE_SIDD_NEGATIVE_POLARITY _SIDD_NEGATIVE_POLARITY
48   #define SIMDE_SIDD_MASKED_POSITIVE_POLARITY _SIDD_MASKED_POSITIVE_POLARITY
49   #define SIMDE_SIDD_MASKED_NEGATIVE_POLARITY _SIDD_MASKED_NEGATIVE_POLARITY
50   #define SIMDE_SIDD_LEAST_SIGNIFICANT _SIDD_LEAST_SIGNIFICANT
51   #define SIMDE_SIDD_MOST_SIGNIFICANT _SIDD_MOST_SIGNIFICANT
52   #define SIMDE_SIDD_BIT_MASK _SIDD_BIT_MASK
53   #define SIMDE_SIDD_UNIT_MASK _SIDD_UNIT_MASK
54 #else
55   #define SIMDE_SIDD_UBYTE_OPS 0x00
56   #define SIMDE_SIDD_UWORD_OPS 0x01
57   #define SIMDE_SIDD_SBYTE_OPS 0x02
58   #define SIMDE_SIDD_SWORD_OPS 0x03
59   #define SIMDE_SIDD_CMP_EQUAL_ANY 0x00
60   #define SIMDE_SIDD_CMP_RANGES 0x04
61   #define SIMDE_SIDD_CMP_EQUAL_EACH 0x08
62   #define SIMDE_SIDD_CMP_EQUAL_ORDERED 0x0c
63   #define SIMDE_SIDD_POSITIVE_POLARITY 0x00
64   #define SIMDE_SIDD_NEGATIVE_POLARITY 0x10
65   #define SIMDE_SIDD_MASKED_POSITIVE_POLARITY 0x20
66   #define SIMDE_SIDD_MASKED_NEGATIVE_POLARITY 0x30
67   #define SIMDE_SIDD_LEAST_SIGNIFICANT 0x00
68   #define SIMDE_SIDD_MOST_SIGNIFICANT 0x40
69   #define SIMDE_SIDD_BIT_MASK 0x00
70   #define SIMDE_SIDD_UNIT_MASK 0x40
71 #endif
72 
73 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES) && !defined(_SIDD_UBYTE_OPS)
74   #define _SIDD_UBYTE_OPS SIMDE_SIDD_UBYTE_OPS
75   #define _SIDD_UWORD_OPS SIMDE_SIDD_UWORD_OPS
76   #define _SIDD_SBYTE_OPS SIMDE_SIDD_SBYTE_OPS
77   #define _SIDD_SWORD_OPS SIMDE_SIDD_SWORD_OPS
78   #define _SIDD_CMP_EQUAL_ANY SIMDE_SIDD_CMP_EQUAL_ANY
79   #define _SIDD_CMP_RANGES SIMDE_SIDD_CMP_RANGES
80   #define _SIDD_CMP_EQUAL_EACH SIMDE_SIDD_CMP_EQUAL_EACH
81   #define _SIDD_CMP_EQUAL_ORDERED SIMDE_SIDD_CMP_EQUAL_ORDERED
82   #define _SIDD_POSITIVE_POLARITY SIMDE_SIDD_POSITIVE_POLARITY
83   #define _SIDD_NEGATIVE_POLARITY SIMDE_SIDD_NEGATIVE_POLARITY
84   #define _SIDD_MASKED_POSITIVE_POLARITY SIMDE_SIDD_MASKED_POSITIVE_POLARITY
85   #define _SIDD_MASKED_NEGATIVE_POLARITY SIMDE_SIDD_MASKED_NEGATIVE_POLARITY
86   #define _SIDD_LEAST_SIGNIFICANT SIMDE_SIDD_LEAST_SIGNIFICANT
87   #define _SIDD_MOST_SIGNIFICANT SIMDE_SIDD_MOST_SIGNIFICANT
88   #define _SIDD_BIT_MASK SIMDE_SIDD_BIT_MASK
89   #define _SIDD_UNIT_MASK SIMDE_SIDD_UNIT_MASK
90 #endif
91 
92 SIMDE_FUNCTION_ATTRIBUTES
simde_mm_cmpestrs(simde__m128i a,int la,simde__m128i b,int lb,const int imm8)93 int simde_mm_cmpestrs (simde__m128i a, int la, simde__m128i b, int lb, const int imm8)
94     SIMDE_REQUIRE_CONSTANT_RANGE(imm8, 0, 127) {
95   #if !defined(HEDLEY_PGI_VERSION)
96     /* https://www.pgroup.com/userforum/viewtopic.php?f=4&p=27590&sid=cf89f8bf30be801831fe4a2ff0a2fa6c */
97     (void) a;
98     (void) b;
99   #endif
100   (void) la;
101   (void) lb;
102   return la <= ((128 / ((imm8 & SIMDE_SIDD_UWORD_OPS) ? 16 : 8)) - 1);
103 }
104 #if defined(SIMDE_X86_SSE4_2_NATIVE)
105   #define simde_mm_cmpestrs(a, la, b, lb, imm8) _mm_cmpestrs(a, la, b, lb, imm8)
106 #endif
107 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
108   #undef _mm_cmpestrs
109   #define _mm_cmpestrs(a, la, b, lb, imm8) simde_mm_cmpestrs(a, la, b, lb, imm8)
110 #endif
111 
112 SIMDE_FUNCTION_ATTRIBUTES
simde_mm_cmpestrz(simde__m128i a,int la,simde__m128i b,int lb,const int imm8)113 int simde_mm_cmpestrz (simde__m128i a, int la, simde__m128i b, int lb, const int imm8)
114     SIMDE_REQUIRE_CONSTANT_RANGE(imm8, 0, 127) {
115   #if !defined(HEDLEY_PGI_VERSION)
116     /* https://www.pgroup.com/userforum/viewtopic.php?f=4&p=27590&sid=cf89f8bf30be801831fe4a2ff0a2fa6c */
117     (void) a;
118     (void) b;
119   #endif
120   (void) la;
121   (void) lb;
122   return lb <= ((128 / ((imm8 & SIMDE_SIDD_UWORD_OPS) ? 16 : 8)) - 1);
123 }
124 #if defined(SIMDE_X86_SSE4_2_NATIVE)
125   #define simde_mm_cmpestrz(a, la, b, lb, imm8) _mm_cmpestrz(a, la, b, lb, imm8)
126 #endif
127 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
128   #undef _mm_cmpestrz
129   #define _mm_cmpestrz(a, la, b, lb, imm8) simde_mm_cmpestrz(a, la, b, lb, imm8)
130 #endif
131 
132 SIMDE_FUNCTION_ATTRIBUTES
133 simde__m128i
simde_mm_cmpgt_epi64(simde__m128i a,simde__m128i b)134 simde_mm_cmpgt_epi64 (simde__m128i a, simde__m128i b) {
135 #if defined(SIMDE_X86_SSE4_2_NATIVE)
136   return _mm_cmpgt_epi64(a, b);
137 #else
138   simde__m128i_private
139     r_,
140     a_ = simde__m128i_to_private(a),
141     b_ = simde__m128i_to_private(b);
142 
143   #if defined(SIMDE_ARM_NEON_A64V8_NATIVE)
144     r_.neon_u64 = vcgtq_s64(a_.neon_i64, b_.neon_i64);
145   #elif defined(SIMDE_ARM_NEON_A32V7_NATIVE)
146     // ARMv7 lacks vcgtq_s64.
147     // This is based off of Clang's SSE2 polyfill:
148     // (a > b) -> ((a_hi > b_hi) || (a_lo > b_lo && a_hi == b_hi))
149 
150     // Mask the sign bit out since we need a signed AND an unsigned comparison
151     // and it is ugly to try and split them.
152     int32x4_t mask   = vreinterpretq_s32_s64(vdupq_n_s64(0x80000000ull));
153     int32x4_t a_mask = veorq_s32(a_.neon_i32, mask);
154     int32x4_t b_mask = veorq_s32(b_.neon_i32, mask);
155     // Check if a > b
156     int64x2_t greater = vreinterpretq_s64_u32(vcgtq_s32(a_mask, b_mask));
157     // Copy upper mask to lower mask
158     // a_hi > b_hi
159     int64x2_t gt_hi = vshrq_n_s64(greater, 63);
160     // Copy lower mask to upper mask
161     // a_lo > b_lo
162     int64x2_t gt_lo = vsliq_n_s64(greater, greater, 32);
163     // Compare for equality
164     int64x2_t equal = vreinterpretq_s64_u32(vceqq_s32(a_mask, b_mask));
165     // Copy upper mask to lower mask
166     // a_hi == b_hi
167     int64x2_t eq_hi = vshrq_n_s64(equal, 63);
168     // a_hi > b_hi || (a_lo > b_lo && a_hi == b_hi)
169     int64x2_t ret = vorrq_s64(gt_hi, vandq_s64(gt_lo, eq_hi));
170     r_.neon_i64 = ret;
171   #elif defined(SIMDE_POWER_ALTIVEC_P8_NATIVE)
172     r_.altivec_u64 = HEDLEY_REINTERPRET_CAST(SIMDE_POWER_ALTIVEC_VECTOR(unsigned long long), vec_cmpgt(a_.altivec_i64, b_.altivec_i64));
173   #elif defined(SIMDE_VECTOR_SUBSCRIPT_OPS)
174     r_.i64 = HEDLEY_STATIC_CAST(__typeof__(r_.i64), a_.i64 > b_.i64);
175   #else
176     SIMDE_VECTORIZE
177     for (size_t i = 0 ; i < (sizeof(r_.i64) / sizeof(r_.i64[0])) ; i++) {
178       r_.i64[i] = (a_.i64[i] > b_.i64[i]) ? ~INT64_C(0) : INT64_C(0);
179     }
180   #endif
181 
182   return simde__m128i_from_private(r_);
183 #endif
184 }
185 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
186   #undef _mm_cmpgt_epi64
187   #define _mm_cmpgt_epi64(a, b) simde_mm_cmpgt_epi64(a, b)
188 #endif
189 
190 SIMDE_FUNCTION_ATTRIBUTES
191 int
simde_mm_cmpistrs_8_(simde__m128i a)192 simde_mm_cmpistrs_8_(simde__m128i a) {
193   simde__m128i_private a_= simde__m128i_to_private(a);
194   const int upper_bound = (128 / 8) - 1;
195   int a_invalid = 0;
196   SIMDE_VECTORIZE
197   for (int i = 0 ; i <= upper_bound ; i++) {
198     if(!a_.i8[i])
199       a_invalid = 1;
200   }
201   return a_invalid;
202 }
203 
204 SIMDE_FUNCTION_ATTRIBUTES
205 int
simde_mm_cmpistrs_16_(simde__m128i a)206 simde_mm_cmpistrs_16_(simde__m128i a) {
207   simde__m128i_private a_= simde__m128i_to_private(a);
208   const int upper_bound = (128 / 16) - 1;
209   int a_invalid = 0;
210   SIMDE_VECTORIZE
211   for (int i = 0 ; i <= upper_bound ; i++) {
212     if(!a_.i16[i])
213       a_invalid = 1;
214   }
215   return a_invalid;
216 }
217 
218 #if defined(SIMDE_X86_SSE4_2_NATIVE)
219   #define simde_mm_cmpistrs(a, b, imm8) _mm_cmpistrs(a, b, imm8)
220 #else
221   #define simde_mm_cmpistrs(a, b, imm8) \
222      (((imm8) & SIMDE_SIDD_UWORD_OPS) \
223        ? simde_mm_cmpistrs_16_((a)) \
224        : simde_mm_cmpistrs_8_((a)))
225 #endif
226 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
227   #undef _mm_cmpistrs
228   #define _mm_cmpistrs(a, b, imm8) simde_mm_cmpistrs(a, b, imm8)
229 #endif
230 
231 SIMDE_FUNCTION_ATTRIBUTES
232 int
simde_mm_cmpistrz_8_(simde__m128i b)233 simde_mm_cmpistrz_8_(simde__m128i b) {
234   simde__m128i_private b_= simde__m128i_to_private(b);
235   const int upper_bound = (128 / 8) - 1;
236   int b_invalid = 0;
237   SIMDE_VECTORIZE
238   for (int i = 0 ; i <= upper_bound ; i++) {
239     if(!b_.i8[i])
240       b_invalid = 1;
241   }
242   return b_invalid;
243 }
244 
245 SIMDE_FUNCTION_ATTRIBUTES
246 int
simde_mm_cmpistrz_16_(simde__m128i b)247 simde_mm_cmpistrz_16_(simde__m128i b) {
248   simde__m128i_private b_= simde__m128i_to_private(b);
249   const int upper_bound = (128 / 16) - 1;
250   int b_invalid = 0;
251   SIMDE_VECTORIZE
252   for (int i = 0 ; i <= upper_bound ; i++) {
253     if(!b_.i16[i])
254       b_invalid = 1;
255   }
256   return b_invalid;
257 }
258 
259 #if defined(SIMDE_X86_SSE4_2_NATIVE)
260   #define simde_mm_cmpistrz(a, b, imm8) _mm_cmpistrz(a, b, imm8)
261 #else
262   #define simde_mm_cmpistrz(a, b, imm8) \
263      (((imm8) & SIMDE_SIDD_UWORD_OPS) \
264        ? simde_mm_cmpistrz_16_((b)) \
265        : simde_mm_cmpistrz_8_((b)))
266 #endif
267 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
268   #undef _mm_cmpistrz
269   #define _mm_cmpistrz(a, b, imm8) simde_mm_cmpistrz(a, b, imm8)
270 #endif
271 
272 SIMDE_FUNCTION_ATTRIBUTES
273 uint32_t
simde_mm_crc32_u8(uint32_t prevcrc,uint8_t v)274 simde_mm_crc32_u8(uint32_t prevcrc, uint8_t v) {
275   #if defined(SIMDE_X86_SSE4_2_NATIVE)
276     return _mm_crc32_u8(prevcrc, v);
277   #else
278     uint32_t crc = prevcrc;
279     crc ^= v;
280     for(int bit = 0 ; bit < 8 ; bit++) {
281       if (crc & 1)
282         crc = (crc >> 1) ^ UINT32_C(0x82f63b78);
283       else
284         crc = (crc >> 1);
285     }
286     return crc;
287   #endif
288 }
289 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
290   #define _mm_crc32_u8(prevcrc, v) simde_mm_crc32_u8(prevcrc, v)
291 #endif
292 
293 SIMDE_FUNCTION_ATTRIBUTES
294 uint32_t
simde_mm_crc32_u16(uint32_t prevcrc,uint16_t v)295 simde_mm_crc32_u16(uint32_t prevcrc, uint16_t v) {
296   #if defined(SIMDE_X86_SSE4_2_NATIVE)
297     return _mm_crc32_u16(prevcrc, v);
298   #else
299     uint32_t crc = prevcrc;
300     crc = simde_mm_crc32_u8(crc, v & 0xff);
301     crc = simde_mm_crc32_u8(crc, (v >> 8) & 0xff);
302     return crc;
303   #endif
304 }
305 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
306   #define _mm_crc32_u16(prevcrc, v) simde_mm_crc32_u16(prevcrc, v)
307 #endif
308 
309 SIMDE_FUNCTION_ATTRIBUTES
310 uint32_t
simde_mm_crc32_u32(uint32_t prevcrc,uint32_t v)311 simde_mm_crc32_u32(uint32_t prevcrc, uint32_t v) {
312   #if defined(SIMDE_X86_SSE4_2_NATIVE)
313     return _mm_crc32_u32(prevcrc, v);
314   #else
315     uint32_t crc = prevcrc;
316     crc = simde_mm_crc32_u16(crc, v & 0xffff);
317     crc = simde_mm_crc32_u16(crc, (v >> 16) & 0xffff);
318     return crc;
319   #endif
320 }
321 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
322   #define _mm_crc32_u32(prevcrc, v) simde_mm_crc32_u32(prevcrc, v)
323 #endif
324 
325 SIMDE_FUNCTION_ATTRIBUTES
326 uint64_t
simde_mm_crc32_u64(uint64_t prevcrc,uint64_t v)327 simde_mm_crc32_u64(uint64_t prevcrc, uint64_t v) {
328   #if defined(SIMDE_X86_SSE4_2_NATIVE)
329     return _mm_crc32_u64(prevcrc, v);
330   #else
331     uint64_t crc = prevcrc;
332     crc = simde_mm_crc32_u32(HEDLEY_STATIC_CAST(uint32_t, crc), v & 0xffffffff);
333     crc = simde_mm_crc32_u32(HEDLEY_STATIC_CAST(uint32_t, crc), (v >> 32) & 0xffffffff);
334     return crc;
335   #endif
336 }
337 #if defined(SIMDE_X86_SSE4_2_ENABLE_NATIVE_ALIASES)
338   #define _mm_crc32_u64(prevcrc, v) simde_mm_crc32_u64(prevcrc, v)
339 #endif
340 
341 SIMDE_END_DECLS_
342 
343 HEDLEY_DIAGNOSTIC_POP
344 
345 #endif /* !defined(SIMDE_X86_SSE4_2_H) */
346