1 /*
2  * Copyright (c) 2015 James Almer
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #ifndef AVUTIL_X86_INTMATH_H
22 #define AVUTIL_X86_INTMATH_H
23 
24 #include <stdint.h>
25 #include <stdlib.h>
26 #if HAVE_FAST_CLZ
27 #if defined(_MSC_VER)
28 #include <intrin.h>
29 #elif defined(__INTEL_COMPILER)
30 #include <immintrin.h>
31 #endif
32 #endif
33 #include "config.h"
34 
35 #if HAVE_FAST_CLZ
36 #if (defined(__INTEL_COMPILER) && (__INTEL_COMPILER>=1216)) || defined(_MSC_VER)
37 #   if defined(__INTEL_COMPILER)
38 #       define ff_log2(x) (_bit_scan_reverse((x)|1))
39 #   else
40 #       define ff_log2 ff_log2_x86
ff_log2_x86(unsigned int v)41 static av_always_inline av_const int ff_log2_x86(unsigned int v)
42 {
43     unsigned long n;
44     _BitScanReverse(&n, v|1);
45     return n;
46 }
47 #   endif
48 #   define ff_log2_16bit av_log2
49 
50 #if defined(__INTEL_COMPILER) || (defined(_MSC_VER) && (_MSC_VER >= 1700) && \
51                                   (defined(__BMI__) || !defined(__clang__)))
52 #   define ff_ctz(v) _tzcnt_u32(v)
53 
54 #   if ARCH_X86_64
55 #       define ff_ctzll(v) _tzcnt_u64(v)
56 #   else
57 #       define ff_ctzll ff_ctzll_x86
ff_ctzll_x86(long long v)58 static av_always_inline av_const int ff_ctzll_x86(long long v)
59 {
60     return ((uint32_t)v == 0) ? _tzcnt_u32((uint32_t)(v >> 32)) + 32 : _tzcnt_u32((uint32_t)v);
61 }
62 #   endif
63 #endif /* _MSC_VER */
64 
65 #endif /* __INTEL_COMPILER */
66 
67 #endif /* HAVE_FAST_CLZ */
68 
69 #if defined(__GNUC__)
70 
71 /* Our generic version of av_popcount is faster than GCC's built-in on
72  * CPUs that don't support the popcnt instruction.
73  */
74 #if defined(__POPCNT__)
75     #define av_popcount   __builtin_popcount
76 #if ARCH_X86_64
77     #define av_popcount64 __builtin_popcountll
78 #endif
79 
80 #endif /* __POPCNT__ */
81 
82 #if defined(__BMI2__)
83 
84 #if AV_GCC_VERSION_AT_LEAST(5,1)
85 #define av_mod_uintp2 __builtin_ia32_bzhi_si
86 #elif HAVE_INLINE_ASM
87 /* GCC releases before 5.1.0 have a broken bzhi builtin, so for those we
88  * implement it using inline assembly
89  */
90 #define av_mod_uintp2 av_mod_uintp2_bmi2
av_mod_uintp2_bmi2(unsigned a,unsigned p)91 static av_always_inline av_const unsigned av_mod_uintp2_bmi2(unsigned a, unsigned p)
92 {
93     if (av_builtin_constant_p(p))
94         return a & ((1 << p) - 1);
95     else {
96         unsigned x;
97         __asm__ ("bzhi %2, %1, %0 \n\t" : "=r"(x) : "rm"(a), "r"(p));
98         return x;
99     }
100 }
101 #endif /* AV_GCC_VERSION_AT_LEAST */
102 
103 #endif /* __BMI2__ */
104 
105 #if defined(__SSE2__) && !defined(__INTEL_COMPILER)
106 
107 #define av_clipd av_clipd_sse2
av_clipd_sse2(double a,double amin,double amax)108 static av_always_inline av_const double av_clipd_sse2(double a, double amin, double amax)
109 {
110 #if defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
111     if (amin > amax) abort();
112 #endif
113     __asm__ ("minsd %2, %0 \n\t"
114              "maxsd %1, %0 \n\t"
115              : "+&x"(a) : "xm"(amin), "xm"(amax));
116     return a;
117 }
118 
119 #endif /* __SSE2__ */
120 
121 #if defined(__SSE__) && !defined(__INTEL_COMPILER)
122 
123 #define av_clipf av_clipf_sse
av_clipf_sse(float a,float amin,float amax)124 static av_always_inline av_const float av_clipf_sse(float a, float amin, float amax)
125 {
126 #if defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
127     if (amin > amax) abort();
128 #endif
129     __asm__ ("minss %2, %0 \n\t"
130              "maxss %1, %0 \n\t"
131              : "+&x"(a) : "xm"(amin), "xm"(amax));
132     return a;
133 }
134 
135 #endif /* __SSE__ */
136 
137 #endif /* __GNUC__ */
138 
139 #endif /* AVUTIL_X86_INTMATH_H */
140