1 #ifndef _ZBUILD_H
2 #define _ZBUILD_H
3 
4 #define _POSIX_SOURCE 1  /* fileno */
5 #ifndef _POSIX_C_SOURCE
6 #  define _POSIX_C_SOURCE 200112L /* snprintf, posix_memalign */
7 #endif
8 
9 #include <stddef.h>
10 #include <string.h>
11 #include <stdlib.h>
12 #include <stdint.h>
13 
14 /* Determine compiler version of C Standard */
15 #ifdef __STDC_VERSION__
16 #  if __STDC_VERSION__ >= 199901L
17 #    ifndef STDC99
18 #      define STDC99
19 #    endif
20 #  endif
21 #  if __STDC_VERSION__ >= 201112L
22 #    ifndef STDC11
23 #      define STDC11
24 #    endif
25 #  endif
26 #endif
27 
28 /* Determine compiler support for TLS */
29 #ifndef Z_TLS
30 #  if defined(STDC11) && !defined(__STDC_NO_THREADS__)
31 #    define Z_TLS _Thread_local
32 #  elif defined(__GNUC__) || defined(__SUNPRO_C)
33 #    define Z_TLS __thread
34 #  elif defined(_WIN32) && (defined(_MSC_VER) || defined(__ICL))
35 #    define Z_TLS __declspec(thread)
36 #  else
37 #    warning Unable to detect Thread Local Storage support.
38 #    define Z_TLS
39 #  endif
40 #endif
41 
42 /* This has to be first include that defines any types */
43 #if defined(_MSC_VER)
44 #  if defined(_WIN64)
45     typedef __int64 ssize_t;
46 #  else
47     typedef long ssize_t;
48 #  endif
49 #endif
50 
51 /* MS Visual Studio does not allow inline in C, only C++.
52    But it provides __inline instead, so use that. */
53 #if defined(_MSC_VER) && !defined(inline) && !defined(__cplusplus)
54 #  define inline __inline
55 #endif
56 
57 #if defined(ZLIB_COMPAT)
58 #  define PREFIX(x) x
59 #  define PREFIX2(x) ZLIB_ ## x
60 #  define PREFIX3(x) z_ ## x
61 #  define PREFIX4(x) x ## 64
62 #  define zVersion zlibVersion
63 #  define z_size_t unsigned long
64 #else
65 #  define PREFIX(x) zng_ ## x
66 #  define PREFIX2(x) ZLIBNG_ ## x
67 #  define PREFIX3(x) zng_ ## x
68 #  define PREFIX4(x) zng_ ## x
69 #  define zVersion zlibng_version
70 #  define z_size_t size_t
71 #endif
72 
73 /* Minimum of a and b. */
74 #define MIN(a, b) ((a) > (b) ? (b) : (a))
75 /* Maximum of a and b. */
76 #define MAX(a, b) ((a) < (b) ? (b) : (a))
77 /* Ignore unused variable warning */
78 #define Z_UNUSED(var) (void)(var)
79 
80 #if defined(HAVE_VISIBILITY_INTERNAL)
81 #  define Z_INTERNAL __attribute__((visibility ("internal")))
82 #elif defined(HAVE_VISIBILITY_HIDDEN)
83 #  define Z_INTERNAL __attribute__((visibility ("hidden")))
84 #else
85 #  define Z_INTERNAL
86 #endif
87 
88 #ifndef __cplusplus
89 #  define Z_REGISTER register
90 #else
91 #  define Z_REGISTER
92 #endif
93 
94 /* Reverse the bytes in a value. Use compiler intrinsics when
95    possible to take advantage of hardware implementations. */
96 #if defined(_MSC_VER) && (_MSC_VER >= 1300)
97 #  include <stdlib.h>
98 #  pragma intrinsic(_byteswap_ulong)
99 #  define ZSWAP16(q) _byteswap_ushort(q)
100 #  define ZSWAP32(q) _byteswap_ulong(q)
101 #  define ZSWAP64(q) _byteswap_uint64(q)
102 
103 #elif defined(__clang__) || (defined(__GNUC__) && \
104         (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)))
105 #  define ZSWAP16(q) __builtin_bswap16(q)
106 #  define ZSWAP32(q) __builtin_bswap32(q)
107 #  define ZSWAP64(q) __builtin_bswap64(q)
108 
109 #elif defined(__GNUC__) && (__GNUC__ >= 2) && defined(__linux__)
110 #  include <byteswap.h>
111 #  define ZSWAP16(q) bswap_16(q)
112 #  define ZSWAP32(q) bswap_32(q)
113 #  define ZSWAP64(q) bswap_64(q)
114 
115 #elif defined(__FreeBSD__) || defined(__NetBSD__) || defined(__DragonFly__)
116 #  include <sys/endian.h>
117 #  define ZSWAP16(q) bswap16(q)
118 #  define ZSWAP32(q) bswap32(q)
119 #  define ZSWAP64(q) bswap64(q)
120 #elif defined(__OpenBSD__)
121 #  include <sys/endian.h>
122 #  define ZSWAP16(q) swap16(q)
123 #  define ZSWAP32(q) swap32(q)
124 #  define ZSWAP64(q) swap64(q)
125 #elif defined(__INTEL_COMPILER)
126 /* ICC does not provide a two byte swap. */
127 #  define ZSWAP16(q) ((((q) & 0xff) << 8) | (((q) & 0xff00) >> 8))
128 #  define ZSWAP32(q) _bswap(q)
129 #  define ZSWAP64(q) _bswap64(q)
130 
131 #else
132 #  define ZSWAP16(q) ((((q) & 0xff) << 8) | (((q) & 0xff00) >> 8))
133 #  define ZSWAP32(q) ((((q) >> 24) & 0xff) + (((q) >> 8) & 0xff00) + \
134                      (((q) & 0xff00) << 8) + (((q) & 0xff) << 24))
135 #  define ZSWAP64(q)                           \
136          (((q & 0xFF00000000000000u) >> 56u) | \
137           ((q & 0x00FF000000000000u) >> 40u) | \
138           ((q & 0x0000FF0000000000u) >> 24u) | \
139           ((q & 0x000000FF00000000u) >> 8u)  | \
140           ((q & 0x00000000FF000000u) << 8u)  | \
141           ((q & 0x0000000000FF0000u) << 24u) | \
142           ((q & 0x000000000000FF00u) << 40u) | \
143           ((q & 0x00000000000000FFu) << 56u))
144 #endif
145 
146 /* Only enable likely/unlikely if the compiler is known to support it */
147 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__INTEL_COMPILER) || defined(__clang__)
148 #  define LIKELY_NULL(x)        __builtin_expect((x) != 0, 0)
149 #  define LIKELY(x)             __builtin_expect(!!(x), 1)
150 #  define UNLIKELY(x)           __builtin_expect(!!(x), 0)
151 #  define PREFETCH_L1(addr)     __builtin_prefetch(addr, 0, 3)
152 #  define PREFETCH_L2(addr)     __builtin_prefetch(addr, 0, 2)
153 #  define PREFETCH_RW(addr)     __builtin_prefetch(addr, 1, 2)
154 #elif defined(__WIN__)
155 #  include <xmmintrin.h>
156 #  define LIKELY_NULL(x)        x
157 #  define LIKELY(x)             x
158 #  define UNLIKELY(x)           x
159 #  define PREFETCH_L1(addr)     _mm_prefetch((char *) addr, _MM_HINT_T0)
160 #  define PREFETCH_L2(addr)     _mm_prefetch((char *) addr, _MM_HINT_T1)
161 #  define PREFETCH_RW(addr)     _mm_prefetch((char *) addr, _MM_HINT_T1)
162 #else
163 #  define LIKELY_NULL(x)        x
164 #  define LIKELY(x)             x
165 #  define UNLIKELY(x)           x
166 #  define PREFETCH_L1(addr)     addr
167 #  define PREFETCH_L2(addr)     addr
168 #  define PREFETCH_RW(addr)     addr
169 #endif /* (un)likely */
170 
171 #if defined(__clang__) || defined(__GNUC__)
172 #  define ALIGNED_(x) __attribute__ ((aligned(x)))
173 #elif defined(_MSC_VER)
174 #  define ALIGNED_(x) __declspec(align(x))
175 #endif
176 
177 /* Diagnostic functions */
178 #ifdef ZLIB_DEBUG
179 #  include <stdio.h>
180    extern int Z_INTERNAL z_verbose;
181    extern void Z_INTERNAL z_error(char *m);
182 #  define Assert(cond, msg) {if (!(cond)) z_error(msg);}
183 #  define Trace(x) {if (z_verbose >= 0) fprintf x;}
184 #  define Tracev(x) {if (z_verbose > 0) fprintf x;}
185 #  define Tracevv(x) {if (z_verbose > 1) fprintf x;}
186 #  define Tracec(c, x) {if (z_verbose > 0 && (c)) fprintf x;}
187 #  define Tracecv(c, x) {if (z_verbose > 1 && (c)) fprintf x;}
188 #else
189 #  define Assert(cond, msg)
190 #  define Trace(x)
191 #  define Tracev(x)
192 #  define Tracevv(x)
193 #  define Tracec(c, x)
194 #  define Tracecv(c, x)
195 #endif
196 
197 #ifndef NO_UNALIGNED
198 #  if defined(__x86_64__) || defined(_M_X64) || defined(__amd64__) || defined(_M_AMD64)
199 #    define UNALIGNED_OK
200 #    define UNALIGNED64_OK
201 #  elif defined(__i386__) || defined(__i486__) || defined(__i586__) || \
202         defined(__i686__) || defined(_X86_) || defined(_M_IX86)
203 #    define UNALIGNED_OK
204 #  elif defined(__aarch64__) || defined(_M_ARM64)
205 #    if (defined(__GNUC__) && defined(__ARM_FEATURE_UNALIGNED)) || !defined(__GNUC__)
206 #      define UNALIGNED_OK
207 #      define UNALIGNED64_OK
208 #    endif
209 #  elif defined(__arm__) || (_M_ARM >= 7)
210 #    if (defined(__GNUC__) && defined(__ARM_FEATURE_UNALIGNED)) || !defined(__GNUC__)
211 #      define UNALIGNED_OK
212 #    endif
213 #  elif defined(__powerpc64__) || defined(__ppc64__)
214 #    if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
215 #      define UNALIGNED_OK
216 #      define UNALIGNED64_OK
217 #    endif
218 #  endif
219 #endif
220 
221 /* Force compiler to emit unaligned memory accesses if unaligned access is supported
222    on the architecture, otherwise don't assume unaligned access is supported. Older
223    compilers don't optimize memcpy and memcmp calls to unaligned access instructions
224    when it is supported on the architecture resulting in significant performance impact.
225    Newer compilers might optimize memcpy but not all optimize memcmp for all integer types. */
226 #ifdef UNALIGNED_OK
227 #  define zmemcpy_2(dest, src)    (*((uint16_t *)(dest)) = *((uint16_t *)(src)))
228 #  define zmemcmp_2(str1, str2)   (*((uint16_t *)(str1)) != *((uint16_t *)(str2)))
229 #  define zmemcpy_4(dest, src)    (*((uint32_t *)(dest)) = *((uint32_t *)(src)))
230 #  define zmemcmp_4(str1, str2)   (*((uint32_t *)(str1)) != *((uint32_t *)(str2)))
231 #  if defined(UNALIGNED64_OK) && (UINTPTR_MAX == UINT64_MAX)
232 #    define zmemcpy_8(dest, src)  (*((uint64_t *)(dest)) = *((uint64_t *)(src)))
233 #    define zmemcmp_8(str1, str2) (*((uint64_t *)(str1)) != *((uint64_t *)(str2)))
234 #  else
235 #    define zmemcpy_8(dest, src)  (((uint32_t *)(dest))[0] = ((uint32_t *)(src))[0], \
236                                    ((uint32_t *)(dest))[1] = ((uint32_t *)(src))[1])
237 #    define zmemcmp_8(str1, str2) (((uint32_t *)(str1))[0] != ((uint32_t *)(str2))[0] || \
238                                    ((uint32_t *)(str1))[1] != ((uint32_t *)(str2))[1])
239 #  endif
240 #else
241 #  define zmemcpy_2(dest, src)  memcpy(dest, src, 2)
242 #  define zmemcmp_2(str1, str2) memcmp(str1, str2, 2)
243 #  define zmemcpy_4(dest, src)  memcpy(dest, src, 4)
244 #  define zmemcmp_4(str1, str2) memcmp(str1, str2, 4)
245 #  define zmemcpy_8(dest, src)  memcpy(dest, src, 8)
246 #  define zmemcmp_8(str1, str2) memcmp(str1, str2, 8)
247 #endif
248 
249 #if defined(__has_feature)
250 #  if __has_feature(memory_sanitizer)
251 #    define Z_MEMORY_SANITIZER 1
252 #    include <sanitizer/msan_interface.h>
253 #  endif
254 #endif
255 
256 #endif
257