|
| 1 | +#ifndef _ZBUILD_H |
| 2 | +#define _ZBUILD_H |
| 3 | + |
| 4 | +#define _POSIX_SOURCE 1 /* fileno */ |
| 5 | +#ifndef _POSIX_C_SOURCE |
| 6 | +# define _POSIX_C_SOURCE 200809L /* snprintf, posix_memalign, strdup */ |
| 7 | +#endif |
| 8 | +#ifndef _ISOC11_SOURCE |
| 9 | +# define _ISOC11_SOURCE 1 /* aligned_alloc */ |
| 10 | +#endif |
| 11 | +#ifdef __OpenBSD__ |
| 12 | +# define _BSD_SOURCE 1 |
| 13 | +#endif |
| 14 | + |
| 15 | +#include <stddef.h> |
| 16 | +#include <string.h> |
| 17 | +#include <stdlib.h> |
| 18 | +#include <stdint.h> |
| 19 | + |
| 20 | +/* Determine compiler version of C Standard */ |
| 21 | +#ifdef __STDC_VERSION__ |
| 22 | +# if __STDC_VERSION__ >= 199901L |
| 23 | +# ifndef STDC99 |
| 24 | +# define STDC99 |
| 25 | +# endif |
| 26 | +# endif |
| 27 | +# if __STDC_VERSION__ >= 201112L |
| 28 | +# ifndef STDC11 |
| 29 | +# define STDC11 |
| 30 | +# endif |
| 31 | +# endif |
| 32 | +#endif |
| 33 | + |
| 34 | +#ifndef Z_HAS_ATTRIBUTE |
| 35 | +# if defined(__has_attribute) |
| 36 | +# define Z_HAS_ATTRIBUTE(a) __has_attribute(a) |
| 37 | +# else |
| 38 | +# define Z_HAS_ATTRIBUTE(a) 0 |
| 39 | +# endif |
| 40 | +#endif |
| 41 | + |
| 42 | +#ifndef Z_FALLTHROUGH |
| 43 | +# if Z_HAS_ATTRIBUTE(__fallthrough__) || (defined(__GNUC__) && (__GNUC__ >= 7)) |
| 44 | +# define Z_FALLTHROUGH __attribute__((__fallthrough__)) |
| 45 | +# else |
| 46 | +# define Z_FALLTHROUGH do {} while(0) /* fallthrough */ |
| 47 | +# endif |
| 48 | +#endif |
| 49 | + |
| 50 | +#ifndef Z_TARGET |
| 51 | +# if Z_HAS_ATTRIBUTE(__target__) |
| 52 | +# define Z_TARGET(x) __attribute__((__target__(x))) |
| 53 | +# else |
| 54 | +# define Z_TARGET(x) |
| 55 | +# endif |
| 56 | +#endif |
| 57 | + |
| 58 | +/* This has to be first include that defines any types */ |
| 59 | +#if defined(_MSC_VER) |
| 60 | +# if defined(_WIN64) |
| 61 | + typedef __int64 ssize_t; |
| 62 | +# else |
| 63 | + typedef long ssize_t; |
| 64 | +# endif |
| 65 | + |
| 66 | +# if defined(_WIN64) |
| 67 | + #define SSIZE_MAX _I64_MAX |
| 68 | +# else |
| 69 | + #define SSIZE_MAX LONG_MAX |
| 70 | +# endif |
| 71 | +#endif |
| 72 | + |
| 73 | +/* MS Visual Studio does not allow inline in C, only C++. |
| 74 | + But it provides __inline instead, so use that. */ |
| 75 | +#if defined(_MSC_VER) && !defined(inline) && !defined(__cplusplus) |
| 76 | +# define inline __inline |
| 77 | +#endif |
| 78 | + |
| 79 | +#if defined(ZLIB_COMPAT) |
| 80 | +# define PREFIX(x) x |
| 81 | +# define PREFIX2(x) ZLIB_ ## x |
| 82 | +# define PREFIX3(x) z_ ## x |
| 83 | +# define PREFIX4(x) x ## 64 |
| 84 | +# define zVersion zlibVersion |
| 85 | +#else |
| 86 | +# define PREFIX(x) zng_ ## x |
| 87 | +# define PREFIX2(x) ZLIBNG_ ## x |
| 88 | +# define PREFIX3(x) zng_ ## x |
| 89 | +# define PREFIX4(x) zng_ ## x |
| 90 | +# define zVersion zlibng_version |
| 91 | +# define z_size_t size_t |
| 92 | +#endif |
| 93 | + |
| 94 | +/* In zlib-compat some functions and types use unsigned long, but zlib-ng use size_t */ |
| 95 | +#if defined(ZLIB_COMPAT) |
| 96 | +# define z_uintmax_t unsigned long |
| 97 | +#else |
| 98 | +# define z_uintmax_t size_t |
| 99 | +#endif |
| 100 | + |
| 101 | +/* Minimum of a and b. */ |
| 102 | +#define MIN(a, b) ((a) > (b) ? (b) : (a)) |
| 103 | +/* Maximum of a and b. */ |
| 104 | +#define MAX(a, b) ((a) < (b) ? (b) : (a)) |
| 105 | +/* Ignore unused variable warning */ |
| 106 | +#define Z_UNUSED(var) (void)(var) |
| 107 | + |
| 108 | +#if defined(HAVE_VISIBILITY_INTERNAL) |
| 109 | +# define Z_INTERNAL __attribute__((visibility ("internal"))) |
| 110 | +#elif defined(HAVE_VISIBILITY_HIDDEN) |
| 111 | +# define Z_INTERNAL __attribute__((visibility ("hidden"))) |
| 112 | +#else |
| 113 | +# define Z_INTERNAL |
| 114 | +#endif |
| 115 | + |
| 116 | +/* Symbol versioning helpers, allowing multiple versions of a function to exist. |
| 117 | + * Functions using this must also be added to zlib-ng.map for each version. |
| 118 | + * Double @@ means this is the default for newly compiled applications to link against. |
| 119 | + * Single @ means this is kept for backwards compatibility. |
| 120 | + * This is only used for Zlib-ng native API, and only on platforms supporting this. |
| 121 | + */ |
| 122 | +#if defined(HAVE_SYMVER) |
| 123 | +# define ZSYMVER(func,alias,ver) __asm__(".symver " func ", " alias "@ZLIB_NG_" ver); |
| 124 | +# define ZSYMVER_DEF(func,alias,ver) __asm__(".symver " func ", " alias "@@ZLIB_NG_" ver); |
| 125 | +#else |
| 126 | +# define ZSYMVER(func,alias,ver) |
| 127 | +# define ZSYMVER_DEF(func,alias,ver) |
| 128 | +#endif |
| 129 | + |
| 130 | +#ifndef __cplusplus |
| 131 | +# define Z_REGISTER register |
| 132 | +#else |
| 133 | +# define Z_REGISTER |
| 134 | +#endif |
| 135 | + |
| 136 | +/* Reverse the bytes in a value. Use compiler intrinsics when |
| 137 | + possible to take advantage of hardware implementations. */ |
| 138 | +#if defined(_MSC_VER) && (_MSC_VER >= 1300) |
| 139 | +# include <stdlib.h> |
| 140 | +# pragma intrinsic(_byteswap_ulong) |
| 141 | +# define ZSWAP16(q) _byteswap_ushort(q) |
| 142 | +# define ZSWAP32(q) _byteswap_ulong(q) |
| 143 | +# define ZSWAP64(q) _byteswap_uint64(q) |
| 144 | + |
| 145 | +#elif defined(__clang__) || (defined(__GNUC__) && \ |
| 146 | + (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))) |
| 147 | +# define ZSWAP16(q) __builtin_bswap16(q) |
| 148 | +# define ZSWAP32(q) __builtin_bswap32(q) |
| 149 | +# define ZSWAP64(q) __builtin_bswap64(q) |
| 150 | + |
| 151 | +#elif defined(__GNUC__) && (__GNUC__ >= 2) && defined(__linux__) |
| 152 | +# include <byteswap.h> |
| 153 | +# define ZSWAP16(q) bswap_16(q) |
| 154 | +# define ZSWAP32(q) bswap_32(q) |
| 155 | +# define ZSWAP64(q) bswap_64(q) |
| 156 | + |
| 157 | +#elif defined(__FreeBSD__) || defined(__NetBSD__) || defined(__DragonFly__) |
| 158 | +# include <sys/endian.h> |
| 159 | +# define ZSWAP16(q) bswap16(q) |
| 160 | +# define ZSWAP32(q) bswap32(q) |
| 161 | +# define ZSWAP64(q) bswap64(q) |
| 162 | +#elif defined(__OpenBSD__) |
| 163 | +# include <sys/endian.h> |
| 164 | +# define ZSWAP16(q) swap16(q) |
| 165 | +# define ZSWAP32(q) swap32(q) |
| 166 | +# define ZSWAP64(q) swap64(q) |
| 167 | +#elif defined(__INTEL_COMPILER) |
| 168 | +/* ICC does not provide a two byte swap. */ |
| 169 | +# define ZSWAP16(q) ((((q) & 0xff) << 8) | (((q) & 0xff00) >> 8)) |
| 170 | +# define ZSWAP32(q) _bswap(q) |
| 171 | +# define ZSWAP64(q) _bswap64(q) |
| 172 | + |
| 173 | +#else |
| 174 | +# define ZSWAP16(q) ((((q) & 0xff) << 8) | (((q) & 0xff00) >> 8)) |
| 175 | +# define ZSWAP32(q) ((((q) >> 24) & 0xff) + (((q) >> 8) & 0xff00) + \ |
| 176 | + (((q) & 0xff00) << 8) + (((q) & 0xff) << 24)) |
| 177 | +# define ZSWAP64(q) \ |
| 178 | + (((q & 0xFF00000000000000u) >> 56u) | \ |
| 179 | + ((q & 0x00FF000000000000u) >> 40u) | \ |
| 180 | + ((q & 0x0000FF0000000000u) >> 24u) | \ |
| 181 | + ((q & 0x000000FF00000000u) >> 8u) | \ |
| 182 | + ((q & 0x00000000FF000000u) << 8u) | \ |
| 183 | + ((q & 0x0000000000FF0000u) << 24u) | \ |
| 184 | + ((q & 0x000000000000FF00u) << 40u) | \ |
| 185 | + ((q & 0x00000000000000FFu) << 56u)) |
| 186 | +#endif |
| 187 | + |
| 188 | +/* Only enable likely/unlikely if the compiler is known to support it */ |
| 189 | +#if (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__INTEL_COMPILER) || defined(__clang__) |
| 190 | +# define LIKELY_NULL(x) __builtin_expect((x) != 0, 0) |
| 191 | +# define LIKELY(x) __builtin_expect(!!(x), 1) |
| 192 | +# define UNLIKELY(x) __builtin_expect(!!(x), 0) |
| 193 | +#else |
| 194 | +# define LIKELY_NULL(x) x |
| 195 | +# define LIKELY(x) x |
| 196 | +# define UNLIKELY(x) x |
| 197 | +#endif /* (un)likely */ |
| 198 | + |
| 199 | +#if defined(HAVE_ATTRIBUTE_ALIGNED) |
| 200 | +# define ALIGNED_(x) __attribute__ ((aligned(x))) |
| 201 | +#elif defined(_MSC_VER) |
| 202 | +# define ALIGNED_(x) __declspec(align(x)) |
| 203 | +#else |
| 204 | +/* TODO: Define ALIGNED_ for your compiler */ |
| 205 | +# define ALIGNED_(x) |
| 206 | +#endif |
| 207 | + |
| 208 | +#ifdef HAVE_BUILTIN_ASSUME_ALIGNED |
| 209 | +# define HINT_ALIGNED(p,n) __builtin_assume_aligned((void *)(p),(n)) |
| 210 | +#else |
| 211 | +# define HINT_ALIGNED(p,n) (p) |
| 212 | +#endif |
| 213 | +#define HINT_ALIGNED_16(p) HINT_ALIGNED((p),16) |
| 214 | +#define HINT_ALIGNED_64(p) HINT_ALIGNED((p),64) |
| 215 | +#define HINT_ALIGNED_4096(p) HINT_ALIGNED((p),4096) |
| 216 | + |
| 217 | +/* PADSZ returns needed bytes to pad bpos to pad size |
| 218 | + * PAD_NN calculates pad size and adds it to bpos, returning the result. |
| 219 | + * All take an integer or a pointer as bpos input. |
| 220 | + */ |
| 221 | +#define PADSZ(bpos, pad) (((pad) - ((uintptr_t)(bpos) % (pad))) % (pad)) |
| 222 | +#define PAD_16(bpos) ((bpos) + PADSZ((bpos),16)) |
| 223 | +#define PAD_64(bpos) ((bpos) + PADSZ((bpos),64)) |
| 224 | +#define PAD_4096(bpos) ((bpos) + PADSZ((bpos),4096)) |
| 225 | + |
| 226 | +/* Diagnostic functions */ |
| 227 | +#ifdef ZLIB_DEBUG |
| 228 | +# include <stdio.h> |
| 229 | + extern int Z_INTERNAL z_verbose; |
| 230 | + extern void Z_INTERNAL z_error(const char *m); |
| 231 | +# define Assert(cond, msg) {int _cond = (cond); if (!_cond) z_error(msg);} |
| 232 | +# define Trace(x) {if (z_verbose >= 0) fprintf x;} |
| 233 | +# define Tracev(x) {if (z_verbose > 0) fprintf x;} |
| 234 | +# define Tracevv(x) {if (z_verbose > 1) fprintf x;} |
| 235 | +# define Tracec(c, x) {if (z_verbose > 0 && (c)) fprintf x;} |
| 236 | +# define Tracecv(c, x) {if (z_verbose > 1 && (c)) fprintf x;} |
| 237 | +#else |
| 238 | +# define Assert(cond, msg) |
| 239 | +# define Trace(x) |
| 240 | +# define Tracev(x) |
| 241 | +# define Tracevv(x) |
| 242 | +# define Tracec(c, x) |
| 243 | +# define Tracecv(c, x) |
| 244 | +#endif |
| 245 | + |
| 246 | +/* OPTIMAL_CMP values determine the comparison width: |
| 247 | + * 64: Best for 64-bit architectures with unaligned access |
| 248 | + * 32: Best for 32-bit architectures with unaligned access |
| 249 | + * 16: Safe default for unknown architectures |
| 250 | + * 8: Safe fallback for architectures without unaligned access |
| 251 | + * Note: The unaligned access mentioned is cpu-support, this allows compiler or |
| 252 | + * separate unaligned intrinsics to utilize safe unaligned access, without |
| 253 | + * utilizing unaligned C pointers that are known to have undefined behavior. |
| 254 | + */ |
| 255 | +#if !defined(OPTIMAL_CMP) |
| 256 | +# if defined(__x86_64__) || defined(_M_X64) || defined(__amd64__) || defined(_M_AMD64) |
| 257 | +# define OPTIMAL_CMP 64 |
| 258 | +# elif defined(__i386__) || defined(__i486__) || defined(__i586__) || \ |
| 259 | + defined(__i686__) || defined(_X86_) || defined(_M_IX86) |
| 260 | +# define OPTIMAL_CMP 32 |
| 261 | +# elif defined(__aarch64__) || defined(_M_ARM64) || defined(_M_ARM64EC) |
| 262 | +# if defined(__ARM_FEATURE_UNALIGNED) || defined(_WIN32) |
| 263 | +# define OPTIMAL_CMP 64 |
| 264 | +# else |
| 265 | +# define OPTIMAL_CMP 8 |
| 266 | +# endif |
| 267 | +# elif defined(__arm__) || defined(_M_ARM) |
| 268 | +# if defined(__ARM_FEATURE_UNALIGNED) || defined(_WIN32) |
| 269 | +# define OPTIMAL_CMP 32 |
| 270 | +# else |
| 271 | +# define OPTIMAL_CMP 8 |
| 272 | +# endif |
| 273 | +# elif defined(__powerpc64__) || defined(__ppc64__) |
| 274 | +# define OPTIMAL_CMP 64 |
| 275 | +# elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) |
| 276 | +# define OPTIMAL_CMP 32 |
| 277 | +# endif |
| 278 | +#endif |
| 279 | +#if !defined(OPTIMAL_CMP) |
| 280 | +# define OPTIMAL_CMP 16 |
| 281 | +#endif |
| 282 | + |
| 283 | +#if defined(__has_feature) |
| 284 | +# if __has_feature(address_sanitizer) |
| 285 | +# define Z_ADDRESS_SANITIZER 1 |
| 286 | +# endif |
| 287 | +#elif defined(__SANITIZE_ADDRESS__) |
| 288 | +# define Z_ADDRESS_SANITIZER 1 |
| 289 | +#endif |
| 290 | + |
| 291 | +/* |
| 292 | + * __asan_loadN() and __asan_storeN() calls are inserted by compilers in order to check memory accesses. |
| 293 | + * They can be called manually too, with the following caveats: |
| 294 | + * gcc says: "warning: implicit declaration of function '...'" |
| 295 | + * g++ says: "error: new declaration '...' ambiguates built-in declaration '...'" |
| 296 | + * Accommodate both. |
| 297 | + */ |
| 298 | +#ifdef Z_ADDRESS_SANITIZER |
| 299 | +#ifndef __cplusplus |
| 300 | +void __asan_loadN(void *, long); |
| 301 | +void __asan_storeN(void *, long); |
| 302 | +#endif |
| 303 | +#else |
| 304 | +# define __asan_loadN(a, size) do { Z_UNUSED(a); Z_UNUSED(size); } while (0) |
| 305 | +# define __asan_storeN(a, size) do { Z_UNUSED(a); Z_UNUSED(size); } while (0) |
| 306 | +#endif |
| 307 | + |
| 308 | +#if defined(__has_feature) |
| 309 | +# if __has_feature(memory_sanitizer) |
| 310 | +# define Z_MEMORY_SANITIZER 1 |
| 311 | +# include <sanitizer/msan_interface.h> |
| 312 | +# endif |
| 313 | +#endif |
| 314 | + |
| 315 | +#ifndef Z_MEMORY_SANITIZER |
| 316 | +# define __msan_check_mem_is_initialized(a, size) do { Z_UNUSED(a); Z_UNUSED(size); } while (0) |
| 317 | +# define __msan_unpoison(a, size) do { Z_UNUSED(a); Z_UNUSED(size); } while (0) |
| 318 | +#endif |
| 319 | + |
| 320 | +/* Notify sanitizer runtime about an upcoming read access. */ |
| 321 | +#define instrument_read(a, size) do { \ |
| 322 | + void *__a = (void *)(a); \ |
| 323 | + long __size = size; \ |
| 324 | + __asan_loadN(__a, __size); \ |
| 325 | + __msan_check_mem_is_initialized(__a, __size); \ |
| 326 | +} while (0) |
| 327 | + |
| 328 | +/* Notify sanitizer runtime about an upcoming write access. */ |
| 329 | +#define instrument_write(a, size) do { \ |
| 330 | + void *__a = (void *)(a); \ |
| 331 | + long __size = size; \ |
| 332 | + __asan_storeN(__a, __size); \ |
| 333 | +} while (0) |
| 334 | + |
| 335 | +/* Notify sanitizer runtime about an upcoming read/write access. */ |
| 336 | +#define instrument_read_write(a, size) do { \ |
| 337 | + void *__a = (void *)(a); \ |
| 338 | + long __size = size; \ |
| 339 | + __asan_storeN(__a, __size); \ |
| 340 | + __msan_check_mem_is_initialized(__a, __size); \ |
| 341 | +} while (0) |
| 342 | + |
| 343 | +#endif |
0 commit comments