platform.h (23753B)
1 /* Copyright 2016 Google Inc. All Rights Reserved. 2 3 Distributed under MIT license. 4 See file LICENSE for detail or copy at https://opensource.org/licenses/MIT 5 */ 6 7 /* Macros for compiler / platform specific features and build options. 8 9 Build options are: 10 * BROTLI_BUILD_32_BIT disables 64-bit optimizations 11 * BROTLI_BUILD_64_BIT forces to use 64-bit optimizations 12 * BROTLI_BUILD_BIG_ENDIAN forces to use big-endian optimizations 13 * BROTLI_BUILD_ENDIAN_NEUTRAL disables endian-aware optimizations 14 * BROTLI_BUILD_LITTLE_ENDIAN forces to use little-endian optimizations 15 * BROTLI_BUILD_NO_RBIT disables "rbit" optimization for ARM CPUs 16 * BROTLI_BUILD_NO_UNALIGNED_READ_FAST forces off the fast-unaligned-read 17 optimizations (mainly for testing purposes) 18 * BROTLI_DEBUG dumps file name and line number when decoder detects stream 19 or memory error 20 * BROTLI_ENABLE_LOG enables asserts and dumps various state information 21 * BROTLI_ENABLE_DUMP overrides default "dump" behaviour 22 */ 23 24 #ifndef BROTLI_COMMON_PLATFORM_H_ 25 #define BROTLI_COMMON_PLATFORM_H_ 26 27 #include <string.h> /* IWYU pragma: export memcmp, memcpy, memset */ 28 #include <stdlib.h> /* IWYU pragma: export exit, free, malloc */ 29 #include <sys/types.h> /* should include endian.h for us */ 30 31 #include <brotli/port.h> /* IWYU pragma: export */ 32 #include <brotli/types.h> /* IWYU pragma: export */ 33 34 #if BROTLI_MSVC_VERSION_CHECK(18, 0, 0) 35 #include <intrin.h> 36 #endif 37 38 #if defined(BROTLI_ENABLE_LOG) || defined(BROTLI_DEBUG) 39 #include <assert.h> 40 #include <stdio.h> 41 #endif 42 43 /* The following macros were borrowed from https://github.com/nemequ/hedley 44 * with permission of original author - Evan Nemerson <evan@nemerson.com> */ 45 46 /* >>> >>> >>> hedley macros */ 47 48 /* Define "BROTLI_PREDICT_TRUE" and "BROTLI_PREDICT_FALSE" macros for capable 49 compilers. 50 51 To apply compiler hint, enclose the branching condition into macros, like this: 52 53 if (BROTLI_PREDICT_TRUE(zero == 0)) { 54 // main execution path 55 } else { 56 // compiler should place this code outside of main execution path 57 } 58 59 OR: 60 61 if (BROTLI_PREDICT_FALSE(something_rare_or_unexpected_happens)) { 62 // compiler should place this code outside of main execution path 63 } 64 65 */ 66 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_expect, 3, 0, 0) || \ 67 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \ 68 BROTLI_SUNPRO_VERSION_CHECK(5, 15, 0) || \ 69 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \ 70 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \ 71 BROTLI_TI_VERSION_CHECK(7, 3, 0) || \ 72 BROTLI_TINYC_VERSION_CHECK(0, 9, 27) 73 #define BROTLI_PREDICT_TRUE(x) (__builtin_expect(!!(x), 1)) 74 #define BROTLI_PREDICT_FALSE(x) (__builtin_expect(x, 0)) 75 #else 76 #define BROTLI_PREDICT_FALSE(x) (x) 77 #define BROTLI_PREDICT_TRUE(x) (x) 78 #endif 79 80 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ 81 !defined(__cplusplus) 82 #define BROTLI_RESTRICT restrict 83 #elif BROTLI_GNUC_VERSION_CHECK(3, 1, 0) || \ 84 BROTLI_MSVC_VERSION_CHECK(14, 0, 0) || \ 85 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \ 86 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \ 87 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \ 88 BROTLI_PGI_VERSION_CHECK(17, 10, 0) || \ 89 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \ 90 BROTLI_IAR_VERSION_CHECK(8, 0, 0) || \ 91 (BROTLI_SUNPRO_VERSION_CHECK(5, 14, 0) && defined(__cplusplus)) 92 #define BROTLI_RESTRICT __restrict 93 #elif BROTLI_SUNPRO_VERSION_CHECK(5, 3, 0) && !defined(__cplusplus) 94 #define BROTLI_RESTRICT _Restrict 95 #else 96 #define BROTLI_RESTRICT 97 #endif 98 99 #if (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ 100 (defined(__cplusplus) && (__cplusplus >= 199711L)) 101 #define BROTLI_MAYBE_INLINE inline 102 #elif defined(__GNUC_STDC_INLINE__) || defined(__GNUC_GNU_INLINE__) || \ 103 BROTLI_ARM_VERSION_CHECK(6, 2, 0) 104 #define BROTLI_MAYBE_INLINE __inline__ 105 #elif BROTLI_MSVC_VERSION_CHECK(12, 0, 0) || \ 106 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || BROTLI_TI_VERSION_CHECK(8, 0, 0) 107 #define BROTLI_MAYBE_INLINE __inline 108 #else 109 #define BROTLI_MAYBE_INLINE 110 #endif 111 112 #if BROTLI_GNUC_HAS_ATTRIBUTE(always_inline, 4, 0, 0) || \ 113 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \ 114 BROTLI_SUNPRO_VERSION_CHECK(5, 11, 0) || \ 115 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \ 116 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \ 117 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \ 118 (BROTLI_TI_VERSION_CHECK(7, 3, 0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) 119 #define BROTLI_INLINE BROTLI_MAYBE_INLINE __attribute__((__always_inline__)) 120 #elif BROTLI_MSVC_VERSION_CHECK(12, 0, 0) 121 #define BROTLI_INLINE BROTLI_MAYBE_INLINE __forceinline 122 #elif BROTLI_TI_VERSION_CHECK(7, 0, 0) && defined(__cplusplus) 123 #define BROTLI_INLINE BROTLI_MAYBE_INLINE _Pragma("FUNC_ALWAYS_INLINE;") 124 #elif BROTLI_IAR_VERSION_CHECK(8, 0, 0) 125 #define BROTLI_INLINE BROTLI_MAYBE_INLINE _Pragma("inline=forced") 126 #else 127 #define BROTLI_INLINE BROTLI_MAYBE_INLINE 128 #endif 129 130 #if BROTLI_GNUC_HAS_ATTRIBUTE(noinline, 4, 0, 0) || \ 131 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) || \ 132 BROTLI_SUNPRO_VERSION_CHECK(5, 11, 0) || \ 133 BROTLI_ARM_VERSION_CHECK(4, 1, 0) || \ 134 BROTLI_IBM_VERSION_CHECK(10, 1, 0) || \ 135 BROTLI_TI_VERSION_CHECK(8, 0, 0) || \ 136 (BROTLI_TI_VERSION_CHECK(7, 3, 0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) 137 #define BROTLI_NOINLINE __attribute__((__noinline__)) 138 #elif BROTLI_MSVC_VERSION_CHECK(13, 10, 0) 139 #define BROTLI_NOINLINE __declspec(noinline) 140 #elif BROTLI_PGI_VERSION_CHECK(10, 2, 0) 141 #define BROTLI_NOINLINE _Pragma("noinline") 142 #elif BROTLI_TI_VERSION_CHECK(6, 0, 0) && defined(__cplusplus) 143 #define BROTLI_NOINLINE _Pragma("FUNC_CANNOT_INLINE;") 144 #elif BROTLI_IAR_VERSION_CHECK(8, 0, 0) 145 #define BROTLI_NOINLINE _Pragma("inline=never") 146 #else 147 #define BROTLI_NOINLINE 148 #endif 149 150 /* <<< <<< <<< end of hedley macros. */ 151 152 #if BROTLI_GNUC_HAS_ATTRIBUTE(unused, 2, 7, 0) || \ 153 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) 154 #define BROTLI_UNUSED_FUNCTION static BROTLI_INLINE __attribute__ ((unused)) 155 #else 156 #define BROTLI_UNUSED_FUNCTION static BROTLI_INLINE 157 #endif 158 159 #if BROTLI_GNUC_HAS_ATTRIBUTE(aligned, 2, 7, 0) 160 #define BROTLI_ALIGNED(N) __attribute__((aligned(N))) 161 #else 162 #define BROTLI_ALIGNED(N) 163 #endif 164 165 #if (defined(__ARM_ARCH) && (__ARM_ARCH == 7)) || \ 166 (defined(M_ARM) && (M_ARM == 7)) 167 #define BROTLI_TARGET_ARMV7 168 #endif /* ARMv7 */ 169 170 #if (defined(__ARM_ARCH) && (__ARM_ARCH == 8)) || \ 171 defined(__aarch64__) || defined(__ARM64_ARCH_8__) 172 #define BROTLI_TARGET_ARMV8_ANY 173 174 #if defined(__ARM_32BIT_STATE) 175 #define BROTLI_TARGET_ARMV8_32 176 #elif defined(__ARM_64BIT_STATE) 177 #define BROTLI_TARGET_ARMV8_64 178 #endif 179 180 #endif /* ARMv8 */ 181 182 #if defined(__ARM_NEON__) || defined(__ARM_NEON) 183 #define BROTLI_TARGET_NEON 184 #endif 185 186 #if defined(__i386) || defined(_M_IX86) 187 #define BROTLI_TARGET_X86 188 #endif 189 190 #if defined(__x86_64__) || defined(_M_X64) 191 #define BROTLI_TARGET_X64 192 #endif 193 194 #if defined(__PPC64__) 195 #define BROTLI_TARGET_POWERPC64 196 #endif 197 198 #if defined(__riscv) && defined(__riscv_xlen) && __riscv_xlen == 64 199 #define BROTLI_TARGET_RISCV64 200 #endif 201 202 #if defined(__loongarch_lp64) 203 #define BROTLI_TARGET_LOONGARCH64 204 #endif 205 206 /* This does not seem to be an indicator of z/Architecture (64-bit); neither 207 that allows to use unaligned loads. */ 208 #if defined(__s390x__) 209 #define BROTLI_TARGET_S390X 210 #endif 211 212 #if defined(__mips64) 213 #define BROTLI_TARGET_MIPS64 214 #endif 215 216 #if defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8_64) || \ 217 defined(BROTLI_TARGET_POWERPC64) || defined(BROTLI_TARGET_RISCV64) || \ 218 defined(BROTLI_TARGET_LOONGARCH64) || defined(BROTLI_TARGET_MIPS64) 219 #define BROTLI_TARGET_64_BITS 1 220 #else 221 #define BROTLI_TARGET_64_BITS 0 222 #endif 223 224 #if defined(BROTLI_BUILD_64_BIT) 225 #define BROTLI_64_BITS 1 226 #elif defined(BROTLI_BUILD_32_BIT) 227 #define BROTLI_64_BITS 0 228 #else 229 #define BROTLI_64_BITS BROTLI_TARGET_64_BITS 230 #endif 231 232 #if (BROTLI_64_BITS) 233 #define brotli_reg_t uint64_t 234 #else 235 #define brotli_reg_t uint32_t 236 #endif 237 238 #if defined(BROTLI_BUILD_BIG_ENDIAN) 239 #define BROTLI_BIG_ENDIAN 1 240 #elif defined(BROTLI_BUILD_LITTLE_ENDIAN) 241 #define BROTLI_LITTLE_ENDIAN 1 242 #elif defined(BROTLI_BUILD_ENDIAN_NEUTRAL) 243 /* Just break elif chain. */ 244 #elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) 245 #define BROTLI_LITTLE_ENDIAN 1 246 #elif defined(_WIN32) || defined(BROTLI_TARGET_X64) 247 /* Win32 & x64 can currently always be assumed to be little endian */ 248 #define BROTLI_LITTLE_ENDIAN 1 249 #elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) 250 #define BROTLI_BIG_ENDIAN 1 251 /* Likely target platform is iOS / OSX. */ 252 #elif defined(BYTE_ORDER) && (BYTE_ORDER == LITTLE_ENDIAN) 253 #define BROTLI_LITTLE_ENDIAN 1 254 #elif defined(BYTE_ORDER) && (BYTE_ORDER == BIG_ENDIAN) 255 #define BROTLI_BIG_ENDIAN 1 256 #endif 257 258 #if !defined(BROTLI_LITTLE_ENDIAN) 259 #define BROTLI_LITTLE_ENDIAN 0 260 #endif 261 262 #if !defined(BROTLI_BIG_ENDIAN) 263 #define BROTLI_BIG_ENDIAN 0 264 #endif 265 266 #if defined(BROTLI_BUILD_NO_UNALIGNED_READ_FAST) 267 #define BROTLI_UNALIGNED_READ_FAST (!!0) 268 #elif defined(BROTLI_TARGET_X86) || defined(BROTLI_TARGET_X64) || \ 269 defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY) || \ 270 defined(BROTLI_TARGET_RISCV64) || defined(BROTLI_TARGET_LOONGARCH64) 271 /* These targets are known to generate efficient code for unaligned reads 272 * (e.g. a single instruction, not multiple 1-byte loads, shifted and or'd 273 * together). */ 274 #define BROTLI_UNALIGNED_READ_FAST (!!1) 275 #else 276 #define BROTLI_UNALIGNED_READ_FAST (!!0) 277 #endif 278 279 /* Portable unaligned memory access: read / write values via memcpy. */ 280 #if !defined(BROTLI_USE_PACKED_FOR_UNALIGNED) 281 #if defined(__mips__) && (!defined(__mips_isa_rev) || __mips_isa_rev < 6) 282 #define BROTLI_USE_PACKED_FOR_UNALIGNED 1 283 #else 284 #define BROTLI_USE_PACKED_FOR_UNALIGNED 0 285 #endif 286 #endif /* defined(BROTLI_USE_PACKED_FOR_UNALIGNED) */ 287 288 #if BROTLI_USE_PACKED_FOR_UNALIGNED 289 290 typedef union BrotliPackedValue { 291 uint16_t u16; 292 uint32_t u32; 293 uint64_t u64; 294 size_t szt; 295 } __attribute__ ((packed)) BrotliPackedValue; 296 297 static BROTLI_INLINE uint16_t BrotliUnalignedRead16(const void* p) { 298 const BrotliPackedValue* address = (const BrotliPackedValue*)p; 299 return address->u16; 300 } 301 static BROTLI_INLINE uint32_t BrotliUnalignedRead32(const void* p) { 302 const BrotliPackedValue* address = (const BrotliPackedValue*)p; 303 return address->u32; 304 } 305 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) { 306 const BrotliPackedValue* address = (const BrotliPackedValue*)p; 307 return address->u64; 308 } 309 static BROTLI_INLINE size_t BrotliUnalignedReadSizeT(const void* p) { 310 const BrotliPackedValue* address = (const BrotliPackedValue*)p; 311 return address->szt; 312 } 313 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) { 314 BrotliPackedValue* address = (BrotliPackedValue*)p; 315 address->u64 = v; 316 } 317 318 #else /* not BROTLI_USE_PACKED_FOR_UNALIGNED */ 319 320 static BROTLI_INLINE uint16_t BrotliUnalignedRead16(const void* p) { 321 uint16_t t; 322 memcpy(&t, p, sizeof t); 323 return t; 324 } 325 static BROTLI_INLINE uint32_t BrotliUnalignedRead32(const void* p) { 326 uint32_t t; 327 memcpy(&t, p, sizeof t); 328 return t; 329 } 330 static BROTLI_INLINE uint64_t BrotliUnalignedRead64(const void* p) { 331 uint64_t t; 332 memcpy(&t, p, sizeof t); 333 return t; 334 } 335 static BROTLI_INLINE size_t BrotliUnalignedReadSizeT(const void* p) { 336 size_t t; 337 memcpy(&t, p, sizeof t); 338 return t; 339 } 340 static BROTLI_INLINE void BrotliUnalignedWrite64(void* p, uint64_t v) { 341 memcpy(p, &v, sizeof v); 342 } 343 344 #endif /* BROTLI_USE_PACKED_FOR_UNALIGNED */ 345 346 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_bswap16, 4, 3, 0) 347 #define BROTLI_BSWAP16(V) ((uint16_t)__builtin_bswap16(V)) 348 #else 349 #define BROTLI_BSWAP16(V) ((uint16_t)( \ 350 (((V) & 0xFFU) << 8) | \ 351 (((V) >> 8) & 0xFFU))) 352 #endif 353 354 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_bswap32, 4, 3, 0) 355 #define BROTLI_BSWAP32(V) ((uint32_t)__builtin_bswap32(V)) 356 #else 357 #define BROTLI_BSWAP32(V) ((uint32_t)( \ 358 (((V) & 0xFFU) << 24) | (((V) & 0xFF00U) << 8) | \ 359 (((V) >> 8) & 0xFF00U) | (((V) >> 24) & 0xFFU))) 360 #endif 361 362 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_bswap64, 4, 3, 0) 363 #define BROTLI_BSWAP64(V) ((uint64_t)__builtin_bswap64(V)) 364 #else 365 #define BROTLI_BSWAP64(V) ((uint64_t)( \ 366 (((V) & 0xFFU) << 56) | (((V) & 0xFF00U) << 40) | \ 367 (((V) & 0xFF0000U) << 24) | (((V) & 0xFF000000U) << 8) | \ 368 (((V) >> 8) & 0xFF000000U) | (((V) >> 24) & 0xFF0000U) | \ 369 (((V) >> 40) & 0xFF00U) | (((V) >> 56) & 0xFFU))) 370 #endif 371 372 #if BROTLI_LITTLE_ENDIAN 373 /* Straight endianness. Just read / write values. */ 374 #define BROTLI_UNALIGNED_LOAD16LE BrotliUnalignedRead16 375 #define BROTLI_UNALIGNED_LOAD32LE BrotliUnalignedRead32 376 #define BROTLI_UNALIGNED_LOAD64LE BrotliUnalignedRead64 377 #define BROTLI_UNALIGNED_STORE64LE BrotliUnalignedWrite64 378 #elif BROTLI_BIG_ENDIAN /* BROTLI_LITTLE_ENDIAN */ 379 static BROTLI_INLINE uint16_t BROTLI_UNALIGNED_LOAD16LE(const void* p) { 380 uint16_t value = BrotliUnalignedRead16(p); 381 return BROTLI_BSWAP16(value); 382 } 383 static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32LE(const void* p) { 384 uint32_t value = BrotliUnalignedRead32(p); 385 return BROTLI_BSWAP32(value); 386 } 387 static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void* p) { 388 uint64_t value = BrotliUnalignedRead64(p); 389 return BROTLI_BSWAP64(value); 390 } 391 static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void* p, uint64_t v) { 392 uint64_t value = BROTLI_BSWAP64(v); 393 BrotliUnalignedWrite64(p, value); 394 } 395 #else /* BROTLI_LITTLE_ENDIAN */ 396 /* Read / store values byte-wise; hopefully compiler will understand. */ 397 static BROTLI_INLINE uint16_t BROTLI_UNALIGNED_LOAD16LE(const void* p) { 398 const uint8_t* in = (const uint8_t*)p; 399 return (uint16_t)(in[0] | (in[1] << 8)); 400 } 401 static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32LE(const void* p) { 402 const uint8_t* in = (const uint8_t*)p; 403 uint32_t value = (uint32_t)(in[0]); 404 value |= (uint32_t)(in[1]) << 8; 405 value |= (uint32_t)(in[2]) << 16; 406 value |= (uint32_t)(in[3]) << 24; 407 return value; 408 } 409 static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64LE(const void* p) { 410 const uint8_t* in = (const uint8_t*)p; 411 uint64_t value = (uint64_t)(in[0]); 412 value |= (uint64_t)(in[1]) << 8; 413 value |= (uint64_t)(in[2]) << 16; 414 value |= (uint64_t)(in[3]) << 24; 415 value |= (uint64_t)(in[4]) << 32; 416 value |= (uint64_t)(in[5]) << 40; 417 value |= (uint64_t)(in[6]) << 48; 418 value |= (uint64_t)(in[7]) << 56; 419 return value; 420 } 421 static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64LE(void* p, uint64_t v) { 422 uint8_t* out = (uint8_t*)p; 423 out[0] = (uint8_t)v; 424 out[1] = (uint8_t)(v >> 8); 425 out[2] = (uint8_t)(v >> 16); 426 out[3] = (uint8_t)(v >> 24); 427 out[4] = (uint8_t)(v >> 32); 428 out[5] = (uint8_t)(v >> 40); 429 out[6] = (uint8_t)(v >> 48); 430 out[7] = (uint8_t)(v >> 56); 431 } 432 #endif /* BROTLI_LITTLE_ENDIAN */ 433 434 static BROTLI_INLINE void* BROTLI_UNALIGNED_LOAD_PTR(const void* p) { 435 void* v; 436 memcpy(&v, p, sizeof(void*)); 437 return v; 438 } 439 440 static BROTLI_INLINE void BROTLI_UNALIGNED_STORE_PTR(void* p, const void* v) { 441 memcpy(p, &v, sizeof(void*)); 442 } 443 444 /* BROTLI_IS_CONSTANT macros returns true for compile-time constants. */ 445 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_constant_p, 3, 0, 1) || \ 446 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) 447 #define BROTLI_IS_CONSTANT(x) (!!__builtin_constant_p(x)) 448 #else 449 #define BROTLI_IS_CONSTANT(x) (!!0) 450 #endif 451 452 #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY) 453 #define BROTLI_HAS_UBFX (!!1) 454 #else 455 #define BROTLI_HAS_UBFX (!!0) 456 #endif 457 458 #if defined(BROTLI_ENABLE_LOG) 459 #define BROTLI_LOG(x) printf x 460 #else 461 #define BROTLI_LOG(x) 462 #endif 463 464 #if defined(BROTLI_DEBUG) || defined(BROTLI_ENABLE_LOG) 465 #define BROTLI_ENABLE_DUMP_DEFAULT 1 466 #define BROTLI_DCHECK(x) assert(x) 467 #else 468 #define BROTLI_ENABLE_DUMP_DEFAULT 0 469 #define BROTLI_DCHECK(x) 470 #endif 471 472 #if !defined(BROTLI_ENABLE_DUMP) 473 #define BROTLI_ENABLE_DUMP BROTLI_ENABLE_DUMP_DEFAULT 474 #endif 475 476 #if BROTLI_ENABLE_DUMP 477 static BROTLI_INLINE void BrotliDump(const char* f, int l, const char* fn) { 478 fprintf(stderr, "%s:%d (%s)\n", f, l, fn); 479 fflush(stderr); 480 } 481 #define BROTLI_DUMP() BrotliDump(__FILE__, __LINE__, __FUNCTION__) 482 #else 483 #define BROTLI_DUMP() (void)(0) 484 #endif 485 486 /* BrotliRBit assumes brotli_reg_t fits native CPU register type. */ 487 #if (BROTLI_64_BITS == BROTLI_TARGET_64_BITS) 488 /* TODO(eustas): add appropriate icc/sunpro/arm/ibm/ti checks. */ 489 #if (BROTLI_GNUC_VERSION_CHECK(3, 0, 0) || defined(__llvm__)) && \ 490 !defined(BROTLI_BUILD_NO_RBIT) 491 #if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8_ANY) 492 /* TODO(eustas): detect ARMv6T2 and enable this code for it. */ 493 static BROTLI_INLINE brotli_reg_t BrotliRBit(brotli_reg_t input) { 494 brotli_reg_t output; 495 __asm__("rbit %0, %1\n" : "=r"(output) : "r"(input)); 496 return output; 497 } 498 #define BROTLI_RBIT(x) BrotliRBit(x) 499 #endif /* armv7 / armv8 */ 500 #endif /* gcc || clang */ 501 #endif /* brotli_reg_t is native */ 502 #if !defined(BROTLI_RBIT) 503 static BROTLI_INLINE void BrotliRBit(void) { /* Should break build if used. */ } 504 #endif /* BROTLI_RBIT */ 505 506 #define BROTLI_REPEAT_4(X) {X; X; X; X;} 507 #define BROTLI_REPEAT_5(X) {X; X; X; X; X;} 508 #define BROTLI_REPEAT_6(X) {X; X; X; X; X; X;} 509 510 #define BROTLI_UNUSED(X) (void)(X) 511 512 #define BROTLI_MIN_MAX(T) \ 513 static BROTLI_INLINE T brotli_min_ ## T (T a, T b) { return a < b ? a : b; } \ 514 static BROTLI_INLINE T brotli_max_ ## T (T a, T b) { return a > b ? a : b; } 515 BROTLI_MIN_MAX(double) BROTLI_MIN_MAX(float) BROTLI_MIN_MAX(int) 516 BROTLI_MIN_MAX(size_t) BROTLI_MIN_MAX(uint32_t) BROTLI_MIN_MAX(uint8_t) 517 #undef BROTLI_MIN_MAX 518 #define BROTLI_MIN(T, A, B) (brotli_min_ ## T((A), (B))) 519 #define BROTLI_MAX(T, A, B) (brotli_max_ ## T((A), (B))) 520 521 #define BROTLI_SWAP(T, A, I, J) { \ 522 T __brotli_swap_tmp = (A)[(I)]; \ 523 (A)[(I)] = (A)[(J)]; \ 524 (A)[(J)] = __brotli_swap_tmp; \ 525 } 526 527 #if BROTLI_64_BITS 528 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_ctzll, 3, 4, 0) || \ 529 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) 530 #define BROTLI_TZCNT64 __builtin_ctzll 531 #elif BROTLI_MSVC_VERSION_CHECK(18, 0, 0) 532 #if defined(BROTLI_TARGET_X64) && !defined(_M_ARM64EC) 533 #define BROTLI_TZCNT64 _tzcnt_u64 534 #else /* BROTLI_TARGET_X64 */ 535 static BROTLI_INLINE uint32_t BrotliBsf64Msvc(uint64_t x) { 536 uint32_t lsb; 537 _BitScanForward64(&lsb, x); 538 return lsb; 539 } 540 #define BROTLI_TZCNT64 BrotliBsf64Msvc 541 #endif /* BROTLI_TARGET_X64 */ 542 #endif /* __builtin_ctzll */ 543 #endif /* BROTLI_64_BITS */ 544 545 #if BROTLI_GNUC_HAS_BUILTIN(__builtin_clz, 3, 4, 0) || \ 546 BROTLI_INTEL_VERSION_CHECK(16, 0, 0) 547 #define BROTLI_BSR32(x) (31u ^ (uint32_t)__builtin_clz(x)) 548 #elif BROTLI_MSVC_VERSION_CHECK(18, 0, 0) 549 static BROTLI_INLINE uint32_t BrotliBsr32Msvc(uint32_t x) { 550 unsigned long msb; 551 _BitScanReverse(&msb, x); 552 return (uint32_t)msb; 553 } 554 #define BROTLI_BSR32 BrotliBsr32Msvc 555 #endif /* __builtin_clz */ 556 557 /* Default brotli_alloc_func */ 558 BROTLI_COMMON_API void* BrotliDefaultAllocFunc(void* opaque, size_t size); 559 560 /* Default brotli_free_func */ 561 BROTLI_COMMON_API void BrotliDefaultFreeFunc(void* opaque, void* address); 562 563 /* Circular logical rotates. */ 564 static BROTLI_INLINE uint16_t BrotliRotateRight16(uint16_t const value, 565 size_t count) { 566 count &= 0x0F; /* for fickle pattern recognition */ 567 return (value >> count) | (uint16_t)(value << ((0U - count) & 0x0F)); 568 } 569 static BROTLI_INLINE uint32_t BrotliRotateRight32(uint32_t const value, 570 size_t count) { 571 count &= 0x1F; /* for fickle pattern recognition */ 572 return (value >> count) | (uint32_t)(value << ((0U - count) & 0x1F)); 573 } 574 static BROTLI_INLINE uint64_t BrotliRotateRight64(uint64_t const value, 575 size_t count) { 576 count &= 0x3F; /* for fickle pattern recognition */ 577 return (value >> count) | (uint64_t)(value << ((0U - count) & 0x3F)); 578 } 579 580 BROTLI_UNUSED_FUNCTION void BrotliSuppressUnusedFunctions(void) { 581 BROTLI_UNUSED(&BrotliSuppressUnusedFunctions); 582 BROTLI_UNUSED(&BrotliUnalignedRead16); 583 BROTLI_UNUSED(&BrotliUnalignedRead32); 584 BROTLI_UNUSED(&BrotliUnalignedRead64); 585 BROTLI_UNUSED(&BrotliUnalignedReadSizeT); 586 BROTLI_UNUSED(&BrotliUnalignedWrite64); 587 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD16LE); 588 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD32LE); 589 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD64LE); 590 BROTLI_UNUSED(&BROTLI_UNALIGNED_STORE64LE); 591 BROTLI_UNUSED(&BROTLI_UNALIGNED_LOAD_PTR); 592 BROTLI_UNUSED(&BROTLI_UNALIGNED_STORE_PTR); 593 BROTLI_UNUSED(&BrotliRBit); 594 BROTLI_UNUSED(&brotli_min_double); 595 BROTLI_UNUSED(&brotli_max_double); 596 BROTLI_UNUSED(&brotli_min_float); 597 BROTLI_UNUSED(&brotli_max_float); 598 BROTLI_UNUSED(&brotli_min_int); 599 BROTLI_UNUSED(&brotli_max_int); 600 BROTLI_UNUSED(&brotli_min_size_t); 601 BROTLI_UNUSED(&brotli_max_size_t); 602 BROTLI_UNUSED(&brotli_min_uint32_t); 603 BROTLI_UNUSED(&brotli_max_uint32_t); 604 BROTLI_UNUSED(&brotli_min_uint8_t); 605 BROTLI_UNUSED(&brotli_max_uint8_t); 606 BROTLI_UNUSED(&BrotliDefaultAllocFunc); 607 BROTLI_UNUSED(&BrotliDefaultFreeFunc); 608 BROTLI_UNUSED(&BrotliRotateRight16); 609 BROTLI_UNUSED(&BrotliRotateRight32); 610 BROTLI_UNUSED(&BrotliRotateRight64); 611 #if BROTLI_ENABLE_DUMP 612 BROTLI_UNUSED(&BrotliDump); 613 #endif 614 615 #if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) && \ 616 !defined(_M_ARM64EC) 617 /* _mm_prefetch() is not defined outside of x86/x64 */ 618 /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */ 619 #include <mmintrin.h> 620 #define PREFETCH_L1(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0) 621 #define PREFETCH_L2(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T1) 622 #elif BROTLI_GNUC_HAS_BUILTIN(__builtin_prefetch, 3, 1, 0) 623 #define PREFETCH_L1(ptr) \ 624 __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */) 625 #define PREFETCH_L2(ptr) \ 626 __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */) 627 #elif defined(__aarch64__) 628 #define PREFETCH_L1(ptr) \ 629 do { \ 630 __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr))); \ 631 } while (0) 632 #define PREFETCH_L2(ptr) \ 633 do { \ 634 __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr))); \ 635 } while (0) 636 #else 637 #define PREFETCH_L1(ptr) \ 638 do { \ 639 (void)(ptr); \ 640 } while (0) /* disabled */ 641 #define PREFETCH_L2(ptr) \ 642 do { \ 643 (void)(ptr); \ 644 } while (0) /* disabled */ 645 #endif 646 647 /* The SIMD matchers are only faster at certain quality levels. */ 648 #if defined(_M_X64) && defined(BROTLI_TZCNT64) 649 #define BROTLI_MAX_SIMD_QUALITY 7 650 #elif defined(BROTLI_TZCNT64) 651 #define BROTLI_MAX_SIMD_QUALITY 6 652 #endif 653 } 654 655 #if defined(_MSC_VER) 656 #define BROTLI_CRASH() __debugbreak(), (void)abort() 657 #elif BROTLI_GNUC_HAS_BUILTIN(__builtin_trap, 3, 0, 0) 658 #define BROTLI_CRASH() (void)__builtin_trap() 659 #else 660 #define BROTLI_CRASH() (void)abort() 661 #endif 662 663 /* Make BROTLI_TEST=0 act same as undefined. */ 664 #if defined(BROTLI_TEST) && ((1-BROTLI_TEST-1) == 0) 665 #undef BROTLI_TEST 666 #endif 667 668 #if BROTLI_GNUC_HAS_ATTRIBUTE(model, 3, 0, 3) 669 #define BROTLI_MODEL(M) __attribute__((model(M))) 670 #else 671 #define BROTLI_MODEL(M) /* M */ 672 #endif 673 674 #if BROTLI_GNUC_HAS_ATTRIBUTE(cold, 4, 3, 0) 675 #define BROTLI_COLD __attribute__((cold)) 676 #else 677 #define BROTLI_COLD /* cold */ 678 #endif 679 680 #endif /* BROTLI_COMMON_PLATFORM_H_ */