Skip to content

Commit e1353b9

Browse files
committed
Remove ARCH_* guards around Bits::FindLSBSetNonZero64().
Bits::FindLSBSetNonZero64() is now available unconditionally, and it should be easier to reason about the code included in each build configuration. This reduces the amount of conditional compiling going on, which makes it easier to reason about what stubs are a used in each build configuration. The guards were added to work around the fact that MSVC has a _BitScanForward64() intrinsic, but the intrinsic is only implemented on 64-bit targets, and causes a compilation error on 32-bit targets errors. By contrast, Clang and GCC support __builtin_ctzll() everywhere, and implement it with varying efficiency. This CL reworks the conditional compilation directives so that Bits::FindLSBSetNonZero64() uses the _BitScanForward64() intrinsic on MSVC when available, and the portable implementation otherwise. PiperOrigin-RevId: 310007748
1 parent c98344f commit e1353b9

File tree

1 file changed

+29
-24
lines changed

1 file changed

+29
-24
lines changed

snappy-stubs-internal.h

Lines changed: 29 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -264,17 +264,15 @@ class Bits {
264264
// that it's 0-indexed.
265265
static int FindLSBSetNonZero(uint32_t n);
266266

267-
#if defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
268267
static int FindLSBSetNonZero64(uint64_t n);
269-
#endif // defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
270268

271269
private:
272270
// No copying
273271
Bits(const Bits&);
274272
void operator=(const Bits&);
275273
};
276274

277-
#ifdef HAVE_BUILTIN_CTZ
275+
#if defined(HAVE_BUILTIN_CTZ)
278276

279277
inline int Bits::Log2FloorNonZero(uint32_t n) {
280278
assert(n != 0);
@@ -296,23 +294,18 @@ inline int Bits::FindLSBSetNonZero(uint32_t n) {
296294
return __builtin_ctz(n);
297295
}
298296

299-
#if defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
300-
inline int Bits::FindLSBSetNonZero64(uint64_t n) {
301-
assert(n != 0);
302-
return __builtin_ctzll(n);
303-
}
304-
#endif // defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
305-
306297
#elif defined(_MSC_VER)
307298

308299
inline int Bits::Log2FloorNonZero(uint32_t n) {
309300
assert(n != 0);
301+
// NOLINTNEXTLINE(runtime/int): The MSVC intrinsic demands unsigned long.
310302
unsigned long where;
311303
_BitScanReverse(&where, n);
312304
return static_cast<int>(where);
313305
}
314306

315307
inline int Bits::Log2Floor(uint32_t n) {
308+
// NOLINTNEXTLINE(runtime/int): The MSVC intrinsic demands unsigned long.
316309
unsigned long where;
317310
if (_BitScanReverse(&where, n))
318311
return static_cast<int>(where);
@@ -321,22 +314,13 @@ inline int Bits::Log2Floor(uint32_t n) {
321314

322315
inline int Bits::FindLSBSetNonZero(uint32_t n) {
323316
assert(n != 0);
317+
// NOLINTNEXTLINE(runtime/int): The MSVC intrinsic demands unsigned long.
324318
unsigned long where;
325319
if (_BitScanForward(&where, n))
326320
return static_cast<int>(where);
327321
return 32;
328322
}
329323

330-
#if defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
331-
inline int Bits::FindLSBSetNonZero64(uint64_t n) {
332-
assert(n != 0);
333-
unsigned long where;
334-
if (_BitScanForward64(&where, n))
335-
return static_cast<int>(where);
336-
return 64;
337-
}
338-
#endif // defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
339-
340324
#else // Portable versions.
341325

342326
inline int Bits::Log2FloorNonZero(uint32_t n) {
@@ -375,22 +359,43 @@ inline int Bits::FindLSBSetNonZero(uint32_t n) {
375359
return rc;
376360
}
377361

378-
#if defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
362+
#endif // End portable versions.
363+
364+
#if defined(HAVE_BUILTIN_CTZ)
365+
366+
inline int Bits::FindLSBSetNonZero64(uint64_t n) {
367+
assert(n != 0);
368+
return __builtin_ctzll(n);
369+
}
370+
371+
#elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_ARM64))
372+
// _BitScanForward64() is only available on x64 and ARM64.
373+
374+
inline int Bits::FindLSBSetNonZero64(uint64_t n) {
375+
assert(n != 0);
376+
// NOLINTNEXTLINE(runtime/int): The MSVC intrinsic demands unsigned long.
377+
unsigned long where;
378+
if (_BitScanForward64(&where, n))
379+
return static_cast<int>(where);
380+
return 64;
381+
}
382+
383+
#else // Portable version.
384+
379385
// FindLSBSetNonZero64() is defined in terms of FindLSBSetNonZero().
380386
inline int Bits::FindLSBSetNonZero64(uint64_t n) {
381387
assert(n != 0);
382388

383389
const uint32_t bottombits = static_cast<uint32_t>(n);
384390
if (bottombits == 0) {
385-
// Bottom bits are zero, so scan in top bits
391+
// Bottom bits are zero, so scan the top bits.
386392
return 32 + FindLSBSetNonZero(static_cast<uint32_t>(n >> 32));
387393
} else {
388394
return FindLSBSetNonZero(bottombits);
389395
}
390396
}
391-
#endif // defined(ARCH_K8) || defined(ARCH_PPC) || defined(ARCH_ARM)
392397

393-
#endif // End portable versions.
398+
#endif // End portable version.
394399

395400
// Variable-length integer encoding.
396401
class Varint {

0 commit comments

Comments
 (0)