#define HEAPBLK_TO_OFFSET(x) (((x) % HEAPBLOCKS_PER_BYTE) * BITS_PER_HEAPBLOCK)
/* Masks for counting subsets of bits in the visibility map. */
-#define VISIBLE_MASK64 UINT64CONST(0x5555555555555555) /* The lower bit of each
- * bit pair */
-#define FROZEN_MASK64 UINT64CONST(0xaaaaaaaaaaaaaaaa) /* The upper bit of each
- * bit pair */
+#define VISIBLE_MASK8 (0x55) /* The lower bit of each bit pair */
+#define FROZEN_MASK8 (0xaa) /* The upper bit of each bit pair */
/* prototypes for internal routines */
static Buffer vm_readbuf(Relation rel, BlockNumber blkno, bool extend);
{
Buffer mapBuffer;
uint64 *map;
- int i;
/*
* Read till we fall off the end of the map. We assume that any extra
*/
map = (uint64 *) PageGetContents(BufferGetPage(mapBuffer));
- StaticAssertStmt(MAPSIZE % sizeof(uint64) == 0,
- "unsupported MAPSIZE");
- if (all_frozen == NULL)
- {
- for (i = 0; i < MAPSIZE / sizeof(uint64); i++)
- nvisible += pg_popcount64(map[i] & VISIBLE_MASK64);
- }
- else
- {
- for (i = 0; i < MAPSIZE / sizeof(uint64); i++)
- {
- nvisible += pg_popcount64(map[i] & VISIBLE_MASK64);
- nfrozen += pg_popcount64(map[i] & FROZEN_MASK64);
- }
- }
+ nvisible += pg_popcount_masked((const char *) map, MAPSIZE, VISIBLE_MASK8);
+ if (all_frozen)
+ nfrozen += pg_popcount_masked((const char *) map, MAPSIZE, FROZEN_MASK8);
ReleaseBuffer(mapBuffer);
}
extern PGDLLIMPORT int (*pg_popcount32) (uint32 word);
extern PGDLLIMPORT int (*pg_popcount64) (uint64 word);
extern PGDLLIMPORT uint64 (*pg_popcount_optimized) (const char *buf, int bytes);
+extern PGDLLIMPORT uint64 (*pg_popcount_masked_optimized) (const char *buf, int bytes, bits8 mask);
/*
* We can also try to use the AVX-512 popcount instruction on some systems.
#ifdef USE_AVX512_POPCNT_WITH_RUNTIME_CHECK
extern bool pg_popcount_avx512_available(void);
extern uint64 pg_popcount_avx512(const char *buf, int bytes);
+extern uint64 pg_popcount_masked_avx512(const char *buf, int bytes, bits8 mask);
#endif
#else
extern int pg_popcount32(uint32 word);
extern int pg_popcount64(uint64 word);
extern uint64 pg_popcount_optimized(const char *buf, int bytes);
+extern uint64 pg_popcount_masked_optimized(const char *buf, int bytes, bits8 mask);
#endif /* TRY_POPCNT_FAST */
return pg_popcount_optimized(buf, bytes);
}
+/*
+ * Returns the number of 1-bits in buf after applying the mask to each byte.
+ *
+ * Similar to pg_popcount(), we only take on the function pointer overhead when
+ * it's likely to be faster.
+ */
+static inline uint64
+pg_popcount_masked(const char *buf, int bytes, bits8 mask)
+{
+ /*
+ * We set the threshold to the point at which we'll first use special
+ * instructions in the optimized version.
+ */
+#if SIZEOF_VOID_P >= 8
+ int threshold = 8;
+#else
+ int threshold = 4;
+#endif
+
+ if (bytes < threshold)
+ {
+ uint64 popcnt = 0;
+
+ while (bytes--)
+ popcnt += pg_number_of_ones[(unsigned char) *buf++ & mask];
+ return popcnt;
+ }
+
+ return pg_popcount_masked_optimized(buf, bytes, mask);
+}
+
/*
* Rotate the bits of "word" to the right/left by n bits.
*/
static inline int pg_popcount32_slow(uint32 word);
static inline int pg_popcount64_slow(uint64 word);
static uint64 pg_popcount_slow(const char *buf, int bytes);
+static uint64 pg_popcount_masked_slow(const char *buf, int bytes, bits8 mask);
#ifdef TRY_POPCNT_FAST
static bool pg_popcount_available(void);
static int pg_popcount32_choose(uint32 word);
static int pg_popcount64_choose(uint64 word);
static uint64 pg_popcount_choose(const char *buf, int bytes);
+static uint64 pg_popcount_masked_choose(const char *buf, int bytes, bits8 mask);
static inline int pg_popcount32_fast(uint32 word);
static inline int pg_popcount64_fast(uint64 word);
static uint64 pg_popcount_fast(const char *buf, int bytes);
+static uint64 pg_popcount_masked_fast(const char *buf, int bytes, bits8 mask);
int (*pg_popcount32) (uint32 word) = pg_popcount32_choose;
int (*pg_popcount64) (uint64 word) = pg_popcount64_choose;
uint64 (*pg_popcount_optimized) (const char *buf, int bytes) = pg_popcount_choose;
+uint64 (*pg_popcount_masked_optimized) (const char *buf, int bytes, bits8 mask) = pg_popcount_masked_choose;
#endif /* TRY_POPCNT_FAST */
#ifdef TRY_POPCNT_FAST
pg_popcount32 = pg_popcount32_fast;
pg_popcount64 = pg_popcount64_fast;
pg_popcount_optimized = pg_popcount_fast;
+ pg_popcount_masked_optimized = pg_popcount_masked_fast;
}
else
{
pg_popcount32 = pg_popcount32_slow;
pg_popcount64 = pg_popcount64_slow;
pg_popcount_optimized = pg_popcount_slow;
+ pg_popcount_masked_optimized = pg_popcount_masked_slow;
}
#ifdef USE_AVX512_POPCNT_WITH_RUNTIME_CHECK
if (pg_popcount_avx512_available())
+ {
pg_popcount_optimized = pg_popcount_avx512;
+ pg_popcount_masked_optimized = pg_popcount_masked_avx512;
+ }
#endif
}
return pg_popcount_optimized(buf, bytes);
}
+static uint64
+pg_popcount_masked_choose(const char *buf, int bytes, bits8 mask)
+{
+ choose_popcount_functions();
+ return pg_popcount_masked(buf, bytes, mask);
+}
+
/*
* pg_popcount32_fast
* Return the number of 1 bits set in word
return popcnt;
}
+/*
+ * pg_popcount_masked_fast
+ * Returns the number of 1-bits in buf after applying the mask to each byte
+ */
+static uint64
+pg_popcount_masked_fast(const char *buf, int bytes, bits8 mask)
+{
+ uint64 popcnt = 0;
+
+#if SIZEOF_VOID_P >= 8
+ /* Process in 64-bit chunks if the buffer is aligned */
+ uint64 maskv = ~UINT64CONST(0) / 0xFF * mask;
+
+ if (buf == (const char *) TYPEALIGN(8, buf))
+ {
+ const uint64 *words = (const uint64 *) buf;
+
+ while (bytes >= 8)
+ {
+ popcnt += pg_popcount64_fast(*words++ & maskv);
+ bytes -= 8;
+ }
+
+ buf = (const char *) words;
+ }
+#else
+ /* Process in 32-bit chunks if the buffer is aligned. */
+ uint32 maskv = ~((uint32) 0) / 0xFF * mask;
+
+ if (buf == (const char *) TYPEALIGN(4, buf))
+ {
+ const uint32 *words = (const uint32 *) buf;
+
+ while (bytes >= 4)
+ {
+ popcnt += pg_popcount32_fast(*words++ & maskv);
+ bytes -= 4;
+ }
+
+ buf = (const char *) words;
+ }
+#endif
+
+ /* Process any remaining bytes */
+ while (bytes--)
+ popcnt += pg_number_of_ones[(unsigned char) *buf++ & mask];
+
+ return popcnt;
+}
+
#endif /* TRY_POPCNT_FAST */
return popcnt;
}
+/*
+ * pg_popcount_masked_slow
+ * Returns the number of 1-bits in buf after applying the mask to each byte
+ */
+static uint64
+pg_popcount_masked_slow(const char *buf, int bytes, bits8 mask)
+{
+ uint64 popcnt = 0;
+
+#if SIZEOF_VOID_P >= 8
+ /* Process in 64-bit chunks if the buffer is aligned */
+ uint64 maskv = ~UINT64CONST(0) / 0xFF * mask;
+
+ if (buf == (const char *) TYPEALIGN(8, buf))
+ {
+ const uint64 *words = (const uint64 *) buf;
+
+ while (bytes >= 8)
+ {
+ popcnt += pg_popcount64_slow(*words++ & maskv);
+ bytes -= 8;
+ }
+
+ buf = (const char *) words;
+ }
+#else
+ /* Process in 32-bit chunks if the buffer is aligned. */
+ uint32 maskv = ~((uint32) 0) / 0xFF * mask;
+
+ if (buf == (const char *) TYPEALIGN(4, buf))
+ {
+ const uint32 *words = (const uint32 *) buf;
+
+ while (bytes >= 4)
+ {
+ popcnt += pg_popcount32_slow(*words++ & maskv);
+ bytes -= 4;
+ }
+
+ buf = (const char *) words;
+ }
+#endif
+
+ /* Process any remaining bytes */
+ while (bytes--)
+ popcnt += pg_number_of_ones[(unsigned char) *buf++ & mask];
+
+ return popcnt;
+}
+
#ifndef TRY_POPCNT_FAST
/*
return pg_popcount_slow(buf, bytes);
}
+/*
+ * pg_popcount_masked_optimized
+ * Returns the number of 1-bits in buf after applying the mask to each byte
+ */
+uint64
+pg_popcount_masked_optimized(const char *buf, int bytes, bits8 mask)
+{
+ return pg_popcount_masked_slow(buf, bytes, mask);
+}
+
#endif /* !TRY_POPCNT_FAST */
return _mm512_reduce_add_epi64(accum);
}
+/*
+ * pg_popcount_masked_avx512
+ * Returns the number of 1-bits in buf after applying the mask to each byte
+ */
+uint64
+pg_popcount_masked_avx512(const char *buf, int bytes, bits8 mask)
+{
+ __m512i val,
+ vmasked,
+ cnt;
+ __m512i accum = _mm512_setzero_si512();
+ const char *final;
+ int tail_idx;
+ __mmask64 bmask = ~UINT64CONST(0);
+ const __m512i maskv = _mm512_set1_epi8(mask);
+
+ /*
+ * Align buffer down to avoid double load overhead from unaligned access.
+ * Calculate a mask to ignore preceding bytes. Find start offset of final
+ * iteration and ensure it is not empty.
+ */
+ bmask <<= ((uintptr_t) buf) % sizeof(__m512i);
+ tail_idx = (((uintptr_t) buf + bytes - 1) % sizeof(__m512i)) + 1;
+ final = (const char *) TYPEALIGN_DOWN(sizeof(__m512i), buf + bytes - 1);
+ buf = (const char *) TYPEALIGN_DOWN(sizeof(__m512i), buf);
+
+ /*
+ * Iterate through all but the final iteration. Starting from the second
+ * iteration, the mask is ignored.
+ */
+ if (buf < final)
+ {
+ val = _mm512_maskz_loadu_epi8(bmask, (const __m512i *) buf);
+ vmasked = _mm512_and_si512(val, maskv);
+ cnt = _mm512_popcnt_epi64(vmasked);
+ accum = _mm512_add_epi64(accum, cnt);
+
+ buf += sizeof(__m512i);
+ bmask = ~UINT64CONST(0);
+
+ for (; buf < final; buf += sizeof(__m512i))
+ {
+ val = _mm512_load_si512((const __m512i *) buf);
+ vmasked = _mm512_and_si512(val, maskv);
+ cnt = _mm512_popcnt_epi64(vmasked);
+ accum = _mm512_add_epi64(accum, cnt);
+ }
+ }
+
+ /* Final iteration needs to ignore bytes that are not within the length */
+ bmask &= (~UINT64CONST(0) >> (sizeof(__m512i) - tail_idx));
+
+ val = _mm512_maskz_loadu_epi8(bmask, (const __m512i *) buf);
+ vmasked = _mm512_and_si512(val, maskv);
+ cnt = _mm512_popcnt_epi64(vmasked);
+ accum = _mm512_add_epi64(accum, cnt);
+
+ return _mm512_reduce_add_epi64(accum);
+}
+
#endif /* TRY_POPCNT_FAST */