lib/crc: x86: Reorganize crc-pclmul static_call initialization
Reorganize the crc-pclmul static_call initialization to place more of the logic in the *_mod_init_arch() functions instead of in the INIT_CRC_PCLMUL macro. This provides the flexibility to do more than a single static_call update for each CPU feature check. Right away, optimize crc64_mod_init_arch() to check the CPU features just once instead of twice, doing both the crc64_msb and crc64_lsb static_call updates together. A later commit will also use this to initialize an additional static_key when crc32_lsb_vpclmul_avx512() is enabled. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250719224938.126512-2-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
parent
9b0236f4ef
commit
110628e55a
|
|
@ -25,23 +25,20 @@ crc_t prefix##_vpclmul_avx512(crc_t crc, const u8 *p, size_t len, \
|
|||
const void *consts_ptr); \
|
||||
DEFINE_STATIC_CALL(prefix##_pclmul, prefix##_pclmul_sse)
|
||||
|
||||
#define INIT_CRC_PCLMUL(prefix) \
|
||||
do { \
|
||||
if (boot_cpu_has(X86_FEATURE_VPCLMULQDQ) && \
|
||||
boot_cpu_has(X86_FEATURE_AVX2) && \
|
||||
cpu_has_xfeatures(XFEATURE_MASK_YMM, NULL)) { \
|
||||
if (boot_cpu_has(X86_FEATURE_AVX512BW) && \
|
||||
boot_cpu_has(X86_FEATURE_AVX512VL) && \
|
||||
!boot_cpu_has(X86_FEATURE_PREFER_YMM) && \
|
||||
cpu_has_xfeatures(XFEATURE_MASK_AVX512, NULL)) { \
|
||||
static_call_update(prefix##_pclmul, \
|
||||
prefix##_vpclmul_avx512); \
|
||||
} else { \
|
||||
static_call_update(prefix##_pclmul, \
|
||||
prefix##_vpclmul_avx2); \
|
||||
} \
|
||||
} \
|
||||
} while (0)
|
||||
static inline bool have_vpclmul(void)
|
||||
{
|
||||
return boot_cpu_has(X86_FEATURE_VPCLMULQDQ) &&
|
||||
boot_cpu_has(X86_FEATURE_AVX2) &&
|
||||
cpu_has_xfeatures(XFEATURE_MASK_YMM, NULL);
|
||||
}
|
||||
|
||||
static inline bool have_avx512(void)
|
||||
{
|
||||
return boot_cpu_has(X86_FEATURE_AVX512BW) &&
|
||||
boot_cpu_has(X86_FEATURE_AVX512VL) &&
|
||||
!boot_cpu_has(X86_FEATURE_PREFER_YMM) &&
|
||||
cpu_has_xfeatures(XFEATURE_MASK_AVX512, NULL);
|
||||
}
|
||||
|
||||
/*
|
||||
* Call a [V]PCLMULQDQ optimized CRC function if the data length is at least 16
|
||||
|
|
|
|||
|
|
@ -23,6 +23,13 @@ static inline void crc_t10dif_mod_init_arch(void)
|
|||
{
|
||||
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
|
||||
static_branch_enable(&have_pclmulqdq);
|
||||
INIT_CRC_PCLMUL(crc16_msb);
|
||||
if (have_vpclmul()) {
|
||||
if (have_avx512())
|
||||
static_call_update(crc16_msb_pclmul,
|
||||
crc16_msb_vpclmul_avx512);
|
||||
else
|
||||
static_call_update(crc16_msb_pclmul,
|
||||
crc16_msb_vpclmul_avx2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -77,7 +77,15 @@ static inline void crc32_mod_init_arch(void)
|
|||
static_branch_enable(&have_crc32);
|
||||
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
|
||||
static_branch_enable(&have_pclmulqdq);
|
||||
INIT_CRC_PCLMUL(crc32_lsb);
|
||||
if (have_vpclmul()) {
|
||||
if (have_avx512()) {
|
||||
static_call_update(crc32_lsb_pclmul,
|
||||
crc32_lsb_vpclmul_avx512);
|
||||
} else {
|
||||
static_call_update(crc32_lsb_pclmul,
|
||||
crc32_lsb_vpclmul_avx2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,18 @@ static inline void crc64_mod_init_arch(void)
|
|||
{
|
||||
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
|
||||
static_branch_enable(&have_pclmulqdq);
|
||||
INIT_CRC_PCLMUL(crc64_msb);
|
||||
INIT_CRC_PCLMUL(crc64_lsb);
|
||||
if (have_vpclmul()) {
|
||||
if (have_avx512()) {
|
||||
static_call_update(crc64_msb_pclmul,
|
||||
crc64_msb_vpclmul_avx512);
|
||||
static_call_update(crc64_lsb_pclmul,
|
||||
crc64_lsb_vpclmul_avx512);
|
||||
} else {
|
||||
static_call_update(crc64_msb_pclmul,
|
||||
crc64_msb_vpclmul_avx2);
|
||||
static_call_update(crc64_lsb_pclmul,
|
||||
crc64_lsb_vpclmul_avx2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue