On Thu, Oct 17, 2019 at 09:09:01PM +0200, Ard Biesheuvel wrote:
> +void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
> +{
> +     state = PTR_ALIGN(state, CHACHA_STATE_ALIGN);
> +
> +     if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) {
> +             hchacha_block_generic(state, stream, nrounds);
> +     } else {
> +             kernel_fpu_begin();
> +             hchacha_block_ssse3(state, stream, nrounds);
> +             kernel_fpu_end();
> +     }
> +}
> +EXPORT_SYMBOL(hchacha_block_arch);

I generally find negative logic like the above harder to read than the other
way:

        if (static_branch_likely(&chacha_use_simd) && crypto_simd_usable()) {
                kernel_fpu_begin();
                hchacha_block_ssse3(state, stream, nrounds);
                kernel_fpu_end();
        } else {
                hchacha_block_generic(state, stream, nrounds);
        }

(This applies to lots of places in this patch series.)  Not a big deal though.

>  static int __init chacha_simd_mod_init(void)
>  {
>       if (!boot_cpu_has(X86_FEATURE_SSSE3))
> -             return -ENODEV;
> -
> -#ifdef CONFIG_AS_AVX2
> -     chacha_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
> -                       boot_cpu_has(X86_FEATURE_AVX2) &&
> -                       cpu_has_xfeatures(XFEATURE_MASK_SSE | 
> XFEATURE_MASK_YMM, NULL);
> -#ifdef CONFIG_AS_AVX512
> -     chacha_use_avx512vl = chacha_use_avx2 &&
> -                           boot_cpu_has(X86_FEATURE_AVX512VL) &&
> -                           boot_cpu_has(X86_FEATURE_AVX512BW); /* kmovq */
> -#endif
> -#endif
> +             return 0;
> +
> +     static_branch_enable(&chacha_use_simd);
> +
> +     if (IS_ENABLED(CONFIG_AS_AVX2) &&
> +         boot_cpu_has(X86_FEATURE_AVX) &&
> +         boot_cpu_has(X86_FEATURE_AVX2) &&
> +         cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
> +             static_branch_enable(&chacha_use_avx2);
> +
> +             if (IS_ENABLED(CONFIG_AS_AVX512) &&
> +                 boot_cpu_has(X86_FEATURE_AVX512VL) &&
> +                 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
> +                     static_branch_enable(&chacha_use_avx512vl);
> +     }
>       return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
>  }
>  

This patch is missing the corresponding change to chacha_simd_mod_fini(), to
skip unregistering the skcipher algorithms if they weren't registered.

- Eric

Reply via email to