@@ -1228,6 +1228,11 @@ static struct aead_alg aesni_aeads[0];
static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
+module_param_named(using_x86_avx2, gcm_use_avx2.key.enabled.counter, int, 0444);
+module_param_named(using_x86_avx, gcm_use_avx.key.enabled.counter, int, 0444);
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2 (for GCM mode)");
+MODULE_PARM_DESC(using_x86_avx, "Using x86 instruction set extensions: AVX (for CTR and GCM modes)");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_AES, NULL),
{}
@@ -1241,22 +1246,14 @@ static int __init aesni_init(void)
if (!x86_match_cpu(module_cpu_ids))
return -ENODEV;
#ifdef CONFIG_X86_64
- if (boot_cpu_has(X86_FEATURE_AVX2)) {
- pr_info("AVX2 version of gcm_enc/dec engaged.\n");
- static_branch_enable(&gcm_use_avx);
+ if (boot_cpu_has(X86_FEATURE_AVX2))
static_branch_enable(&gcm_use_avx2);
- } else
+
if (boot_cpu_has(X86_FEATURE_AVX)) {
- pr_info("AVX version of gcm_enc/dec engaged.\n");
static_branch_enable(&gcm_use_avx);
- } else {
- pr_info("SSE version of gcm_enc/dec engaged.\n");
- }
- if (boot_cpu_has(X86_FEATURE_AVX)) {
- /* optimize performance of ctr mode encryption transform */
static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
- pr_info("AES CTR mode by8 optimization enabled\n");
}
+
#endif /* CONFIG_X86_64 */
err = crypto_register_alg(&aesni_cipher_alg);
@@ -166,6 +166,10 @@ static struct skcipher_alg aria_algs[] = {
static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
+static int using_x86_gfni;
+module_param(using_x86_gfni, int, 0444);
+MODULE_PARM_DESC(using_x86_gfni, "Using x86 instruction set extensions: GF-NI");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_AVX, NULL),
{}
@@ -192,6 +196,7 @@ static int __init aria_avx_init(void)
}
if (boot_cpu_has(X86_FEATURE_GFNI)) {
+ using_x86_gfni = 1;
aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
@@ -210,6 +215,7 @@ static void __exit aria_avx_exit(void)
{
simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs),
aria_simd_algs);
+ using_x86_gfni = 0;
}
module_init(aria_avx_init);
@@ -55,6 +55,11 @@ void blake2s_compress(struct blake2s_state *state, const u8 *block,
}
EXPORT_SYMBOL(blake2s_compress);
+module_param_named(using_x86_ssse3, blake2s_use_ssse3.key.enabled.counter, int, 0444);
+module_param_named(using_x86_avx512vl, blake2s_use_avx512.key.enabled.counter, int, 0444);
+MODULE_PARM_DESC(using_x86_ssse3, "Using x86 instruction set extensions: SSSE3");
+MODULE_PARM_DESC(using_x86_avx512vl, "Using x86 instruction set extensions: AVX-512VL");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL),
X86_MATCH_FEATURE(X86_FEATURE_AVX512VL, NULL),
@@ -277,6 +277,11 @@ static struct skcipher_alg algs[] = {
},
};
+module_param_named(using_x86_avx512vl, chacha_use_avx512vl.key.enabled.counter, int, 0444);
+module_param_named(using_x86_avx2, chacha_use_avx2.key.enabled.counter, int, 0444);
+MODULE_PARM_DESC(using_x86_avx512vl, "Using x86 instruction set extensions: AVX-512VL");
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL),
{}
@@ -240,6 +240,10 @@ static struct shash_alg alg = {
}
};
+static int using_x86_pclmulqdq;
+module_param(using_x86_pclmulqdq, int, 0444);
+MODULE_PARM_DESC(using_x86_pclmulqdq, "Using x86 instruction set extensions: PCLMULQDQ");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_XMM4_2, NULL),
{}
@@ -252,6 +256,7 @@ static int __init crc32c_intel_mod_init(void)
return -ENODEV;
#ifdef CONFIG_X86_64
if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
+ using_x86_pclmulqdq = 1;
alg.update = crc32c_pcl_intel_update;
alg.finup = crc32c_pcl_intel_finup;
alg.digest = crc32c_pcl_intel_digest;
@@ -263,6 +268,7 @@ static int __init crc32c_intel_mod_init(void)
static void __exit crc32c_intel_mod_fini(void)
{
crypto_unregister_shash(&alg);
+ using_x86_pclmulqdq = 0;
}
module_init(crc32c_intel_mod_init);
@@ -1697,6 +1697,9 @@ static struct kpp_alg curve25519_alg = {
.max_size = curve25519_max_size,
};
+module_param_named(using_x86_adx, curve25519_use_bmi2_adx.key.enabled.counter, int, 0444);
+MODULE_PARM_DESC(using_x86_adx, "Using x86 instruction set extensions: ADX");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_ADX, NULL),
{}
@@ -269,6 +269,13 @@ static struct shash_alg alg = {
},
};
+module_param_named(using_x86_avx, poly1305_use_avx.key.enabled.counter, int, 0444);
+module_param_named(using_x86_avx2, poly1305_use_avx2.key.enabled.counter, int, 0444);
+module_param_named(using_x86_avx512f, poly1305_use_avx512.key.enabled.counter, int, 0444);
+MODULE_PARM_DESC(using_x86_avx, "Using x86 instruction set extensions: AVX");
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2");
+MODULE_PARM_DESC(using_x86_avx512f, "Using x86 instruction set extensions: AVX-512F");
+
static const struct x86_cpu_id module_cpu_ids[] = {
X86_MATCH_FEATURE(X86_FEATURE_ANY, NULL),
{}
@@ -90,6 +90,17 @@ static int using_x86_avx2;
static int using_x86_shani;
#endif
+#ifdef CONFIG_AS_SHA1_NI
+module_param(using_x86_shani, int, 0444);
+MODULE_PARM_DESC(using_x86_shani, "Using x86 instruction set extensions: SHA-NI");
+#endif
+module_param(using_x86_ssse3, int, 0444);
+module_param(using_x86_avx, int, 0444);
+module_param(using_x86_avx2, int, 0444);
+MODULE_PARM_DESC(using_x86_ssse3, "Using x86 instruction set extensions: SSSE3");
+MODULE_PARM_DESC(using_x86_avx, "Using x86 instruction set extensions: AVX");
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2");
+
static int sha1_update(struct shash_desc *desc, const u8 *data,
unsigned int len, unsigned int bytes_per_fpu,
sha1_block_fn *sha1_xform)
@@ -104,6 +104,17 @@ static int using_x86_avx2;
static int using_x86_shani;
#endif
+#ifdef CONFIG_AS_SHA256_NI
+module_param(using_x86_shani, int, 0444);
+MODULE_PARM_DESC(using_x86_shani, "Using x86 instruction set extensions: SHA-NI");
+#endif
+module_param(using_x86_ssse3, int, 0444);
+module_param(using_x86_avx, int, 0444);
+module_param(using_x86_avx2, int, 0444);
+MODULE_PARM_DESC(using_x86_ssse3, "Using x86 instruction set extensions: SSSE3");
+MODULE_PARM_DESC(using_x86_avx, "Using x86 instruction set extensions: AVX");
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2");
+
static int _sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len, unsigned int bytes_per_fpu,
sha256_block_fn *sha256_xform)
@@ -212,9 +223,6 @@ static void unregister_sha256_ssse3(void)
}
}
-asmlinkage void sha256_transform_avx(struct sha256_state *state,
- const u8 *data, int blocks);
-
static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
@@ -273,9 +281,6 @@ static void unregister_sha256_avx(void)
}
}
-asmlinkage void sha256_transform_rorx(struct sha256_state *state,
- const u8 *data, int blocks);
-
static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
@@ -335,9 +340,6 @@ static void unregister_sha256_avx2(void)
}
#ifdef CONFIG_AS_SHA256_NI
-asmlinkage void sha256_ni_transform(struct sha256_state *digest,
- const u8 *data, int rounds);
-
static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
@@ -81,6 +81,13 @@ static int using_x86_ssse3;
static int using_x86_avx;
static int using_x86_avx2;
+module_param(using_x86_ssse3, int, 0444);
+module_param(using_x86_avx, int, 0444);
+module_param(using_x86_avx2, int, 0444);
+MODULE_PARM_DESC(using_x86_ssse3, "Using x86 instruction set extensions: SSSE3");
+MODULE_PARM_DESC(using_x86_avx, "Using x86 instruction set extensions: AVX");
+MODULE_PARM_DESC(using_x86_avx2, "Using x86 instruction set extensions: AVX2");
+
static int sha512_update(struct shash_desc *desc, const u8 *data,
unsigned int len, unsigned int bytes_per_fpu,
sha512_block_fn *sha512_xform)