@@ -770,11 +770,14 @@ config CRYPTO_DEV_ARTPEC6
select CRYPTO_AES
select CRYPTO_ALGAPI
select CRYPTO_SKCIPHER
+ select CRYPTO_CBC
select CRYPTO_CTR
+ select CRYPTO_ECB
select CRYPTO_HASH
select CRYPTO_SHA1
select CRYPTO_SHA256
select CRYPTO_SHA512
+ select CRYPTO_XTS
help
Enables the driver for the on-chip crypto accelerator
of Axis ARTPEC SoCs.
@@ -1088,7 +1088,7 @@ artpec6_crypto_common_destroy(struct artpec6_crypto_req_common *common)
/*
* Ciphering functions.
*/
-static int artpec6_crypto_encrypt(struct skcipher_request *req)
+static int __artpec6_crypto_encrypt(struct skcipher_request *req)
{
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
@@ -1136,7 +1136,7 @@ static int artpec6_crypto_encrypt(struct skcipher_request *req)
return artpec6_crypto_submit(&req_ctx->common);
}
-static int artpec6_crypto_decrypt(struct skcipher_request *req)
+static int __artpec6_crypto_decrypt(struct skcipher_request *req)
{
int ret;
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
@@ -1188,6 +1188,53 @@ static int artpec6_crypto_decrypt(struct skcipher_request *req)
return artpec6_crypto_submit(&req_ctx->common);
}
+static int artpec6_crypto_crypt_fallback(struct skcipher_request *req,
+ bool encrypt)
+{
+ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
+ struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
+ SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
+ int ret;
+
+ ret = crypto_sync_skcipher_setkey(ctx->fallback, ctx->aes_key,
+ ctx->key_length);
+ if (ret)
+ return ret;
+
+ skcipher_request_set_sync_tfm(subreq, ctx->fallback);
+ skcipher_request_set_callback(subreq, req->base.flags,
+ NULL, NULL);
+ skcipher_request_set_crypt(subreq, req->src, req->dst,
+ req->cryptlen, req->iv);
+ ret = encrypt ? crypto_skcipher_encrypt(subreq)
+ : crypto_skcipher_decrypt(subreq);
+ skcipher_request_zero(subreq);
+
+ return ret;
+}
+
+static int artpec6_crypto_encrypt(struct skcipher_request *req)
+{
+ int ret;
+
+ ret = __artpec6_crypto_encrypt(req);
+ if (ret != -ENOSPC)
+ return ret;
+
+ return artpec6_crypto_crypt_fallback(req, true);
+}
+
+static int artpec6_crypto_decrypt(struct skcipher_request *req)
+{
+ int ret;
+
+ ret = __artpec6_crypto_decrypt(req);
+ if (ret != -ENOSPC)
+ return ret;
+
+ return artpec6_crypto_crypt_fallback(req, false);
+}
+
static int artpec6_crypto_block_encrypt(struct skcipher_request *req)
{
if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
@@ -1570,18 +1617,7 @@ static int artpec6_crypto_prepare_hash(struct ahash_request *areq)
return ARTPEC6_CRYPTO_PREPARE_HASH_START;
}
-
-static int artpec6_crypto_aes_ecb_init(struct crypto_skcipher *tfm)
-{
- struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
-
- tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
- ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_ECB;
-
- return 0;
-}
-
-static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher *tfm)
+static int artpec6_crypto_aes_init(struct crypto_skcipher *tfm, int crypto_type)
{
struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
@@ -1592,44 +1628,39 @@ static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher *tfm)
return PTR_ERR(ctx->fallback);
tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
- ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CTR;
+ ctx->crypto_type = crypto_type;
return 0;
}
-static int artpec6_crypto_aes_cbc_init(struct crypto_skcipher *tfm)
+static int artpec6_crypto_aes_ecb_init(struct crypto_skcipher *tfm)
{
- struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
+ return artpec6_crypto_aes_init(tfm, ARTPEC6_CRYPTO_CIPHER_AES_ECB);
+}
- tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
- ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CBC;
+static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher *tfm)
+{
+ return artpec6_crypto_aes_init(tfm, ARTPEC6_CRYPTO_CIPHER_AES_CTR);
+}
- return 0;
+static int artpec6_crypto_aes_cbc_init(struct crypto_skcipher *tfm)
+{
+ return artpec6_crypto_aes_init(tfm, ARTPEC6_CRYPTO_CIPHER_AES_CBC);
}
static int artpec6_crypto_aes_xts_init(struct crypto_skcipher *tfm)
{
- struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
-
- tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
- ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_XTS;
-
- return 0;
+ return artpec6_crypto_aes_init(tfm, ARTPEC6_CRYPTO_CIPHER_AES_XTS);
}
static void artpec6_crypto_aes_exit(struct crypto_skcipher *tfm)
{
struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
- memset(ctx, 0, sizeof(*ctx));
-}
+ if (ctx->fallback)
+ crypto_free_sync_skcipher(ctx->fallback);
-static void artpec6_crypto_aes_ctr_exit(struct crypto_skcipher *tfm)
-{
- struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
-
- crypto_free_sync_skcipher(ctx->fallback);
- artpec6_crypto_aes_exit(tfm);
+ memset(ctx, 0, sizeof(*ctx));
}
static int
@@ -2764,7 +2795,8 @@ static struct skcipher_alg crypto_algos[] = {
.cra_driver_name = "artpec6-ecb-aes",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_ALLOCATES_MEMORY,
+ CRYPTO_ALG_ALLOCATES_MEMORY |
+ CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
.cra_alignmask = 3,
@@ -2799,7 +2831,7 @@ static struct skcipher_alg crypto_algos[] = {
.encrypt = artpec6_crypto_ctr_encrypt,
.decrypt = artpec6_crypto_ctr_decrypt,
.init = artpec6_crypto_aes_ctr_init,
- .exit = artpec6_crypto_aes_ctr_exit,
+ .exit = artpec6_crypto_aes_exit,
},
/* AES - CBC */
{
@@ -2808,7 +2840,8 @@ static struct skcipher_alg crypto_algos[] = {
.cra_driver_name = "artpec6-cbc-aes",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_ALLOCATES_MEMORY,
+ CRYPTO_ALG_ALLOCATES_MEMORY |
+ CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
.cra_alignmask = 3,
@@ -2830,7 +2863,8 @@ static struct skcipher_alg crypto_algos[] = {
.cra_driver_name = "artpec6-xts-aes",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_ALLOCATES_MEMORY,
+ CRYPTO_ALG_ALLOCATES_MEMORY |
+ CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
.cra_alignmask = 3,