@@ -34,7 +34,7 @@ static inline void qce_write_array(struct qce_device *qce, u32 offset,
int i;
for (i = 0; i < len; i++)
- qce_write(qce, offset + i * sizeof(u32), val[i]);
+ qce_write_reg_dma(qce, offset + i * sizeof(u32), val[i], 1);
}
static inline void
@@ -43,7 +43,7 @@ qce_clear_array(struct qce_device *qce, u32 offset, unsigned int len)
int i;
for (i = 0; i < len; i++)
- qce_write(qce, offset + i * sizeof(u32), 0);
+ qce_write_reg_dma(qce, offset + i * sizeof(u32), 0, 1);
}
static u32 qce_config_reg(struct qce_device *qce, int little)
@@ -86,16 +86,16 @@ static void qce_setup_config(struct qce_device *qce)
config = qce_config_reg(qce, 0);
/* clear status */
- qce_write(qce, REG_STATUS, 0);
- qce_write(qce, REG_CONFIG, config);
+ qce_write_reg_dma(qce, REG_STATUS, 0, 1);
+ qce_write_reg_dma(qce, REG_CONFIG, config, 1);
}
static inline void qce_crypto_go(struct qce_device *qce, bool result_dump)
{
if (result_dump)
- qce_write(qce, REG_GOPROC, BIT(GO_SHIFT) | BIT(RESULTS_DUMP_SHIFT));
+ qce_write_reg_dma(qce, REG_GOPROC, BIT(GO_SHIFT) | BIT(RESULTS_DUMP_SHIFT), 1);
else
- qce_write(qce, REG_GOPROC, BIT(GO_SHIFT));
+ qce_write_reg_dma(qce, REG_GOPROC, BIT(GO_SHIFT), 1);
}
#if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
@@ -308,7 +308,7 @@ static void qce_xtskey(struct qce_device *qce, const u8 *enckey,
/* Set data unit size to cryptlen. Anything else causes
* crypto engine to return back incorrect results.
*/
- qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen);
+ qce_write_reg_dma(qce, REG_ENCR_XTS_DU_SIZE, cryptlen, 1);
}
static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
@@ -325,7 +325,9 @@ static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
u32 encr_cfg = 0, auth_cfg = 0, config;
unsigned int ivsize = rctx->ivsize;
unsigned long flags = rctx->flags;
+ int ret;
+ qce_clear_bam_transaction(qce);
qce_setup_config(qce);
if (IS_XTS(flags))
@@ -336,7 +338,7 @@ static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
qce_cpu_to_be32p_array(enckey, ctx->enc_key, keylen);
enckey_words = keylen / sizeof(u32);
- qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
+ qce_write_reg_dma(qce, REG_AUTH_SEG_CFG, auth_cfg, 1);
encr_cfg = qce_encr_cfg(flags, keylen);
@@ -369,25 +371,31 @@ static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
if (IS_ENCRYPT(flags))
encr_cfg |= BIT(ENCODE_SHIFT);
- qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg);
- qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen);
- qce_write(qce, REG_ENCR_SEG_START, 0);
+ qce_write_reg_dma(qce, REG_ENCR_SEG_CFG, encr_cfg, 1);
+ qce_write_reg_dma(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen, 1);
+ qce_write_reg_dma(qce, REG_ENCR_SEG_START, 0, 1);
if (IS_CTR(flags)) {
- qce_write(qce, REG_CNTR_MASK, ~0);
- qce_write(qce, REG_CNTR_MASK0, ~0);
- qce_write(qce, REG_CNTR_MASK1, ~0);
- qce_write(qce, REG_CNTR_MASK2, ~0);
+ qce_write_reg_dma(qce, REG_CNTR_MASK, ~0, 1);
+ qce_write_reg_dma(qce, REG_CNTR_MASK0, ~0, 1);
+ qce_write_reg_dma(qce, REG_CNTR_MASK1, ~0, 1);
+ qce_write_reg_dma(qce, REG_CNTR_MASK2, ~0, 1);
}
- qce_write(qce, REG_SEG_SIZE, rctx->cryptlen);
+ qce_write_reg_dma(qce, REG_SEG_SIZE, rctx->cryptlen, 1);
/* get little endianness */
config = qce_config_reg(qce, 1);
- qce_write(qce, REG_CONFIG, config);
+ qce_write_reg_dma(qce, REG_CONFIG, config, 1);
qce_crypto_go(qce, true);
+ ret = qce_submit_cmd_desc(qce, 0);
+ if (ret) {
+ dev_err(qce->dev, "Error in skcipher cmd descriptor\n");
+ return ret;
+ }
+
return 0;
}
#endif
@@ -31,6 +31,7 @@ static void qce_skcipher_done(void *data)
struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req));
struct qce_device *qce = tmpl->qce;
+ struct qce_bam_transaction *qce_bam_txn = qce->dma.qce_bam_txn;
struct qce_result_dump *result_buf = qce->dma.result_buf;
enum dma_data_direction dir_src, dir_dst;
u32 status;
@@ -52,6 +53,17 @@ static void qce_skcipher_done(void *data)
sg_free_table(&rctx->dst_tbl);
+ if (qce_bam_txn->qce_read_sgl_cnt)
+ dma_unmap_sg(qce->dev,
+ qce_bam_txn->qce_reg_read_sgl,
+ qce_bam_txn->qce_read_sgl_cnt,
+ DMA_DEV_TO_MEM);
+ if (qce_bam_txn->qce_write_sgl_cnt)
+ dma_unmap_sg(qce->dev,
+ qce_bam_txn->qce_reg_write_sgl,
+ qce_bam_txn->qce_write_sgl_cnt,
+ DMA_MEM_TO_DEV);
+
error = qce_check_status(qce, &status);
if (error < 0)
dev_dbg(qce->dev, "skcipher operation error (%x)\n", status);