lkml.org 
[lkml]   [2020]   [Sep]   [21]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
    Patch in this message
    /
    From
    Subject[PATCH v2 10/12] crypto: caam/jr - add support for XTS with 16B IV
    Date
    From: Andrei Botila <andrei.botila@nxp.com>

    Newer CAAM versions (Era 9+) support 16B IVs. Since for these devices
    the HW limitation is no longer present newer version should process the
    requests containing 16B IVs directly in hardware without using a fallback.

    Signed-off-by: Andrei Botila <andrei.botila@nxp.com>
    ---
    drivers/crypto/caam/caamalg.c | 12 ++++++++----
    drivers/crypto/caam/caamalg_desc.c | 27 ++++++++++++++++-----------
    2 files changed, 24 insertions(+), 15 deletions(-)

    diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
    index b5f8823e4300..d7a3b13c0c52 100644
    --- a/drivers/crypto/caam/caamalg.c
    +++ b/drivers/crypto/caam/caamalg.c
    @@ -834,6 +834,7 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
    {
    struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
    struct device *jrdev = ctx->jrdev;
    + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
    u32 *desc;
    int err;

    @@ -846,9 +847,11 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
    if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
    ctx->xts_key_fallback = true;

    - err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
    - if (err)
    - return err;
    + if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
    + err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
    + if (err)
    + return err;
    + }

    ctx->cdata.keylen = keylen;
    ctx->cdata.key_virt = key;
    @@ -1787,6 +1790,7 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
    struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
    struct device *jrdev = ctx->jrdev;
    struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
    + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
    u32 *desc;
    int ret = 0;

    @@ -1798,7 +1802,7 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
    if (!req->cryptlen && !ctx->fallback)
    return 0;

    - if (ctx->fallback && (xts_skcipher_ivsize(req) ||
    + if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
    ctx->xts_key_fallback)) {
    struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);

    diff --git a/drivers/crypto/caam/caamalg_desc.c b/drivers/crypto/caam/caamalg_desc.c
    index d6c58184bb57..433d6d5cd582 100644
    --- a/drivers/crypto/caam/caamalg_desc.c
    +++ b/drivers/crypto/caam/caamalg_desc.c
    @@ -1550,13 +1550,14 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
    set_jump_tgt_here(desc, key_jump_cmd);

    /*
    - * create sequence for loading the sector index
    - * Upper 8B of IV - will be used as sector index
    - * Lower 8B of IV - will be discarded
    + * create sequence for loading the sector index / 16B tweak value
    + * Lower 8B of IV - sector index / tweak lower half
    + * Upper 8B of IV - upper half of 16B tweak
    */
    append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    (0x20 << LDST_OFFSET_SHIFT));
    - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
    + append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    + (0x30 << LDST_OFFSET_SHIFT));

    /* Load operation */
    append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    @@ -1565,9 +1566,11 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
    /* Perform operation */
    skcipher_append_src_dst(desc);

    - /* Store upper 8B of IV */
    + /* Store lower 8B and upper 8B of IV */
    append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    (0x20 << LDST_OFFSET_SHIFT));
    + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    + (0x30 << LDST_OFFSET_SHIFT));

    print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
    ": ", DUMP_PREFIX_ADDRESS, 16, 4,
    @@ -1609,23 +1612,25 @@ void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
    set_jump_tgt_here(desc, key_jump_cmd);

    /*
    - * create sequence for loading the sector index
    - * Upper 8B of IV - will be used as sector index
    - * Lower 8B of IV - will be discarded
    + * create sequence for loading the sector index / 16B tweak value
    + * Lower 8B of IV - sector index / tweak lower half
    + * Upper 8B of IV - upper half of 16B tweak
    */
    append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    (0x20 << LDST_OFFSET_SHIFT));
    - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
    -
    + append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    + (0x30 << LDST_OFFSET_SHIFT));
    /* Load operation */
    append_dec_op1(desc, cdata->algtype);

    /* Perform operation */
    skcipher_append_src_dst(desc);

    - /* Store upper 8B of IV */
    + /* Store lower 8B and upper 8B of IV */
    append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    (0x20 << LDST_OFFSET_SHIFT));
    + append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
    + (0x30 << LDST_OFFSET_SHIFT));

    print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
    ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
    --
    2.17.1
    \
     
     \ /
      Last update: 2020-09-21 09:33    [W:2.895 / U:0.040 seconds]
    ©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site