lkml.org 
[lkml]   [2019]   [Mar]   [22]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
    Patch in this message
    /
    From
    Subject[PATCH 5.0 046/238] crypto: x86/aegis - fix handling chunked inputs and MAY_SLEEP
    Date
    5.0-stable review patch.  If anyone has any objections, please let me know.

    ------------------

    From: Eric Biggers <ebiggers@google.com>

    commit ba6771c0a0bc2fac9d6a8759bab8493bd1cffe3b upstream.

    The x86 AEGIS implementations all fail the improved AEAD tests because
    they produce the wrong result with some data layouts. The issue is that
    they assume that if the skcipher_walk API gives 'nbytes' not aligned to
    the walksize (a.k.a. walk.stride), then it is the end of the data. In
    fact, this can happen before the end.

    Also, when the CRYPTO_TFM_REQ_MAY_SLEEP flag is given, they can
    incorrectly sleep in the skcipher_walk_*() functions while preemption
    has been disabled by kernel_fpu_begin().

    Fix these bugs.

    Fixes: 1d373d4e8e15 ("crypto: x86 - Add optimized AEGIS implementations")
    Cc: <stable@vger.kernel.org> # v4.18+
    Cc: Ondrej Mosnacek <omosnace@redhat.com>
    Signed-off-by: Eric Biggers <ebiggers@google.com>
    Reviewed-by: Ondrej Mosnacek <omosnace@redhat.com>
    Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
    Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>

    ---
    arch/x86/crypto/aegis128-aesni-glue.c | 38 +++++++++++++--------------------
    arch/x86/crypto/aegis128l-aesni-glue.c | 38 +++++++++++++--------------------
    arch/x86/crypto/aegis256-aesni-glue.c | 38 +++++++++++++--------------------
    3 files changed, 45 insertions(+), 69 deletions(-)

    --- a/arch/x86/crypto/aegis128-aesni-glue.c
    +++ b/arch/x86/crypto/aegis128-aesni-glue.c
    @@ -119,31 +119,20 @@ static void crypto_aegis128_aesni_proces
    }

    static void crypto_aegis128_aesni_process_crypt(
    - struct aegis_state *state, struct aead_request *req,
    + struct aegis_state *state, struct skcipher_walk *walk,
    const struct aegis_crypt_ops *ops)
    {
    - struct skcipher_walk walk;
    - u8 *src, *dst;
    - unsigned int chunksize, base;
    -
    - ops->skcipher_walk_init(&walk, req, false);
    -
    - while (walk.nbytes) {
    - src = walk.src.virt.addr;
    - dst = walk.dst.virt.addr;
    - chunksize = walk.nbytes;
    -
    - ops->crypt_blocks(state, chunksize, src, dst);
    -
    - base = chunksize & ~(AEGIS128_BLOCK_SIZE - 1);
    - src += base;
    - dst += base;
    - chunksize &= AEGIS128_BLOCK_SIZE - 1;
    -
    - if (chunksize > 0)
    - ops->crypt_tail(state, chunksize, src, dst);
    + while (walk->nbytes >= AEGIS128_BLOCK_SIZE) {
    + ops->crypt_blocks(state,
    + round_down(walk->nbytes, AEGIS128_BLOCK_SIZE),
    + walk->src.virt.addr, walk->dst.virt.addr);
    + skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE);
    + }

    - skcipher_walk_done(&walk, 0);
    + if (walk->nbytes) {
    + ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr,
    + walk->dst.virt.addr);
    + skcipher_walk_done(walk, 0);
    }
    }

    @@ -186,13 +175,16 @@ static void crypto_aegis128_aesni_crypt(
    {
    struct crypto_aead *tfm = crypto_aead_reqtfm(req);
    struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm);
    + struct skcipher_walk walk;
    struct aegis_state state;

    + ops->skcipher_walk_init(&walk, req, true);
    +
    kernel_fpu_begin();

    crypto_aegis128_aesni_init(&state, ctx->key.bytes, req->iv);
    crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen);
    - crypto_aegis128_aesni_process_crypt(&state, req, ops);
    + crypto_aegis128_aesni_process_crypt(&state, &walk, ops);
    crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen);

    kernel_fpu_end();
    --- a/arch/x86/crypto/aegis128l-aesni-glue.c
    +++ b/arch/x86/crypto/aegis128l-aesni-glue.c
    @@ -119,31 +119,20 @@ static void crypto_aegis128l_aesni_proce
    }

    static void crypto_aegis128l_aesni_process_crypt(
    - struct aegis_state *state, struct aead_request *req,
    + struct aegis_state *state, struct skcipher_walk *walk,
    const struct aegis_crypt_ops *ops)
    {
    - struct skcipher_walk walk;
    - u8 *src, *dst;
    - unsigned int chunksize, base;
    -
    - ops->skcipher_walk_init(&walk, req, false);
    -
    - while (walk.nbytes) {
    - src = walk.src.virt.addr;
    - dst = walk.dst.virt.addr;
    - chunksize = walk.nbytes;
    -
    - ops->crypt_blocks(state, chunksize, src, dst);
    -
    - base = chunksize & ~(AEGIS128L_BLOCK_SIZE - 1);
    - src += base;
    - dst += base;
    - chunksize &= AEGIS128L_BLOCK_SIZE - 1;
    -
    - if (chunksize > 0)
    - ops->crypt_tail(state, chunksize, src, dst);
    + while (walk->nbytes >= AEGIS128L_BLOCK_SIZE) {
    + ops->crypt_blocks(state, round_down(walk->nbytes,
    + AEGIS128L_BLOCK_SIZE),
    + walk->src.virt.addr, walk->dst.virt.addr);
    + skcipher_walk_done(walk, walk->nbytes % AEGIS128L_BLOCK_SIZE);
    + }

    - skcipher_walk_done(&walk, 0);
    + if (walk->nbytes) {
    + ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr,
    + walk->dst.virt.addr);
    + skcipher_walk_done(walk, 0);
    }
    }

    @@ -186,13 +175,16 @@ static void crypto_aegis128l_aesni_crypt
    {
    struct crypto_aead *tfm = crypto_aead_reqtfm(req);
    struct aegis_ctx *ctx = crypto_aegis128l_aesni_ctx(tfm);
    + struct skcipher_walk walk;
    struct aegis_state state;

    + ops->skcipher_walk_init(&walk, req, true);
    +
    kernel_fpu_begin();

    crypto_aegis128l_aesni_init(&state, ctx->key.bytes, req->iv);
    crypto_aegis128l_aesni_process_ad(&state, req->src, req->assoclen);
    - crypto_aegis128l_aesni_process_crypt(&state, req, ops);
    + crypto_aegis128l_aesni_process_crypt(&state, &walk, ops);
    crypto_aegis128l_aesni_final(&state, tag_xor, req->assoclen, cryptlen);

    kernel_fpu_end();
    --- a/arch/x86/crypto/aegis256-aesni-glue.c
    +++ b/arch/x86/crypto/aegis256-aesni-glue.c
    @@ -119,31 +119,20 @@ static void crypto_aegis256_aesni_proces
    }

    static void crypto_aegis256_aesni_process_crypt(
    - struct aegis_state *state, struct aead_request *req,
    + struct aegis_state *state, struct skcipher_walk *walk,
    const struct aegis_crypt_ops *ops)
    {
    - struct skcipher_walk walk;
    - u8 *src, *dst;
    - unsigned int chunksize, base;
    -
    - ops->skcipher_walk_init(&walk, req, false);
    -
    - while (walk.nbytes) {
    - src = walk.src.virt.addr;
    - dst = walk.dst.virt.addr;
    - chunksize = walk.nbytes;
    -
    - ops->crypt_blocks(state, chunksize, src, dst);
    -
    - base = chunksize & ~(AEGIS256_BLOCK_SIZE - 1);
    - src += base;
    - dst += base;
    - chunksize &= AEGIS256_BLOCK_SIZE - 1;
    -
    - if (chunksize > 0)
    - ops->crypt_tail(state, chunksize, src, dst);
    + while (walk->nbytes >= AEGIS256_BLOCK_SIZE) {
    + ops->crypt_blocks(state,
    + round_down(walk->nbytes, AEGIS256_BLOCK_SIZE),
    + walk->src.virt.addr, walk->dst.virt.addr);
    + skcipher_walk_done(walk, walk->nbytes % AEGIS256_BLOCK_SIZE);
    + }

    - skcipher_walk_done(&walk, 0);
    + if (walk->nbytes) {
    + ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr,
    + walk->dst.virt.addr);
    + skcipher_walk_done(walk, 0);
    }
    }

    @@ -186,13 +175,16 @@ static void crypto_aegis256_aesni_crypt(
    {
    struct crypto_aead *tfm = crypto_aead_reqtfm(req);
    struct aegis_ctx *ctx = crypto_aegis256_aesni_ctx(tfm);
    + struct skcipher_walk walk;
    struct aegis_state state;

    + ops->skcipher_walk_init(&walk, req, true);
    +
    kernel_fpu_begin();

    crypto_aegis256_aesni_init(&state, ctx->key, req->iv);
    crypto_aegis256_aesni_process_ad(&state, req->src, req->assoclen);
    - crypto_aegis256_aesni_process_crypt(&state, req, ops);
    + crypto_aegis256_aesni_process_crypt(&state, &walk, ops);
    crypto_aegis256_aesni_final(&state, tag_xor, req->assoclen, cryptlen);

    kernel_fpu_end();

    \
     
     \ /
      Last update: 2019-03-22 13:39    [W:3.349 / U:0.004 seconds]
    ©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site