]> git.baikalelectronics.ru Git - kernel.git/commitdiff
crypto: x86/poly1305 - depend on generic library not generic shash
authorArd Biesheuvel <ardb@kernel.org>
Fri, 8 Nov 2019 12:22:22 +0000 (13:22 +0100)
committerHerbert Xu <herbert@gondor.apana.org.au>
Sun, 17 Nov 2019 01:02:41 +0000 (09:02 +0800)
Remove the dependency on the generic Poly1305 driver. Instead, depend
on the generic library so that we only reuse code without pulling in
the generic skcipher implementation as well.

While at it, remove the logic that prefers the non-SIMD path for short
inputs - this is no longer necessary after recent FPU handling changes
on x86.

Since this removes the last remaining user of the routines exported
by the generic shash driver, unexport them and make them static.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/x86/crypto/poly1305_glue.c
crypto/Kconfig
crypto/poly1305_generic.c
include/crypto/internal/poly1305.h

index b43b93c95e798e5dc00400a09397905ac50f7f20..a5b3a054604cf2415025ee949619307bc8488aff 100644 (file)
@@ -34,6 +34,24 @@ static void poly1305_simd_mult(u32 *a, const u32 *b)
        poly1305_block_sse2(a, m, b, 1);
 }
 
+static unsigned int poly1305_scalar_blocks(struct poly1305_desc_ctx *dctx,
+                                          const u8 *src, unsigned int srclen)
+{
+       unsigned int datalen;
+
+       if (unlikely(!dctx->sset)) {
+               datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
+               src += srclen - datalen;
+               srclen = datalen;
+       }
+       if (srclen >= POLY1305_BLOCK_SIZE) {
+               poly1305_core_blocks(&dctx->h, dctx->r, src,
+                                    srclen / POLY1305_BLOCK_SIZE, 1);
+               srclen %= POLY1305_BLOCK_SIZE;
+       }
+       return srclen;
+}
+
 static unsigned int poly1305_simd_blocks(struct poly1305_desc_ctx *dctx,
                                         const u8 *src, unsigned int srclen)
 {
@@ -91,12 +109,6 @@ static int poly1305_simd_update(struct shash_desc *desc,
        struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
        unsigned int bytes;
 
-       /* kernel_fpu_begin/end is costly, use fallback for small updates */
-       if (srclen <= 288 || !crypto_simd_usable())
-               return crypto_poly1305_update(desc, src, srclen);
-
-       kernel_fpu_begin();
-
        if (unlikely(dctx->buflen)) {
                bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
                memcpy(dctx->buf + dctx->buflen, src, bytes);
@@ -105,25 +117,57 @@ static int poly1305_simd_update(struct shash_desc *desc,
                dctx->buflen += bytes;
 
                if (dctx->buflen == POLY1305_BLOCK_SIZE) {
-                       poly1305_simd_blocks(dctx, dctx->buf,
-                                            POLY1305_BLOCK_SIZE);
+                       if (likely(crypto_simd_usable())) {
+                               kernel_fpu_begin();
+                               poly1305_simd_blocks(dctx, dctx->buf,
+                                                    POLY1305_BLOCK_SIZE);
+                               kernel_fpu_end();
+                       } else {
+                               poly1305_scalar_blocks(dctx, dctx->buf,
+                                                      POLY1305_BLOCK_SIZE);
+                       }
                        dctx->buflen = 0;
                }
        }
 
        if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
-               bytes = poly1305_simd_blocks(dctx, src, srclen);
+               if (likely(crypto_simd_usable())) {
+                       kernel_fpu_begin();
+                       bytes = poly1305_simd_blocks(dctx, src, srclen);
+                       kernel_fpu_end();
+               } else {
+                       bytes = poly1305_scalar_blocks(dctx, src, srclen);
+               }
                src += srclen - bytes;
                srclen = bytes;
        }
 
-       kernel_fpu_end();
-
        if (unlikely(srclen)) {
                dctx->buflen = srclen;
                memcpy(dctx->buf, src, srclen);
        }
+}
+
+static int crypto_poly1305_init(struct shash_desc *desc)
+{
+       struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
+
+       poly1305_core_init(&dctx->h);
+       dctx->buflen = 0;
+       dctx->rset = 0;
+       dctx->sset = false;
+
+       return 0;
+}
+
+static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
+{
+       struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
+
+       if (unlikely(!dctx->sset))
+               return -ENOKEY;
 
+       poly1305_final_generic(dctx, dst);
        return 0;
 }
 
index 362dd6ae6aca566a6e8941887d6d28866f3bd2b3..2c7327a5b28ecd22f4e619327527537544f3aa69 100644 (file)
@@ -714,7 +714,7 @@ config CRYPTO_POLY1305
 config CRYPTO_POLY1305_X86_64
        tristate "Poly1305 authenticator algorithm (x86_64/SSE2/AVX2)"
        depends on X86 && 64BIT
-       select CRYPTO_POLY1305
+       select CRYPTO_LIB_POLY1305_GENERIC
        help
          Poly1305 authenticator algorithm, RFC7539.
 
index afe9a9e576ddabab03593bc438af2ed3ac27aced..21edbd8c99fbc1f91a54df9d0321bcce9890dfdf 100644 (file)
@@ -19,7 +19,7 @@
 #include <linux/module.h>
 #include <asm/unaligned.h>
 
-int crypto_poly1305_init(struct shash_desc *desc)
+static int crypto_poly1305_init(struct shash_desc *desc)
 {
        struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
 
@@ -30,7 +30,6 @@ int crypto_poly1305_init(struct shash_desc *desc)
 
        return 0;
 }
-EXPORT_SYMBOL_GPL(crypto_poly1305_init);
 
 static void poly1305_blocks(struct poly1305_desc_ctx *dctx, const u8 *src,
                            unsigned int srclen)
@@ -47,8 +46,8 @@ static void poly1305_blocks(struct poly1305_desc_ctx *dctx, const u8 *src,
                             srclen / POLY1305_BLOCK_SIZE, 1);
 }
 
-int crypto_poly1305_update(struct shash_desc *desc,
-                          const u8 *src, unsigned int srclen)
+static int crypto_poly1305_update(struct shash_desc *desc,
+                                 const u8 *src, unsigned int srclen)
 {
        struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
        unsigned int bytes;
@@ -80,9 +79,8 @@ int crypto_poly1305_update(struct shash_desc *desc,
 
        return 0;
 }
-EXPORT_SYMBOL_GPL(crypto_poly1305_update);
 
-int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
+static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
 {
        struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
 
@@ -92,7 +90,6 @@ int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
        poly1305_final_generic(dctx, dst);
        return 0;
 }
-EXPORT_SYMBOL_GPL(crypto_poly1305_final);
 
 static struct shash_alg poly1305_alg = {
        .digestsize     = POLY1305_DIGEST_SIZE,
index 04fa269e5534bb79834f839448d93bedeb3c5699..479b0cab2a1ad68dc9397be3cc8de1814cc993a7 100644 (file)
@@ -10,8 +10,6 @@
 #include <linux/types.h>
 #include <crypto/poly1305.h>
 
-struct shash_desc;
-
 /*
  * Poly1305 core functions.  These implement the ε-almost-∆-universal hash
  * function underlying the Poly1305 MAC, i.e. they don't add an encrypted nonce
@@ -28,13 +26,6 @@ void poly1305_core_blocks(struct poly1305_state *state,
                          unsigned int nblocks, u32 hibit);
 void poly1305_core_emit(const struct poly1305_state *state, void *dst);
 
-/* Crypto API helper functions for the Poly1305 MAC */
-int crypto_poly1305_init(struct shash_desc *desc);
-
-int crypto_poly1305_update(struct shash_desc *desc,
-                          const u8 *src, unsigned int srclen);
-int crypto_poly1305_final(struct shash_desc *desc, u8 *dst);
-
 /*
  * Poly1305 requires a unique key for each tag, which implies that we can't set
  * it on the tfm that gets accessed by multiple users simultaneously. Instead we