Mailing List Archive

[PATCH] Perform AEAD input 24KiB splitting only when input larger than 32KiB
* cipher/chacha20.c (_gcry_chacha20_poly1305_encrypt)
(_gcry_chacha20_poly1305_decrypt): Process in 24KiB chunks if input
larger than 32KiB.
* cipher/cipher-ccm.c (_gcry_cipher_ccm_encrypt)
(_gcry_cipher_ccm_decrypt): Likewise.
* cipher/cipher-eax.c (_gcry_cipher_eax_encrypt)
(_gcry_cipher_eax_decrypt): Likewise.
* cipher/cipher-gcm.c (gcm_cipher_inner): Likewise.
* cipher/cipher-ocb.c (ocb_crypt): Likewise.
* cipher/cipher-poly2305.c (_gcry_cipher_poly1305_encrypt)
(_gcry_cipher_poly1305_decrypt): Likewise.
--

Splitting input which length is just above 24KiB is not benefical.
Instead perform splitting if input is longer than 32KiB to ensure that
last chunk is also a large buffer.

Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
---
cipher/chacha20.c | 12 ++++++++----
cipher/cipher-ccm.c | 12 ++++++++----
cipher/cipher-eax.c | 12 ++++++++----
cipher/cipher-gcm.c | 5 +++--
cipher/cipher-ocb.c | 7 ++++---
cipher/cipher-poly1305.c | 12 ++++++++----
6 files changed, 39 insertions(+), 21 deletions(-)

diff --git a/cipher/chacha20.c b/cipher/chacha20.c
index 497594a0..870cfa18 100644
--- a/cipher/chacha20.c
+++ b/cipher/chacha20.c
@@ -969,8 +969,10 @@ _gcry_chacha20_poly1305_encrypt(gcry_cipher_hd_t c, byte *outbuf,
size_t currlen = length;

/* Since checksumming is done after encryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for checksumming. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for checksumming. However
+ * only do splitting if input is large enough so that last chunks does
+ * not end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

nburn = do_chacha20_encrypt_stream_tail (ctx, outbuf, inbuf, currlen);
@@ -1157,8 +1159,10 @@ _gcry_chacha20_poly1305_decrypt(gcry_cipher_hd_t c, byte *outbuf,
size_t currlen = length;

/* Since checksumming is done before decryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for decryption. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for decryption. However only
+ * do splitting if input is large enough so that last chunks does not
+ * end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

nburn = _gcry_poly1305_update_burn (&c->u_mode.poly1305.ctx, inbuf,
diff --git a/cipher/cipher-ccm.c b/cipher/cipher-ccm.c
index dcb268d0..3e2a767a 100644
--- a/cipher/cipher-ccm.c
+++ b/cipher/cipher-ccm.c
@@ -345,8 +345,10 @@ _gcry_cipher_ccm_encrypt (gcry_cipher_hd_t c, unsigned char *outbuf,
size_t currlen = inbuflen;

/* Since checksumming is done before encryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for encryption. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for encryption. However only
+ * do splitting if input is large enough so that last chunks does not
+ * end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

c->u_mode.ccm.encryptlen -= currlen;
@@ -391,8 +393,10 @@ _gcry_cipher_ccm_decrypt (gcry_cipher_hd_t c, unsigned char *outbuf,
size_t currlen = inbuflen;

/* Since checksumming is done after decryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for checksumming. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for checksumming. However
+ * only do splitting if input is large enough so that last chunks
+ * does not end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

err = _gcry_cipher_ctr_encrypt (c, outbuf, outbuflen, inbuf, currlen);
diff --git a/cipher/cipher-eax.c b/cipher/cipher-eax.c
index 08f815a9..0c5cf84e 100644
--- a/cipher/cipher-eax.c
+++ b/cipher/cipher-eax.c
@@ -53,8 +53,10 @@ _gcry_cipher_eax_encrypt (gcry_cipher_hd_t c,
size_t currlen = inbuflen;

/* Since checksumming is done after encryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for checksumming. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for checksumming. However
+ * only do splitting if input is large enough so that last chunks does
+ * not end up being short.*/
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

err = _gcry_cipher_ctr_encrypt (c, outbuf, outbuflen, inbuf, currlen);
@@ -100,8 +102,10 @@ _gcry_cipher_eax_decrypt (gcry_cipher_hd_t c,
size_t currlen = inbuflen;

/* Since checksumming is done before decryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for decryption. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for decryption. However only
+ * do splitting if input is large enough so that last chunks does not
+ * end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

err = _gcry_cmac_write (c, &c->u_mode.eax.cmac_ciphertext, inbuf,
diff --git a/cipher/cipher-gcm.c b/cipher/cipher-gcm.c
index fc79986e..69ff0de6 100644
--- a/cipher/cipher-gcm.c
+++ b/cipher/cipher-gcm.c
@@ -888,8 +888,9 @@ gcm_crypt_inner (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen,

/* Since checksumming is done after/before encryption/decryption,
* process input in 24KiB chunks to keep data loaded in L1 cache for
- * checksumming/decryption. */
- if (currlen > 24 * 1024)
+ * checksumming/decryption. However only do splitting if input is
+ * large enough so that last chunks does not end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

if (!encrypt)
diff --git a/cipher/cipher-ocb.c b/cipher/cipher-ocb.c
index bfafa4c8..7a4cfbe1 100644
--- a/cipher/cipher-ocb.c
+++ b/cipher/cipher-ocb.c
@@ -548,9 +548,10 @@ ocb_crypt (gcry_cipher_hd_t c, int encrypt,
nblks = nblks < nmaxblks ? nblks : nmaxblks;

/* Since checksum xoring is done before/after encryption/decryption,
- process input in 24KiB chunks to keep data loaded in L1 cache for
- checksumming. */
- if (nblks > 24 * 1024 / OCB_BLOCK_LEN)
+ process input in 24KiB chunks to keep data loaded in L1 cache for
+ checksumming. However only do splitting if input is large enough
+ so that last chunks does not end up being short. */
+ if (nblks > 32 * 1024 / OCB_BLOCK_LEN)
nblks = 24 * 1024 / OCB_BLOCK_LEN;

/* Use a bulk method if available. */
diff --git a/cipher/cipher-poly1305.c b/cipher/cipher-poly1305.c
index bb475236..5cd3561b 100644
--- a/cipher/cipher-poly1305.c
+++ b/cipher/cipher-poly1305.c
@@ -174,8 +174,10 @@ _gcry_cipher_poly1305_encrypt (gcry_cipher_hd_t c,
size_t currlen = inbuflen;

/* Since checksumming is done after encryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for checksumming. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for checksumming. However
+ * only do splitting if input is large enough so that last chunks does
+ * not end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

c->spec->stencrypt(&c->context.c, outbuf, (byte*)inbuf, currlen);
@@ -232,8 +234,10 @@ _gcry_cipher_poly1305_decrypt (gcry_cipher_hd_t c,
size_t currlen = inbuflen;

/* Since checksumming is done before decryption, process input in 24KiB
- * chunks to keep data loaded in L1 cache for decryption. */
- if (currlen > 24 * 1024)
+ * chunks to keep data loaded in L1 cache for decryption. However only
+ * do splitting if input is large enough so that last chunks does not
+ * end up being short. */
+ if (currlen > 32 * 1024)
currlen = 24 * 1024;

_gcry_poly1305_update (&c->u_mode.poly1305.ctx, inbuf, currlen);
--
2.32.0


_______________________________________________
Gcrypt-devel mailing list
Gcrypt-devel@lists.gnupg.org
https://lists.gnupg.org/mailman/listinfo/gcrypt-devel