diff --git a/cbits/cryptonite_align.h b/cbits/cryptonite_align.h index 41172a9..01e8a36 100644 --- a/cbits/cryptonite_align.h +++ b/cbits/cryptonite_align.h @@ -44,11 +44,21 @@ static inline void store_le32_aligned(uint8_t *dst, const uint32_t v) *((uint32_t *) dst) = cpu_to_le32(v); } +static inline void xor_le32_aligned(uint8_t *dst, const uint32_t v) +{ + *((uint32_t *) dst) ^= cpu_to_le32(v); +} + static inline void store_be32_aligned(uint8_t *dst, const uint32_t v) { *((uint32_t *) dst) = cpu_to_be32(v); } +static inline void xor_be32_aligned(uint8_t *dst, const uint32_t v) +{ + *((uint32_t *) dst) ^= cpu_to_be32(v); +} + static inline void store_le64_aligned(uint8_t *dst, const uint64_t v) { *((uint64_t *) dst) = cpu_to_le64(v); @@ -59,6 +69,11 @@ static inline void store_be64_aligned(uint8_t *dst, const uint64_t v) *((uint64_t *) dst) = cpu_to_be64(v); } +static inline void xor_be64_aligned(uint8_t *dst, const uint64_t v) +{ + *((uint64_t *) dst) ^= cpu_to_be64(v); +} + #ifdef UNALIGNED_ACCESS_OK #define load_le32(a) load_le32_aligned(a) #else @@ -70,20 +85,30 @@ static inline uint32_t load_le32(const uint8_t *p) #ifdef UNALIGNED_ACCESS_OK #define store_le32(a, b) store_le32_aligned(a, b) +#define xor_le32(a, b) xor_le32_aligned(a, b) #else static inline void store_le32(uint8_t *dst, const uint32_t v) { dst[0] = v; dst[1] = v >> 8; dst[2] = v >> 16; dst[3] = v >> 24; } +static inline void xor_le32(uint8_t *dst, const uint32_t v) +{ + dst[0] ^= v; dst[1] ^= v >> 8; dst[2] ^= v >> 16; dst[3] ^= v >> 24; +} #endif #ifdef UNALIGNED_ACCESS_OK #define store_be32(a, b) store_be32_aligned(a, b) +#define xor_be32(a, b) xor_be32_aligned(a, b) #else static inline void store_be32(uint8_t *dst, const uint32_t v) { dst[3] = v; dst[2] = v >> 8; dst[1] = v >> 16; dst[0] = v >> 24; } +static inline void xor_be32(uint8_t *dst, const uint32_t v) +{ + dst[3] ^= v; dst[2] ^= v >> 8; dst[1] ^= v >> 16; dst[0] ^= v >> 24; +} #endif #ifdef UNALIGNED_ACCESS_OK @@ -98,12 +123,18 @@ static inline void store_le64(uint8_t *dst, const uint64_t v) #ifdef UNALIGNED_ACCESS_OK #define store_be64(a, b) store_be64_aligned(a, b) +#define xor_be64(a, b) xor_be64_aligned(a, b) #else static inline void store_be64(uint8_t *dst, const uint64_t v) { dst[7] = v ; dst[6] = v >> 8 ; dst[5] = v >> 16; dst[4] = v >> 24; dst[3] = v >> 32; dst[2] = v >> 40; dst[1] = v >> 48; dst[0] = v >> 56; } +static inline void xor_be64(uint8_t *dst, const uint64_t v) +{ + dst[7] ^= v ; dst[6] ^= v >> 8 ; dst[5] ^= v >> 16; dst[4] ^= v >> 24; + dst[3] ^= v >> 32; dst[2] ^= v >> 40; dst[1] ^= v >> 48; dst[0] ^= v >> 56; +} #endif #endif diff --git a/cbits/cryptonite_hash_prefix.c b/cbits/cryptonite_hash_prefix.c new file mode 100644 index 0000000..06df581 --- /dev/null +++ b/cbits/cryptonite_hash_prefix.c @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2020 Olivier Chéron + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include + +void CRYPTONITE_HASHED(finalize_prefix)(struct HASHED_LOWER(ctx) *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out) +{ + uint64_t bits[HASHED(BITS_ELEMS)]; + uint8_t *p = (uint8_t *) &bits; + uint32_t index, padidx, padlen, pos, out_mask; + static const uint32_t cut_off = HASHED(BLOCK_SIZE) - sizeof(bits); + + /* Make sure n <= len */ + n += (len - n) & constant_time_lt(len, n); + + /* Initial index, based on current context state */ + index = CRYPTONITE_HASHED(get_index)(ctx); + + /* Final size after n bytes */ + CRYPTONITE_HASHED(incr_sz)(ctx, bits, n); + + /* Padding index and length */ + padidx = CRYPTONITE_HASHED(get_index)(ctx); + padlen = HASHED(BLOCK_SIZE) + cut_off - padidx; + padlen -= HASHED(BLOCK_SIZE) & constant_time_lt(padidx, cut_off); + + /* Initialize buffers because we will XOR into them */ + memset(ctx->buf + index, 0, HASHED(BLOCK_SIZE) - index); + memset(out, 0, HASHED(DIGEST_SIZE)); + pos = 0; + + /* Iterate based on the full buffer length, regardless of n, and include + * the maximum overhead with padding and size bytes + */ + while (pos < len + HASHED(BLOCK_SIZE) + sizeof(bits)) { + uint8_t b; + + /* Take as many bytes from the input buffer as possible */ + if (pos < len) + b = *(data++) & (uint8_t) constant_time_lt(pos, n); + else + b = 0; + + /* First padding byte */ + b |= 0x80 & (uint8_t) constant_time_eq(pos, n);; + + /* Size bytes are always at the end of a block */ + if (index >= cut_off) + b |= p[index - cut_off] & (uint8_t) constant_time_ge(pos, n + padlen); + + /* Store this byte into the buffer */ + ctx->buf[index++] ^= b; + pos++; + + /* Process a full block, at a boundary which is independent from n */ + if (index >= HASHED(BLOCK_SIZE)) { + index = 0; + HASHED_LOWER(do_chunk)(ctx, (void *) ctx->buf); + memset(ctx->buf, 0, HASHED(BLOCK_SIZE)); + + /* Try to store the result: this is a no-op except when we reach the + * actual size based on n, more iterations may continue after that + * when len is really larger + */ + out_mask = constant_time_eq(pos, n + padlen + sizeof(bits)); + CRYPTONITE_HASHED(select_digest)(ctx, out, out_mask); + } + } +} diff --git a/cbits/cryptonite_hash_prefix.h b/cbits/cryptonite_hash_prefix.h new file mode 100644 index 0000000..3eed4e0 --- /dev/null +++ b/cbits/cryptonite_hash_prefix.h @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2020 Olivier Chéron + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef CRYPTONITE_HASH_PREFIX_H +#define CRYPTONITE_HASH_PREFIX_H + +#include + +static inline uint32_t constant_time_msb(uint32_t a) +{ + return 0 - (a >> 31); +} + +static inline uint32_t constant_time_lt(uint32_t a, uint32_t b) +{ + return constant_time_msb(a ^ ((a ^ b) | ((a - b) ^ b))); +} + +static inline uint32_t constant_time_ge(uint32_t a, uint32_t b) +{ + return ~constant_time_lt(a, b); +} + +static inline uint32_t constant_time_is_zero(uint32_t a) +{ + return constant_time_msb(~a & (a - 1)); +} + +static inline uint32_t constant_time_eq(uint32_t a, uint32_t b) +{ + return constant_time_is_zero(a ^ b); +} + +static inline uint64_t constant_time_msb_64(uint64_t a) +{ + return 0 - (a >> 63); +} + +static inline uint64_t constant_time_lt_64(uint64_t a, uint64_t b) +{ + return constant_time_msb_64(a ^ ((a ^ b) | ((a - b) ^ b))); +} + +#endif diff --git a/cbits/cryptonite_md5.c b/cbits/cryptonite_md5.c index 126bac4..48ac4c6 100644 --- a/cbits/cryptonite_md5.c +++ b/cbits/cryptonite_md5.c @@ -185,3 +185,30 @@ void cryptonite_md5_finalize(struct md5_ctx *ctx, uint8_t *out) store_le32(out+ 8, ctx->h[2]); store_le32(out+12, ctx->h[3]); } + +#define HASHED(m) MD5_##m +#define HASHED_LOWER(m) md5_##m +#define CRYPTONITE_HASHED(m) cryptonite_md5_##m +#define MD5_BLOCK_SIZE 64 +#define MD5_BITS_ELEMS 1 + +static inline uint32_t cryptonite_md5_get_index(const struct md5_ctx *ctx) +{ + return (uint32_t) (ctx->sz & 0x3f); +} + +static inline void cryptonite_md5_incr_sz(struct md5_ctx *ctx, uint64_t *bits, uint32_t n) +{ + ctx->sz += n; + *bits = cpu_to_le64(ctx->sz << 3); +} + +static inline void cryptonite_md5_select_digest(const struct md5_ctx *ctx, uint8_t *out, uint32_t out_mask) +{ + xor_le32(out , ctx->h[0] & out_mask); + xor_le32(out+ 4, ctx->h[1] & out_mask); + xor_le32(out+ 8, ctx->h[2] & out_mask); + xor_le32(out+12, ctx->h[3] & out_mask); +} + +#include diff --git a/cbits/cryptonite_md5.h b/cbits/cryptonite_md5.h index 7187f96..e6fc67a 100644 --- a/cbits/cryptonite_md5.h +++ b/cbits/cryptonite_md5.h @@ -39,5 +39,6 @@ struct md5_ctx void cryptonite_md5_init(struct md5_ctx *ctx); void cryptonite_md5_update(struct md5_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_md5_finalize(struct md5_ctx *ctx, uint8_t *out); +void cryptonite_md5_finalize_prefix(struct md5_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); #endif diff --git a/cbits/cryptonite_sha1.c b/cbits/cryptonite_sha1.c index 533eded..3a94707 100644 --- a/cbits/cryptonite_sha1.c +++ b/cbits/cryptonite_sha1.c @@ -216,3 +216,31 @@ void cryptonite_sha1_finalize(struct sha1_ctx *ctx, uint8_t *out) store_be32(out+12, ctx->h[3]); store_be32(out+16, ctx->h[4]); } + +#define HASHED(m) SHA1_##m +#define HASHED_LOWER(m) sha1_##m +#define CRYPTONITE_HASHED(m) cryptonite_sha1_##m +#define SHA1_BLOCK_SIZE 64 +#define SHA1_BITS_ELEMS 1 + +static inline uint32_t cryptonite_sha1_get_index(const struct sha1_ctx *ctx) +{ + return (uint32_t) (ctx->sz & 0x3f); +} + +static inline void cryptonite_sha1_incr_sz(struct sha1_ctx *ctx, uint64_t *bits, uint32_t n) +{ + ctx->sz += n; + *bits = cpu_to_be64(ctx->sz << 3); +} + +static inline void cryptonite_sha1_select_digest(const struct sha1_ctx *ctx, uint8_t *out, uint32_t out_mask) +{ + xor_be32(out , ctx->h[0] & out_mask); + xor_be32(out+ 4, ctx->h[1] & out_mask); + xor_be32(out+ 8, ctx->h[2] & out_mask); + xor_be32(out+12, ctx->h[3] & out_mask); + xor_be32(out+16, ctx->h[4] & out_mask); +} + +#include diff --git a/cbits/cryptonite_sha1.h b/cbits/cryptonite_sha1.h index ee3f202..73cd306 100644 --- a/cbits/cryptonite_sha1.h +++ b/cbits/cryptonite_sha1.h @@ -41,5 +41,6 @@ struct sha1_ctx void cryptonite_sha1_init(struct sha1_ctx *ctx); void cryptonite_sha1_update(struct sha1_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_sha1_finalize(struct sha1_ctx *ctx, uint8_t *out); +void cryptonite_sha1_finalize_prefix(struct sha1_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); #endif diff --git a/cbits/cryptonite_sha256.c b/cbits/cryptonite_sha256.c index d82f5df..fb783a4 100644 --- a/cbits/cryptonite_sha256.c +++ b/cbits/cryptonite_sha256.c @@ -161,6 +161,14 @@ void cryptonite_sha224_finalize(struct sha224_ctx *ctx, uint8_t *out) memcpy(out, intermediate, SHA224_DIGEST_SIZE); } +void cryptonite_sha224_finalize_prefix(struct sha224_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out) +{ + uint8_t intermediate[SHA256_DIGEST_SIZE]; + + cryptonite_sha256_finalize_prefix(ctx, data, len, n, intermediate); + memcpy(out, intermediate, SHA224_DIGEST_SIZE); +} + void cryptonite_sha256_finalize(struct sha256_ctx *ctx, uint8_t *out) { static uint8_t padding[64] = { 0x80, }; @@ -182,3 +190,29 @@ void cryptonite_sha256_finalize(struct sha256_ctx *ctx, uint8_t *out) for (i = 0; i < 8; i++) store_be32(out+4*i, ctx->h[i]); } + +#define HASHED(m) SHA256_##m +#define HASHED_LOWER(m) sha256_##m +#define CRYPTONITE_HASHED(m) cryptonite_sha256_##m +#define SHA256_BLOCK_SIZE 64 +#define SHA256_BITS_ELEMS 1 + +static inline uint32_t cryptonite_sha256_get_index(const struct sha256_ctx *ctx) +{ + return (uint32_t) (ctx->sz & 0x3f); +} + +static inline void cryptonite_sha256_incr_sz(struct sha256_ctx *ctx, uint64_t *bits, uint32_t n) +{ + ctx->sz += n; + *bits = cpu_to_be64(ctx->sz << 3); +} + +static inline void cryptonite_sha256_select_digest(const struct sha256_ctx *ctx, uint8_t *out, uint32_t out_mask) +{ + uint32_t i; + for (i = 0; i < 8; i++) + xor_be32(out+4*i, ctx->h[i] & out_mask); +} + +#include diff --git a/cbits/cryptonite_sha256.h b/cbits/cryptonite_sha256.h index 705ff9a..49e18cc 100644 --- a/cbits/cryptonite_sha256.h +++ b/cbits/cryptonite_sha256.h @@ -47,9 +47,11 @@ struct sha256_ctx void cryptonite_sha224_init(struct sha224_ctx *ctx); void cryptonite_sha224_update(struct sha224_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_sha224_finalize(struct sha224_ctx *ctx, uint8_t *out); +void cryptonite_sha224_finalize_prefix(struct sha224_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); void cryptonite_sha256_init(struct sha256_ctx *ctx); void cryptonite_sha256_update(struct sha256_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_sha256_finalize(struct sha256_ctx *ctx, uint8_t *out); +void cryptonite_sha256_finalize_prefix(struct sha256_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); #endif diff --git a/cbits/cryptonite_sha512.c b/cbits/cryptonite_sha512.c index 7d345ae..cdc1eec 100644 --- a/cbits/cryptonite_sha512.c +++ b/cbits/cryptonite_sha512.c @@ -180,6 +180,14 @@ void cryptonite_sha384_finalize(struct sha384_ctx *ctx, uint8_t *out) memcpy(out, intermediate, SHA384_DIGEST_SIZE); } +void cryptonite_sha384_finalize_prefix(struct sha384_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out) +{ + uint8_t intermediate[SHA512_DIGEST_SIZE]; + + cryptonite_sha512_finalize_prefix(ctx, data, len, n, intermediate); + memcpy(out, intermediate, SHA384_DIGEST_SIZE); +} + void cryptonite_sha512_finalize(struct sha512_ctx *ctx, uint8_t *out) { static uint8_t padding[128] = { 0x80, }; @@ -203,6 +211,38 @@ void cryptonite_sha512_finalize(struct sha512_ctx *ctx, uint8_t *out) store_be64(out+8*i, ctx->h[i]); } +#define HASHED(m) SHA512_##m +#define HASHED_LOWER(m) sha512_##m +#define CRYPTONITE_HASHED(m) cryptonite_sha512_##m +#define SHA512_BLOCK_SIZE 128 +#define SHA512_BITS_ELEMS 2 + +#include + +static inline uint32_t cryptonite_sha512_get_index(const struct sha512_ctx *ctx) +{ + return (uint32_t) (ctx->sz[0] & 0x7f); +} + +static inline void cryptonite_sha512_incr_sz(struct sha512_ctx *ctx, uint64_t *bits, uint32_t n) +{ + ctx->sz[0] += n; + ctx->sz[1] += 1 & constant_time_lt_64(ctx->sz[0], n); + bits[0] = cpu_to_be64((ctx->sz[1] << 3 | ctx->sz[0] >> 61)); + bits[1] = cpu_to_be64((ctx->sz[0] << 3)); +} + +static inline void cryptonite_sha512_select_digest(const struct sha512_ctx *ctx, uint8_t *out, uint32_t out_mask) +{ + uint32_t i; + uint64_t out_mask_64 = out_mask; + out_mask_64 |= out_mask_64 << 32; + for (i = 0; i < 8; i++) + xor_be64(out+8*i, ctx->h[i] & out_mask_64); +} + +#include + #include void cryptonite_sha512t_init(struct sha512_ctx *ctx, uint32_t hashlen) diff --git a/cbits/cryptonite_sha512.h b/cbits/cryptonite_sha512.h index 38fc560..53893c0 100644 --- a/cbits/cryptonite_sha512.h +++ b/cbits/cryptonite_sha512.h @@ -46,10 +46,12 @@ struct sha512_ctx void cryptonite_sha384_init(struct sha384_ctx *ctx); void cryptonite_sha384_update(struct sha384_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_sha384_finalize(struct sha384_ctx *ctx, uint8_t *out); +void cryptonite_sha384_finalize_prefix(struct sha384_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); void cryptonite_sha512_init(struct sha512_ctx *ctx); void cryptonite_sha512_update(struct sha512_ctx *ctx, const uint8_t *data, uint32_t len); void cryptonite_sha512_finalize(struct sha512_ctx *ctx, uint8_t *out); +void cryptonite_sha512_finalize_prefix(struct sha512_ctx *ctx, const uint8_t *data, uint32_t len, uint32_t n, uint8_t *out); /* only multiples of 8 are supported as valid t values */ void cryptonite_sha512t_init(struct sha512_ctx *ctx, uint32_t hashlen); diff --git a/cryptonite.cabal b/cryptonite.cabal index b4bf6e5..b9af22f 100644 --- a/cryptonite.cabal +++ b/cryptonite.cabal @@ -57,6 +57,7 @@ extra-source-files: cbits/*.h cbits/argon2/*.h cbits/argon2/*.c cbits/aes/x86ni_impl.c + cbits/cryptonite_hash_prefix.c tests/*.hs source-repository head