commit 519eed9df83148955efd27c495555fe0625aef41
parent e0660d87a2e8d03a2010f3d272496a4da9b2128e
Author: Armin Preiml <apreiml@strohwolke.at>
Date: Fri, 16 Dec 2022 14:53:50 +0100
crypto::aes unify blocks
Unify block structure between operations to make the implemenation of
determining hardware support during runtime easier.
Signed-off-by: Armin Preiml <apreiml@strohwolke.at>
Diffstat:
8 files changed, 80 insertions(+), 64 deletions(-)
diff --git a/crypto/aes/+test/ni+x86_64.ha b/crypto/aes/+test/ni+x86_64.ha
@@ -1,7 +1,7 @@
use bytes;
use crypto::cipher;
-const zero_rk: [RKLEN_256]u8 = [0...];
+const zero_rk: [EXPKEYLEN256]u8 = [0...];
// taken from fips-197.pdf Section A.1
@test fn ni_enc_key_expand_128() void = {
@@ -38,12 +38,11 @@ const zero_rk: [RKLEN_256]u8 = [0...];
let block = x86ni();
x86ni_init(&block, key[..]);
- assert(block.rklen == 176);
- assert(bytes::equal(expected_rounds[..], block.enc_rk[..RKLEN_128]));
+ assert(block.rounds == 10);
+ assert(bytes::equal(expected_rounds[..], block.expkey[..EXPKEYLEN128]));
cipher::finish(&block);
- assert(bytes::equal(zero_rk[..], block.enc_rk[..]));
- assert(bytes::equal(zero_rk[..], block.dec_rk[..]));
+ assert(bytes::equal(zero_rk[..], block.expkey[..EXPKEYLEN256]));
};
// taken from fips-197.pdf Section A.2
@@ -86,8 +85,8 @@ const zero_rk: [RKLEN_256]u8 = [0...];
let block = x86ni();
x86ni_init(&block, key[..]);
- assert(block.rklen == 208);
- assert(bytes::equal(expected_rounds[..], block.enc_rk[..RKLEN_192]));
+ assert(block.rounds == 12);
+ assert(bytes::equal(expected_rounds[..], block.expkey[..EXPKEYLEN192]));
};
@@ -136,8 +135,8 @@ const zero_rk: [RKLEN_256]u8 = [0...];
let block = x86ni();
x86ni_init(&block, key[..]);
- assert(block.rklen == 240);
- assert(bytes::equal(expected_rounds[..], block.enc_rk[..RKLEN_256]));
+ assert(block.rounds == 14);
+ assert(bytes::equal(expected_rounds[..], block.expkey[..EXPKEYLEN256]));
};
@test fn ni_test_encrypt_128() void = {
diff --git a/crypto/aes/+x86_64/ni.ha b/crypto/aes/+x86_64/ni.ha
@@ -2,23 +2,18 @@ use bytes;
use crypto::cipher;
use rt;
-def RKLEN_128: size = 176;
-def RKLEN_192: size = 208;
-def RKLEN_256: size = 240;
+def EXPKEYLEN128: size = 176;
+def EXPKEYLEN192: size = 208;
+def EXPKEYLEN256: size = 240;
-export type x86ni_block = struct {
- block: cipher::block,
- enc_rk: [RKLEN_256]u8,
- dec_rk: [RKLEN_256]u8,
- rklen: u8,
-};
+def X86NI_EXPKEYSIZE: size = 480;
const x86ni_vtable: cipher::blockvtable = cipher::blockvtable {
blocksz = BLOCKSIZE,
nparallel = 1,
encrypt = &x86ni_encrypt,
decrypt = &x86ni_decrypt,
- finish = &x86ni_finish,
+ finish = &block_finish,
};
// Checks if the native AES interface is available.
@@ -33,34 +28,35 @@ export fn x86ni_available() bool = {
// the cipher, and must call [[crypto::cipher::finish]] when they are finished
// using the cipher to securely erase any secret data stored in the cipher
// state.
-export fn x86ni() x86ni_block = {
- return x86ni_block {
- block = &x86ni_vtable,
+export fn x86ni() block = {
+ return block {
+ vtable = &x86ni_vtable,
...
};
};
-export fn x86ni_init(b: *x86ni_block, key: []u8) void = {
+export fn x86ni_init(b: *block, key: []u8) void = {
assert(len(key) == 16 || len(key) == 24 || len(key) == 32,
"Invalid aes key length");
- b.rklen = x86ni_keyexp(key[..], b.enc_rk[..], b.dec_rk[..]);
-};
-
-fn x86ni_encrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
- let b = block: *x86ni_block;
- x86ni_asencrypt(b.enc_rk[..b.rklen], dest, src);
+ let enc = b.expkey[..EXPKEYLEN256];
+ let dec = b.expkey[EXPKEYLEN256..];
+ const expkeylen = x86ni_keyexp(key[..], enc, dec);
+ b.rounds = (expkeylen >> 4) - 1;
};
-fn x86ni_decrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
- let b = block: *x86ni_block;
- x86ni_asdecrypt(b.dec_rk[..b.rklen], dest, src);
+fn x86ni_encrypt(b: *cipher::block, dest: []u8, src: []u8) void = {
+ let b = b: *block;
+ const expkeylen = (b.rounds + 1) << 4;
+ let enc = b.expkey[..expkeylen];
+ x86ni_asencrypt(enc, dest, src);
};
-fn x86ni_finish(block: *cipher::block) void = {
- let b = block: *x86ni_block;
- bytes::zero(b.enc_rk[..]);
- bytes::zero(b.dec_rk[..]);
+fn x86ni_decrypt(b: *cipher::block, dest: []u8, src: []u8) void = {
+ let b = b: *block;
+ const expkeylen = (b.rounds + 1) << 4;
+ let dec = b.expkey[EXPKEYLEN256..];
+ x86ni_asdecrypt(dec[..expkeylen], dest, src);
};
// Expands encryption and decryption key and returns the size of the round keys.
diff --git a/crypto/aes/aes.ha b/crypto/aes/aes.ha
@@ -0,0 +1,4 @@
+// License: MPL-2.0
+// (c) 2022 Armin Preiml <apreiml@strohwolke.at>
+
+def MAXEXPKEYSIZE: size = CT64_EXPKEYSIZE;
diff --git a/crypto/aes/aes_ct64.ha b/crypto/aes/aes_ct64.ha
@@ -26,15 +26,15 @@
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
+use bytes;
use crypto::cipher;
use crypto::cipher::{blocksz,nparallel};
use crypto::math;
use endian;
-def CT64_NPARALLEL: size = 4;
+def CT64_EXPKEYSIZE: size = 960;
-// The block size used by the AES algorithm.
-export def BLOCKSIZE: size = 16;
+def CT64_NPARALLEL: size = 4;
// Size of the buffer used for [[crypto::cipher::ctr]].
export def CTR_BUFSIZE: size = BLOCKSIZE * (CT64_NPARALLEL + 1);
@@ -43,12 +43,6 @@ export def CTR_BUFSIZE: size = BLOCKSIZE * (CT64_NPARALLEL + 1);
// [[crypto::cipher::cbc_decryptor]].
export def CBC_BUFSIZE: size = BLOCKSIZE * 2;
-export type ct64_block = struct {
- block: cipher::block,
- rounds: uint,
- sk_exp: [120]u64,
-};
-
// Returns an AES [[crypto::cipher::block]] cipher implementation optimized for
// constant time operation on 64-bit systems.
//
@@ -56,8 +50,8 @@ export type ct64_block = struct {
// the cipher, and must call [[crypto::cipher::finish]] when they are finished
// using the cipher to securely erase any secret data stored in the cipher
// state.
-export fn ct64() ct64_block = ct64_block {
- block = &ct64_vtable,
+export fn ct64() block = block {
+ vtable = &ct64_vtable,
...
};
@@ -66,19 +60,23 @@ const ct64_vtable: cipher::blockvtable = cipher::blockvtable {
nparallel = CT64_NPARALLEL,
encrypt = &aes_ct64_encrypt,
decrypt = &aes_ct64_decrypt,
- finish = &aes_ct64_finish,
+ finish = &block_finish,
};
// Initializes the ct64 AES implementation with an encryption key.
-export fn ct64_init(cipher: *ct64_block, key: []u8) void = {
+export fn ct64_init(cipher: *block, key: []u8) void = {
let comp_skey: [30]u64 = [0...];
cipher.rounds = br_aes_ct64_keysched(comp_skey[..], key, len(key));
- br_aes_ct64_skey_expand(cipher.sk_exp, cipher.rounds, comp_skey[..]);
+ br_aes_ct64_skey_expand(ct64_expkey(cipher), cipher.rounds, comp_skey[..]);
+};
+
+fn ct64_expkey(b: *block) []u64 = {
+ return (b.expkey[..]: *[*]u64)[..len(b.expkey)/size(u64)];
};
// Combines up to 4 blocks and encrypts them in one run
-fn aes_ct64_encrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
- let b = block: *ct64_block;
+fn aes_ct64_encrypt(b: *cipher::block, dest: []u8, src: []u8) void = {
+ let b = b: *block;
assert(len(src) % blocksz(b) == 0
&& (len(src) / blocksz(b)) <= nparallel(b),
@@ -95,7 +93,7 @@ fn aes_ct64_encrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
};
br_aes_ct64_ortho(q);
- br_aes_ct64_bitslice_encrypt(b.rounds, b.sk_exp, q);
+ br_aes_ct64_bitslice_encrypt(b.rounds, ct64_expkey(b), q);
br_aes_ct64_ortho(q);
for (let i = 0z; i < nblocks; i += 1) {
@@ -106,8 +104,8 @@ fn aes_ct64_encrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
};
// Combines up to 4 blocks and decrypts them in one run
-fn aes_ct64_decrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
- let b = block: *ct64_block;
+fn aes_ct64_decrypt(b: *cipher::block, dest: []u8, src: []u8) void = {
+ let b = b: *block;
assert(len(src) % blocksz(b) == 0
&& (len(src) / blocksz(b)) <= nparallel(b),
@@ -123,7 +121,7 @@ fn aes_ct64_decrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
};
br_aes_ct64_ortho(q);
- br_aes_ct64_bitslice_decrypt(b.rounds, b.sk_exp, q);
+ br_aes_ct64_bitslice_decrypt(b.rounds, ct64_expkey(b), q);
br_aes_ct64_ortho(q);
for (let i = 0z; i < nblocks; i += 1) {
@@ -133,11 +131,6 @@ fn aes_ct64_decrypt(block: *cipher::block, dest: []u8, src: []u8) void = {
br_range_enc32le(dest, w);
};
-fn aes_ct64_finish(block: *cipher::block) void = {
- let b = block: *ct64_block;
- b.sk_exp = [0...];
-};
-
// see br_aes_ct64_ortho in src/inner.h of BearSSL
fn br_aes_ct64_ortho(q: []u64) void = {
swapn(0x5555555555555555, 0xAAAAAAAAAAAAAAAA, 1, &q[0], &q[1]);
diff --git a/crypto/aes/block.ha b/crypto/aes/block.ha
@@ -0,0 +1,18 @@
+// License: MPL-2.0
+// (c) 2022 Armin Preiml <apreiml@strohwolke.at>
+use bytes;
+use crypto::cipher;
+
+// The block size used by the AES algorithm.
+export def BLOCKSIZE: size = 16;
+
+export type block = struct {
+ vtable: cipher::block,
+ rounds: u32,
+ expkey: [MAXEXPKEYSIZE]u8,
+};
+
+fn block_finish(b: *cipher::block) void = {
+ let b = b: *block;
+ bytes::zero(b.expkey);
+};
diff --git a/crypto/aes/xts/xts.ha b/crypto/aes/xts/xts.ha
@@ -5,8 +5,8 @@ use bytes;
use crypto::cipher;
export type block = struct {
- b1: aes::ct64_block,
- b2: aes::ct64_block,
+ b1: aes::block,
+ b2: aes::block,
x: [aes::BLOCKSIZE]u8,
};
diff --git a/scripts/gen-stdlib b/scripts/gen-stdlib
@@ -230,7 +230,9 @@ crypto() {
gensrcs_crypto_aes() {
gen_srcs crypto::aes \
+ aes.ha \
aes_ct64.ha \
+ block.ha \
$*
}
diff --git a/stdlib.mk b/stdlib.mk
@@ -801,7 +801,9 @@ $(HARECACHE)/crypto/crypto-any.ssa: $(stdlib_crypto_any_srcs) $(stdlib_rt) $(std
# crypto::aes (+any)
stdlib_crypto_aes_any_srcs = \
- $(STDLIB)/crypto/aes/aes_ct64.ha
+ $(STDLIB)/crypto/aes/aes.ha \
+ $(STDLIB)/crypto/aes/aes_ct64.ha \
+ $(STDLIB)/crypto/aes/block.ha
$(HARECACHE)/crypto/aes/crypto_aes-any.ssa: $(stdlib_crypto_aes_any_srcs) $(stdlib_rt) $(stdlib_bytes_$(PLATFORM)) $(stdlib_crypto_cipher_$(PLATFORM)) $(stdlib_crypto_math_$(PLATFORM)) $(stdlib_endian_$(PLATFORM)) $(stdlib_rt_$(PLATFORM)) $(stdlib_io_$(PLATFORM))
@printf 'HAREC \t$@\n'
@@ -3002,7 +3004,9 @@ $(TESTCACHE)/crypto/crypto-any.ssa: $(testlib_crypto_any_srcs) $(testlib_rt) $(t
# crypto::aes (+any)
testlib_crypto_aes_any_srcs = \
+ $(STDLIB)/crypto/aes/aes.ha \
$(STDLIB)/crypto/aes/aes_ct64.ha \
+ $(STDLIB)/crypto/aes/block.ha \
$(STDLIB)/crypto/aes/ct64+test.ha \
$(STDLIB)/crypto/aes/cbc+test.ha \
$(STDLIB)/crypto/aes/ctr+test.ha \