From 94474e02fa217c037ece9d819a9b12025f65cdb9 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Mon, 24 Jul 2023 16:41:17 +0200 Subject: [PATCH] riscv: Implement AES-192 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Even though the RISC-V vector instructions only support AES-128 and AES-256 for key generation, the round instructions themselves can easily be used to implement AES-192 too - we just need to fallback to the generic key generation routines in this case. Note that the vector instructions use the encryption key schedule (but in reverse order) so we need to generate the encryption key schedule even when doing decryption using the vector instructions. Signed-off-by: Ard Biesheuvel Signed-off-by: Christoph Müllner Reviewed-by: Tomas Mraz Reviewed-by: Paul Dale Reviewed-by: Hugo Landau (Merged from https://github.com/openssl/openssl/pull/21923) --- crypto/aes/asm/aes-riscv64-zvkned.pl | 109 ++++++++++++++++++ .../ciphers/cipher_aes_ccm_hw_rv64i.inc | 4 +- .../ciphers/cipher_aes_gcm_hw_rv64i.inc | 4 +- .../ciphers/cipher_aes_hw_rv64i.inc | 14 +-- .../ciphers/cipher_aes_ocb_hw.c | 5 +- .../ciphers/cipher_aes_xts_hw.c | 4 +- 6 files changed, 124 insertions(+), 16 deletions(-) diff --git a/crypto/aes/asm/aes-riscv64-zvkned.pl b/crypto/aes/asm/aes-riscv64-zvkned.pl index 2586a71af1..e0f5f19ff5 100644 --- a/crypto/aes/asm/aes-riscv64-zvkned.pl +++ b/crypto/aes/asm/aes-riscv64-zvkned.pl @@ -263,6 +263,8 @@ rv64i_zvkned_encrypt: beq $rounds, $T6, L_enc_256 li $T6, 10 beq $rounds, $T6, L_enc_128 + li $T6, 12 + beq $rounds, $T6, L_enc_192 j L_fail_m2 .size rv64i_zvkned_encrypt,.-rv64i_zvkned_encrypt @@ -315,6 +317,58 @@ L_enc_128: .size L_enc_128,.-L_enc_128 ___ +$code .= <<___; +.p2align 3 +L_enc_192: + @{[vsetivli__x0_4_e32_m1_tu_mu]} + + @{[vle32_v $v10, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v11, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v12, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v13, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v14, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v15, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v16, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v17, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v18, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v19, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v20, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v21, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v22, ($KEYP)]} + + @{[vle32_v $v1, ($INP)]} + + @{[vaesz_vs $v1, $v10]} # with round key w[ 0, 3] + @{[vaesem_vs $v1, $v11]} + @{[vaesem_vs $v1, $v12]} + @{[vaesem_vs $v1, $v13]} + @{[vaesem_vs $v1, $v14]} + @{[vaesem_vs $v1, $v15]} + @{[vaesem_vs $v1, $v16]} + @{[vaesem_vs $v1, $v17]} + @{[vaesem_vs $v1, $v18]} + @{[vaesem_vs $v1, $v19]} + @{[vaesem_vs $v1, $v20]} + @{[vaesem_vs $v1, $v21]} + @{[vaesef_vs $v1, $v22]} + + @{[vse32_v $v1, ($OUTP)]} + ret +.size L_enc_192,.-L_enc_192 +___ + $code .= <<___; .p2align 3 L_enc_256: @@ -399,6 +453,8 @@ rv64i_zvkned_decrypt: beq $rounds, $T6, L_dec_256 li $T6, 10 beq $rounds, $T6, L_dec_128 + li $T6, 12 + beq $rounds, $T6, L_dec_192 j L_fail_m2 .size rv64i_zvkned_decrypt,.-rv64i_zvkned_decrypt @@ -451,6 +507,59 @@ L_dec_128: .size L_dec_128,.-L_dec_128 ___ +$code .= <<___; +.p2align 3 +L_dec_192: + @{[vsetivli__x0_4_e32_m1_tu_mu]} + + @{[vle32_v $v10, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v11, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v12, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v13, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v14, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v15, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v16, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v17, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v18, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v19, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v20, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v21, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v22, ($KEYP)]} + + @{[vle32_v $v1, ($INP)]} + + @{[vaesz_vs $v1, $v22]} # with round key w[48,51] + @{[vaesdm_vs $v1, $v21]} # with round key w[44,47] + @{[vaesdm_vs $v1, $v20]} # with round key w[40,43] + @{[vaesdm_vs $v1, $v19]} # with round key w[36,39] + @{[vaesdm_vs $v1, $v18]} # with round key w[32,35] + @{[vaesdm_vs $v1, $v17]} # with round key w[28,31] + @{[vaesdm_vs $v1, $v16]} # with round key w[24,27] + @{[vaesdm_vs $v1, $v15]} # with round key w[20,23] + @{[vaesdm_vs $v1, $v14]} # with round key w[16,19] + @{[vaesdm_vs $v1, $v13]} # with round key w[12,15] + @{[vaesdm_vs $v1, $v12]} # with round key w[ 8,11] + @{[vaesdm_vs $v1, $v11]} # with round key w[ 4, 7] + @{[vaesdf_vs $v1, $v10]} # with round key w[ 0, 3] + + @{[vse32_v $v1, ($OUTP)]} + + ret +.size L_dec_192,.-L_dec_192 +___ + $code .= <<___; .p2align 3 L_dec_256: diff --git a/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc index 28efc21853..f2353bb3b8 100644 --- a/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc @@ -41,12 +41,12 @@ static int ccm_rv64i_zvkned_initkey(PROV_CCM_CTX *ctx, const unsigned char *key, { PROV_AES_CCM_CTX *actx = (PROV_AES_CCM_CTX *)ctx; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { AES_HW_CCM_SET_KEY_FN(rv64i_zvkned_set_encrypt_key, rv64i_zvkned_encrypt, NULL, NULL); } else { - AES_HW_CCM_SET_KEY_FN(AES_set_encrypt_key, AES_encrypt, NULL, NULL) + AES_HW_CCM_SET_KEY_FN(AES_set_encrypt_key, rv64i_zvkned_encrypt, NULL, NULL) } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc index f4665b89e6..b4a6749d3f 100644 --- a/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc @@ -41,12 +41,12 @@ static int rv64i_zvkned_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key, { PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; AES_KEY *ks = &actx->ks.ks; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { GCM_HW_SET_KEY_CTR_FN(ks, rv64i_zvkned_set_encrypt_key, rv64i_zvkned_encrypt, NULL); } else { - GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, NULL); + GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, rv64i_zvkned_encrypt, NULL); } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc index 8deaf01be6..2761905ee0 100644 --- a/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc @@ -71,7 +71,7 @@ static int cipher_hw_rv64i_zvkned_initkey(PROV_CIPHER_CTX *dat, dat->ks = ks; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { if ((dat->mode == EVP_CIPH_ECB_MODE || dat->mode == EVP_CIPH_CBC_MODE) && !dat->enc) { @@ -86,15 +86,13 @@ static int cipher_hw_rv64i_zvkned_initkey(PROV_CIPHER_CTX *dat, } else { if ((dat->mode == EVP_CIPH_ECB_MODE || dat->mode == EVP_CIPH_CBC_MODE) && !dat->enc) { - ret = AES_set_decrypt_key(key, keylen * 8, ks); - dat->block = (block128_f)AES_decrypt; - dat->stream.cbc = (dat->mode == EVP_CIPH_CBC_MODE) - ? (cbc128_f)AES_cbc_encrypt : NULL; + ret = AES_set_encrypt_key(key, keylen * 8, ks); + dat->block = (block128_f) rv64i_zvkned_decrypt; + dat->stream.cbc = NULL; } else { ret = AES_set_encrypt_key(key, keylen * 8, ks); - dat->block = (block128_f)AES_encrypt; - dat->stream.cbc = (dat->mode == EVP_CIPH_CBC_MODE) - ? (cbc128_f)AES_cbc_encrypt : NULL; + dat->block = (block128_f) rv64i_zvkned_encrypt; + dat->stream.cbc = NULL; } } diff --git a/providers/implementations/ciphers/cipher_aes_ocb_hw.c b/providers/implementations/ciphers/cipher_aes_ocb_hw.c index 5e90febe54..7ece4d32c9 100644 --- a/providers/implementations/ciphers/cipher_aes_ocb_hw.c +++ b/providers/implementations/ciphers/cipher_aes_ocb_hw.c @@ -130,8 +130,9 @@ static int cipher_hw_aes_ocb_rv64i_zvkned_initkey(PROV_CIPHER_CTX *vctx, rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, NULL, NULL); } else { - OCB_SET_KEY_FN(AES_set_encrypt_key, AES_set_decrypt_key, - AES_encrypt, AES_decrypt, NULL, NULL); + OCB_SET_KEY_FN(AES_set_encrypt_key, AES_set_encrypt_key, + rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, + NULL, NULL); } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_xts_hw.c b/providers/implementations/ciphers/cipher_aes_xts_hw.c index 45e6c90090..e5ee6741ea 100644 --- a/providers/implementations/ciphers/cipher_aes_xts_hw.c +++ b/providers/implementations/ciphers/cipher_aes_xts_hw.c @@ -190,8 +190,8 @@ static int cipher_hw_aes_xts_rv64i_zvkned_initkey(PROV_CIPHER_CTX *ctx, rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, stream_enc, stream_dec); } else { - XTS_SET_KEY_FN(AES_set_encrypt_key, AES_set_decrypt_key, - AES_encrypt, AES_decrypt, + XTS_SET_KEY_FN(AES_set_encrypt_key, AES_set_encrypt_key, + rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, stream_enc, stream_dec); } return 1;