Skip to content

Commit

Permalink
core: arm: crypto: fix AES-GCM counter increase
Browse files Browse the repository at this point in the history
In pmull_gcm_encrypt() and pmull_gcm_decrypt() it was assumed that it's
enough to only increase the least significant 64-bits of the counter fed
to the block cipher. This can hold for 96-bit IVs, but not for IVs of
any other length as the number stored in the least significant 64-bits
of the counter can't be easily predicted.

In this patch pmull_gcm_encrypt() and pmull_gcm_decrypt() are updated to
increase the entire counter, at the same time is the interface changed
to accept the counter in little endian format instead.

Acked-by: Jerome Forissier <jerome.forissier@linaro.org>
Tested-by: Jens Wiklander <jens.wiklander@linaro.org> (QEMU, Hikey)
Signed-off-by: Jens Wiklander <jens.wiklander@linaro.org>
  • Loading branch information
jenswi-linaro committed Nov 20, 2017
1 parent 8b8352f commit 731b6e3
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 35 deletions.
41 changes: 24 additions & 17 deletions core/arch/arm/crypto/aes-gcm-ce.c
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,22 @@
#include <tomcrypt.h>
#include <types_ext.h>

static void get_be_block(void *dst, const void *src)
{
uint64_t *d = dst;

d[1] = get_be64(src);
d[0] = get_be64((const uint8_t *)src + 8);
}

static void put_be_block(void *dst, const void *src)
{
const uint64_t *s = src;

put_be64(dst, s[1]);
put_be64((uint8_t *)dst + 8, s[0]);
}

TEE_Result internal_aes_gcm_set_key(struct internal_aes_gcm_ctx *ctx,
const void *key, size_t key_len)
{
Expand All @@ -37,18 +53,6 @@ TEE_Result internal_aes_gcm_set_key(struct internal_aes_gcm_ctx *ctx,
return TEE_SUCCESS;
}

static void get_dg(uint64_t dg[2], struct internal_aes_gcm_ctx *ctx)
{
dg[1] = get_be64(ctx->hash_state);
dg[0] = get_be64(ctx->hash_state + 8);
}

static void put_dg(struct internal_aes_gcm_ctx *ctx, uint64_t dg[2])
{
put_be64(ctx->hash_state, dg[1]);
put_be64(ctx->hash_state + 8, dg[0]);
}

void internal_aes_gcm_ghash_update(struct internal_aes_gcm_ctx *ctx,
const void *head, const void *data,
size_t num_blocks)
Expand All @@ -57,7 +61,7 @@ void internal_aes_gcm_ghash_update(struct internal_aes_gcm_ctx *ctx,
uint64_t dg[2];
uint64_t *k;

get_dg(dg, ctx);
get_be_block(dg, ctx->hash_state);

k = (void *)ctx->hash_subkey;

Expand All @@ -70,7 +74,7 @@ void internal_aes_gcm_ghash_update(struct internal_aes_gcm_ctx *ctx,
#endif
thread_kernel_disable_vfp(vfp_state);

put_dg(ctx, dg);
put_be_block(ctx->hash_state, dg);
}

#ifdef ARM64
Expand All @@ -97,12 +101,14 @@ internal_aes_gcm_update_payload_block_aligned(struct internal_aes_gcm_ctx *ctx,
{
uint32_t vfp_state;
uint64_t dg[2];
uint64_t ctr[2];
uint64_t *k;
void *ctr = ctx->ctr;
void *enc_key = ctx->skey.rijndael.eK;
size_t rounds = ctx->skey.rijndael.Nr;

get_dg(dg, ctx);
get_be_block(dg, ctx->hash_state);
get_be_block(ctr, ctx->ctr);

k = (void *)ctx->hash_subkey;

vfp_state = thread_kernel_enable_vfp();
Expand All @@ -117,6 +123,7 @@ internal_aes_gcm_update_payload_block_aligned(struct internal_aes_gcm_ctx *ctx,

thread_kernel_disable_vfp(vfp_state);

put_dg(ctx, dg);
put_be_block(ctx->ctr, ctr);
put_be_block(ctx->hash_state, dg);
}
#endif /*ARM64*/
36 changes: 20 additions & 16 deletions core/arch/arm/crypto/ghash-ce-core_a64.S
Original file line number Diff line number Diff line change
Expand Up @@ -315,24 +315,29 @@ ENDPROC(pmull_ghash_update_p8)
.macro pmull_gcm_do_crypt, enc
ld1 {SHASH.2d}, [x4]
ld1 {XL.2d}, [x1]
ldr x8, [x5, #8] // load lower counter
ldp x8, x9, [x5] // load counter

movi MASK.16b, #0xe1
ext SHASH2.16b, SHASH.16b, SHASH.16b, #8
CPU_LE( rev x8, x8 )

shl MASK.2d, MASK.2d, #57
eor SHASH2.16b, SHASH2.16b, SHASH.16b

.if \enc == 1
ld1 {KS.16b}, [x7]
.endif

0: ld1 {CTR.8b}, [x5] // load upper counter
ld1 {INP.16b}, [x3], #16
rev x9, x8
add x8, x8, #1
0: ins CTR.d[1], x8 // set counter
ins CTR.d[0], x9
CPU_LE( rev64 CTR.16b, CTR.16b)

add x8, x8, #1 // increase counter
cmp x8, #0
b.ne 1f
add x9, x9, #1

1: ld1 {INP.16b}, [x3], #16
sub w0, w0, #1
ins CTR.d[1], x9 // set lower counter

.if \enc == 1
eor INP.16b, INP.16b, KS.16b // encrypt input
Expand All @@ -342,9 +347,9 @@ CPU_LE( rev x8, x8 )
rev64 T1.16b, INP.16b

cmp w6, #12
b.ge 2f // AES-192/256?
b.ge 3f // AES-192/256?

1: enc_round CTR, v21
2: enc_round CTR, v21

ext T2.16b, XL.16b, XL.16b, #8
ext IN1.16b, T1.16b, T1.16b, #8
Expand Down Expand Up @@ -404,28 +409,27 @@ CPU_LE( rev x8, x8 )

cbnz w0, 0b

CPU_LE( rev x8, x8 )
st1 {XL.2d}, [x1]
str x8, [x5, #8] // store lower counter
stp x8, x9, [x5] // store counter

.if \enc == 1
st1 {KS.16b}, [x7]
.endif

ret

2: b.eq 3f // AES-192?
3: b.eq 4f // AES-192?
enc_round CTR, v17
enc_round CTR, v18
3: enc_round CTR, v19
4: enc_round CTR, v19
enc_round CTR, v20
b 1b
b 2b
.endm

/*
* void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
* const u8 src[], struct ghash_key const *k,
* u8 ctr[], int rounds, u8 ks[])
* u64 ctr[2], int rounds, u8 ks[])
*/
.section .text.pmull_gcm_encrypt
ENTRY(pmull_gcm_encrypt)
Expand All @@ -435,7 +439,7 @@ ENDPROC(pmull_gcm_encrypt)
/*
* void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[],
* const u8 src[], struct ghash_key const *k,
* u8 ctr[], int rounds)
* u64 ctr[2], int rounds)
*/
.section .text.pmull_gcm_decrypt
ENTRY(pmull_gcm_decrypt)
Expand Down
4 changes: 2 additions & 2 deletions core/arch/arm/include/crypto/ghash-ce-core.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ void pmull_gcm_load_round_keys(uint64_t rk[30], int rounds);

void pmull_gcm_encrypt(int blocks, uint64_t dg[2], uint8_t dst[],
const uint8_t src[], const uint64_t k[2],
const uint8_t ctr[], int rounds, uint8_t ks[]);
uint64_t ctr[], int rounds, uint8_t ks[]);


void pmull_gcm_decrypt(int blocks, uint64_t dg[2], uint8_t dst[],
const uint8_t src[], const uint64_t k[2],
const uint8_t ctr[], int rounds);
uint64_t ctr[], int rounds);

void pmull_gcm_encrypt_block(uint8_t dst[], const uint8_t src[], int rounds);

Expand Down

0 comments on commit 731b6e3

Please sign in to comment.