mirror of
https://github.com/torvalds/linux.git
synced 2025-11-30 23:16:01 +07:00
crypto: lib/chacha - strongly type the ChaCha state
The ChaCha state matrix is 16 32-bit words. Currently it is represented
in the code as a raw u32 array, or even just a pointer to u32. This
weak typing is error-prone. Instead, introduce struct chacha_state:
struct chacha_state {
u32 x[16];
};
Convert all ChaCha and HChaCha functions to use struct chacha_state.
No functional changes.
Signed-off-by: Eric Biggers <ebiggers@google.com>
Acked-by: Kent Overstreet <kent.overstreet@linux.dev>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
@@ -17,15 +17,18 @@
|
||||
#include <asm/neon.h>
|
||||
#include <asm/simd.h>
|
||||
|
||||
asmlinkage void chacha_block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
|
||||
int nrounds);
|
||||
asmlinkage void chacha_4block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_block_xor_neon(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src, int nrounds);
|
||||
asmlinkage void chacha_4block_xor_neon(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
int nrounds, unsigned int nbytes);
|
||||
asmlinkage void hchacha_block_arm(const u32 *state, u32 *out, int nrounds);
|
||||
asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
|
||||
asmlinkage void hchacha_block_arm(const struct chacha_state *state,
|
||||
u32 *out, int nrounds);
|
||||
asmlinkage void hchacha_block_neon(const struct chacha_state *state,
|
||||
u32 *out, int nrounds);
|
||||
|
||||
asmlinkage void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes,
|
||||
const u32 *state, int nrounds);
|
||||
const struct chacha_state *state, int nrounds);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_neon);
|
||||
|
||||
@@ -34,7 +37,7 @@ static inline bool neon_usable(void)
|
||||
return static_branch_likely(&use_neon) && crypto_simd_usable();
|
||||
}
|
||||
|
||||
static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
|
||||
static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
u8 buf[CHACHA_BLOCK_SIZE];
|
||||
@@ -46,7 +49,7 @@ static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
|
||||
bytes -= l;
|
||||
src += l;
|
||||
dst += l;
|
||||
state[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE);
|
||||
state->x[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE);
|
||||
}
|
||||
if (bytes) {
|
||||
const u8 *s = src;
|
||||
@@ -57,11 +60,12 @@ static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
|
||||
chacha_block_xor_neon(state, d, s, nrounds);
|
||||
if (d != dst)
|
||||
memcpy(dst, buf, bytes);
|
||||
state[12]++;
|
||||
state->x[12]++;
|
||||
}
|
||||
}
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state, u32 *stream,
|
||||
int nrounds)
|
||||
{
|
||||
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) {
|
||||
hchacha_block_arm(state, stream, nrounds);
|
||||
@@ -73,13 +77,13 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
|
||||
int nrounds)
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() ||
|
||||
bytes <= CHACHA_BLOCK_SIZE) {
|
||||
chacha_doarm(dst, src, bytes, state, nrounds);
|
||||
state[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE);
|
||||
state->x[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -367,7 +367,7 @@
|
||||
|
||||
/*
|
||||
* void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes,
|
||||
* const u32 *state, int nrounds);
|
||||
* const struct chacha_state *state, int nrounds);
|
||||
*/
|
||||
ENTRY(chacha_doarm)
|
||||
cmp r2, #0 // len == 0?
|
||||
@@ -407,7 +407,8 @@ ENTRY(chacha_doarm)
|
||||
ENDPROC(chacha_doarm)
|
||||
|
||||
/*
|
||||
* void hchacha_block_arm(const u32 state[16], u32 out[8], int nrounds);
|
||||
* void hchacha_block_arm(const struct chacha_state *state,
|
||||
* u32 out[8], int nrounds);
|
||||
*/
|
||||
ENTRY(hchacha_block_arm)
|
||||
push {r1,r4-r11,lr}
|
||||
|
||||
@@ -28,15 +28,17 @@
|
||||
#include <asm/neon.h>
|
||||
#include <asm/simd.h>
|
||||
|
||||
asmlinkage void chacha_block_xor_neon(u32 *state, u8 *dst, const u8 *src,
|
||||
int nrounds);
|
||||
asmlinkage void chacha_4block_xor_neon(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_block_xor_neon(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src, int nrounds);
|
||||
asmlinkage void chacha_4block_xor_neon(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
int nrounds, int bytes);
|
||||
asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
|
||||
asmlinkage void hchacha_block_neon(const struct chacha_state *state,
|
||||
u32 *out, int nrounds);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
|
||||
|
||||
static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
|
||||
static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
int bytes, int nrounds)
|
||||
{
|
||||
while (bytes > 0) {
|
||||
@@ -48,18 +50,19 @@ static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
|
||||
memcpy(buf, src, l);
|
||||
chacha_block_xor_neon(state, buf, buf, nrounds);
|
||||
memcpy(dst, buf, l);
|
||||
state[12] += 1;
|
||||
state->x[12] += 1;
|
||||
break;
|
||||
}
|
||||
chacha_4block_xor_neon(state, dst, src, nrounds, l);
|
||||
bytes -= l;
|
||||
src += l;
|
||||
dst += l;
|
||||
state[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE);
|
||||
state->x[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state, u32 *stream,
|
||||
int nrounds)
|
||||
{
|
||||
if (!static_branch_likely(&have_neon) || !crypto_simd_usable()) {
|
||||
hchacha_block_generic(state, stream, nrounds);
|
||||
@@ -71,8 +74,8 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
|
||||
int nrounds)
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE ||
|
||||
!crypto_simd_usable())
|
||||
|
||||
@@ -9,11 +9,13 @@
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
|
||||
asmlinkage void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_crypt_arch(struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds);
|
||||
EXPORT_SYMBOL(chacha_crypt_arch);
|
||||
|
||||
asmlinkage void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds);
|
||||
asmlinkage void hchacha_block_arch(const struct chacha_state *state,
|
||||
u32 *stream, int nrounds);
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
bool chacha_is_arch_optimized(void)
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
#include <asm/simd.h>
|
||||
#include <asm/switch_to.h>
|
||||
|
||||
asmlinkage void chacha_p10le_8x(u32 *state, u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_p10le_8x(const struct chacha_state *state, u8 *dst,
|
||||
const u8 *src, unsigned int len, int nrounds);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10);
|
||||
|
||||
@@ -31,7 +31,7 @@ static void vsx_end(void)
|
||||
preempt_enable();
|
||||
}
|
||||
|
||||
static void chacha_p10_do_8x(u32 *state, u8 *dst, const u8 *src,
|
||||
static void chacha_p10_do_8x(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
unsigned int l = bytes & ~0x0FF;
|
||||
@@ -41,21 +41,22 @@ static void chacha_p10_do_8x(u32 *state, u8 *dst, const u8 *src,
|
||||
bytes -= l;
|
||||
src += l;
|
||||
dst += l;
|
||||
state[12] += l / CHACHA_BLOCK_SIZE;
|
||||
state->x[12] += l / CHACHA_BLOCK_SIZE;
|
||||
}
|
||||
|
||||
if (bytes > 0)
|
||||
chacha_crypt_generic(state, dst, src, bytes, nrounds);
|
||||
}
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state,
|
||||
u32 *stream, int nrounds)
|
||||
{
|
||||
hchacha_block_generic(state, stream, nrounds);
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
|
||||
int nrounds)
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE ||
|
||||
!crypto_simd_usable())
|
||||
|
||||
@@ -7,9 +7,6 @@
|
||||
#===================================================================================
|
||||
# Written by Danny Tsen <dtsen@us.ibm.com>
|
||||
#
|
||||
# chacha_p10le_8x(u32 *state, byte *dst, const byte *src,
|
||||
# size_t len, int nrounds);
|
||||
#
|
||||
# do rounds, 8 quarter rounds
|
||||
# 1. a += b; d ^= a; d <<<= 16;
|
||||
# 2. c += d; b ^= c; b <<<= 12;
|
||||
@@ -575,7 +572,8 @@
|
||||
.endm
|
||||
|
||||
#
|
||||
# chacha20_p10le_8x(u32 *state, byte *dst, const byte *src, size_t len, int nrounds);
|
||||
# void chacha_p10le_8x(const struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
# unsigned int len, int nrounds);
|
||||
#
|
||||
SYM_FUNC_START(chacha_p10le_8x)
|
||||
.align 5
|
||||
|
||||
@@ -15,17 +15,17 @@
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_zvkb);
|
||||
|
||||
asmlinkage void chacha_zvkb(u32 state[16], const u8 *in, u8 *out,
|
||||
asmlinkage void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out,
|
||||
size_t nblocks, int nrounds);
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *out, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state, u32 *out, int nrounds)
|
||||
{
|
||||
hchacha_block_generic(state, out, nrounds);
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
|
||||
int nrounds)
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
u8 block_buffer[CHACHA_BLOCK_SIZE];
|
||||
unsigned int full_blocks = bytes / CHACHA_BLOCK_SIZE;
|
||||
|
||||
@@ -132,15 +132,15 @@
|
||||
vror.vi \b3, \b3, 32 - 7
|
||||
.endm
|
||||
|
||||
// void chacha_zvkb(u32 state[16], const u8 *in, u8 *out, size_t nblocks,
|
||||
// int nrounds);
|
||||
// void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out,
|
||||
// size_t nblocks, int nrounds);
|
||||
//
|
||||
// |nblocks| is the number of 64-byte blocks to process, and must be nonzero.
|
||||
//
|
||||
// |state| gives the ChaCha state matrix, including the 32-bit counter in
|
||||
// state[12] following the RFC7539 convention; note that this differs from the
|
||||
// original Salsa20 paper which uses a 64-bit counter in state[12..13]. The
|
||||
// updated 32-bit counter is written back to state[12] before returning.
|
||||
// state->x[12] following the RFC7539 convention; note that this differs from
|
||||
// the original Salsa20 paper which uses a 64-bit counter in state->x[12..13].
|
||||
// The updated 32-bit counter is written back to state->x[12] before returning.
|
||||
SYM_FUNC_START(chacha_zvkb)
|
||||
addi sp, sp, -96
|
||||
sd s0, 0(sp)
|
||||
|
||||
@@ -16,14 +16,15 @@
|
||||
#include <asm/fpu.h>
|
||||
#include "chacha-s390.h"
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state,
|
||||
u32 *stream, int nrounds)
|
||||
{
|
||||
/* TODO: implement hchacha_block_arch() in assembly */
|
||||
hchacha_block_generic(state, stream, nrounds);
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
/* s390 chacha20 implementation has 20 rounds hard-coded,
|
||||
@@ -36,11 +37,11 @@ void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
|
||||
DECLARE_KERNEL_FPU_ONSTACK32(vxstate);
|
||||
|
||||
kernel_fpu_begin(&vxstate, KERNEL_VXR);
|
||||
chacha20_vx(dst, src, bytes, &state[4], &state[12]);
|
||||
chacha20_vx(dst, src, bytes, &state->x[4], &state->x[12]);
|
||||
kernel_fpu_end(&vxstate, KERNEL_VXR);
|
||||
|
||||
state[12] += round_up(bytes, CHACHA_BLOCK_SIZE) /
|
||||
CHACHA_BLOCK_SIZE;
|
||||
state->x[12] += round_up(bytes, CHACHA_BLOCK_SIZE) /
|
||||
CHACHA_BLOCK_SIZE;
|
||||
}
|
||||
}
|
||||
EXPORT_SYMBOL(chacha_crypt_arch);
|
||||
|
||||
@@ -12,24 +12,33 @@
|
||||
#include <linux/module.h>
|
||||
#include <linux/sizes.h>
|
||||
|
||||
asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_block_xor_ssse3(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_4block_xor_ssse3(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void hchacha_block_ssse3(const u32 *state, u32 *out, int nrounds);
|
||||
asmlinkage void hchacha_block_ssse3(const struct chacha_state *state,
|
||||
u32 *out, int nrounds);
|
||||
|
||||
asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_2block_xor_avx2(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_4block_xor_avx2(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_8block_xor_avx2(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
|
||||
asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_2block_xor_avx512vl(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_4block_xor_avx512vl(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
|
||||
asmlinkage void chacha_8block_xor_avx512vl(const struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int len, int nrounds);
|
||||
|
||||
static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd);
|
||||
@@ -42,7 +51,7 @@ static unsigned int chacha_advance(unsigned int len, unsigned int maxblocks)
|
||||
return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
|
||||
}
|
||||
|
||||
static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
|
||||
static void chacha_dosimd(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (static_branch_likely(&chacha_use_avx512vl)) {
|
||||
@@ -52,24 +61,24 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
|
||||
bytes -= CHACHA_BLOCK_SIZE * 8;
|
||||
src += CHACHA_BLOCK_SIZE * 8;
|
||||
dst += CHACHA_BLOCK_SIZE * 8;
|
||||
state[12] += 8;
|
||||
state->x[12] += 8;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE * 4) {
|
||||
chacha_8block_xor_avx512vl(state, dst, src, bytes,
|
||||
nrounds);
|
||||
state[12] += chacha_advance(bytes, 8);
|
||||
state->x[12] += chacha_advance(bytes, 8);
|
||||
return;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE * 2) {
|
||||
chacha_4block_xor_avx512vl(state, dst, src, bytes,
|
||||
nrounds);
|
||||
state[12] += chacha_advance(bytes, 4);
|
||||
state->x[12] += chacha_advance(bytes, 4);
|
||||
return;
|
||||
}
|
||||
if (bytes) {
|
||||
chacha_2block_xor_avx512vl(state, dst, src, bytes,
|
||||
nrounds);
|
||||
state[12] += chacha_advance(bytes, 2);
|
||||
state->x[12] += chacha_advance(bytes, 2);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -80,21 +89,21 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
|
||||
bytes -= CHACHA_BLOCK_SIZE * 8;
|
||||
src += CHACHA_BLOCK_SIZE * 8;
|
||||
dst += CHACHA_BLOCK_SIZE * 8;
|
||||
state[12] += 8;
|
||||
state->x[12] += 8;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE * 4) {
|
||||
chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
|
||||
state[12] += chacha_advance(bytes, 8);
|
||||
state->x[12] += chacha_advance(bytes, 8);
|
||||
return;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE * 2) {
|
||||
chacha_4block_xor_avx2(state, dst, src, bytes, nrounds);
|
||||
state[12] += chacha_advance(bytes, 4);
|
||||
state->x[12] += chacha_advance(bytes, 4);
|
||||
return;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE) {
|
||||
chacha_2block_xor_avx2(state, dst, src, bytes, nrounds);
|
||||
state[12] += chacha_advance(bytes, 2);
|
||||
state->x[12] += chacha_advance(bytes, 2);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -104,20 +113,21 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
|
||||
bytes -= CHACHA_BLOCK_SIZE * 4;
|
||||
src += CHACHA_BLOCK_SIZE * 4;
|
||||
dst += CHACHA_BLOCK_SIZE * 4;
|
||||
state[12] += 4;
|
||||
state->x[12] += 4;
|
||||
}
|
||||
if (bytes > CHACHA_BLOCK_SIZE) {
|
||||
chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
|
||||
state[12] += chacha_advance(bytes, 4);
|
||||
state->x[12] += chacha_advance(bytes, 4);
|
||||
return;
|
||||
}
|
||||
if (bytes) {
|
||||
chacha_block_xor_ssse3(state, dst, src, bytes, nrounds);
|
||||
state[12]++;
|
||||
state->x[12]++;
|
||||
}
|
||||
}
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_arch(const struct chacha_state *state,
|
||||
u32 *stream, int nrounds)
|
||||
{
|
||||
if (!static_branch_likely(&chacha_use_simd)) {
|
||||
hchacha_block_generic(state, stream, nrounds);
|
||||
@@ -129,8 +139,8 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_arch);
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
|
||||
int nrounds)
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (!static_branch_likely(&chacha_use_simd) ||
|
||||
bytes <= CHACHA_BLOCK_SIZE)
|
||||
|
||||
@@ -50,12 +50,12 @@ static int chacha_stream_xor(struct skcipher_request *req,
|
||||
bool arch)
|
||||
{
|
||||
struct skcipher_walk walk;
|
||||
u32 state[16];
|
||||
struct chacha_state state;
|
||||
int err;
|
||||
|
||||
err = skcipher_walk_virt(&walk, req, false);
|
||||
|
||||
chacha_init(state, ctx->key, iv);
|
||||
chacha_init(&state, ctx->key, iv);
|
||||
|
||||
while (walk.nbytes > 0) {
|
||||
unsigned int nbytes = walk.nbytes;
|
||||
@@ -64,10 +64,10 @@ static int chacha_stream_xor(struct skcipher_request *req,
|
||||
nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
|
||||
|
||||
if (arch)
|
||||
chacha_crypt(state, walk.dst.virt.addr,
|
||||
chacha_crypt(&state, walk.dst.virt.addr,
|
||||
walk.src.virt.addr, nbytes, ctx->nrounds);
|
||||
else
|
||||
chacha_crypt_generic(state, walk.dst.virt.addr,
|
||||
chacha_crypt_generic(&state, walk.dst.virt.addr,
|
||||
walk.src.virt.addr, nbytes,
|
||||
ctx->nrounds);
|
||||
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
|
||||
@@ -97,15 +97,15 @@ static int crypto_xchacha_crypt(struct skcipher_request *req, bool arch)
|
||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct chacha_ctx subctx;
|
||||
u32 state[16];
|
||||
struct chacha_state state;
|
||||
u8 real_iv[16];
|
||||
|
||||
/* Compute the subkey given the original key and first 128 nonce bits */
|
||||
chacha_init(state, ctx->key, req->iv);
|
||||
chacha_init(&state, ctx->key, req->iv);
|
||||
if (arch)
|
||||
hchacha_block(state, subctx.key, ctx->nrounds);
|
||||
hchacha_block(&state, subctx.key, ctx->nrounds);
|
||||
else
|
||||
hchacha_block_generic(state, subctx.key, ctx->nrounds);
|
||||
hchacha_block_generic(&state, subctx.key, ctx->nrounds);
|
||||
subctx.nrounds = ctx->nrounds;
|
||||
|
||||
/* Build the real IV */
|
||||
|
||||
@@ -309,11 +309,11 @@ static void crng_reseed(struct work_struct *work)
|
||||
* key value, at index 4, so the state should always be zeroed out
|
||||
* immediately after using in order to maintain forward secrecy.
|
||||
* If the state cannot be erased in a timely manner, then it is
|
||||
* safer to set the random_data parameter to &chacha_state[4] so
|
||||
* that this function overwrites it before returning.
|
||||
* safer to set the random_data parameter to &chacha_state->x[4]
|
||||
* so that this function overwrites it before returning.
|
||||
*/
|
||||
static void crng_fast_key_erasure(u8 key[CHACHA_KEY_SIZE],
|
||||
u32 chacha_state[CHACHA_STATE_WORDS],
|
||||
struct chacha_state *chacha_state,
|
||||
u8 *random_data, size_t random_data_len)
|
||||
{
|
||||
u8 first_block[CHACHA_BLOCK_SIZE];
|
||||
@@ -321,8 +321,8 @@ static void crng_fast_key_erasure(u8 key[CHACHA_KEY_SIZE],
|
||||
BUG_ON(random_data_len > 32);
|
||||
|
||||
chacha_init_consts(chacha_state);
|
||||
memcpy(&chacha_state[4], key, CHACHA_KEY_SIZE);
|
||||
memset(&chacha_state[12], 0, sizeof(u32) * 4);
|
||||
memcpy(&chacha_state->x[4], key, CHACHA_KEY_SIZE);
|
||||
memset(&chacha_state->x[12], 0, sizeof(u32) * 4);
|
||||
chacha20_block(chacha_state, first_block);
|
||||
|
||||
memcpy(key, first_block, CHACHA_KEY_SIZE);
|
||||
@@ -335,7 +335,7 @@ static void crng_fast_key_erasure(u8 key[CHACHA_KEY_SIZE],
|
||||
* random data. It also returns up to 32 bytes on its own of random data
|
||||
* that may be used; random_data_len may not be greater than 32.
|
||||
*/
|
||||
static void crng_make_state(u32 chacha_state[CHACHA_STATE_WORDS],
|
||||
static void crng_make_state(struct chacha_state *chacha_state,
|
||||
u8 *random_data, size_t random_data_len)
|
||||
{
|
||||
unsigned long flags;
|
||||
@@ -395,7 +395,7 @@ static void crng_make_state(u32 chacha_state[CHACHA_STATE_WORDS],
|
||||
|
||||
static void _get_random_bytes(void *buf, size_t len)
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
u8 tmp[CHACHA_BLOCK_SIZE];
|
||||
size_t first_block_len;
|
||||
|
||||
@@ -403,26 +403,26 @@ static void _get_random_bytes(void *buf, size_t len)
|
||||
return;
|
||||
|
||||
first_block_len = min_t(size_t, 32, len);
|
||||
crng_make_state(chacha_state, buf, first_block_len);
|
||||
crng_make_state(&chacha_state, buf, first_block_len);
|
||||
len -= first_block_len;
|
||||
buf += first_block_len;
|
||||
|
||||
while (len) {
|
||||
if (len < CHACHA_BLOCK_SIZE) {
|
||||
chacha20_block(chacha_state, tmp);
|
||||
chacha20_block(&chacha_state, tmp);
|
||||
memcpy(buf, tmp, len);
|
||||
memzero_explicit(tmp, sizeof(tmp));
|
||||
break;
|
||||
}
|
||||
|
||||
chacha20_block(chacha_state, buf);
|
||||
if (unlikely(chacha_state[12] == 0))
|
||||
++chacha_state[13];
|
||||
chacha20_block(&chacha_state, buf);
|
||||
if (unlikely(chacha_state.x[12] == 0))
|
||||
++chacha_state.x[13];
|
||||
len -= CHACHA_BLOCK_SIZE;
|
||||
buf += CHACHA_BLOCK_SIZE;
|
||||
}
|
||||
|
||||
memzero_explicit(chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&chacha_state, sizeof(chacha_state));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -441,7 +441,7 @@ EXPORT_SYMBOL(get_random_bytes);
|
||||
|
||||
static ssize_t get_random_bytes_user(struct iov_iter *iter)
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
u8 block[CHACHA_BLOCK_SIZE];
|
||||
size_t ret = 0, copied;
|
||||
|
||||
@@ -453,21 +453,22 @@ static ssize_t get_random_bytes_user(struct iov_iter *iter)
|
||||
* bytes, in case userspace causes copy_to_iter() below to sleep
|
||||
* forever, so that we still retain forward secrecy in that case.
|
||||
*/
|
||||
crng_make_state(chacha_state, (u8 *)&chacha_state[4], CHACHA_KEY_SIZE);
|
||||
crng_make_state(&chacha_state, (u8 *)&chacha_state.x[4],
|
||||
CHACHA_KEY_SIZE);
|
||||
/*
|
||||
* However, if we're doing a read of len <= 32, we don't need to
|
||||
* use chacha_state after, so we can simply return those bytes to
|
||||
* the user directly.
|
||||
*/
|
||||
if (iov_iter_count(iter) <= CHACHA_KEY_SIZE) {
|
||||
ret = copy_to_iter(&chacha_state[4], CHACHA_KEY_SIZE, iter);
|
||||
ret = copy_to_iter(&chacha_state.x[4], CHACHA_KEY_SIZE, iter);
|
||||
goto out_zero_chacha;
|
||||
}
|
||||
|
||||
for (;;) {
|
||||
chacha20_block(chacha_state, block);
|
||||
if (unlikely(chacha_state[12] == 0))
|
||||
++chacha_state[13];
|
||||
chacha20_block(&chacha_state, block);
|
||||
if (unlikely(chacha_state.x[12] == 0))
|
||||
++chacha_state.x[13];
|
||||
|
||||
copied = copy_to_iter(block, sizeof(block), iter);
|
||||
ret += copied;
|
||||
@@ -484,7 +485,7 @@ static ssize_t get_random_bytes_user(struct iov_iter *iter)
|
||||
|
||||
memzero_explicit(block, sizeof(block));
|
||||
out_zero_chacha:
|
||||
memzero_explicit(chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&chacha_state, sizeof(chacha_state));
|
||||
return ret ? ret : -EFAULT;
|
||||
}
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ static void bch2_checksum_update(struct bch2_checksum_state *state, const void *
|
||||
}
|
||||
}
|
||||
|
||||
static void bch2_chacha20_init(u32 state[CHACHA_STATE_WORDS],
|
||||
static void bch2_chacha20_init(struct chacha_state *state,
|
||||
const struct bch_key *key, struct nonce nonce)
|
||||
{
|
||||
u32 key_words[CHACHA_KEY_SIZE / sizeof(u32)];
|
||||
@@ -109,11 +109,11 @@ static void bch2_chacha20_init(u32 state[CHACHA_STATE_WORDS],
|
||||
static void bch2_chacha20(const struct bch_key *key, struct nonce nonce,
|
||||
void *data, size_t len)
|
||||
{
|
||||
u32 state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state state;
|
||||
|
||||
bch2_chacha20_init(state, key, nonce);
|
||||
chacha20_crypt(state, data, data, len);
|
||||
memzero_explicit(state, sizeof(state));
|
||||
bch2_chacha20_init(&state, key, nonce);
|
||||
chacha20_crypt(&state, data, data, len);
|
||||
memzero_explicit(&state, sizeof(state));
|
||||
}
|
||||
|
||||
static void bch2_poly1305_init(struct poly1305_desc_ctx *desc,
|
||||
@@ -257,14 +257,14 @@ int __bch2_encrypt_bio(struct bch_fs *c, unsigned type,
|
||||
{
|
||||
struct bio_vec bv;
|
||||
struct bvec_iter iter;
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
int ret = 0;
|
||||
|
||||
if (bch2_fs_inconsistent_on(!c->chacha20_key_set,
|
||||
c, "attempting to encrypt without encryption key"))
|
||||
return -BCH_ERR_no_encryption_key;
|
||||
|
||||
bch2_chacha20_init(chacha_state, &c->chacha20_key, nonce);
|
||||
bch2_chacha20_init(&chacha_state, &c->chacha20_key, nonce);
|
||||
|
||||
bio_for_each_segment(bv, bio, iter) {
|
||||
void *p;
|
||||
@@ -280,10 +280,10 @@ int __bch2_encrypt_bio(struct bch_fs *c, unsigned type,
|
||||
}
|
||||
|
||||
p = bvec_kmap_local(&bv);
|
||||
chacha20_crypt(chacha_state, p, p, bv.bv_len);
|
||||
chacha20_crypt(&chacha_state, p, p, bv.bv_len);
|
||||
kunmap_local(p);
|
||||
}
|
||||
memzero_explicit(chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&chacha_state, sizeof(chacha_state));
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@@ -30,16 +30,23 @@
|
||||
/* 192-bit nonce, then 64-bit stream position */
|
||||
#define XCHACHA_IV_SIZE 32
|
||||
|
||||
void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
|
||||
static inline void chacha20_block(u32 *state, u8 *stream)
|
||||
struct chacha_state {
|
||||
u32 x[CHACHA_STATE_WORDS];
|
||||
};
|
||||
|
||||
void chacha_block_generic(struct chacha_state *state, u8 *stream, int nrounds);
|
||||
static inline void chacha20_block(struct chacha_state *state, u8 *stream)
|
||||
{
|
||||
chacha_block_generic(state, stream, 20);
|
||||
}
|
||||
|
||||
void hchacha_block_arch(const u32 *state, u32 *out, int nrounds);
|
||||
void hchacha_block_generic(const u32 *state, u32 *out, int nrounds);
|
||||
void hchacha_block_arch(const struct chacha_state *state, u32 *out,
|
||||
int nrounds);
|
||||
void hchacha_block_generic(const struct chacha_state *state, u32 *out,
|
||||
int nrounds);
|
||||
|
||||
static inline void hchacha_block(const u32 *state, u32 *out, int nrounds)
|
||||
static inline void hchacha_block(const struct chacha_state *state, u32 *out,
|
||||
int nrounds)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
|
||||
hchacha_block_arch(state, out, nrounds);
|
||||
@@ -54,37 +61,39 @@ enum chacha_constants { /* expand 32-byte k */
|
||||
CHACHA_CONSTANT_TE_K = 0x6b206574U
|
||||
};
|
||||
|
||||
static inline void chacha_init_consts(u32 *state)
|
||||
static inline void chacha_init_consts(struct chacha_state *state)
|
||||
{
|
||||
state[0] = CHACHA_CONSTANT_EXPA;
|
||||
state[1] = CHACHA_CONSTANT_ND_3;
|
||||
state[2] = CHACHA_CONSTANT_2_BY;
|
||||
state[3] = CHACHA_CONSTANT_TE_K;
|
||||
state->x[0] = CHACHA_CONSTANT_EXPA;
|
||||
state->x[1] = CHACHA_CONSTANT_ND_3;
|
||||
state->x[2] = CHACHA_CONSTANT_2_BY;
|
||||
state->x[3] = CHACHA_CONSTANT_TE_K;
|
||||
}
|
||||
|
||||
static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
|
||||
static inline void chacha_init(struct chacha_state *state,
|
||||
const u32 *key, const u8 *iv)
|
||||
{
|
||||
chacha_init_consts(state);
|
||||
state[4] = key[0];
|
||||
state[5] = key[1];
|
||||
state[6] = key[2];
|
||||
state[7] = key[3];
|
||||
state[8] = key[4];
|
||||
state[9] = key[5];
|
||||
state[10] = key[6];
|
||||
state[11] = key[7];
|
||||
state[12] = get_unaligned_le32(iv + 0);
|
||||
state[13] = get_unaligned_le32(iv + 4);
|
||||
state[14] = get_unaligned_le32(iv + 8);
|
||||
state[15] = get_unaligned_le32(iv + 12);
|
||||
state->x[4] = key[0];
|
||||
state->x[5] = key[1];
|
||||
state->x[6] = key[2];
|
||||
state->x[7] = key[3];
|
||||
state->x[8] = key[4];
|
||||
state->x[9] = key[5];
|
||||
state->x[10] = key[6];
|
||||
state->x[11] = key[7];
|
||||
state->x[12] = get_unaligned_le32(iv + 0);
|
||||
state->x[13] = get_unaligned_le32(iv + 4);
|
||||
state->x[14] = get_unaligned_le32(iv + 8);
|
||||
state->x[15] = get_unaligned_le32(iv + 12);
|
||||
}
|
||||
|
||||
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
|
||||
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds);
|
||||
void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
|
||||
void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds);
|
||||
|
||||
static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
|
||||
static inline void chacha_crypt(struct chacha_state *state,
|
||||
u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
|
||||
@@ -93,8 +102,8 @@ static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
|
||||
chacha_crypt_generic(state, dst, src, bytes, nrounds);
|
||||
}
|
||||
|
||||
static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes)
|
||||
static inline void chacha20_crypt(struct chacha_state *state,
|
||||
u8 *dst, const u8 *src, unsigned int bytes)
|
||||
{
|
||||
chacha_crypt(state, dst, src, bytes, 20);
|
||||
}
|
||||
|
||||
@@ -13,8 +13,9 @@
|
||||
#include <linux/unaligned.h>
|
||||
#include <crypto/chacha.h>
|
||||
|
||||
static void chacha_permute(u32 *x, int nrounds)
|
||||
static void chacha_permute(struct chacha_state *state, int nrounds)
|
||||
{
|
||||
u32 *x = state->x;
|
||||
int i;
|
||||
|
||||
/* whitelist the allowed round counts */
|
||||
@@ -65,7 +66,7 @@ static void chacha_permute(u32 *x, int nrounds)
|
||||
|
||||
/**
|
||||
* chacha_block_generic - generate one keystream block and increment block counter
|
||||
* @state: input state matrix (16 32-bit words)
|
||||
* @state: input state matrix
|
||||
* @stream: output keystream block (64 bytes)
|
||||
* @nrounds: number of rounds (20 or 12; 20 is recommended)
|
||||
*
|
||||
@@ -73,25 +74,26 @@ static void chacha_permute(u32 *x, int nrounds)
|
||||
* The caller has already converted the endianness of the input. This function
|
||||
* also handles incrementing the block counter in the input matrix.
|
||||
*/
|
||||
void chacha_block_generic(u32 *state, u8 *stream, int nrounds)
|
||||
void chacha_block_generic(struct chacha_state *state, u8 *stream, int nrounds)
|
||||
{
|
||||
u32 x[16];
|
||||
struct chacha_state permuted_state;
|
||||
int i;
|
||||
|
||||
memcpy(x, state, 64);
|
||||
memcpy(permuted_state.x, state->x, 64);
|
||||
|
||||
chacha_permute(x, nrounds);
|
||||
chacha_permute(&permuted_state, nrounds);
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE(x); i++)
|
||||
put_unaligned_le32(x[i] + state[i], &stream[i * sizeof(u32)]);
|
||||
for (i = 0; i < ARRAY_SIZE(state->x); i++)
|
||||
put_unaligned_le32(permuted_state.x[i] + state->x[i],
|
||||
&stream[i * sizeof(u32)]);
|
||||
|
||||
state[12]++;
|
||||
state->x[12]++;
|
||||
}
|
||||
EXPORT_SYMBOL(chacha_block_generic);
|
||||
|
||||
/**
|
||||
* hchacha_block_generic - abbreviated ChaCha core, for XChaCha
|
||||
* @state: input state matrix (16 32-bit words)
|
||||
* @state: input state matrix
|
||||
* @stream: output (8 32-bit words)
|
||||
* @nrounds: number of rounds (20 or 12; 20 is recommended)
|
||||
*
|
||||
@@ -100,15 +102,16 @@ EXPORT_SYMBOL(chacha_block_generic);
|
||||
* skips the final addition of the initial state, and outputs only certain words
|
||||
* of the state. It should not be used for streaming directly.
|
||||
*/
|
||||
void hchacha_block_generic(const u32 *state, u32 *stream, int nrounds)
|
||||
void hchacha_block_generic(const struct chacha_state *state,
|
||||
u32 *stream, int nrounds)
|
||||
{
|
||||
u32 x[16];
|
||||
struct chacha_state permuted_state;
|
||||
|
||||
memcpy(x, state, 64);
|
||||
memcpy(permuted_state.x, state->x, 64);
|
||||
|
||||
chacha_permute(x, nrounds);
|
||||
chacha_permute(&permuted_state, nrounds);
|
||||
|
||||
memcpy(&stream[0], &x[0], 16);
|
||||
memcpy(&stream[4], &x[12], 16);
|
||||
memcpy(&stream[0], &permuted_state.x[0], 16);
|
||||
memcpy(&stream[4], &permuted_state.x[12], 16);
|
||||
}
|
||||
EXPORT_SYMBOL(hchacha_block_generic);
|
||||
|
||||
@@ -8832,7 +8832,7 @@ chacha20poly1305_encrypt_bignonce(u8 *dst, const u8 *src, const size_t src_len,
|
||||
{
|
||||
const u8 *pad0 = page_address(ZERO_PAGE(0));
|
||||
struct poly1305_desc_ctx poly1305_state;
|
||||
u32 chacha20_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha20_state;
|
||||
union {
|
||||
u8 block0[POLY1305_KEY_SIZE];
|
||||
__le64 lens[2];
|
||||
@@ -8844,12 +8844,12 @@ chacha20poly1305_encrypt_bignonce(u8 *dst, const u8 *src, const size_t src_len,
|
||||
memcpy(&bottom_row[4], nonce, 12);
|
||||
for (i = 0; i < 8; ++i)
|
||||
le_key[i] = get_unaligned_le32(key + sizeof(le_key[i]) * i);
|
||||
chacha_init(chacha20_state, le_key, bottom_row);
|
||||
chacha20_crypt(chacha20_state, b.block0, b.block0, sizeof(b.block0));
|
||||
chacha_init(&chacha20_state, le_key, bottom_row);
|
||||
chacha20_crypt(&chacha20_state, b.block0, b.block0, sizeof(b.block0));
|
||||
poly1305_init(&poly1305_state, b.block0);
|
||||
poly1305_update(&poly1305_state, ad, ad_len);
|
||||
poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf);
|
||||
chacha20_crypt(chacha20_state, dst, src, src_len);
|
||||
chacha20_crypt(&chacha20_state, dst, src, src_len);
|
||||
poly1305_update(&poly1305_state, dst, src_len);
|
||||
poly1305_update(&poly1305_state, pad0, (0x10 - src_len) & 0xf);
|
||||
b.lens[0] = cpu_to_le64(ad_len);
|
||||
|
||||
@@ -32,7 +32,8 @@ static void chacha_load_key(u32 *k, const u8 *in)
|
||||
k[7] = get_unaligned_le32(in + 28);
|
||||
}
|
||||
|
||||
static void xchacha_init(u32 *chacha_state, const u8 *key, const u8 *nonce)
|
||||
static void xchacha_init(struct chacha_state *chacha_state,
|
||||
const u8 *key, const u8 *nonce)
|
||||
{
|
||||
u32 k[CHACHA_KEY_WORDS];
|
||||
u8 iv[CHACHA_IV_SIZE];
|
||||
@@ -54,7 +55,8 @@ static void xchacha_init(u32 *chacha_state, const u8 *key, const u8 *nonce)
|
||||
|
||||
static void
|
||||
__chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u8 *ad, const size_t ad_len, u32 *chacha_state)
|
||||
const u8 *ad, const size_t ad_len,
|
||||
struct chacha_state *chacha_state)
|
||||
{
|
||||
const u8 *pad0 = page_address(ZERO_PAGE(0));
|
||||
struct poly1305_desc_ctx poly1305_state;
|
||||
@@ -82,7 +84,7 @@ __chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
|
||||
poly1305_final(&poly1305_state, dst + src_len);
|
||||
|
||||
memzero_explicit(chacha_state, CHACHA_STATE_WORDS * sizeof(u32));
|
||||
memzero_explicit(chacha_state, sizeof(*chacha_state));
|
||||
memzero_explicit(&b, sizeof(b));
|
||||
}
|
||||
|
||||
@@ -91,7 +93,7 @@ void chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u64 nonce,
|
||||
const u8 key[CHACHA20POLY1305_KEY_SIZE])
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
u32 k[CHACHA_KEY_WORDS];
|
||||
__le64 iv[2];
|
||||
|
||||
@@ -100,8 +102,9 @@ void chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
iv[0] = 0;
|
||||
iv[1] = cpu_to_le64(nonce);
|
||||
|
||||
chacha_init(chacha_state, k, (u8 *)iv);
|
||||
__chacha20poly1305_encrypt(dst, src, src_len, ad, ad_len, chacha_state);
|
||||
chacha_init(&chacha_state, k, (u8 *)iv);
|
||||
__chacha20poly1305_encrypt(dst, src, src_len, ad, ad_len,
|
||||
&chacha_state);
|
||||
|
||||
memzero_explicit(iv, sizeof(iv));
|
||||
memzero_explicit(k, sizeof(k));
|
||||
@@ -113,16 +116,18 @@ void xchacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u8 nonce[XCHACHA20POLY1305_NONCE_SIZE],
|
||||
const u8 key[CHACHA20POLY1305_KEY_SIZE])
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
|
||||
xchacha_init(chacha_state, key, nonce);
|
||||
__chacha20poly1305_encrypt(dst, src, src_len, ad, ad_len, chacha_state);
|
||||
xchacha_init(&chacha_state, key, nonce);
|
||||
__chacha20poly1305_encrypt(dst, src, src_len, ad, ad_len,
|
||||
&chacha_state);
|
||||
}
|
||||
EXPORT_SYMBOL(xchacha20poly1305_encrypt);
|
||||
|
||||
static bool
|
||||
__chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u8 *ad, const size_t ad_len, u32 *chacha_state)
|
||||
const u8 *ad, const size_t ad_len,
|
||||
struct chacha_state *chacha_state)
|
||||
{
|
||||
const u8 *pad0 = page_address(ZERO_PAGE(0));
|
||||
struct poly1305_desc_ctx poly1305_state;
|
||||
@@ -169,7 +174,7 @@ bool chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u64 nonce,
|
||||
const u8 key[CHACHA20POLY1305_KEY_SIZE])
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
u32 k[CHACHA_KEY_WORDS];
|
||||
__le64 iv[2];
|
||||
bool ret;
|
||||
@@ -179,11 +184,11 @@ bool chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
iv[0] = 0;
|
||||
iv[1] = cpu_to_le64(nonce);
|
||||
|
||||
chacha_init(chacha_state, k, (u8 *)iv);
|
||||
chacha_init(&chacha_state, k, (u8 *)iv);
|
||||
ret = __chacha20poly1305_decrypt(dst, src, src_len, ad, ad_len,
|
||||
chacha_state);
|
||||
&chacha_state);
|
||||
|
||||
memzero_explicit(chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(iv, sizeof(iv));
|
||||
memzero_explicit(k, sizeof(k));
|
||||
return ret;
|
||||
@@ -195,11 +200,11 @@ bool xchacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
|
||||
const u8 nonce[XCHACHA20POLY1305_NONCE_SIZE],
|
||||
const u8 key[CHACHA20POLY1305_KEY_SIZE])
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
|
||||
xchacha_init(chacha_state, key, nonce);
|
||||
xchacha_init(&chacha_state, key, nonce);
|
||||
return __chacha20poly1305_decrypt(dst, src, src_len, ad, ad_len,
|
||||
chacha_state);
|
||||
&chacha_state);
|
||||
}
|
||||
EXPORT_SYMBOL(xchacha20poly1305_decrypt);
|
||||
|
||||
@@ -213,7 +218,7 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
|
||||
{
|
||||
const u8 *pad0 = page_address(ZERO_PAGE(0));
|
||||
struct poly1305_desc_ctx poly1305_state;
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
struct sg_mapping_iter miter;
|
||||
size_t partial = 0;
|
||||
unsigned int flags;
|
||||
@@ -240,8 +245,8 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
|
||||
b.iv[0] = 0;
|
||||
b.iv[1] = cpu_to_le64(nonce);
|
||||
|
||||
chacha_init(chacha_state, b.k, (u8 *)b.iv);
|
||||
chacha20_crypt(chacha_state, b.block0, pad0, sizeof(b.block0));
|
||||
chacha_init(&chacha_state, b.k, (u8 *)b.iv);
|
||||
chacha20_crypt(&chacha_state, b.block0, pad0, sizeof(b.block0));
|
||||
poly1305_init(&poly1305_state, b.block0);
|
||||
|
||||
if (unlikely(ad_len)) {
|
||||
@@ -276,13 +281,13 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
|
||||
|
||||
if (unlikely(length < sl))
|
||||
l &= ~(CHACHA_BLOCK_SIZE - 1);
|
||||
chacha20_crypt(chacha_state, addr, addr, l);
|
||||
chacha20_crypt(&chacha_state, addr, addr, l);
|
||||
addr += l;
|
||||
length -= l;
|
||||
}
|
||||
|
||||
if (unlikely(length > 0)) {
|
||||
chacha20_crypt(chacha_state, b.chacha_stream, pad0,
|
||||
chacha20_crypt(&chacha_state, b.chacha_stream, pad0,
|
||||
CHACHA_BLOCK_SIZE);
|
||||
crypto_xor(addr, b.chacha_stream, length);
|
||||
partial = length;
|
||||
@@ -323,7 +328,7 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
|
||||
!crypto_memneq(b.mac[0], b.mac[1], POLY1305_DIGEST_SIZE);
|
||||
}
|
||||
|
||||
memzero_explicit(chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&chacha_state, sizeof(chacha_state));
|
||||
memzero_explicit(&b, sizeof(b));
|
||||
|
||||
return ret;
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
#include <crypto/algapi.h> // for crypto_xor_cpy
|
||||
#include <crypto/chacha.h>
|
||||
|
||||
void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
|
||||
void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
|
||||
unsigned int bytes, int nrounds)
|
||||
{
|
||||
/* aligned to potentially speed up crypto_xor() */
|
||||
|
||||
@@ -50,7 +50,7 @@ struct skcipher_def {
|
||||
/* Perform cipher operations with the chacha lib */
|
||||
static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain)
|
||||
{
|
||||
u32 chacha_state[CHACHA_STATE_WORDS];
|
||||
struct chacha_state chacha_state;
|
||||
u8 iv[16], key[32];
|
||||
u64 start, end;
|
||||
|
||||
@@ -66,10 +66,10 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain)
|
||||
}
|
||||
|
||||
/* Encrypt */
|
||||
chacha_init(chacha_state, (u32 *)key, iv);
|
||||
chacha_init(&chacha_state, (u32 *)key, iv);
|
||||
|
||||
start = ktime_get_ns();
|
||||
chacha_crypt_arch(chacha_state, cipher, plain, data_size, 20);
|
||||
chacha_crypt_arch(&chacha_state, cipher, plain, data_size, 20);
|
||||
end = ktime_get_ns();
|
||||
|
||||
|
||||
@@ -81,10 +81,10 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain)
|
||||
pr_info("lib encryption took: %lld nsec", end - start);
|
||||
|
||||
/* Decrypt */
|
||||
chacha_init(chacha_state, (u32 *)key, iv);
|
||||
chacha_init(&chacha_state, (u32 *)key, iv);
|
||||
|
||||
start = ktime_get_ns();
|
||||
chacha_crypt_arch(chacha_state, revert, cipher, data_size, 20);
|
||||
chacha_crypt_arch(&chacha_state, revert, cipher, data_size, 20);
|
||||
end = ktime_get_ns();
|
||||
|
||||
if (debug)
|
||||
|
||||
Reference in New Issue
Block a user