Improve performance of XTS cipher mode impl

The XTS cipher mode performance is approximately doubled and test
 coverage is improved.
 -----BEGIN PGP SIGNATURE-----
 
 iQIcBAABCAAGBQJb0LRUAAoJEL6G67QVEE/fW0kQAKztZpe9TxQMIzQNlBtYJiUF
 51Xit4GVj3jZ5An1HcxhfKxv3hbsKOdDifBw1I+F5c7Tfmid6M/8f9M+fZ0FmGiN
 VsnkVNcckMbMck03C0llxigz24n3vxd8LvEQBR3ZA1sGAnWTSJNHfEmKZwh+5mX4
 hT2WoFC2QGcwOyWUH5W9NtASbVgzr3/KWuCaluK/okmLHA+ENov3FetMWYPad848
 FCjAvMLbC+S/AlkXwKTkn3L4+yow95yliyZHZXDiDS2QRP2u6fv39Q+0fYOayns1
 cTgmggNNsRErsYMAGveX5+GoFfLtUdYDH9h6e4U77mxonZz9IL8iP8sFVZEUc3Md
 syS3EPDQor+KBJF/opg0gR9IfY5cMoYUrW0huujX2uyOqQTl4yb0SAxzhPEP/gC7
 BOW757WkRXMsNDZNdYAtHi+kp0PPA+zGl9QFTMFSQtRgnPHyO0gkLFgXFQartPcg
 b6wpe+g+IwVdOzXjeYCJeP5WPD99pcxPsOcmHRfqopJq2fihm71czCFOg0uYjxZn
 LwhLrAzge6/MrZFBHsCwpkkItJov0hKjUl5QJ0S4vEK4fRPawaBkcvR9QuTCo9Pb
 xXdedxTKQYaBaXimV6ZsK4OpSZ/QR8FmwVeM33RUWzAz+SjtegzuvkXAA1Ic5XgI
 nxnjod33d+jxrzPDrVMl
 =K2kp
 -----END PGP SIGNATURE-----

Merge remote-tracking branch 'remotes/berrange/tags/qcrypto-next-pull-request' into staging

Improve performance of XTS cipher mode impl

The XTS cipher mode performance is approximately doubled and test
coverage is improved.

# gpg: Signature made Wed 24 Oct 2018 19:05:08 BST
# gpg:                using RSA key BE86EBB415104FDF
# gpg: Good signature from "Daniel P. Berrange <dan@berrange.com>"
# gpg:                 aka "Daniel P. Berrange <berrange@redhat.com>"
# Primary key fingerprint: DAF3 A6FD B26B 6291 2D0E  8E3F BE86 EBB4 1510 4FDF

* remotes/berrange/tags/qcrypto-next-pull-request:
  crypto: add testing for unaligned buffers with XTS cipher mode
  crypto: refactor XTS cipher mode test suite
  crypto: annotate xts_tweak_encdec as inlineable
  crypto: convert xts_mult_x to use xts_uint128 type
  crypto: convert xts_tweak_encdec to use xts_uint128 type
  crypto: introduce a xts_uint128 data type
  crypto: remove code duplication in tweak encrypt/decrypt
  crypto: expand algorithm coverage for cipher benchmark

Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Peter Maydell 2018-10-24 22:08:42 +01:00
commit a4d710251f
3 changed files with 397 additions and 168 deletions

View File

@ -24,52 +24,75 @@
*/
#include "qemu/osdep.h"
#include "qemu/bswap.h"
#include "crypto/xts.h"
static void xts_mult_x(uint8_t *I)
{
int x;
uint8_t t, tt;
typedef union {
uint8_t b[XTS_BLOCK_SIZE];
uint64_t u[2];
} xts_uint128;
for (x = t = 0; x < 16; x++) {
tt = I[x] >> 7;
I[x] = ((I[x] << 1) | t) & 0xFF;
t = tt;
static inline void xts_uint128_xor(xts_uint128 *D,
const xts_uint128 *S1,
const xts_uint128 *S2)
{
D->u[0] = S1->u[0] ^ S2->u[0];
D->u[1] = S1->u[1] ^ S2->u[1];
}
if (tt) {
I[0] ^= 0x87;
static inline void xts_uint128_cpu_to_les(xts_uint128 *v)
{
cpu_to_le64s(&v->u[0]);
cpu_to_le64s(&v->u[1]);
}
static inline void xts_uint128_le_to_cpus(xts_uint128 *v)
{
le64_to_cpus(&v->u[0]);
le64_to_cpus(&v->u[1]);
}
static void xts_mult_x(xts_uint128 *I)
{
uint64_t tt;
xts_uint128_le_to_cpus(I);
tt = I->u[0] >> 63;
I->u[0] <<= 1;
if (I->u[1] >> 63) {
I->u[0] ^= 0x87;
}
I->u[1] <<= 1;
I->u[1] |= tt;
xts_uint128_cpu_to_les(I);
}
/**
* xts_tweak_uncrypt:
* xts_tweak_encdec:
* @param ctxt: the cipher context
* @param func: the cipher function
* @src: buffer providing the cipher text of XTS_BLOCK_SIZE bytes
* @dst: buffer to output the plain text of XTS_BLOCK_SIZE bytes
* @src: buffer providing the input text of XTS_BLOCK_SIZE bytes
* @dst: buffer to output the output text of XTS_BLOCK_SIZE bytes
* @iv: the initialization vector tweak of XTS_BLOCK_SIZE bytes
*
* Decrypt data with a tweak
* Encrypt/decrypt data with a tweak
*/
static void xts_tweak_decrypt(const void *ctx,
static inline void xts_tweak_encdec(const void *ctx,
xts_cipher_func *func,
const uint8_t *src,
uint8_t *dst,
uint8_t *iv)
const xts_uint128 *src,
xts_uint128 *dst,
xts_uint128 *iv)
{
unsigned long x;
/* tweak encrypt block i */
for (x = 0; x < XTS_BLOCK_SIZE; x++) {
dst[x] = src[x] ^ iv[x];
}
xts_uint128_xor(dst, src, iv);
func(ctx, XTS_BLOCK_SIZE, dst, dst);
func(ctx, XTS_BLOCK_SIZE, dst->b, dst->b);
for (x = 0; x < XTS_BLOCK_SIZE; x++) {
dst[x] = dst[x] ^ iv[x];
}
xts_uint128_xor(dst, dst, iv);
/* LFSR the tweak */
xts_mult_x(iv);
@ -85,7 +108,7 @@ void xts_decrypt(const void *datactx,
uint8_t *dst,
const uint8_t *src)
{
uint8_t PP[XTS_BLOCK_SIZE], CC[XTS_BLOCK_SIZE], T[XTS_BLOCK_SIZE];
xts_uint128 PP, CC, T;
unsigned long i, m, mo, lim;
/* get number of blocks */
@ -102,72 +125,53 @@ void xts_decrypt(const void *datactx,
}
/* encrypt the iv */
encfunc(tweakctx, XTS_BLOCK_SIZE, T, iv);
encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
xts_uint128 *S = (xts_uint128 *)src;
xts_uint128 *D = (xts_uint128 *)dst;
for (i = 0; i < lim; i++, S++, D++) {
xts_tweak_encdec(datactx, decfunc, S, D, &T);
}
} else {
xts_uint128 D;
for (i = 0; i < lim; i++) {
xts_tweak_decrypt(datactx, decfunc, src, dst, T);
memcpy(&D, src, XTS_BLOCK_SIZE);
xts_tweak_encdec(datactx, decfunc, &D, &D, &T);
memcpy(dst, &D, XTS_BLOCK_SIZE);
src += XTS_BLOCK_SIZE;
dst += XTS_BLOCK_SIZE;
}
}
/* if length is not a multiple of XTS_BLOCK_SIZE then */
if (mo > 0) {
memcpy(CC, T, XTS_BLOCK_SIZE);
xts_mult_x(CC);
xts_uint128 S, D;
memcpy(&CC, &T, XTS_BLOCK_SIZE);
xts_mult_x(&CC);
/* PP = tweak decrypt block m-1 */
xts_tweak_decrypt(datactx, decfunc, src, PP, CC);
memcpy(&S, src, XTS_BLOCK_SIZE);
xts_tweak_encdec(datactx, decfunc, &S, &PP, &CC);
/* Pm = first length % XTS_BLOCK_SIZE bytes of PP */
for (i = 0; i < mo; i++) {
CC[i] = src[XTS_BLOCK_SIZE + i];
dst[XTS_BLOCK_SIZE + i] = PP[i];
CC.b[i] = src[XTS_BLOCK_SIZE + i];
dst[XTS_BLOCK_SIZE + i] = PP.b[i];
}
for (; i < XTS_BLOCK_SIZE; i++) {
CC[i] = PP[i];
CC.b[i] = PP.b[i];
}
/* Pm-1 = Tweak uncrypt CC */
xts_tweak_decrypt(datactx, decfunc, CC, dst, T);
xts_tweak_encdec(datactx, decfunc, &CC, &D, &T);
memcpy(dst, &D, XTS_BLOCK_SIZE);
}
/* Decrypt the iv back */
decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T);
}
/**
* xts_tweak_crypt:
* @param ctxt: the cipher context
* @param func: the cipher function
* @src: buffer providing the plain text of XTS_BLOCK_SIZE bytes
* @dst: buffer to output the cipher text of XTS_BLOCK_SIZE bytes
* @iv: the initialization vector tweak of XTS_BLOCK_SIZE bytes
*
* Encrypt data with a tweak
*/
static void xts_tweak_encrypt(const void *ctx,
xts_cipher_func *func,
const uint8_t *src,
uint8_t *dst,
uint8_t *iv)
{
unsigned long x;
/* tweak encrypt block i */
for (x = 0; x < XTS_BLOCK_SIZE; x++) {
dst[x] = src[x] ^ iv[x];
}
func(ctx, XTS_BLOCK_SIZE, dst, dst);
for (x = 0; x < XTS_BLOCK_SIZE; x++) {
dst[x] = dst[x] ^ iv[x];
}
/* LFSR the tweak */
xts_mult_x(iv);
decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);
}
@ -180,7 +184,7 @@ void xts_encrypt(const void *datactx,
uint8_t *dst,
const uint8_t *src)
{
uint8_t PP[XTS_BLOCK_SIZE], CC[XTS_BLOCK_SIZE], T[XTS_BLOCK_SIZE];
xts_uint128 PP, CC, T;
unsigned long i, m, mo, lim;
/* get number of blocks */
@ -197,34 +201,50 @@ void xts_encrypt(const void *datactx,
}
/* encrypt the iv */
encfunc(tweakctx, XTS_BLOCK_SIZE, T, iv);
encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
xts_uint128 *S = (xts_uint128 *)src;
xts_uint128 *D = (xts_uint128 *)dst;
for (i = 0; i < lim; i++, S++, D++) {
xts_tweak_encdec(datactx, encfunc, S, D, &T);
}
} else {
xts_uint128 D;
for (i = 0; i < lim; i++) {
xts_tweak_encrypt(datactx, encfunc, src, dst, T);
memcpy(&D, src, XTS_BLOCK_SIZE);
xts_tweak_encdec(datactx, encfunc, &D, &D, &T);
memcpy(dst, &D, XTS_BLOCK_SIZE);
dst += XTS_BLOCK_SIZE;
src += XTS_BLOCK_SIZE;
}
}
/* if length is not a multiple of XTS_BLOCK_SIZE then */
if (mo > 0) {
xts_uint128 S, D;
/* CC = tweak encrypt block m-1 */
xts_tweak_encrypt(datactx, encfunc, src, CC, T);
memcpy(&S, src, XTS_BLOCK_SIZE);
xts_tweak_encdec(datactx, encfunc, &S, &CC, &T);
/* Cm = first length % XTS_BLOCK_SIZE bytes of CC */
for (i = 0; i < mo; i++) {
PP[i] = src[XTS_BLOCK_SIZE + i];
dst[XTS_BLOCK_SIZE + i] = CC[i];
PP.b[i] = src[XTS_BLOCK_SIZE + i];
dst[XTS_BLOCK_SIZE + i] = CC.b[i];
}
for (; i < XTS_BLOCK_SIZE; i++) {
PP[i] = CC[i];
PP.b[i] = CC.b[i];
}
/* Cm-1 = Tweak encrypt PP */
xts_tweak_encrypt(datactx, encfunc, PP, dst, T);
xts_tweak_encdec(datactx, encfunc, &PP, &D, &T);
memcpy(dst, &D, XTS_BLOCK_SIZE);
}
/* Decrypt the iv back */
decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T);
decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);
}

View File

@ -15,17 +15,27 @@
#include "crypto/init.h"
#include "crypto/cipher.h"
static void test_cipher_speed(const void *opaque)
static void test_cipher_speed(size_t chunk_size,
QCryptoCipherMode mode,
QCryptoCipherAlgorithm alg)
{
QCryptoCipher *cipher;
Error *err = NULL;
double total = 0.0;
size_t chunk_size = (size_t)opaque;
uint8_t *key = NULL, *iv = NULL;
uint8_t *plaintext = NULL, *ciphertext = NULL;
size_t nkey = qcrypto_cipher_get_key_len(QCRYPTO_CIPHER_ALG_AES_128);
size_t niv = qcrypto_cipher_get_iv_len(QCRYPTO_CIPHER_ALG_AES_128,
QCRYPTO_CIPHER_MODE_CBC);
size_t nkey;
size_t niv;
if (!qcrypto_cipher_supports(alg, mode)) {
return;
}
nkey = qcrypto_cipher_get_key_len(alg);
niv = qcrypto_cipher_get_iv_len(alg, mode);
if (mode == QCRYPTO_CIPHER_MODE_XTS) {
nkey *= 2;
}
key = g_new0(uint8_t, nkey);
memset(key, g_test_rand_int(), nkey);
@ -38,11 +48,11 @@ static void test_cipher_speed(const void *opaque)
plaintext = g_new0(uint8_t, chunk_size);
memset(plaintext, g_test_rand_int(), chunk_size);
cipher = qcrypto_cipher_new(QCRYPTO_CIPHER_ALG_AES_128,
QCRYPTO_CIPHER_MODE_CBC,
cipher = qcrypto_cipher_new(alg, mode,
key, nkey, &err);
g_assert(cipher != NULL);
if (mode != QCRYPTO_CIPHER_MODE_ECB)
g_assert(qcrypto_cipher_setiv(cipher,
iv, niv,
&err) == 0);
@ -55,13 +65,26 @@ static void test_cipher_speed(const void *opaque)
chunk_size,
&err) == 0);
total += chunk_size;
} while (g_test_timer_elapsed() < 5.0);
} while (g_test_timer_elapsed() < 1.0);
total /= MiB;
g_print("cbc(aes128): ");
g_print("Testing chunk_size %zu bytes ", chunk_size);
g_print("done: %.2f MB in %.2f secs: ", total, g_test_timer_last());
g_print("%.2f MB/sec\n", total / g_test_timer_last());
g_print("Enc chunk %zu bytes ", chunk_size);
g_print("%.2f MB/sec ", total / g_test_timer_last());
total = 0.0;
g_test_timer_start();
do {
g_assert(qcrypto_cipher_decrypt(cipher,
plaintext,
ciphertext,
chunk_size,
&err) == 0);
total += chunk_size;
} while (g_test_timer_elapsed() < 1.0);
total /= MiB;
g_print("Dec chunk %zu bytes ", chunk_size);
g_print("%.2f MB/sec ", total / g_test_timer_last());
qcrypto_cipher_free(cipher);
g_free(plaintext);
@ -70,19 +93,99 @@ static void test_cipher_speed(const void *opaque)
g_free(key);
}
static void test_cipher_speed_ecb_aes_128(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_ECB,
QCRYPTO_CIPHER_ALG_AES_128);
}
static void test_cipher_speed_ecb_aes_256(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_ECB,
QCRYPTO_CIPHER_ALG_AES_256);
}
static void test_cipher_speed_cbc_aes_128(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_CBC,
QCRYPTO_CIPHER_ALG_AES_128);
}
static void test_cipher_speed_cbc_aes_256(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_CBC,
QCRYPTO_CIPHER_ALG_AES_256);
}
static void test_cipher_speed_ctr_aes_128(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_CTR,
QCRYPTO_CIPHER_ALG_AES_128);
}
static void test_cipher_speed_ctr_aes_256(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_CTR,
QCRYPTO_CIPHER_ALG_AES_256);
}
static void test_cipher_speed_xts_aes_128(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_XTS,
QCRYPTO_CIPHER_ALG_AES_128);
}
static void test_cipher_speed_xts_aes_256(const void *opaque)
{
size_t chunk_size = (size_t)opaque;
test_cipher_speed(chunk_size,
QCRYPTO_CIPHER_MODE_XTS,
QCRYPTO_CIPHER_ALG_AES_256);
}
int main(int argc, char **argv)
{
size_t i;
char name[64];
g_test_init(&argc, &argv, NULL);
g_assert(qcrypto_init(NULL) == 0);
for (i = 512; i <= 64 * KiB; i *= 2) {
memset(name, 0 , sizeof(name));
snprintf(name, sizeof(name), "/crypto/cipher/speed-%zu", i);
g_test_add_data_func(name, (void *)i, test_cipher_speed);
}
#define ADD_TEST(mode, cipher, keysize, chunk) \
g_test_add_data_func( \
"/crypto/cipher/" #mode "-" #cipher "-" #keysize "/chunk-" #chunk, \
(void *)chunk, \
test_cipher_speed_ ## mode ## _ ## cipher ## _ ## keysize)
#define ADD_TESTS(chunk) \
do { \
ADD_TEST(ecb, aes, 128, chunk); \
ADD_TEST(ecb, aes, 256, chunk); \
ADD_TEST(cbc, aes, 128, chunk); \
ADD_TEST(cbc, aes, 256, chunk); \
ADD_TEST(ctr, aes, 128, chunk); \
ADD_TEST(ctr, aes, 256, chunk); \
ADD_TEST(xts, aes, 128, chunk); \
ADD_TEST(xts, aes, 256, chunk); \
} while (0)
ADD_TESTS(512);
ADD_TESTS(4096);
ADD_TESTS(16384);
ADD_TESTS(65536);
return g_test_run();
}

View File

@ -1,7 +1,7 @@
/*
* QEMU Crypto XTS cipher mode
*
* Copyright (c) 2015-2016 Red Hat, Inc.
* Copyright (c) 2015-2018 Red Hat, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@ -340,23 +340,11 @@ static void test_xts_aes_decrypt(const void *ctx,
static void test_xts(const void *opaque)
{
const QCryptoXTSTestData *data = opaque;
unsigned char out[512], Torg[16], T[16];
uint8_t out[512], Torg[16], T[16];
uint64_t seq;
int j;
unsigned long len;
struct TestAES aesdata;
struct TestAES aestweak;
for (j = 0; j < 2; j++) {
/* skip the cases where
* the length is smaller than 2*blocklen
* or the length is not a multiple of 32
*/
if ((j == 1) && ((data->PTLEN < 32) || (data->PTLEN % 32))) {
continue;
}
len = data->PTLEN / 2;
AES_set_encrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.enc);
AES_set_decrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.dec);
AES_set_encrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.enc);
@ -367,12 +355,42 @@ static void test_xts(const void *opaque)
memset(Torg + 8, 0, 8);
memcpy(T, Torg, sizeof(T));
if (j == 0) {
xts_encrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out, data->PTX);
} else {
g_assert(memcmp(out, data->CTX, data->PTLEN) == 0);
memcpy(T, Torg, sizeof(T));
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out, data->CTX);
g_assert(memcmp(out, data->PTX, data->PTLEN) == 0);
}
static void test_xts_split(const void *opaque)
{
const QCryptoXTSTestData *data = opaque;
uint8_t out[512], Torg[16], T[16];
uint64_t seq;
unsigned long len = data->PTLEN / 2;
struct TestAES aesdata;
struct TestAES aestweak;
AES_set_encrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.enc);
AES_set_decrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.dec);
AES_set_encrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.enc);
AES_set_decrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.dec);
seq = data->seqnum;
STORE64L(seq, Torg);
memset(Torg + 8, 0, 8);
memcpy(T, Torg, sizeof(T));
xts_encrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
@ -381,17 +399,10 @@ static void test_xts(const void *opaque)
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, len, &out[len], &data->PTX[len]);
}
g_assert(memcmp(out, data->CTX, data->PTLEN) == 0);
memcpy(T, Torg, sizeof(T));
if (j == 0) {
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out, data->CTX);
} else {
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
@ -400,10 +411,90 @@ static void test_xts(const void *opaque)
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, len, &out[len], &data->CTX[len]);
}
g_assert(memcmp(out, data->PTX, data->PTLEN) == 0);
}
static void test_xts_unaligned(const void *opaque)
{
#define BAD_ALIGN 3
const QCryptoXTSTestData *data = opaque;
uint8_t in[512 + BAD_ALIGN], out[512 + BAD_ALIGN];
uint8_t Torg[16], T[16 + BAD_ALIGN];
uint64_t seq;
struct TestAES aesdata;
struct TestAES aestweak;
AES_set_encrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.enc);
AES_set_decrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.dec);
AES_set_encrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.enc);
AES_set_decrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.dec);
seq = data->seqnum;
STORE64L(seq, Torg);
memset(Torg + 8, 0, 8);
/* IV not aligned */
memcpy(T + BAD_ALIGN, Torg, 16);
memcpy(in, data->PTX, data->PTLEN);
xts_encrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T + BAD_ALIGN, data->PTLEN, out, in);
g_assert(memcmp(out, data->CTX, data->PTLEN) == 0);
/* plain text not aligned */
memcpy(T, Torg, 16);
memcpy(in + BAD_ALIGN, data->PTX, data->PTLEN);
xts_encrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out, in + BAD_ALIGN);
g_assert(memcmp(out, data->CTX, data->PTLEN) == 0);
/* cipher text not aligned */
memcpy(T, Torg, 16);
memcpy(in, data->PTX, data->PTLEN);
xts_encrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out + BAD_ALIGN, in);
g_assert(memcmp(out + BAD_ALIGN, data->CTX, data->PTLEN) == 0);
/* IV not aligned */
memcpy(T + BAD_ALIGN, Torg, 16);
memcpy(in, data->CTX, data->PTLEN);
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T + BAD_ALIGN, data->PTLEN, out, in);
g_assert(memcmp(out, data->PTX, data->PTLEN) == 0);
/* cipher text not aligned */
memcpy(T, Torg, 16);
memcpy(in + BAD_ALIGN, data->CTX, data->PTLEN);
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out, in + BAD_ALIGN);
g_assert(memcmp(out, data->PTX, data->PTLEN) == 0);
/* plain text not aligned */
memcpy(T, Torg, 16);
memcpy(in, data->CTX, data->PTLEN);
xts_decrypt(&aesdata, &aestweak,
test_xts_aes_encrypt,
test_xts_aes_decrypt,
T, data->PTLEN, out + BAD_ALIGN, in);
g_assert(memcmp(out + BAD_ALIGN, data->PTX, data->PTLEN) == 0);
}
@ -416,7 +507,22 @@ int main(int argc, char **argv)
g_assert(qcrypto_init(NULL) == 0);
for (i = 0; i < G_N_ELEMENTS(test_data); i++) {
g_test_add_data_func(test_data[i].path, &test_data[i], test_xts);
gchar *path = g_strdup_printf("%s/basic", test_data[i].path);
g_test_add_data_func(path, &test_data[i], test_xts);
g_free(path);
/* skip the cases where the length is smaller than 2*blocklen
* or the length is not a multiple of 32
*/
if ((test_data[i].PTLEN >= 32) && !(test_data[i].PTLEN % 32)) {
path = g_strdup_printf("%s/split", test_data[i].path);
g_test_add_data_func(path, &test_data[i], test_xts_split);
g_free(path);
}
path = g_strdup_printf("%s/unaligned", test_data[i].path);
g_test_add_data_func(path, &test_data[i], test_xts_unaligned);
g_free(path);
}
return g_test_run();