qemu with hax to log dma reads & writes jcs.org/2018/11/12/vfio

crypto: add testing for unaligned buffers with XTS cipher mode

Validate that the XTS cipher mode will correctly operate with plain
text, cipher text and IV buffers that are not 64-bit aligned.

Reviewed-by: Alberto Garcia <berto@igalia.com>
Signed-off-by: Daniel P. Berrangé <berrange@redhat.com>

+86
+86
tests/test-crypto-xts.c
··· 416 416 } 417 417 418 418 419 + static void test_xts_unaligned(const void *opaque) 420 + { 421 + #define BAD_ALIGN 3 422 + const QCryptoXTSTestData *data = opaque; 423 + uint8_t in[512 + BAD_ALIGN], out[512 + BAD_ALIGN]; 424 + uint8_t Torg[16], T[16 + BAD_ALIGN]; 425 + uint64_t seq; 426 + struct TestAES aesdata; 427 + struct TestAES aestweak; 428 + 429 + AES_set_encrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.enc); 430 + AES_set_decrypt_key(data->key1, data->keylen / 2 * 8, &aesdata.dec); 431 + AES_set_encrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.enc); 432 + AES_set_decrypt_key(data->key2, data->keylen / 2 * 8, &aestweak.dec); 433 + 434 + seq = data->seqnum; 435 + STORE64L(seq, Torg); 436 + memset(Torg + 8, 0, 8); 437 + 438 + /* IV not aligned */ 439 + memcpy(T + BAD_ALIGN, Torg, 16); 440 + memcpy(in, data->PTX, data->PTLEN); 441 + xts_encrypt(&aesdata, &aestweak, 442 + test_xts_aes_encrypt, 443 + test_xts_aes_decrypt, 444 + T + BAD_ALIGN, data->PTLEN, out, in); 445 + 446 + g_assert(memcmp(out, data->CTX, data->PTLEN) == 0); 447 + 448 + /* plain text not aligned */ 449 + memcpy(T, Torg, 16); 450 + memcpy(in + BAD_ALIGN, data->PTX, data->PTLEN); 451 + xts_encrypt(&aesdata, &aestweak, 452 + test_xts_aes_encrypt, 453 + test_xts_aes_decrypt, 454 + T, data->PTLEN, out, in + BAD_ALIGN); 455 + 456 + g_assert(memcmp(out, data->CTX, data->PTLEN) == 0); 457 + 458 + /* cipher text not aligned */ 459 + memcpy(T, Torg, 16); 460 + memcpy(in, data->PTX, data->PTLEN); 461 + xts_encrypt(&aesdata, &aestweak, 462 + test_xts_aes_encrypt, 463 + test_xts_aes_decrypt, 464 + T, data->PTLEN, out + BAD_ALIGN, in); 465 + 466 + g_assert(memcmp(out + BAD_ALIGN, data->CTX, data->PTLEN) == 0); 467 + 468 + 469 + /* IV not aligned */ 470 + memcpy(T + BAD_ALIGN, Torg, 16); 471 + memcpy(in, data->CTX, data->PTLEN); 472 + xts_decrypt(&aesdata, &aestweak, 473 + test_xts_aes_encrypt, 474 + test_xts_aes_decrypt, 475 + T + BAD_ALIGN, data->PTLEN, out, in); 476 + 477 + g_assert(memcmp(out, data->PTX, data->PTLEN) == 0); 478 + 479 + /* cipher text not aligned */ 480 + memcpy(T, Torg, 16); 481 + memcpy(in + BAD_ALIGN, data->CTX, data->PTLEN); 482 + xts_decrypt(&aesdata, &aestweak, 483 + test_xts_aes_encrypt, 484 + test_xts_aes_decrypt, 485 + T, data->PTLEN, out, in + BAD_ALIGN); 486 + 487 + g_assert(memcmp(out, data->PTX, data->PTLEN) == 0); 488 + 489 + /* plain text not aligned */ 490 + memcpy(T, Torg, 16); 491 + memcpy(in, data->CTX, data->PTLEN); 492 + xts_decrypt(&aesdata, &aestweak, 493 + test_xts_aes_encrypt, 494 + test_xts_aes_decrypt, 495 + T, data->PTLEN, out + BAD_ALIGN, in); 496 + 497 + g_assert(memcmp(out + BAD_ALIGN, data->PTX, data->PTLEN) == 0); 498 + } 499 + 500 + 419 501 int main(int argc, char **argv) 420 502 { 421 503 size_t i; ··· 437 519 g_test_add_data_func(path, &test_data[i], test_xts_split); 438 520 g_free(path); 439 521 } 522 + 523 + path = g_strdup_printf("%s/unaligned", test_data[i].path); 524 + g_test_add_data_func(path, &test_data[i], test_xts_unaligned); 525 + g_free(path); 440 526 } 441 527 442 528 return g_test_run();