dect
/
linux-2.6
Archived
13
0
Fork 0

crypto: tcrypt - Avoid using contiguous pages

If tcrypt is to be used as a run-time integrity test, it needs to be
more resilient in a hostile environment.  For a start allocating 32K
of physically contiguous memory is definitely out.

This patch teaches it to use separate pages instead.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Herbert Xu 2008-07-31 12:23:53 +08:00
parent a7581a01fb
commit f139cfa7cd
2 changed files with 157 additions and 155 deletions

View File

@ -31,10 +31,10 @@
#include "tcrypt.h" #include "tcrypt.h"
/* /*
* Need to kmalloc() memory for testing. * Need slab memory for testing (size in number of pages).
*/ */
#define TVMEMSIZE 16384 #define TVMEMSIZE 4
#define XBUFSIZE 32768 #define XBUFSIZE 8
/* /*
* Indexes into the xbuf to simulate cross-page access. * Indexes into the xbuf to simulate cross-page access.
@ -67,9 +67,9 @@ static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
static unsigned int sec; static unsigned int sec;
static int mode; static int mode;
static char *xbuf; static char *xbuf[XBUFSIZE];
static char *axbuf; static char *axbuf[XBUFSIZE];
static char *tvmem; static char *tvmem[TVMEMSIZE];
static char *check[] = { static char *check[] = {
"des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
@ -133,9 +133,7 @@ static void test_hash(char *algo, struct hash_testvec *template,
printk("test %u:\n", i + 1); printk("test %u:\n", i + 1);
memset(result, 0, 64); memset(result, 0, 64);
hash_buff = kzalloc(template[i].psize, GFP_KERNEL); hash_buff = xbuf[0];
if (!hash_buff)
continue;
memcpy(hash_buff, template[i].plaintext, template[i].psize); memcpy(hash_buff, template[i].plaintext, template[i].psize);
sg_init_one(&sg[0], hash_buff, template[i].psize); sg_init_one(&sg[0], hash_buff, template[i].psize);
@ -146,7 +144,6 @@ static void test_hash(char *algo, struct hash_testvec *template,
template[i].ksize); template[i].ksize);
if (ret) { if (ret) {
printk("setkey() failed ret=%d\n", ret); printk("setkey() failed ret=%d\n", ret);
kfree(hash_buff);
goto out; goto out;
} }
} }
@ -167,7 +164,6 @@ static void test_hash(char *algo, struct hash_testvec *template,
/* fall through */ /* fall through */
default: default:
printk("digest () failed ret=%d\n", ret); printk("digest () failed ret=%d\n", ret);
kfree(hash_buff);
goto out; goto out;
} }
@ -176,14 +172,10 @@ static void test_hash(char *algo, struct hash_testvec *template,
memcmp(result, template[i].digest, memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm)) ? crypto_ahash_digestsize(tfm)) ?
"fail" : "pass"); "fail" : "pass");
kfree(hash_buff);
} }
printk("testing %s across pages\n", algo); printk("testing %s across pages\n", algo);
/* setup the dummy buffer first */
memset(xbuf, 0, XBUFSIZE);
j = 0; j = 0;
for (i = 0; i < tcount; i++) { for (i = 0; i < tcount; i++) {
if (template[i].np) { if (template[i].np) {
@ -194,12 +186,13 @@ static void test_hash(char *algo, struct hash_testvec *template,
temp = 0; temp = 0;
sg_init_table(sg, template[i].np); sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) { for (k = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]], sg_set_buf(&sg[k],
template[i].plaintext + temp, memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
template[i].tap[k]); offset_in_page(IDX[k]),
template[i].plaintext + temp,
template[i].tap[k]),
template[i].tap[k]);
temp += template[i].tap[k]; temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
} }
if (template[i].ksize) { if (template[i].ksize) {
@ -298,15 +291,8 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
/* some tepmplates have no input data but they will /* some tepmplates have no input data but they will
* touch input * touch input
*/ */
input = kzalloc(template[i].ilen + template[i].rlen, GFP_KERNEL); input = xbuf[0];
if (!input) assoc = axbuf[0];
continue;
assoc = kzalloc(template[i].alen, GFP_KERNEL);
if (!assoc) {
kfree(input);
continue;
}
memcpy(input, template[i].input, template[i].ilen); memcpy(input, template[i].input, template[i].ilen);
memcpy(assoc, template[i].assoc, template[i].alen); memcpy(assoc, template[i].assoc, template[i].alen);
@ -320,10 +306,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
crypto_aead_set_flags( crypto_aead_set_flags(
tfm, CRYPTO_TFM_REQ_WEAK_KEY); tfm, CRYPTO_TFM_REQ_WEAK_KEY);
if (template[i].key) key = template[i].key;
key = template[i].key;
else
key = kzalloc(template[i].klen, GFP_KERNEL);
ret = crypto_aead_setkey(tfm, key, ret = crypto_aead_setkey(tfm, key,
template[i].klen); template[i].klen);
@ -332,7 +315,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
crypto_aead_get_flags(tfm)); crypto_aead_get_flags(tfm));
if (!template[i].fail) if (!template[i].fail)
goto next_one; continue;
} }
authsize = abs(template[i].rlen - template[i].ilen); authsize = abs(template[i].rlen - template[i].ilen);
@ -341,7 +324,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
printk(KERN_INFO printk(KERN_INFO
"failed to set authsize = %u\n", "failed to set authsize = %u\n",
authsize); authsize);
goto next_one; continue;
} }
sg_init_one(&sg[0], input, sg_init_one(&sg[0], input,
@ -373,7 +356,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
default: default:
printk(KERN_INFO "%s () failed err=%d\n", printk(KERN_INFO "%s () failed err=%d\n",
e, -ret); e, -ret);
goto next_one; continue;
} }
q = input; q = input;
@ -382,16 +365,10 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
printk(KERN_INFO "enc/dec: %s\n", printk(KERN_INFO "enc/dec: %s\n",
memcmp(q, template[i].result, memcmp(q, template[i].result,
template[i].rlen) ? "fail" : "pass"); template[i].rlen) ? "fail" : "pass");
next_one:
if (!template[i].key)
kfree(key);
kfree(assoc);
kfree(input);
} }
} }
printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e); printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e);
memset(axbuf, 0, XBUFSIZE);
for (i = 0, j = 0; i < tcount; i++) { for (i = 0, j = 0; i < tcount; i++) {
if (template[i].np) { if (template[i].np) {
@ -418,18 +395,30 @@ next_one:
goto out; goto out;
} }
memset(xbuf, 0, XBUFSIZE); authsize = abs(template[i].rlen - template[i].ilen);
sg_init_table(sg, template[i].np); sg_init_table(sg, template[i].np);
for (k = 0, temp = 0; k < template[i].np; k++) { for (k = 0, temp = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]], if (WARN_ON(offset_in_page(IDX[k]) +
template[i].input + temp, template[i].tap[k] > PAGE_SIZE))
goto out;
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
memcpy(q, template[i].input + temp,
template[i].tap[k]); template[i].tap[k]);
n = template[i].tap[k];
if (k == template[i].np - 1 && enc)
n += authsize;
if (offset_in_page(q) + n < PAGE_SIZE)
q[n] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
temp += template[i].tap[k]; temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
} }
authsize = abs(template[i].rlen - template[i].ilen);
ret = crypto_aead_setauthsize(tfm, authsize); ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) { if (ret) {
printk(KERN_INFO printk(KERN_INFO
@ -438,17 +427,24 @@ next_one:
goto out; goto out;
} }
if (enc) if (enc) {
if (WARN_ON(sg[k - 1].offset +
sg[k - 1].length + authsize >
PAGE_SIZE))
goto out;
sg[k - 1].length += authsize; sg[k - 1].length += authsize;
}
sg_init_table(asg, template[i].anp); sg_init_table(asg, template[i].anp);
for (k = 0, temp = 0; k < template[i].anp; k++) { for (k = 0, temp = 0; k < template[i].anp; k++) {
memcpy(&axbuf[IDX[k]], sg_set_buf(&asg[k],
template[i].assoc + temp, memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
template[i].atap[k]); offset_in_page(IDX[k]),
temp += template[i].atap[k]; template[i].assoc + temp,
sg_set_buf(&asg[k], &axbuf[IDX[k]], template[i].atap[k]),
template[i].atap[k]); template[i].atap[k]);
temp += template[i].atap[k];
} }
aead_request_set_crypt(req, sg, sg, aead_request_set_crypt(req, sg, sg,
@ -481,7 +477,8 @@ next_one:
for (k = 0, temp = 0; k < template[i].np; k++) { for (k = 0, temp = 0; k < template[i].np; k++) {
printk(KERN_INFO "page %u\n", k); printk(KERN_INFO "page %u\n", k);
q = &xbuf[IDX[k]]; q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
n = template[i].tap[k]; n = template[i].tap[k];
if (k == template[i].np - 1) if (k == template[i].np - 1)
@ -499,7 +496,8 @@ next_one:
else else
n = 0; n = 0;
} else { } else {
for (n = 0; q[n]; n++) for (n = 0; offset_in_page(q + n) &&
q[n]; n++)
; ;
} }
if (n) { if (n) {
@ -558,12 +556,6 @@ static void test_cipher(char *algo, int enc,
j = 0; j = 0;
for (i = 0; i < tcount; i++) { for (i = 0; i < tcount; i++) {
data = kzalloc(template[i].ilen, GFP_KERNEL);
if (!data)
continue;
memcpy(data, template[i].input, template[i].ilen);
if (template[i].iv) if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN); memcpy(iv, template[i].iv, MAX_IVLEN);
else else
@ -574,6 +566,9 @@ static void test_cipher(char *algo, int enc,
printk("test %u (%d bit key):\n", printk("test %u (%d bit key):\n",
j, template[i].klen * 8); j, template[i].klen * 8);
data = xbuf[0];
memcpy(data, template[i].input, template[i].ilen);
crypto_ablkcipher_clear_flags(tfm, ~0); crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk) if (template[i].wk)
crypto_ablkcipher_set_flags( crypto_ablkcipher_set_flags(
@ -585,10 +580,8 @@ static void test_cipher(char *algo, int enc,
printk("setkey() failed flags=%x\n", printk("setkey() failed flags=%x\n",
crypto_ablkcipher_get_flags(tfm)); crypto_ablkcipher_get_flags(tfm));
if (!template[i].fail) { if (!template[i].fail)
kfree(data);
goto out; goto out;
}
} }
sg_init_one(&sg[0], data, template[i].ilen); sg_init_one(&sg[0], data, template[i].ilen);
@ -613,7 +606,6 @@ static void test_cipher(char *algo, int enc,
/* fall through */ /* fall through */
default: default:
printk("%s () failed err=%d\n", e, -ret); printk("%s () failed err=%d\n", e, -ret);
kfree(data);
goto out; goto out;
} }
@ -624,7 +616,6 @@ static void test_cipher(char *algo, int enc,
memcmp(q, template[i].result, memcmp(q, template[i].result,
template[i].rlen) ? "fail" : "pass"); template[i].rlen) ? "fail" : "pass");
} }
kfree(data);
} }
printk("\ntesting %s %s across pages (chunking)\n", algo, e); printk("\ntesting %s %s across pages (chunking)\n", algo, e);
@ -642,7 +633,6 @@ static void test_cipher(char *algo, int enc,
printk("test %u (%d bit key):\n", printk("test %u (%d bit key):\n",
j, template[i].klen * 8); j, template[i].klen * 8);
memset(xbuf, 0, XBUFSIZE);
crypto_ablkcipher_clear_flags(tfm, ~0); crypto_ablkcipher_clear_flags(tfm, ~0);
if (template[i].wk) if (template[i].wk)
crypto_ablkcipher_set_flags( crypto_ablkcipher_set_flags(
@ -661,12 +651,23 @@ static void test_cipher(char *algo, int enc,
temp = 0; temp = 0;
sg_init_table(sg, template[i].np); sg_init_table(sg, template[i].np);
for (k = 0; k < template[i].np; k++) { for (k = 0; k < template[i].np; k++) {
memcpy(&xbuf[IDX[k]], if (WARN_ON(offset_in_page(IDX[k]) +
template[i].input + temp, template[i].tap[k] > PAGE_SIZE))
template[i].tap[k]); goto out;
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
memcpy(q, template[i].input + temp,
template[i].tap[k]);
if (offset_in_page(q) + template[i].tap[k] <
PAGE_SIZE)
q[template[i].tap[k]] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
temp += template[i].tap[k]; temp += template[i].tap[k];
sg_set_buf(&sg[k], &xbuf[IDX[k]],
template[i].tap[k]);
} }
ablkcipher_request_set_crypt(req, sg, sg, ablkcipher_request_set_crypt(req, sg, sg,
@ -696,19 +697,21 @@ static void test_cipher(char *algo, int enc,
temp = 0; temp = 0;
for (k = 0; k < template[i].np; k++) { for (k = 0; k < template[i].np; k++) {
printk("page %u\n", k); printk("page %u\n", k);
q = &xbuf[IDX[k]]; q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
hexdump(q, template[i].tap[k]); hexdump(q, template[i].tap[k]);
printk("%s\n", printk("%s\n",
memcmp(q, template[i].result + temp, memcmp(q, template[i].result + temp,
template[i].tap[k]) ? "fail" : template[i].tap[k]) ? "fail" :
"pass"); "pass");
for (n = 0; q[template[i].tap[k] + n]; n++) q += template[i].tap[k];
for (n = 0; offset_in_page(q + n) && q[n]; n++)
; ;
if (n) { if (n) {
printk("Result buffer corruption %u " printk("Result buffer corruption %u "
"bytes:\n", n); "bytes:\n", n);
hexdump(&q[template[i].tap[k]], n); hexdump(q, n);
} }
temp += template[i].tap[k]; temp += template[i].tap[k];
} }
@ -719,16 +722,13 @@ out:
ablkcipher_request_free(req); ablkcipher_request_free(req);
} }
static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p, static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
int blen, int sec) struct scatterlist *sg, int blen, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount; int bcount;
int ret; int ret;
sg_init_one(sg, p, blen);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
if (enc) if (enc)
@ -745,16 +745,13 @@ static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p,
return 0; return 0;
} }
static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p, static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
int blen) struct scatterlist *sg, int blen)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int ret = 0; int ret = 0;
int i; int i;
sg_init_one(sg, p, blen);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
@ -804,7 +801,7 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
unsigned int tcount, u8 *keysize) unsigned int tcount, u8 *keysize)
{ {
unsigned int ret, i, j, iv_len; unsigned int ret, i, j, iv_len;
unsigned char *key, *p, iv[128]; unsigned char *key, iv[128];
struct crypto_blkcipher *tfm; struct crypto_blkcipher *tfm;
struct blkcipher_desc desc; struct blkcipher_desc desc;
const char *e; const char *e;
@ -832,27 +829,28 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
b_size = block_sizes; b_size = block_sizes;
do { do {
struct scatterlist sg[TVMEMSIZE];
if ((*keysize + *b_size) > TVMEMSIZE) { if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
printk("template (%u) too big for tvmem (%u)\n", printk("template (%u) too big for "
*keysize + *b_size, TVMEMSIZE); "tvmem (%lu)\n", *keysize + *b_size,
TVMEMSIZE * PAGE_SIZE);
goto out; goto out;
} }
printk("test %u (%d bit key, %d byte blocks): ", i, printk("test %u (%d bit key, %d byte blocks): ", i,
*keysize * 8, *b_size); *keysize * 8, *b_size);
memset(tvmem, 0xff, *keysize + *b_size); memset(tvmem[0], 0xff, PAGE_SIZE);
/* set key, plain text and IV */ /* set key, plain text and IV */
key = (unsigned char *)tvmem; key = (unsigned char *)tvmem[0];
for (j = 0; j < tcount; j++) { for (j = 0; j < tcount; j++) {
if (template[j].klen == *keysize) { if (template[j].klen == *keysize) {
key = template[j].key; key = template[j].key;
break; break;
} }
} }
p = (unsigned char *)tvmem + *keysize;
ret = crypto_blkcipher_setkey(tfm, key, *keysize); ret = crypto_blkcipher_setkey(tfm, key, *keysize);
if (ret) { if (ret) {
@ -861,6 +859,14 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
goto out; goto out;
} }
sg_init_table(sg, TVMEMSIZE);
sg_set_buf(sg, tvmem[0] + *keysize,
PAGE_SIZE - *keysize);
for (j = 1; j < TVMEMSIZE; j++) {
sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
memset (tvmem[j], 0xff, PAGE_SIZE);
}
iv_len = crypto_blkcipher_ivsize(tfm); iv_len = crypto_blkcipher_ivsize(tfm);
if (iv_len) { if (iv_len) {
memset(&iv, 0xff, iv_len); memset(&iv, 0xff, iv_len);
@ -868,9 +874,11 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec,
} }
if (sec) if (sec)
ret = test_cipher_jiffies(&desc, enc, p, *b_size, sec); ret = test_cipher_jiffies(&desc, enc, sg,
*b_size, sec);
else else
ret = test_cipher_cycles(&desc, enc, p, *b_size); ret = test_cipher_cycles(&desc, enc, sg,
*b_size);
if (ret) { if (ret) {
printk("%s() failed flags=%x\n", e, desc.flags); printk("%s() failed flags=%x\n", e, desc.flags);
@ -886,19 +894,16 @@ out:
crypto_free_blkcipher(tfm); crypto_free_blkcipher(tfm);
} }
static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen, static int test_hash_jiffies_digest(struct hash_desc *desc,
struct scatterlist *sg, int blen,
char *out, int sec) char *out, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount; int bcount;
int ret; int ret;
sg_init_table(sg, 1);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
return ret; return ret;
@ -910,18 +915,15 @@ static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
int plen, char *out, int sec) int blen, int plen, char *out, int sec)
{ {
struct scatterlist sg[1];
unsigned long start, end; unsigned long start, end;
int bcount, pcount; int bcount, pcount;
int ret; int ret;
if (plen == blen) if (plen == blen)
return test_hash_jiffies_digest(desc, p, blen, out, sec); return test_hash_jiffies_digest(desc, sg, blen, out, sec);
sg_init_table(sg, 1);
for (start = jiffies, end = start + sec * HZ, bcount = 0; for (start = jiffies, end = start + sec * HZ, bcount = 0;
time_before(jiffies, end); bcount++) { time_before(jiffies, end); bcount++) {
@ -929,7 +931,6 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
return ret; return ret;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
return ret; return ret;
@ -946,22 +947,18 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen,
return 0; return 0;
} }
static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, static int test_hash_cycles_digest(struct hash_desc *desc,
char *out) struct scatterlist *sg, int blen, char *out)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int i; int i;
int ret; int ret;
sg_init_table(sg, 1);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
/* Warm-up run. */ /* Warm-up run. */
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
goto out; goto out;
@ -973,7 +970,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen,
start = get_cycles(); start = get_cycles();
sg_set_buf(sg, p, blen);
ret = crypto_hash_digest(desc, sg, blen, out); ret = crypto_hash_digest(desc, sg, blen, out);
if (ret) if (ret)
goto out; goto out;
@ -996,18 +992,15 @@ out:
return 0; return 0;
} }
static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
int plen, char *out) int blen, int plen, char *out)
{ {
struct scatterlist sg[1];
unsigned long cycles = 0; unsigned long cycles = 0;
int i, pcount; int i, pcount;
int ret; int ret;
if (plen == blen) if (plen == blen)
return test_hash_cycles_digest(desc, p, blen, out); return test_hash_cycles_digest(desc, sg, blen, out);
sg_init_table(sg, 1);
local_bh_disable(); local_bh_disable();
local_irq_disable(); local_irq_disable();
@ -1018,7 +1011,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
goto out; goto out;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
goto out; goto out;
@ -1038,7 +1030,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
if (ret) if (ret)
goto out; goto out;
for (pcount = 0; pcount < blen; pcount += plen) { for (pcount = 0; pcount < blen; pcount += plen) {
sg_set_buf(sg, p + pcount, plen);
ret = crypto_hash_update(desc, sg, plen); ret = crypto_hash_update(desc, sg, plen);
if (ret) if (ret)
goto out; goto out;
@ -1068,6 +1059,7 @@ out:
static void test_hash_speed(char *algo, unsigned int sec, static void test_hash_speed(char *algo, unsigned int sec,
struct hash_speed *speed) struct hash_speed *speed)
{ {
struct scatterlist sg[TVMEMSIZE];
struct crypto_hash *tfm; struct crypto_hash *tfm;
struct hash_desc desc; struct hash_desc desc;
char output[1024]; char output[1024];
@ -1093,23 +1085,27 @@ static void test_hash_speed(char *algo, unsigned int sec,
goto out; goto out;
} }
sg_init_table(sg, TVMEMSIZE);
for (i = 0; i < TVMEMSIZE; i++) {
sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
memset(tvmem[i], 0xff, PAGE_SIZE);
}
for (i = 0; speed[i].blen != 0; i++) { for (i = 0; speed[i].blen != 0; i++) {
if (speed[i].blen > TVMEMSIZE) { if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
printk("template (%u) too big for tvmem (%u)\n", printk("template (%u) too big for tvmem (%lu)\n",
speed[i].blen, TVMEMSIZE); speed[i].blen, TVMEMSIZE * PAGE_SIZE);
goto out; goto out;
} }
printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
memset(tvmem, 0xff, speed[i].blen);
if (sec) if (sec)
ret = test_hash_jiffies(&desc, tvmem, speed[i].blen, ret = test_hash_jiffies(&desc, sg, speed[i].blen,
speed[i].plen, output, sec); speed[i].plen, output, sec);
else else
ret = test_hash_cycles(&desc, tvmem, speed[i].blen, ret = test_hash_cycles(&desc, sg, speed[i].blen,
speed[i].plen, output); speed[i].plen, output);
if (ret) { if (ret) {
@ -1128,7 +1124,6 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
unsigned int i; unsigned int i;
char result[COMP_BUF_SIZE]; char result[COMP_BUF_SIZE];
struct crypto_comp *tfm; struct crypto_comp *tfm;
unsigned int tsize;
printk("\ntesting %s compression\n", algo); printk("\ntesting %s compression\n", algo);
@ -1159,14 +1154,6 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
printk("\ntesting %s decompression\n", algo); printk("\ntesting %s decompression\n", algo);
tsize = sizeof(struct comp_testvec);
tsize *= dtcount;
if (tsize > TVMEMSIZE) {
printk("template (%u) too big for tvmem (%u)\n", tsize,
TVMEMSIZE);
goto out;
}
for (i = 0; i < dtcount; i++) { for (i = 0; i < dtcount; i++) {
int ilen, ret, dlen = COMP_BUF_SIZE; int ilen, ret, dlen = COMP_BUF_SIZE;
@ -1185,7 +1172,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass", memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass",
ilen, dlen); ilen, dlen);
} }
out:
crypto_free_comp(tfm); crypto_free_comp(tfm);
} }
@ -1917,18 +1904,25 @@ static void do_test(void)
static int __init tcrypt_mod_init(void) static int __init tcrypt_mod_init(void)
{ {
int err = -ENOMEM; int err = -ENOMEM;
int i;
tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL); for (i = 0; i < TVMEMSIZE; i++) {
if (tvmem == NULL) tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
return err; if (!tvmem[i])
goto err_free_tv;
}
xbuf = kmalloc(XBUFSIZE, GFP_KERNEL); for (i = 0; i < XBUFSIZE; i++) {
if (xbuf == NULL) xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
goto err_free_tv; if (!xbuf[i])
goto err_free_xbuf;
}
axbuf = kmalloc(XBUFSIZE, GFP_KERNEL); for (i = 0; i < XBUFSIZE; i++) {
if (axbuf == NULL) axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
goto err_free_xbuf; if (!axbuf[i])
goto err_free_axbuf;
}
do_test(); do_test();
@ -1940,11 +1934,15 @@ static int __init tcrypt_mod_init(void)
*/ */
err = -EAGAIN; err = -EAGAIN;
kfree(axbuf); err_free_axbuf:
err_free_xbuf: for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
kfree(xbuf); free_page((unsigned long)axbuf[i]);
err_free_tv: err_free_xbuf:
kfree(tvmem); for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
free_page((unsigned long)xbuf[i]);
err_free_tv:
for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
free_page((unsigned long)tvmem[i]);
return err; return err;
} }

View File

@ -39,7 +39,7 @@ struct cipher_testvec {
char *iv; char *iv;
char *input; char *input;
char *result; char *result;
unsigned char tap[MAX_TAP]; unsigned short tap[MAX_TAP];
int np; int np;
unsigned char fail; unsigned char fail;
unsigned char wk; /* weak key flag */ unsigned char wk; /* weak key flag */
@ -5111,6 +5111,8 @@ static struct cipher_testvec aes_ctr_enc_tv_template[] = {
"\x4b\xef\x31\x18\xea\xac\xb1\x84" "\x4b\xef\x31\x18\xea\xac\xb1\x84"
"\x21\xed\xda\x86", "\x21\xed\xda\x86",
.rlen = 4100, .rlen = 4100,
.np = 2,
.tap = { 4064, 36 },
}, },
}; };
@ -8126,7 +8128,9 @@ static struct cipher_testvec salsa20_stream_enc_tv_template[] = {
"\xfc\x3f\x09\x7a\x0b\xdc\xc5\x1b" "\xfc\x3f\x09\x7a\x0b\xdc\xc5\x1b"
"\x87\x13\xc6\x5b\x59\x8d\xf2\xc8" "\x87\x13\xc6\x5b\x59\x8d\xf2\xc8"
"\xaf\xdf\x11\x95", "\xaf\xdf\x11\x95",
.rlen = 4100, .rlen = 4100,
.np = 2,
.tap = { 4064, 36 },
}, },
}; };