提交 4e7babba 编写于 作者: E Eric Biggers 提交者: Herbert Xu

crypto: testmgr - convert skcipher testing to use testvec_configs

Convert alg_test_skcipher() to use the new test framework, adding a list
of testvec_configs to test by default.  When the extra self-tests are
enabled, randomly generated testvec_configs are tested as well.

This improves skcipher test coverage mainly because now all algorithms
have a variety of data layouts tested, whereas before each algorithm was
responsible for declaring its own chunked test cases which were often
missing or provided poor test coverage.  The new code also tests both
the MAY_SLEEP and !MAY_SLEEP cases, different IV alignments, and buffers
that cross pages.

This has already found a bug in the arm64 ctr-aes-neonbs algorithm.
It would have easily found many past bugs.

I removed the skcipher chunked test vectors that were the same as
non-chunked ones, but left the ones that were unique.
Signed-off-by: NEric Biggers <ebiggers@google.com>
Signed-off-by: NHerbert Xu <herbert@gondor.apana.org.au>
上级 25f9dddb
...@@ -284,6 +284,68 @@ struct testvec_config { ...@@ -284,6 +284,68 @@ struct testvec_config {
#define TESTVEC_CONFIG_NAMELEN 192 #define TESTVEC_CONFIG_NAMELEN 192
/*
* The following are the lists of testvec_configs to test for each algorithm
* type when the basic crypto self-tests are enabled, i.e. when
* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test
* coverage, while keeping the test time much shorter than the full fuzz tests
* so that the basic tests can be enabled in a wider range of circumstances.
*/
/* Configs for skciphers and aeads */
static const struct testvec_config default_cipher_testvec_configs[] = {
{
.name = "in-place",
.inplace = true,
.src_divs = { { .proportion_of_total = 10000 } },
}, {
.name = "out-of-place",
.src_divs = { { .proportion_of_total = 10000 } },
}, {
.name = "unaligned buffer, offset=1",
.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
.iv_offset = 1,
}, {
.name = "buffer aligned only to alignmask",
.src_divs = {
{
.proportion_of_total = 10000,
.offset = 1,
.offset_relative_to_alignmask = true,
},
},
.iv_offset = 1,
.iv_offset_relative_to_alignmask = true,
}, {
.name = "two even aligned splits",
.src_divs = {
{ .proportion_of_total = 5000 },
{ .proportion_of_total = 5000 },
},
}, {
.name = "uneven misaligned splits, may sleep",
.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
.src_divs = {
{ .proportion_of_total = 1900, .offset = 33 },
{ .proportion_of_total = 3300, .offset = 7 },
{ .proportion_of_total = 4800, .offset = 18 },
},
.iv_offset = 3,
}, {
.name = "misaligned splits crossing pages, inplace",
.inplace = true,
.src_divs = {
{
.proportion_of_total = 7500,
.offset = PAGE_SIZE - 32
}, {
.proportion_of_total = 2500,
.offset = PAGE_SIZE - 7
},
},
}
};
static unsigned int count_test_sg_divisions(const struct test_sg_division *divs) static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
{ {
unsigned int remaining = TEST_SG_TOTAL; unsigned int remaining = TEST_SG_TOTAL;
...@@ -1608,8 +1670,6 @@ static int test_cipher(struct crypto_cipher *tfm, int enc, ...@@ -1608,8 +1670,6 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
j = 0; j = 0;
for (i = 0; i < tcount; i++) { for (i = 0; i < tcount; i++) {
if (template[i].np)
continue;
if (fips_enabled && template[i].fips_skip) if (fips_enabled && template[i].fips_skip)
continue; continue;
...@@ -1667,282 +1727,214 @@ static int test_cipher(struct crypto_cipher *tfm, int enc, ...@@ -1667,282 +1727,214 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
return ret; return ret;
} }
static int __test_skcipher(struct crypto_skcipher *tfm, int enc, static int test_skcipher_vec_cfg(const char *driver, int enc,
const struct cipher_testvec *template, const struct cipher_testvec *vec,
unsigned int tcount, unsigned int vec_num,
const bool diff_dst, const int align_offset) const struct testvec_config *cfg,
struct skcipher_request *req,
struct cipher_test_sglists *tsgls)
{ {
const char *algo = struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
unsigned int i, j, k, n, temp; const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
char *q; const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
struct skcipher_request *req; const char *op = enc ? "encryption" : "decryption";
struct scatterlist sg[8]; DECLARE_CRYPTO_WAIT(wait);
struct scatterlist sgout[8]; u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
const char *e, *d; u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
struct crypto_wait wait; cfg->iv_offset +
const char *input, *result; (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
void *data; struct kvec input;
char iv[MAX_IVLEN]; int err;
char *xbuf[XBUFSIZE];
char *xoutbuf[XBUFSIZE];
int ret = -ENOMEM;
unsigned int ivsize = crypto_skcipher_ivsize(tfm);
if (testmgr_alloc_buf(xbuf))
goto out_nobuf;
if (diff_dst && testmgr_alloc_buf(xoutbuf))
goto out_nooutbuf;
if (diff_dst)
d = "-ddst";
else
d = "";
if (enc == ENCRYPT) /* Set the key */
e = "encryption"; if (vec->wk)
crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
else else
e = "decryption"; crypto_skcipher_clear_flags(tfm,
CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
crypto_init_wait(&wait); err = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
if (err) {
req = skcipher_request_alloc(tfm, GFP_KERNEL); if (vec->fail) /* expectedly failed to set key? */
if (!req) { return 0;
pr_err("alg: skcipher%s: Failed to allocate request for %s\n", pr_err("alg: skcipher: %s setkey failed with err %d on test vector %u; flags=%#x\n",
d, algo); driver, err, vec_num, crypto_skcipher_get_flags(tfm));
goto out; return err;
}
if (vec->fail) {
pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %u\n",
driver, vec_num);
return -EINVAL;
} }
skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, /* The IV must be copied to a buffer, as the algorithm may modify it */
crypto_req_done, &wait); if (ivsize) {
if (WARN_ON(ivsize > MAX_IVLEN))
j = 0; return -EINVAL;
for (i = 0; i < tcount; i++) { if (vec->iv && !(vec->generates_iv && enc))
if (template[i].np && !template[i].also_non_np) memcpy(iv, vec->iv, ivsize);
continue;
if (fips_enabled && template[i].fips_skip)
continue;
if (template[i].iv && !(template[i].generates_iv && enc))
memcpy(iv, template[i].iv, ivsize);
else else
memset(iv, 0, MAX_IVLEN); memset(iv, 0, ivsize);
} else {
input = enc ? template[i].ptext : template[i].ctext; if (vec->generates_iv) {
result = enc ? template[i].ctext : template[i].ptext; pr_err("alg: skcipher: %s has ivsize=0 but test vector %u generates IV!\n",
j++; driver, vec_num);
ret = -EINVAL; return -EINVAL;
if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
goto out;
data = xbuf[0];
data += align_offset;
memcpy(data, input, template[i].len);
crypto_skcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
ret = crypto_skcipher_setkey(tfm, template[i].key,
template[i].klen);
if (template[i].fail == !ret) {
pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
d, j, algo, crypto_skcipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
sg_init_one(&sg[0], data, template[i].len);
if (diff_dst) {
data = xoutbuf[0];
data += align_offset;
sg_init_one(&sgout[0], data, template[i].len);
}
skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
template[i].len, iv);
ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
crypto_skcipher_decrypt(req), &wait);
if (ret) {
pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
d, e, j, algo, -ret);
goto out;
}
q = data;
if (memcmp(q, result, template[i].len)) {
pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
d, j, e, algo);
hexdump(q, template[i].len);
ret = -EINVAL;
goto out;
}
if (template[i].generates_iv && enc &&
memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
d, j, e, algo);
hexdump(iv, crypto_skcipher_ivsize(tfm));
ret = -EINVAL;
goto out;
} }
iv = NULL;
} }
j = 0; /* Build the src/dst scatterlists */
for (i = 0; i < tcount; i++) { input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
/* alignment tests are only done with continuous buffers */ input.iov_len = vec->len;
if (align_offset != 0) err = build_cipher_test_sglists(tsgls, cfg, alignmask,
break; vec->len, vec->len, &input, 1);
if (err) {
if (!template[i].np) pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %u, cfg=\"%s\"\n",
continue; driver, op, vec_num, cfg->name);
return err;
if (fips_enabled && template[i].fips_skip) }
continue;
if (template[i].iv && !(template[i].generates_iv && enc))
memcpy(iv, template[i].iv, ivsize);
else
memset(iv, 0, MAX_IVLEN);
input = enc ? template[i].ptext : template[i].ctext;
result = enc ? template[i].ctext : template[i].ptext;
j++;
crypto_skcipher_clear_flags(tfm, ~0);
if (template[i].wk)
crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
ret = crypto_skcipher_setkey(tfm, template[i].key,
template[i].klen);
if (template[i].fail == !ret) {
pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
d, j, algo, crypto_skcipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
temp = 0;
ret = -EINVAL;
sg_init_table(sg, template[i].np);
if (diff_dst)
sg_init_table(sgout, template[i].np);
for (k = 0; k < template[i].np; k++) {
if (WARN_ON(offset_in_page(IDX[k]) +
template[i].tap[k] > PAGE_SIZE))
goto out;
q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
memcpy(q, input + temp, template[i].tap[k]);
if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
q[template[i].tap[k]] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
if (diff_dst) {
q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
sg_set_buf(&sgout[k], q, template[i].tap[k]);
memset(q, 0, template[i].tap[k]); /* Do the actual encryption or decryption */
if (offset_in_page(q) + testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
template[i].tap[k] < PAGE_SIZE) skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
q[template[i].tap[k]] = 0; skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
} vec->len, iv);
err = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
crypto_skcipher_decrypt(req), &wait);
if (err) {
pr_err("alg: skcipher: %s %s failed with err %d on test vector %u, cfg=\"%s\"\n",
driver, op, err, vec_num, cfg->name);
return err;
}
temp += template[i].tap[k]; /* Check for the correct output (ciphertext or plaintext) */
} err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
vec->len, 0, true);
if (err == -EOVERFLOW) {
pr_err("alg: skcipher: %s %s overran dst buffer on test vector %u, cfg=\"%s\"\n",
driver, op, vec_num, cfg->name);
return err;
}
if (err) {
pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n",
driver, op, vec_num, cfg->name);
return err;
}
skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, /* If applicable, check that the algorithm generated the correct IV */
template[i].len, iv); if (vec->generates_iv && enc && memcmp(iv, vec->iv, ivsize) != 0) {
pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %u, cfg=\"%s\"\n",
driver, op, vec_num, cfg->name);
hexdump(iv, ivsize);
return -EINVAL;
}
ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : return 0;
crypto_skcipher_decrypt(req), &wait); }
if (ret) { static int test_skcipher_vec(const char *driver, int enc,
pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", const struct cipher_testvec *vec,
d, e, j, algo, -ret); unsigned int vec_num,
goto out; struct skcipher_request *req,
} struct cipher_test_sglists *tsgls)
{
unsigned int i;
int err;
temp = 0; if (fips_enabled && vec->fips_skip)
ret = -EINVAL; return 0;
for (k = 0; k < template[i].np; k++) {
if (diff_dst)
q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
else
q = xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]);
if (memcmp(q, result + temp, template[i].tap[k])) { for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", err = test_skcipher_vec_cfg(driver, enc, vec, vec_num,
d, j, e, k, algo); &default_cipher_testvec_configs[i],
hexdump(q, template[i].tap[k]); req, tsgls);
goto out; if (err)
} return err;
}
q += template[i].tap[k]; #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
for (n = 0; offset_in_page(q + n) && q[n]; n++) if (!noextratests) {
; struct testvec_config cfg;
if (n) { char cfgname[TESTVEC_CONFIG_NAMELEN];
pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
d, j, e, k, algo, n); for (i = 0; i < fuzz_iterations; i++) {
hexdump(q, n); generate_random_testvec_config(&cfg, cfgname,
goto out; sizeof(cfgname));
} err = test_skcipher_vec_cfg(driver, enc, vec, vec_num,
temp += template[i].tap[k]; &cfg, req, tsgls);
if (err)
return err;
} }
} }
#endif
return 0;
}
ret = 0; static int test_skcipher(const char *driver, int enc,
const struct cipher_test_suite *suite,
struct skcipher_request *req,
struct cipher_test_sglists *tsgls)
{
unsigned int i;
int err;
out: for (i = 0; i < suite->count; i++) {
skcipher_request_free(req); err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req,
if (diff_dst) tsgls);
testmgr_free_buf(xoutbuf); if (err)
out_nooutbuf: return err;
testmgr_free_buf(xbuf); }
out_nobuf: return 0;
return ret;
} }
static int test_skcipher(struct crypto_skcipher *tfm, int enc, static int alg_test_skcipher(const struct alg_test_desc *desc,
const struct cipher_testvec *template, const char *driver, u32 type, u32 mask)
unsigned int tcount)
{ {
unsigned int alignmask; const struct cipher_test_suite *suite = &desc->suite.cipher;
int ret; struct crypto_skcipher *tfm;
struct skcipher_request *req = NULL;
struct cipher_test_sglists *tsgls = NULL;
int err;
/* test 'dst == src' case */ if (suite->count <= 0) {
ret = __test_skcipher(tfm, enc, template, tcount, false, 0); pr_err("alg: skcipher: empty test suite for %s\n", driver);
if (ret) return -EINVAL;
return ret; }
/* test 'dst != src' case */ tfm = crypto_alloc_skcipher(driver, type, mask);
ret = __test_skcipher(tfm, enc, template, tcount, true, 0); if (IS_ERR(tfm)) {
if (ret) pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
return ret; driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
/* test unaligned buffers, check with one byte offset */ req = skcipher_request_alloc(tfm, GFP_KERNEL);
ret = __test_skcipher(tfm, enc, template, tcount, true, 1); if (!req) {
if (ret) pr_err("alg: skcipher: failed to allocate request for %s\n",
return ret; driver);
err = -ENOMEM;
goto out;
}
alignmask = crypto_tfm_alg_alignmask(&tfm->base); tsgls = alloc_cipher_test_sglists();
if (alignmask) { if (!tsgls) {
/* Check if alignment mask for tfm is correctly set. */ pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
ret = __test_skcipher(tfm, enc, template, tcount, true, driver);
alignmask + 1); err = -ENOMEM;
if (ret) goto out;
return ret;
} }
return 0; err = test_skcipher(driver, ENCRYPT, suite, req, tsgls);
if (err)
goto out;
err = test_skcipher(driver, DECRYPT, suite, req, tsgls);
out:
free_cipher_test_sglists(tsgls);
skcipher_request_free(req);
crypto_free_skcipher(tfm);
return err;
} }
static int test_comp(struct crypto_comp *tfm, static int test_comp(struct crypto_comp *tfm,
...@@ -2326,28 +2318,6 @@ static int alg_test_cipher(const struct alg_test_desc *desc, ...@@ -2326,28 +2318,6 @@ static int alg_test_cipher(const struct alg_test_desc *desc,
return err; return err;
} }
static int alg_test_skcipher(const struct alg_test_desc *desc,
const char *driver, u32 type, u32 mask)
{
const struct cipher_test_suite *suite = &desc->suite.cipher;
struct crypto_skcipher *tfm;
int err;
tfm = crypto_alloc_skcipher(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: skcipher: Failed to load transform for "
"%s: %ld\n", driver, PTR_ERR(tfm));
return PTR_ERR(tfm);
}
err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
if (!err)
err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
crypto_free_skcipher(tfm);
return err;
}
static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask) u32 type, u32 mask)
{ {
...@@ -4224,6 +4194,11 @@ static void alg_check_test_descs_order(void) ...@@ -4224,6 +4194,11 @@ static void alg_check_test_descs_order(void)
static void alg_check_testvec_configs(void) static void alg_check_testvec_configs(void)
{ {
int i;
for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
WARN_ON(!valid_testvec_config(
&default_cipher_testvec_configs[i]));
} }
static void testmgr_onetime_init(void) static void testmgr_onetime_init(void)
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册