cipher.c 11.1 KB
Newer Older
L
Linus Torvalds 已提交
1 2 3 4 5 6
/*
 * Cryptographic API.
 *
 * Cipher operations.
 *
 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7
 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
L
Linus Torvalds 已提交
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38
 *
 * This program is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License as published by the Free
 * Software Foundation; either version 2 of the License, or (at your option) 
 * any later version.
 *
 */
#include <linux/compiler.h>
#include <linux/kernel.h>
#include <linux/crypto.h>
#include <linux/errno.h>
#include <linux/mm.h>
#include <linux/slab.h>
#include <linux/string.h>
#include <asm/scatterlist.h>
#include "internal.h"
#include "scatterwalk.h"

static inline void xor_64(u8 *a, const u8 *b)
{
	((u32 *)a)[0] ^= ((u32 *)b)[0];
	((u32 *)a)[1] ^= ((u32 *)b)[1];
}

static inline void xor_128(u8 *a, const u8 *b)
{
	((u32 *)a)[0] ^= ((u32 *)b)[0];
	((u32 *)a)[1] ^= ((u32 *)b)[1];
	((u32 *)a)[2] ^= ((u32 *)b)[2];
	((u32 *)a)[3] ^= ((u32 *)b)[3];
}
39 40 41 42

static unsigned int crypt_slow(const struct cipher_desc *desc,
			       struct scatter_walk *in,
			       struct scatter_walk *out, unsigned int bsize)
L
Linus Torvalds 已提交
43
{
44
	unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
45 46 47
	u8 buffer[bsize * 2 + alignmask];
	u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
	u8 *dst = src + bsize;
48
	unsigned int n;
L
Linus Torvalds 已提交
49

50 51
	n = scatterwalk_copychunks(src, in, bsize, 0);
	scatterwalk_advance(in, n);
L
Linus Torvalds 已提交
52

53
	desc->prfn(desc, dst, src, bsize);
L
Linus Torvalds 已提交
54

55 56
	n = scatterwalk_copychunks(dst, out, bsize, 1);
	scatterwalk_advance(out, n);
L
Linus Torvalds 已提交
57

58
	return bsize;
L
Linus Torvalds 已提交
59 60
}

61 62 63
static inline unsigned int crypt_fast(const struct cipher_desc *desc,
				      struct scatter_walk *in,
				      struct scatter_walk *out,
64
				      unsigned int nbytes, u8 *tmp)
L
Linus Torvalds 已提交
65
{
66 67 68 69 70
	u8 *src, *dst;

	src = in->data;
	dst = scatterwalk_samebuf(in, out) ? src : out->data;

71 72 73 74 75 76
	if (tmp) {
		memcpy(tmp, in->data, nbytes);
		src = tmp;
		dst = tmp;
	}

77 78
	nbytes = desc->prfn(desc, dst, src, nbytes);

79 80 81
	if (tmp)
		memcpy(out->data, tmp, nbytes);

82 83
	scatterwalk_advance(in, nbytes);
	scatterwalk_advance(out, nbytes);
L
Linus Torvalds 已提交
84

85
	return nbytes;
L
Linus Torvalds 已提交
86 87 88 89 90
}

/* 
 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
 * multiple page boundaries by using temporary blocks.  In user context,
91
 * the kernel is given a chance to schedule us once per page.
L
Linus Torvalds 已提交
92
 */
93
static int crypt(const struct cipher_desc *desc,
L
Linus Torvalds 已提交
94 95
		 struct scatterlist *dst,
		 struct scatterlist *src,
96
		 unsigned int nbytes)
L
Linus Torvalds 已提交
97 98
{
	struct scatter_walk walk_in, walk_out;
99
	struct crypto_tfm *tfm = desc->tfm;
L
Linus Torvalds 已提交
100
	const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
101
	unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
102
	unsigned long buffer = 0;
L
Linus Torvalds 已提交
103 104 105 106 107 108 109 110 111 112 113 114 115

	if (!nbytes)
		return 0;

	if (nbytes % bsize) {
		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
		return -EINVAL;
	}

	scatterwalk_start(&walk_in, src);
	scatterwalk_start(&walk_out, dst);

	for(;;) {
116 117 118 119 120 121 122 123 124 125 126 127
		unsigned int n = nbytes;
		u8 *tmp = NULL;

		if (!scatterwalk_aligned(&walk_in, alignmask) ||
		    !scatterwalk_aligned(&walk_out, alignmask)) {
			if (!buffer) {
				buffer = __get_free_page(GFP_ATOMIC);
				if (!buffer)
					n = 0;
			}
			tmp = (u8 *)buffer;
		}
L
Linus Torvalds 已提交
128 129 130 131

		scatterwalk_map(&walk_in, 0);
		scatterwalk_map(&walk_out, 1);

132
		n = scatterwalk_clamp(&walk_in, n);
133
		n = scatterwalk_clamp(&walk_out, n);
L
Linus Torvalds 已提交
134

135
		if (likely(n >= bsize))
136
			n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
137 138
		else
			n = crypt_slow(desc, &walk_in, &walk_out, bsize);
L
Linus Torvalds 已提交
139

140
		nbytes -= n;
L
Linus Torvalds 已提交
141 142 143 144 145

		scatterwalk_done(&walk_in, 0, nbytes);
		scatterwalk_done(&walk_out, 1, nbytes);

		if (!nbytes)
146
			break;
L
Linus Torvalds 已提交
147 148 149

		crypto_yield(tfm);
	}
150 151 152 153 154

	if (buffer)
		free_page(buffer);

	return 0;
L
Linus Torvalds 已提交
155 156
}

157 158 159 160 161 162
static int crypt_iv_unaligned(struct cipher_desc *desc,
			      struct scatterlist *dst,
			      struct scatterlist *src,
			      unsigned int nbytes)
{
	struct crypto_tfm *tfm = desc->tfm;
163
	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181
	u8 *iv = desc->info;

	if (unlikely(((unsigned long)iv & alignmask))) {
		unsigned int ivsize = tfm->crt_cipher.cit_ivsize;
		u8 buffer[ivsize + alignmask];
		u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
		int err;

		desc->info = memcpy(tmp, iv, ivsize);
		err = crypt(desc, dst, src, nbytes);
		memcpy(iv, tmp, ivsize);

		return err;
	}

	return crypt(desc, dst, src, nbytes);
}

182 183 184
static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
					u8 *dst, const u8 *src,
					unsigned int nbytes)
L
Linus Torvalds 已提交
185
{
186 187 188 189 190 191 192 193
	struct crypto_tfm *tfm = desc->tfm;
	void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
	int bsize = crypto_tfm_alg_blocksize(tfm);

	void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
	u8 *iv = desc->info;
	unsigned int done = 0;

194 195
	nbytes -= bsize;

196 197 198 199
	do {
		xor(iv, src);
		fn(crypto_tfm_ctx(tfm), dst, iv);
		memcpy(iv, dst, bsize);
L
Linus Torvalds 已提交
200

201 202
		src += bsize;
		dst += bsize;
203
	} while ((done += bsize) <= nbytes);
204 205

	return done;
L
Linus Torvalds 已提交
206 207
}

208 209 210
static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
					u8 *dst, const u8 *src,
					unsigned int nbytes)
L
Linus Torvalds 已提交
211
{
212 213 214 215 216 217 218 219 220 221 222 223
	struct crypto_tfm *tfm = desc->tfm;
	void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
	int bsize = crypto_tfm_alg_blocksize(tfm);

	u8 stack[src == dst ? bsize : 0];
	u8 *buf = stack;
	u8 **dst_p = src == dst ? &buf : &dst;

	void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
	u8 *iv = desc->info;
	unsigned int done = 0;

224 225
	nbytes -= bsize;

226 227
	do {
		u8 *tmp_dst = *dst_p;
L
Linus Torvalds 已提交
228

229 230 231 232 233 234 235 236
		fn(crypto_tfm_ctx(tfm), tmp_dst, src);
		xor(tmp_dst, iv);
		memcpy(iv, src, bsize);
		if (tmp_dst != dst)
			memcpy(dst, tmp_dst, bsize);

		src += bsize;
		dst += bsize;
237
	} while ((done += bsize) <= nbytes);
238 239

	return done;
L
Linus Torvalds 已提交
240 241
}

242 243
static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst,
				const u8 *src, unsigned int nbytes)
L
Linus Torvalds 已提交
244
{
245 246 247 248 249
	struct crypto_tfm *tfm = desc->tfm;
	int bsize = crypto_tfm_alg_blocksize(tfm);
	void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
	unsigned int done = 0;

250 251
	nbytes -= bsize;

252 253 254 255 256
	do {
		fn(crypto_tfm_ctx(tfm), dst, src);

		src += bsize;
		dst += bsize;
257
	} while ((done += bsize) <= nbytes);
258 259

	return done;
L
Linus Torvalds 已提交
260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
}

static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
{
	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
	
	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
		return -EINVAL;
	} else
		return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen,
		                       &tfm->crt_flags);
}

static int ecb_encrypt(struct crypto_tfm *tfm,
		       struct scatterlist *dst,
                       struct scatterlist *src, unsigned int nbytes)
{
278
	struct cipher_desc desc;
279
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
280 281

	desc.tfm = tfm;
282 283
	desc.crfn = cipher->cia_encrypt;
	desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process;
284 285

	return crypt(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
286 287 288 289 290 291 292
}

static int ecb_decrypt(struct crypto_tfm *tfm,
                       struct scatterlist *dst,
                       struct scatterlist *src,
		       unsigned int nbytes)
{
293
	struct cipher_desc desc;
294
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
295 296

	desc.tfm = tfm;
297 298
	desc.crfn = cipher->cia_decrypt;
	desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process;
299 300

	return crypt(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
301 302 303 304 305 306 307
}

static int cbc_encrypt(struct crypto_tfm *tfm,
                       struct scatterlist *dst,
                       struct scatterlist *src,
		       unsigned int nbytes)
{
308
	struct cipher_desc desc;
309
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
310 311

	desc.tfm = tfm;
312 313
	desc.crfn = cipher->cia_encrypt;
	desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
314 315 316
	desc.info = tfm->crt_cipher.cit_iv;

	return crypt(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
317 318 319 320 321 322 323
}

static int cbc_encrypt_iv(struct crypto_tfm *tfm,
                          struct scatterlist *dst,
                          struct scatterlist *src,
                          unsigned int nbytes, u8 *iv)
{
324
	struct cipher_desc desc;
325
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
326 327

	desc.tfm = tfm;
328 329
	desc.crfn = cipher->cia_encrypt;
	desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
330 331
	desc.info = iv;

332
	return crypt_iv_unaligned(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
333 334 335 336 337 338 339
}

static int cbc_decrypt(struct crypto_tfm *tfm,
                       struct scatterlist *dst,
                       struct scatterlist *src,
		       unsigned int nbytes)
{
340
	struct cipher_desc desc;
341
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
342 343

	desc.tfm = tfm;
344 345
	desc.crfn = cipher->cia_decrypt;
	desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
346 347 348
	desc.info = tfm->crt_cipher.cit_iv;

	return crypt(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
349 350 351 352 353 354 355
}

static int cbc_decrypt_iv(struct crypto_tfm *tfm,
                          struct scatterlist *dst,
                          struct scatterlist *src,
                          unsigned int nbytes, u8 *iv)
{
356
	struct cipher_desc desc;
357
	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
358 359

	desc.tfm = tfm;
360 361
	desc.crfn = cipher->cia_decrypt;
	desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
362 363
	desc.info = iv;

364
	return crypt_iv_unaligned(&desc, dst, src, nbytes);
L
Linus Torvalds 已提交
365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428
}

static int nocrypt(struct crypto_tfm *tfm,
                   struct scatterlist *dst,
                   struct scatterlist *src,
		   unsigned int nbytes)
{
	return -ENOSYS;
}

static int nocrypt_iv(struct crypto_tfm *tfm,
                      struct scatterlist *dst,
                      struct scatterlist *src,
                      unsigned int nbytes, u8 *iv)
{
	return -ENOSYS;
}

int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
{
	u32 mode = flags & CRYPTO_TFM_MODE_MASK;
	tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB;
	return 0;
}

int crypto_init_cipher_ops(struct crypto_tfm *tfm)
{
	int ret = 0;
	struct cipher_tfm *ops = &tfm->crt_cipher;

	ops->cit_setkey = setkey;

	switch (tfm->crt_cipher.cit_mode) {
	case CRYPTO_TFM_MODE_ECB:
		ops->cit_encrypt = ecb_encrypt;
		ops->cit_decrypt = ecb_decrypt;
		break;
		
	case CRYPTO_TFM_MODE_CBC:
		ops->cit_encrypt = cbc_encrypt;
		ops->cit_decrypt = cbc_decrypt;
		ops->cit_encrypt_iv = cbc_encrypt_iv;
		ops->cit_decrypt_iv = cbc_decrypt_iv;
		break;
		
	case CRYPTO_TFM_MODE_CFB:
		ops->cit_encrypt = nocrypt;
		ops->cit_decrypt = nocrypt;
		ops->cit_encrypt_iv = nocrypt_iv;
		ops->cit_decrypt_iv = nocrypt_iv;
		break;
	
	case CRYPTO_TFM_MODE_CTR:
		ops->cit_encrypt = nocrypt;
		ops->cit_decrypt = nocrypt;
		ops->cit_encrypt_iv = nocrypt_iv;
		ops->cit_decrypt_iv = nocrypt_iv;
		break;

	default:
		BUG();
	}
	
	if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
429
		unsigned long align;
430
		unsigned long addr;
L
Linus Torvalds 已提交
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449
	    	
	    	switch (crypto_tfm_alg_blocksize(tfm)) {
	    	case 8:
	    		ops->cit_xor_block = xor_64;
	    		break;
	    		
	    	case 16:
	    		ops->cit_xor_block = xor_128;
	    		break;
	    		
	    	default:
	    		printk(KERN_WARNING "%s: block size %u not supported\n",
	    		       crypto_tfm_alg_name(tfm),
	    		       crypto_tfm_alg_blocksize(tfm));
	    		ret = -EINVAL;
	    		goto out;
	    	}
	    	
		ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
450 451 452 453 454
		align = crypto_tfm_alg_alignmask(tfm) + 1;
		addr = (unsigned long)crypto_tfm_ctx(tfm);
		addr = ALIGN(addr, align);
		addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
		ops->cit_iv = (void *)addr;
L
Linus Torvalds 已提交
455 456 457 458 459 460 461 462 463
	}

out:	
	return ret;
}

void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
{
}