safexcel_cipher.c 52.6 KB
Newer Older
1
// SPDX-License-Identifier: GPL-2.0
2 3 4 5 6 7 8 9 10 11
/*
 * Copyright (C) 2017 Marvell
 *
 * Antoine Tenart <antoine.tenart@free-electrons.com>
 */

#include <linux/device.h>
#include <linux/dma-mapping.h>
#include <linux/dmapool.h>

12
#include <crypto/aead.h>
13
#include <crypto/aes.h>
14
#include <crypto/authenc.h>
15
#include <crypto/ctr.h>
16
#include <crypto/internal/des.h>
17
#include <crypto/sha.h>
18
#include <crypto/xts.h>
19
#include <crypto/skcipher.h>
20
#include <crypto/internal/aead.h>
21
#include <crypto/internal/skcipher.h>
22 23 24 25 26 27 28 29

#include "safexcel.h"

enum safexcel_cipher_direction {
	SAFEXCEL_ENCRYPT,
	SAFEXCEL_DECRYPT,
};

30 31
enum safexcel_cipher_alg {
	SAFEXCEL_DES,
32
	SAFEXCEL_3DES,
33 34 35
	SAFEXCEL_AES,
};

36 37 38 39 40
struct safexcel_cipher_ctx {
	struct safexcel_context base;
	struct safexcel_crypto_priv *priv;

	u32 mode;
41
	enum safexcel_cipher_alg alg;
42
	bool aead;
43

44
	__le32 key[16];
45
	u32 nonce;
46
	unsigned int key_len, xts;
47 48

	/* All the below is AEAD specific */
49
	u32 hash_alg;
50
	u32 state_sz;
51 52
	u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
	u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
53 54
};

55
struct safexcel_cipher_req {
56
	enum safexcel_cipher_direction direction;
57 58
	/* Number of result descriptors associated to the request */
	unsigned int rdescs;
59
	bool needs_inv;
60
	int  nr_src, nr_dst;
61 62
};

63 64
static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
				  struct safexcel_command_desc *cdesc)
65
{
66
	u32 block_sz = 0;
67

68
	if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
69 70
		switch (ctx->alg) {
		case SAFEXCEL_DES:
71
			block_sz = DES_BLOCK_SIZE;
72 73
			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
			break;
74
		case SAFEXCEL_3DES:
75
			block_sz = DES3_EDE_BLOCK_SIZE;
76 77
			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
			break;
78
		case SAFEXCEL_AES:
79
			block_sz = AES_BLOCK_SIZE;
80 81 82
			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
			break;
		}
83

84 85 86 87 88 89 90 91 92 93
		if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
			/* 32 bit nonce */
			cdesc->control_data.token[0] = ctx->nonce;
			/* 64 bit IV part */
			memcpy(&cdesc->control_data.token[1], iv, 8);
			/* 32 bit counter, start at 1 (big endian!) */
			cdesc->control_data.token[3] = cpu_to_be32(1);
		} else {
			memcpy(cdesc->control_data.token, iv, block_sz);
		}
94
	}
95 96 97 98 99 100 101 102 103
}

static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
				    struct safexcel_command_desc *cdesc,
				    u32 length)
{
	struct safexcel_token *token;

	safexcel_cipher_token(ctx, iv, cdesc);
104

105 106
	/* skip over worst case IV of 4 dwords, no need to be exact */
	token = (struct safexcel_token *)(cdesc->control_data.token + 4);
107 108 109

	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
	token[0].packet_length = length;
110 111
	token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
			EIP197_TOKEN_STAT_LAST_HASH;
112
	token[0].instructions = EIP197_TOKEN_INS_LAST |
113
				EIP197_TOKEN_INS_TYPE_CRYPTO |
114 115 116
				EIP197_TOKEN_INS_TYPE_OUTPUT;
}

117 118 119 120 121 122 123
static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
				struct safexcel_command_desc *cdesc,
				enum safexcel_cipher_direction direction,
				u32 cryptlen, u32 assoclen, u32 digestsize)
{
	struct safexcel_token *token;

124
	safexcel_cipher_token(ctx, iv, cdesc);
125 126 127 128 129

	if (direction == SAFEXCEL_DECRYPT)
		cryptlen -= digestsize;

	if (direction == SAFEXCEL_ENCRYPT) {
130 131 132 133
		/* align end of instruction sequence to end of token */
		token = (struct safexcel_token *)(cdesc->control_data.token +
			 EIP197_MAX_TOKENS - 3);

134 135 136 137 138 139 140
		token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
		token[2].packet_length = digestsize;
		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
				EIP197_TOKEN_STAT_LAST_PACKET;
		token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
					EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
	} else {
141 142 143 144
		/* align end of instruction sequence to end of token */
		token = (struct safexcel_token *)(cdesc->control_data.token +
			 EIP197_MAX_TOKENS - 4);

145 146 147 148 149 150 151 152 153 154 155 156 157
		token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
		token[2].packet_length = digestsize;
		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
				EIP197_TOKEN_STAT_LAST_PACKET;
		token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;

		token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
		token[3].packet_length = digestsize |
					 EIP197_TOKEN_HASH_RESULT_VERIFY;
		token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
				EIP197_TOKEN_STAT_LAST_PACKET;
		token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
	}
158

159 160 161 162 163 164 165 166 167 168 169 170
	if (unlikely(!cryptlen)) {
		token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
		token[1].packet_length = assoclen;
		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
		token[1].instructions = EIP197_TOKEN_INS_LAST |
					EIP197_TOKEN_INS_TYPE_HASH;
	} else {
		if (likely(assoclen)) {
			token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
			token[0].packet_length = assoclen;
			token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
		}
171

172 173 174 175 176 177 178 179
		token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
		token[1].packet_length = cryptlen;
		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
		token[1].instructions = EIP197_TOKEN_INS_LAST |
					EIP197_TOKEN_INS_TYPE_CRYPTO |
					EIP197_TOKEN_INS_TYPE_HASH |
					EIP197_TOKEN_INS_TYPE_OUTPUT;
	}
180 181
}

182 183
static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
					const u8 *key, unsigned int len)
184 185 186
{
	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
187
	struct safexcel_crypto_priv *priv = ctx->priv;
188 189 190
	struct crypto_aes_ctx aes;
	int ret, i;

191
	ret = aes_expandkey(&aes, key, len);
192 193 194 195 196
	if (ret) {
		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
		return ret;
	}

197
	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
198 199 200 201 202
		for (i = 0; i < len / sizeof(u32); i++) {
			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
				ctx->base.needs_inv = true;
				break;
			}
203 204 205 206 207 208 209 210 211 212 213 214
		}
	}

	for (i = 0; i < len / sizeof(u32); i++)
		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);

	ctx->key_len = len;

	memzero_explicit(&aes, sizeof(aes));
	return 0;
}

215 216
static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
				unsigned int len)
217 218 219 220 221 222
{
	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_ahash_export_state istate, ostate;
	struct safexcel_crypto_priv *priv = ctx->priv;
	struct crypto_authenc_keys keys;
223 224
	struct crypto_aes_ctx aes;
	int err = -EINVAL;
225 226 227 228

	if (crypto_authenc_extractkeys(&keys, key, len) != 0)
		goto badkey;

229
	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
230 231 232
		/* Minimum keysize is minimum AES key size + nonce size */
		if (keys.enckeylen < (AES_MIN_KEY_SIZE +
				      CTR_RFC3686_NONCE_SIZE))
233 234
			goto badkey;
		/* last 4 bytes of key are the nonce! */
235 236
		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
				      CTR_RFC3686_NONCE_SIZE);
237
		/* exclude the nonce here */
238
		keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
239
	}
240 241

	/* Encryption key */
242 243
	switch (ctx->alg) {
	case SAFEXCEL_3DES:
244
		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
245
		if (unlikely(err))
246 247 248 249 250 251 252 253 254 255
			goto badkey_expflags;
		break;
	case SAFEXCEL_AES:
		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
		if (unlikely(err))
			goto badkey;
		break;
	default:
		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
		goto badkey;
256 257
	}

258
	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
259 260 261 262
	    memcmp(ctx->key, keys.enckey, keys.enckeylen))
		ctx->base.needs_inv = true;

	/* Auth key */
263
	switch (ctx->hash_alg) {
264 265 266 267 268
	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
					 keys.authkeylen, &istate, &ostate))
			goto badkey;
		break;
269 270 271 272 273 274 275 276 277 278
	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
					 keys.authkeylen, &istate, &ostate))
			goto badkey;
		break;
	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
					 keys.authkeylen, &istate, &ostate))
			goto badkey;
		break;
279 280 281 282 283
	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
					 keys.authkeylen, &istate, &ostate))
			goto badkey;
		break;
284 285 286 287 288
	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
					 keys.authkeylen, &istate, &ostate))
			goto badkey;
		break;
289 290
	default:
		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
291
		goto badkey;
292
	}
293 294 295 296

	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
				    CRYPTO_TFM_RES_MASK);

297
	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313
	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
		ctx->base.needs_inv = true;

	/* Now copy the keys into the context */
	memcpy(ctx->key, keys.enckey, keys.enckeylen);
	ctx->key_len = keys.enckeylen;

	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
	memcpy(ctx->opad, &ostate.state, ctx->state_sz);

	memzero_explicit(&keys, sizeof(keys));
	return 0;

badkey:
	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
314
badkey_expflags:
315
	memzero_explicit(&keys, sizeof(keys));
316
	return err;
317 318
}

319
static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
320
				    struct crypto_async_request *async,
321
				    struct safexcel_cipher_req *sreq,
322 323 324 325 326
				    struct safexcel_command_desc *cdesc)
{
	struct safexcel_crypto_priv *priv = ctx->priv;
	int ctrl_size;

327 328 329 330 331 332 333 334 335 336 337 338 339 340 341
	if (ctx->aead) {
		if (sreq->direction == SAFEXCEL_ENCRYPT)
			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
		else
			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
	} else {
		cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;

		/* The decryption control type is a combination of the
		 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
		 * types.
		 */
		if (sreq->direction == SAFEXCEL_DECRYPT)
			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
	}
342 343 344 345

	cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
	cdesc->control_data.control1 |= ctx->mode;

346 347
	if (ctx->aead)
		cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
348 349 350 351
						ctx->hash_alg;

	if (ctx->alg == SAFEXCEL_DES) {
		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
352 353
	} else if (ctx->alg == SAFEXCEL_3DES) {
		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
354
	} else if (ctx->alg == SAFEXCEL_AES) {
355
		switch (ctx->key_len >> ctx->xts) {
356 357 358 359 360 361 362 363 364 365 366
		case AES_KEYSIZE_128:
			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
			break;
		case AES_KEYSIZE_192:
			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
			break;
		case AES_KEYSIZE_256:
			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
			break;
		default:
			dev_err(priv->dev, "aes keysize not supported: %u\n",
367
				ctx->key_len >> ctx->xts);
368 369
			return -EINVAL;
		}
370
	}
371 372

	ctrl_size = ctx->key_len / sizeof(u32);
373 374 375
	if (ctx->aead)
		/* Take in account the ipad+opad digests */
		ctrl_size += ctx->state_sz / sizeof(u32) * 2;
376 377 378 379 380
	cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);

	return 0;
}

381 382
static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
				      struct crypto_async_request *async,
383 384 385 386
				      struct scatterlist *src,
				      struct scatterlist *dst,
				      unsigned int cryptlen,
				      struct safexcel_cipher_req *sreq,
387
				      bool *should_complete, int *ret)
388
{
389 390 391
	struct skcipher_request *areq = skcipher_request_cast(async);
	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
392 393 394 395 396
	struct safexcel_result_desc *rdesc;
	int ndesc = 0;

	*ret = 0;

397 398 399 400
	if (unlikely(!sreq->rdescs))
		return 0;

	while (sreq->rdescs--) {
401 402 403 404 405 406 407 408
		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
		if (IS_ERR(rdesc)) {
			dev_err(priv->dev,
				"cipher: result: could not retrieve the result descriptor\n");
			*ret = PTR_ERR(rdesc);
			break;
		}

409 410
		if (likely(!*ret))
			*ret = safexcel_rdesc_check_errors(priv, rdesc);
411 412

		ndesc++;
413
	}
414 415 416

	safexcel_complete(priv, ring);

417
	if (src == dst) {
418
		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
419
	} else {
420 421
		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
422 423
	}

424 425 426 427 428 429
	/*
	 * Update IV in req from last crypto output word for CBC modes
	 */
	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
		/* For encrypt take the last output word */
430
		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
431 432 433 434 435
				   crypto_skcipher_ivsize(skcipher),
				   (cryptlen -
				    crypto_skcipher_ivsize(skcipher)));
	}

436 437 438 439 440
	*should_complete = true;

	return ndesc;
}

441
static int safexcel_send_req(struct crypto_async_request *base, int ring,
442 443
			     struct safexcel_cipher_req *sreq,
			     struct scatterlist *src, struct scatterlist *dst,
444 445
			     unsigned int cryptlen, unsigned int assoclen,
			     unsigned int digestsize, u8 *iv, int *commands,
446
			     int *results)
447
{
448 449
	struct skcipher_request *areq = skcipher_request_cast(base);
	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
450
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
451 452
	struct safexcel_crypto_priv *priv = ctx->priv;
	struct safexcel_command_desc *cdesc;
453
	struct safexcel_command_desc *first_cdesc = NULL;
454
	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
455
	struct scatterlist *sg;
456 457 458 459 460 461
	unsigned int totlen;
	unsigned int totlen_src = cryptlen + assoclen;
	unsigned int totlen_dst = totlen_src;
	int n_cdesc = 0, n_rdesc = 0;
	int queued, i, ret = 0;
	bool first = true;
462

463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481
	sreq->nr_src = sg_nents_for_len(src, totlen_src);

	if (ctx->aead) {
		/*
		 * AEAD has auth tag appended to output for encrypt and
		 * removed from the output for decrypt!
		 */
		if (sreq->direction == SAFEXCEL_DECRYPT)
			totlen_dst -= digestsize;
		else
			totlen_dst += digestsize;

		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
		       ctx->ipad, ctx->state_sz);
		memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) /
		       sizeof(u32),
		       ctx->opad, ctx->state_sz);
	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
		   (sreq->direction == SAFEXCEL_DECRYPT)) {
482 483 484 485 486
		/*
		 * Save IV from last crypto input word for CBC modes in decrypt
		 * direction. Need to do this first in case of inplace operation
		 * as it will be overwritten.
		 */
487
		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
488
				   crypto_skcipher_ivsize(skcipher),
489
				   (totlen_src -
490 491 492
				    crypto_skcipher_ivsize(skcipher)));
	}

493 494 495 496 497 498 499 500 501
	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);

	/*
	 * Remember actual input length, source buffer length may be
	 * updated in case of inline operation below.
	 */
	totlen = totlen_src;
	queued = totlen_src;

502
	if (src == dst) {
503 504 505 506 507 508
		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
		sreq->nr_dst = sreq->nr_src;
		if (unlikely((totlen_src || totlen_dst) &&
		    (sreq->nr_src <= 0))) {
			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
				max(totlen_src, totlen_dst));
509
			return -EINVAL;
510 511
		}
		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
512
	} else {
513 514 515
		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
				totlen_src);
516
			return -EINVAL;
517 518
		}
		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
519

520 521 522 523 524
		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
				totlen_dst);
			dma_unmap_sg(priv->dev, src, sreq->nr_src,
				     DMA_TO_DEVICE);
525 526
			return -EINVAL;
		}
527
		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
528 529 530 531
	}

	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);

532 533 534
	/* The EIP cannot deal with zero length input packets! */
	if (totlen == 0)
		totlen = 1;
535

536
	/* command descriptors */
537
	for_each_sg(src, sg, sreq->nr_src, i) {
538 539 540 541 542 543
		int len = sg_dma_len(sg);

		/* Do not overflow the request */
		if (queued - len < 0)
			len = queued;

544 545
		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
					   !(queued - len),
546
					   sg_dma_address(sg), len, totlen,
547 548 549 550 551 552 553 554 555
					   ctx->base.ctxr_dma);
		if (IS_ERR(cdesc)) {
			/* No space left in the command descriptor ring */
			ret = PTR_ERR(cdesc);
			goto cdesc_rollback;
		}
		n_cdesc++;

		if (n_cdesc == 1) {
556
			first_cdesc = cdesc;
557 558 559 560 561 562 563
		}

		queued -= len;
		if (!queued)
			break;
	}

564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583
	if (unlikely(!n_cdesc)) {
		/*
		 * Special case: zero length input buffer.
		 * The engine always needs the 1st command descriptor, however!
		 */
		first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
						 ctx->base.ctxr_dma);
		n_cdesc = 1;
	}

	/* Add context control words and token to first command descriptor */
	safexcel_context_control(ctx, base, sreq, first_cdesc);
	if (ctx->aead)
		safexcel_aead_token(ctx, iv, first_cdesc,
				    sreq->direction, cryptlen,
				    assoclen, digestsize);
	else
		safexcel_skcipher_token(ctx, iv, first_cdesc,
					cryptlen);

584
	/* result descriptors */
585 586
	for_each_sg(dst, sg, sreq->nr_dst, i) {
		bool last = (i == sreq->nr_dst - 1);
587 588
		u32 len = sg_dma_len(sg);

589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611
		/* only allow the part of the buffer we know we need */
		if (len > totlen_dst)
			len = totlen_dst;
		if (unlikely(!len))
			break;
		totlen_dst -= len;

		/* skip over AAD space in buffer - not written */
		if (assoclen) {
			if (assoclen >= len) {
				assoclen -= len;
				continue;
			}
			rdesc = safexcel_add_rdesc(priv, ring, first, last,
						   sg_dma_address(sg) +
						   assoclen,
						   len - assoclen);
			assoclen = 0;
		} else {
			rdesc = safexcel_add_rdesc(priv, ring, first, last,
						   sg_dma_address(sg),
						   len);
		}
612 613 614 615 616
		if (IS_ERR(rdesc)) {
			/* No space left in the result descriptor ring */
			ret = PTR_ERR(rdesc);
			goto rdesc_rollback;
		}
617
		if (first) {
618
			first_rdesc = rdesc;
619 620
			first = false;
		}
621 622 623
		n_rdesc++;
	}

624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640
	if (unlikely(first)) {
		/*
		 * Special case: AEAD decrypt with only AAD data.
		 * In this case there is NO output data from the engine,
		 * but the engine still needs a result descriptor!
		 * Create a dummy one just for catching the result token.
		 */
		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
		if (IS_ERR(rdesc)) {
			/* No space left in the result descriptor ring */
			ret = PTR_ERR(rdesc);
			goto rdesc_rollback;
		}
		first_rdesc = rdesc;
		n_rdesc = 1;
	}

641
	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
642

643
	*commands = n_cdesc;
644
	*results = n_rdesc;
645 646 647 648 649 650 651 652 653
	return 0;

rdesc_rollback:
	for (i = 0; i < n_rdesc; i++)
		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
cdesc_rollback:
	for (i = 0; i < n_cdesc; i++)
		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);

654
	if (src == dst) {
655
		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
656
	} else {
657 658
		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
659 660 661 662 663 664 665
	}

	return ret;
}

static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
				      int ring,
666
				      struct crypto_async_request *base,
667
				      struct safexcel_cipher_req *sreq,
668 669
				      bool *should_complete, int *ret)
{
670
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
671 672 673 674 675
	struct safexcel_result_desc *rdesc;
	int ndesc = 0, enq_ret;

	*ret = 0;

676 677 678 679
	if (unlikely(!sreq->rdescs))
		return 0;

	while (sreq->rdescs--) {
680 681 682 683 684 685 686 687
		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
		if (IS_ERR(rdesc)) {
			dev_err(priv->dev,
				"cipher: invalidate: could not retrieve the result descriptor\n");
			*ret = PTR_ERR(rdesc);
			break;
		}

688 689
		if (likely(!*ret))
			*ret = safexcel_rdesc_check_errors(priv, rdesc);
690 691

		ndesc++;
692
	}
693 694 695 696 697 698 699 700 701 702 703 704

	safexcel_complete(priv, ring);

	if (ctx->base.exit_inv) {
		dma_pool_free(priv->context_pool, ctx->base.ctxr,
			      ctx->base.ctxr_dma);

		*should_complete = true;

		return ndesc;
	}

705 706
	ring = safexcel_select_ring(priv);
	ctx->base.ring = ring;
707

708
	spin_lock_bh(&priv->ring[ring].queue_lock);
709
	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
710
	spin_unlock_bh(&priv->ring[ring].queue_lock);
711 712 713 714

	if (enq_ret != -EINPROGRESS)
		*ret = enq_ret;

715 716
	queue_work(priv->ring[ring].workqueue,
		   &priv->ring[ring].work_data.work);
717

718 719 720 721 722
	*should_complete = false;

	return ndesc;
}

723 724 725 726
static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
					   int ring,
					   struct crypto_async_request *async,
					   bool *should_complete, int *ret)
727 728 729 730 731 732 733
{
	struct skcipher_request *req = skcipher_request_cast(async);
	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
	int err;

	if (sreq->needs_inv) {
		sreq->needs_inv = false;
734
		err = safexcel_handle_inv_result(priv, ring, async, sreq,
735 736
						 should_complete, ret);
	} else {
737 738
		err = safexcel_handle_req_result(priv, ring, async, req->src,
						 req->dst, req->cryptlen, sreq,
739 740 741 742 743 744
						 should_complete, ret);
	}

	return err;
}

745 746 747 748 749 750 751 752 753 754 755 756
static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
				       int ring,
				       struct crypto_async_request *async,
				       bool *should_complete, int *ret)
{
	struct aead_request *req = aead_request_cast(async);
	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
	int err;

	if (sreq->needs_inv) {
		sreq->needs_inv = false;
757
		err = safexcel_handle_inv_result(priv, ring, async, sreq,
758 759 760 761 762 763 764 765 766 767 768
						 should_complete, ret);
	} else {
		err = safexcel_handle_req_result(priv, ring, async, req->src,
						 req->dst,
						 req->cryptlen + crypto_aead_authsize(tfm),
						 sreq, should_complete, ret);
	}

	return err;
}

769
static int safexcel_cipher_send_inv(struct crypto_async_request *base,
770
				    int ring, int *commands, int *results)
771
{
772
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
773 774 775
	struct safexcel_crypto_priv *priv = ctx->priv;
	int ret;

776
	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
777 778 779 780 781 782 783 784 785
	if (unlikely(ret))
		return ret;

	*commands = 1;
	*results = 1;

	return 0;
}

786 787
static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
				  int *commands, int *results)
788 789
{
	struct skcipher_request *req = skcipher_request_cast(async);
790
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
791
	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
792
	struct safexcel_crypto_priv *priv = ctx->priv;
793 794
	int ret;

795
	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
796

797
	if (sreq->needs_inv) {
798
		ret = safexcel_cipher_send_inv(async, ring, commands, results);
799 800 801 802 803 804 805 806 807 808
	} else {
		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
		u8 input_iv[AES_BLOCK_SIZE];

		/*
		 * Save input IV in case of CBC decrypt mode
		 * Will be overwritten with output IV prior to use!
		 */
		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));

809
		ret = safexcel_send_req(async, ring, sreq, req->src,
810
					req->dst, req->cryptlen, 0, 0, input_iv,
811
					commands, results);
812
	}
813 814

	sreq->rdescs = *results;
815 816 817 818
	return ret;
}

static int safexcel_aead_send(struct crypto_async_request *async, int ring,
819
			      int *commands, int *results)
820 821 822 823 824 825 826 827
{
	struct aead_request *req = aead_request_cast(async);
	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
	struct safexcel_crypto_priv *priv = ctx->priv;
	int ret;

828
	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
829 830

	if (sreq->needs_inv)
831
		ret = safexcel_cipher_send_inv(async, ring, commands, results);
832
	else
833 834
		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
					req->cryptlen, req->assoclen,
835
					crypto_aead_authsize(tfm), req->iv,
836
					commands, results);
837
	sreq->rdescs = *results;
838 839 840
	return ret;
}

841 842 843 844
static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
				    struct crypto_async_request *base,
				    struct safexcel_cipher_req *sreq,
				    struct safexcel_inv_result *result)
845 846 847
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_crypto_priv *priv = ctx->priv;
848
	int ring = ctx->base.ring;
849

850
	init_completion(&result->completion);
851

852
	ctx = crypto_tfm_ctx(base->tfm);
853
	ctx->base.exit_inv = true;
854
	sreq->needs_inv = true;
855

856
	spin_lock_bh(&priv->ring[ring].queue_lock);
857
	crypto_enqueue_request(&priv->ring[ring].queue, base);
858
	spin_unlock_bh(&priv->ring[ring].queue_lock);
859

860 861
	queue_work(priv->ring[ring].workqueue,
		   &priv->ring[ring].work_data.work);
862

863
	wait_for_completion(&result->completion);
864

865
	if (result->error) {
866 867
		dev_warn(priv->dev,
			"cipher: sync: invalidate: completion error %d\n",
868 869
			 result->error);
		return result->error;
870 871 872 873 874
	}

	return 0;
}

875
static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
876
{
877
	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
878
	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
879 880 881 882 883 884 885 886 887 888 889
	struct safexcel_inv_result result = {};

	memset(req, 0, sizeof(struct skcipher_request));

	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
				      safexcel_inv_complete, &result);
	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));

	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
}

890 891 892 893 894 895 896 897 898 899 900 901 902 903 904
static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
{
	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
	struct safexcel_inv_result result = {};

	memset(req, 0, sizeof(struct aead_request));

	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
				  safexcel_inv_complete, &result);
	aead_request_set_tfm(req, __crypto_aead_cast(tfm));

	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
}

905
static int safexcel_queue_req(struct crypto_async_request *base,
906
			struct safexcel_cipher_req *sreq,
907
			enum safexcel_cipher_direction dir)
908 909
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
910
	struct safexcel_crypto_priv *priv = ctx->priv;
911
	int ret, ring;
912

913
	sreq->needs_inv = false;
914
	sreq->direction = dir;
915 916

	if (ctx->base.ctxr) {
917
		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
918 919 920
			sreq->needs_inv = true;
			ctx->base.needs_inv = false;
		}
921 922 923
	} else {
		ctx->base.ring = safexcel_select_ring(priv);
		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
924
						 EIP197_GFP_FLAGS(*base),
925 926 927 928 929
						 &ctx->base.ctxr_dma);
		if (!ctx->base.ctxr)
			return -ENOMEM;
	}

930 931 932
	ring = ctx->base.ring;

	spin_lock_bh(&priv->ring[ring].queue_lock);
933
	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
934
	spin_unlock_bh(&priv->ring[ring].queue_lock);
935

936 937
	queue_work(priv->ring[ring].workqueue,
		   &priv->ring[ring].work_data.work);
938 939 940 941

	return ret;
}

942
static int safexcel_encrypt(struct skcipher_request *req)
943
{
944
	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
945
			SAFEXCEL_ENCRYPT);
946 947
}

948
static int safexcel_decrypt(struct skcipher_request *req)
949
{
950
	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
951
			SAFEXCEL_DECRYPT);
952 953 954 955 956 957 958 959 960
}

static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_alg_template *tmpl =
		container_of(tfm->__crt_alg, struct safexcel_alg_template,
			     alg.skcipher.base);

961 962
	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
				    sizeof(struct safexcel_cipher_req));
963

964 965 966 967
	ctx->priv = tmpl->priv;

	ctx->base.send = safexcel_skcipher_send;
	ctx->base.handle_result = safexcel_skcipher_handle_result;
968 969 970
	return 0;
}

971
static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
972 973 974
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

975
	memzero_explicit(ctx->key, sizeof(ctx->key));
976 977 978

	/* context not allocated, skip invalidation */
	if (!ctx->base.ctxr)
979
		return -ENOMEM;
980

981
	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
982 983 984 985 986 987 988 989 990 991 992
	return 0;
}

static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_crypto_priv *priv = ctx->priv;
	int ret;

	if (safexcel_cipher_cra_exit(tfm))
		return;
993

994
	if (priv->flags & EIP197_TRC_CACHE) {
995
		ret = safexcel_skcipher_exit_inv(tfm);
996
		if (ret)
997 998
			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
				 ret);
999 1000 1001 1002
	} else {
		dma_pool_free(priv->context_pool, ctx->base.ctxr,
			      ctx->base.ctxr_dma);
	}
1003 1004
}

1005 1006 1007 1008 1009 1010 1011 1012 1013
static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_crypto_priv *priv = ctx->priv;
	int ret;

	if (safexcel_cipher_cra_exit(tfm))
		return;

1014
	if (priv->flags & EIP197_TRC_CACHE) {
1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
		ret = safexcel_aead_exit_inv(tfm);
		if (ret)
			dev_warn(priv->dev, "aead: invalidation error %d\n",
				 ret);
	} else {
		dma_pool_free(priv->context_pool, ctx->base.ctxr,
			      ctx->base.ctxr_dma);
	}
}

1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_AES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
	return 0;
}

1035 1036
struct safexcel_alg_template safexcel_alg_ecb_aes = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1037
	.algo_mask = SAFEXCEL_ALG_AES,
1038
	.alg.skcipher = {
1039
		.setkey = safexcel_skcipher_aes_setkey,
1040 1041
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1042 1043 1044 1045 1046
		.min_keysize = AES_MIN_KEY_SIZE,
		.max_keysize = AES_MAX_KEY_SIZE,
		.base = {
			.cra_name = "ecb(aes)",
			.cra_driver_name = "safexcel-ecb-aes",
1047
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1048
			.cra_flags = CRYPTO_ALG_ASYNC |
1049 1050 1051 1052
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1053
			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1054 1055 1056 1057 1058 1059
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1060
static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1061
{
1062
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1063

1064 1065 1066 1067
	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_AES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
	return 0;
1068 1069 1070 1071
}

struct safexcel_alg_template safexcel_alg_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1072
	.algo_mask = SAFEXCEL_ALG_AES,
1073
	.alg.skcipher = {
1074
		.setkey = safexcel_skcipher_aes_setkey,
1075 1076
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1077 1078 1079 1080 1081 1082
		.min_keysize = AES_MIN_KEY_SIZE,
		.max_keysize = AES_MAX_KEY_SIZE,
		.ivsize = AES_BLOCK_SIZE,
		.base = {
			.cra_name = "cbc(aes)",
			.cra_driver_name = "safexcel-cbc-aes",
1083
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1084
			.cra_flags = CRYPTO_ALG_ASYNC |
1085 1086 1087 1088
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1089
			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1090 1091 1092 1093 1094
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1095

1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106
static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
					   const u8 *key, unsigned int len)
{
	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_crypto_priv *priv = ctx->priv;
	struct crypto_aes_ctx aes;
	int ret, i;
	unsigned int keylen;

	/* last 4 bytes of key are the nonce! */
1107
	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1108
	/* exclude the nonce here */
1109
	keylen = len - CTR_RFC3686_NONCE_SIZE;
1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133
	ret = aes_expandkey(&aes, key, keylen);
	if (ret) {
		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
		return ret;
	}

	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
		for (i = 0; i < keylen / sizeof(u32); i++) {
			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
				ctx->base.needs_inv = true;
				break;
			}
		}
	}

	for (i = 0; i < keylen / sizeof(u32); i++)
		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);

	ctx->key_len = keylen;

	memzero_explicit(&aes, sizeof(aes));
	return 0;
}

1134 1135 1136 1137 1138 1139 1140 1141 1142 1143
static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_AES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
	return 0;
}

1144 1145
struct safexcel_alg_template safexcel_alg_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1146
	.algo_mask = SAFEXCEL_ALG_AES,
1147 1148
	.alg.skcipher = {
		.setkey = safexcel_skcipher_aesctr_setkey,
1149 1150
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1151 1152 1153 1154
		/* Add nonce size */
		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
		.ivsize = CTR_RFC3686_IV_SIZE,
1155 1156 1157
		.base = {
			.cra_name = "rfc3686(ctr(aes))",
			.cra_driver_name = "safexcel-ctr-aes",
1158
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1159 1160 1161 1162 1163
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1164
			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1165 1166 1167 1168 1169 1170
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1171 1172 1173
static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
			       unsigned int len)
{
1174
	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1175 1176
	int ret;

1177 1178 1179
	ret = verify_skcipher_des_key(ctfm, key);
	if (ret)
		return ret;
1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191

	/* if context exits and key changed, need to invalidate it */
	if (ctx->base.ctxr_dma)
		if (memcmp(ctx->key, key, len))
			ctx->base.needs_inv = true;

	memcpy(ctx->key, key, len);
	ctx->key_len = len;

	return 0;
}

1192 1193 1194 1195 1196 1197 1198 1199 1200 1201
static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_DES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
	return 0;
}

1202 1203
struct safexcel_alg_template safexcel_alg_cbc_des = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1204
	.algo_mask = SAFEXCEL_ALG_DES,
1205 1206
	.alg.skcipher = {
		.setkey = safexcel_des_setkey,
1207 1208
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1209 1210 1211 1212 1213 1214
		.min_keysize = DES_KEY_SIZE,
		.max_keysize = DES_KEY_SIZE,
		.ivsize = DES_BLOCK_SIZE,
		.base = {
			.cra_name = "cbc(des)",
			.cra_driver_name = "safexcel-cbc-des",
1215
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1216
			.cra_flags = CRYPTO_ALG_ASYNC |
1217 1218 1219 1220
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = DES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1221
			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1222 1223 1224 1225 1226 1227
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1228
static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1229
{
1230
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1231

1232 1233 1234 1235
	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_DES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
	return 0;
1236 1237 1238 1239
}

struct safexcel_alg_template safexcel_alg_ecb_des = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1240
	.algo_mask = SAFEXCEL_ALG_DES,
1241 1242
	.alg.skcipher = {
		.setkey = safexcel_des_setkey,
1243 1244
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1245 1246 1247 1248 1249
		.min_keysize = DES_KEY_SIZE,
		.max_keysize = DES_KEY_SIZE,
		.base = {
			.cra_name = "ecb(des)",
			.cra_driver_name = "safexcel-ecb-des",
1250
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1251
			.cra_flags = CRYPTO_ALG_ASYNC |
1252 1253 1254 1255
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = DES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1256
			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1257 1258 1259 1260 1261
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1262 1263 1264 1265

static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
				   const u8 *key, unsigned int len)
{
1266 1267
	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
	int err;
1268

1269 1270
	err = verify_skcipher_des3_key(ctfm, key);
	if (err)
1271
		return err;
1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285

	/* if context exits and key changed, need to invalidate it */
	if (ctx->base.ctxr_dma) {
		if (memcmp(ctx->key, key, len))
			ctx->base.needs_inv = true;
	}

	memcpy(ctx->key, key, len);

	ctx->key_len = len;

	return 0;
}

1286 1287 1288 1289 1290 1291 1292 1293 1294 1295
static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_3DES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
	return 0;
}

1296 1297
struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1298
	.algo_mask = SAFEXCEL_ALG_DES,
1299 1300
	.alg.skcipher = {
		.setkey = safexcel_des3_ede_setkey,
1301 1302
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1303 1304 1305 1306 1307 1308
		.min_keysize = DES3_EDE_KEY_SIZE,
		.max_keysize = DES3_EDE_KEY_SIZE,
		.ivsize = DES3_EDE_BLOCK_SIZE,
		.base = {
			.cra_name = "cbc(des3_ede)",
			.cra_driver_name = "safexcel-cbc-des3_ede",
1309
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1310
			.cra_flags = CRYPTO_ALG_ASYNC |
1311 1312 1313 1314
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1315
			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1316 1317 1318 1319 1320 1321
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1322
static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1323
{
1324
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1325

1326 1327 1328 1329
	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_3DES;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
	return 0;
1330 1331 1332 1333
}

struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1334
	.algo_mask = SAFEXCEL_ALG_DES,
1335 1336
	.alg.skcipher = {
		.setkey = safexcel_des3_ede_setkey,
1337 1338
		.encrypt = safexcel_encrypt,
		.decrypt = safexcel_decrypt,
1339 1340 1341 1342 1343
		.min_keysize = DES3_EDE_KEY_SIZE,
		.max_keysize = DES3_EDE_KEY_SIZE,
		.base = {
			.cra_name = "ecb(des3_ede)",
			.cra_driver_name = "safexcel-ecb-des3_ede",
1344
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1345
			.cra_flags = CRYPTO_ALG_ASYNC |
1346 1347 1348 1349
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1350
			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1351 1352 1353 1354 1355 1356
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1357
static int safexcel_aead_encrypt(struct aead_request *req)
1358 1359 1360
{
	struct safexcel_cipher_req *creq = aead_request_ctx(req);

1361
	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1362 1363
}

1364
static int safexcel_aead_decrypt(struct aead_request *req)
1365 1366 1367
{
	struct safexcel_cipher_req *creq = aead_request_ctx(req);

1368
	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382
}

static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_alg_template *tmpl =
		container_of(tfm->__crt_alg, struct safexcel_alg_template,
			     alg.aead.base);

	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
				sizeof(struct safexcel_cipher_req));

	ctx->priv = tmpl->priv;

1383
	ctx->alg  = SAFEXCEL_AES; /* default */
1384
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1385 1386 1387 1388 1389 1390
	ctx->aead = true;
	ctx->base.send = safexcel_aead_send;
	ctx->base.handle_result = safexcel_aead_handle_result;
	return 0;
}

1391 1392 1393 1394 1395
static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_cra_init(tfm);
1396
	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1397 1398 1399 1400 1401 1402
	ctx->state_sz = SHA1_DIGEST_SIZE;
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1403
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1404
	.alg.aead = {
1405
		.setkey = safexcel_aead_setkey,
1406 1407
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1408 1409 1410 1411 1412
		.ivsize = AES_BLOCK_SIZE,
		.maxauthsize = SHA1_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha1),cbc(aes))",
			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1413
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1414
			.cra_flags = CRYPTO_ALG_ASYNC |
1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha1_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

1426 1427 1428 1429 1430
static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_cra_init(tfm);
1431
	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1432 1433 1434 1435 1436 1437
	ctx->state_sz = SHA256_DIGEST_SIZE;
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1438
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1439
	.alg.aead = {
1440
		.setkey = safexcel_aead_setkey,
1441 1442
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1443 1444 1445 1446 1447
		.ivsize = AES_BLOCK_SIZE,
		.maxauthsize = SHA256_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha256),cbc(aes))",
			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1448
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1449
			.cra_flags = CRYPTO_ALG_ASYNC |
1450 1451 1452 1453 1454 1455 1456 1457 1458 1459
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha256_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1460 1461 1462 1463 1464 1465

static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_cra_init(tfm);
1466
	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1467 1468 1469 1470 1471 1472
	ctx->state_sz = SHA256_DIGEST_SIZE;
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1473
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1474
	.alg.aead = {
1475
		.setkey = safexcel_aead_setkey,
1476 1477
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1478 1479 1480 1481 1482
		.ivsize = AES_BLOCK_SIZE,
		.maxauthsize = SHA224_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha224),cbc(aes))",
			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1483
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1484
			.cra_flags = CRYPTO_ALG_ASYNC |
1485 1486 1487 1488 1489 1490 1491 1492 1493 1494
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha224_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1495 1496 1497 1498 1499 1500

static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_cra_init(tfm);
1501
	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1502 1503 1504 1505 1506 1507
	ctx->state_sz = SHA512_DIGEST_SIZE;
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1508
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1509
	.alg.aead = {
1510
		.setkey = safexcel_aead_setkey,
1511 1512
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1513 1514 1515 1516 1517
		.ivsize = AES_BLOCK_SIZE,
		.maxauthsize = SHA512_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha512),cbc(aes))",
			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1518
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1519
			.cra_flags = CRYPTO_ALG_ASYNC |
1520 1521 1522 1523 1524 1525 1526 1527 1528 1529
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha512_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1530 1531 1532 1533 1534 1535

static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_cra_init(tfm);
1536
	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1537 1538 1539 1540 1541 1542
	ctx->state_sz = SHA512_DIGEST_SIZE;
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1543
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1544
	.alg.aead = {
1545
		.setkey = safexcel_aead_setkey,
1546 1547
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1548 1549 1550 1551 1552
		.ivsize = AES_BLOCK_SIZE,
		.maxauthsize = SHA384_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha384),cbc(aes))",
			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1553
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1554
			.cra_flags = CRYPTO_ALG_ASYNC |
1555 1556 1557 1558 1559 1560 1561 1562 1563 1564
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = AES_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha384_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1565

1566 1567 1568 1569 1570 1571 1572 1573 1574
static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha1_cra_init(tfm);
	ctx->alg = SAFEXCEL_3DES; /* override default */
	return 0;
}

1575 1576
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1577
	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1578 1579
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1580 1581
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1582 1583 1584 1585 1586
		.ivsize = DES3_EDE_BLOCK_SIZE,
		.maxauthsize = SHA1_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1587
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1588 1589 1590 1591 1592
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610
			.cra_init = safexcel_aead_sha1_des3_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha1_cra_init(tfm);
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1611
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1612 1613
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1614 1615
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1616
		.ivsize = CTR_RFC3686_IV_SIZE,
1617 1618 1619 1620
		.maxauthsize = SHA1_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
1621
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha1_ctr_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha256_cra_init(tfm);
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1645
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1646 1647
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1648 1649
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1650
		.ivsize = CTR_RFC3686_IV_SIZE,
1651 1652 1653 1654
		.maxauthsize = SHA256_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
1655
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha256_ctr_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha224_cra_init(tfm);
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1679
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1680 1681
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1682 1683
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1684
		.ivsize = CTR_RFC3686_IV_SIZE,
1685 1686 1687 1688
		.maxauthsize = SHA224_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
1689
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha224_ctr_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha512_cra_init(tfm);
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1713
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1714 1715
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1716 1717
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1718
		.ivsize = CTR_RFC3686_IV_SIZE,
1719 1720 1721 1722
		.maxauthsize = SHA512_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
1723
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha512_ctr_cra_init,
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};

static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_aead_sha384_cra_init(tfm);
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
	return 0;
}

struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
	.type = SAFEXCEL_ALG_TYPE_AEAD,
1747
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1748 1749
	.alg.aead = {
		.setkey = safexcel_aead_setkey,
1750 1751
		.encrypt = safexcel_aead_encrypt,
		.decrypt = safexcel_aead_decrypt,
1752
		.ivsize = CTR_RFC3686_IV_SIZE,
1753 1754 1755 1756
		.maxauthsize = SHA384_DIGEST_SIZE,
		.base = {
			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
1757
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1758 1759 1760 1761 1762 1763
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = 1,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_aead_sha384_ctr_cra_init,
1764 1765 1766 1767 1768
			.cra_exit = safexcel_aead_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};
1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860

static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
					   const u8 *key, unsigned int len)
{
	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
	struct safexcel_crypto_priv *priv = ctx->priv;
	struct crypto_aes_ctx aes;
	int ret, i;
	unsigned int keylen;

	/* Check for illegal XTS keys */
	ret = xts_verify_key(ctfm, key, len);
	if (ret)
		return ret;

	/* Only half of the key data is cipher key */
	keylen = (len >> 1);
	ret = aes_expandkey(&aes, key, keylen);
	if (ret) {
		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
		return ret;
	}

	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
		for (i = 0; i < keylen / sizeof(u32); i++) {
			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
				ctx->base.needs_inv = true;
				break;
			}
		}
	}

	for (i = 0; i < keylen / sizeof(u32); i++)
		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);

	/* The other half is the tweak key */
	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
	if (ret) {
		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
		return ret;
	}

	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
		for (i = 0; i < keylen / sizeof(u32); i++) {
			if (ctx->key[i + keylen / sizeof(u32)] !=
			    cpu_to_le32(aes.key_enc[i])) {
				ctx->base.needs_inv = true;
				break;
			}
		}
	}

	for (i = 0; i < keylen / sizeof(u32); i++)
		ctx->key[i + keylen / sizeof(u32)] =
			cpu_to_le32(aes.key_enc[i]);

	ctx->key_len = keylen << 1;

	memzero_explicit(&aes, sizeof(aes));
	return 0;
}

static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
{
	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

	safexcel_skcipher_cra_init(tfm);
	ctx->alg  = SAFEXCEL_AES;
	ctx->xts  = 1;
	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
	return 0;
}

static int safexcel_encrypt_xts(struct skcipher_request *req)
{
	if (req->cryptlen < XTS_BLOCK_SIZE)
		return -EINVAL;
	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
				  SAFEXCEL_ENCRYPT);
}

static int safexcel_decrypt_xts(struct skcipher_request *req)
{
	if (req->cryptlen < XTS_BLOCK_SIZE)
		return -EINVAL;
	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
				  SAFEXCEL_DECRYPT);
}

struct safexcel_alg_template safexcel_alg_xts_aes = {
	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1861
	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872
	.alg.skcipher = {
		.setkey = safexcel_skcipher_aesxts_setkey,
		.encrypt = safexcel_encrypt_xts,
		.decrypt = safexcel_decrypt_xts,
		/* XTS actually uses 2 AES keys glued together */
		.min_keysize = AES_MIN_KEY_SIZE * 2,
		.max_keysize = AES_MAX_KEY_SIZE * 2,
		.ivsize = XTS_BLOCK_SIZE,
		.base = {
			.cra_name = "xts(aes)",
			.cra_driver_name = "safexcel-xts-aes",
1873
			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884
			.cra_flags = CRYPTO_ALG_ASYNC |
				     CRYPTO_ALG_KERN_DRIVER_ONLY,
			.cra_blocksize = XTS_BLOCK_SIZE,
			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
			.cra_alignmask = 0,
			.cra_init = safexcel_skcipher_aes_xts_cra_init,
			.cra_exit = safexcel_skcipher_cra_exit,
			.cra_module = THIS_MODULE,
		},
	},
};