diff --git a/arch/x86/crypto/crc32c-intel.c b/arch/x86/crypto/crc32c-intel.c
index 070afc5b6c94b404a6664a0e9ff61c714dc498ad..b9d00261703c56f45e25cf9740c412ce26c71e71 100644
--- a/arch/x86/crypto/crc32c-intel.c
+++ b/arch/x86/crypto/crc32c-intel.c
@@ -6,13 +6,22 @@
  * Intel(R) 64 and IA-32 Architectures Software Developer's Manual
  * Volume 2A: Instruction Set Reference, A-M
  *
- * Copyright (c) 2008 Austin Zhang <austin_zhang@linux.intel.com>
- * Copyright (c) 2008 Kent Liu <kent.liu@intel.com>
+ * Copyright (C) 2008 Intel Corporation
+ * Authors: Austin Zhang <austin_zhang@linux.intel.com>
+ *          Kent Liu <kent.liu@intel.com>
  *
  * This program is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
- * any later version.
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
  *
  */
 #include <linux/init.h>
@@ -75,99 +84,92 @@ static u32 __pure crc32c_intel_le_hw(u32 crc, unsigned char const *p, size_t len
  * If your algorithm starts with ~0, then XOR with ~0 before you set
  * the seed.
  */
-static int crc32c_intel_setkey(struct crypto_ahash *hash, const u8 *key,
+static int crc32c_intel_setkey(struct crypto_shash *hash, const u8 *key,
 			unsigned int keylen)
 {
-	u32 *mctx = crypto_ahash_ctx(hash);
+	u32 *mctx = crypto_shash_ctx(hash);
 
 	if (keylen != sizeof(u32)) {
-		crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
+		crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
 		return -EINVAL;
 	}
 	*mctx = le32_to_cpup((__le32 *)key);
 	return 0;
 }
 
-static int crc32c_intel_init(struct ahash_request *req)
+static int crc32c_intel_init(struct shash_desc *desc)
 {
-	u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
-	u32 *crcp = ahash_request_ctx(req);
+	u32 *mctx = crypto_shash_ctx(desc->tfm);
+	u32 *crcp = shash_desc_ctx(desc);
 
 	*crcp = *mctx;
 
 	return 0;
 }
 
-static int crc32c_intel_update(struct ahash_request *req)
+static int crc32c_intel_update(struct shash_desc *desc, const u8 *data,
+			       unsigned int len)
 {
-	struct crypto_hash_walk walk;
-	u32 *crcp = ahash_request_ctx(req);
-	u32 crc = *crcp;
-	int nbytes;
-
-	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
-	   nbytes = crypto_hash_walk_done(&walk, 0))
-	crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
+	u32 *crcp = shash_desc_ctx(desc);
 
-	*crcp = crc;
+	*crcp = crc32c_intel_le_hw(*crcp, data, len);
 	return 0;
 }
 
-static int crc32c_intel_final(struct ahash_request *req)
+static int __crc32c_intel_finup(u32 *crcp, const u8 *data, unsigned int len,
+				u8 *out)
 {
-	u32 *crcp = ahash_request_ctx(req);
-
-	*(__le32 *)req->result = ~cpu_to_le32p(crcp);
+	*(__le32 *)out = ~cpu_to_le32(crc32c_intel_le_hw(*crcp, data, len));
 	return 0;
 }
 
-static int crc32c_intel_digest(struct ahash_request *req)
+static int crc32c_intel_finup(struct shash_desc *desc, const u8 *data,
+			      unsigned int len, u8 *out)
 {
-	struct crypto_hash_walk walk;
-	u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
-	u32 crc = *mctx;
-	int nbytes;
+	return __crc32c_intel_finup(shash_desc_ctx(desc), data, len, out);
+}
 
-	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
-	   nbytes = crypto_hash_walk_done(&walk, 0))
-		crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
+static int crc32c_intel_final(struct shash_desc *desc, u8 *out)
+{
+	u32 *crcp = shash_desc_ctx(desc);
 
-	*(__le32 *)req->result = ~cpu_to_le32(crc);
+	*(__le32 *)out = ~cpu_to_le32p(crcp);
 	return 0;
 }
 
+static int crc32c_intel_digest(struct shash_desc *desc, const u8 *data,
+			       unsigned int len, u8 *out)
+{
+	return __crc32c_intel_finup(crypto_shash_ctx(desc->tfm), data, len,
+				    out);
+}
+
 static int crc32c_intel_cra_init(struct crypto_tfm *tfm)
 {
 	u32 *key = crypto_tfm_ctx(tfm);
 
 	*key = ~0;
 
-	tfm->crt_ahash.reqsize = sizeof(u32);
-
 	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name               =       "crc32c",
-	.cra_driver_name        =       "crc32c-intel",
-	.cra_priority           =       200,
-	.cra_flags              =       CRYPTO_ALG_TYPE_AHASH,
-	.cra_blocksize          =       CHKSUM_BLOCK_SIZE,
-	.cra_alignmask          =       3,
-	.cra_ctxsize            =       sizeof(u32),
-	.cra_module             =       THIS_MODULE,
-	.cra_list               =       LIST_HEAD_INIT(alg.cra_list),
-	.cra_init               =       crc32c_intel_cra_init,
-	.cra_type               =       &crypto_ahash_type,
-	.cra_u                  =       {
-		.ahash = {
-			.digestsize    =       CHKSUM_DIGEST_SIZE,
-			.setkey        =       crc32c_intel_setkey,
-			.init          =       crc32c_intel_init,
-			.update        =       crc32c_intel_update,
-			.final         =       crc32c_intel_final,
-			.digest        =       crc32c_intel_digest,
-		}
+static struct shash_alg alg = {
+	.setkey			=	crc32c_intel_setkey,
+	.init			=	crc32c_intel_init,
+	.update			=	crc32c_intel_update,
+	.final			=	crc32c_intel_final,
+	.finup			=	crc32c_intel_finup,
+	.digest			=	crc32c_intel_digest,
+	.descsize		=	sizeof(u32),
+	.digestsize		=	CHKSUM_DIGEST_SIZE,
+	.base			=	{
+		.cra_name		=	"crc32c",
+		.cra_driver_name	=	"crc32c-intel",
+		.cra_priority		=	200,
+		.cra_blocksize		=	CHKSUM_BLOCK_SIZE,
+		.cra_ctxsize		=	sizeof(u32),
+		.cra_module		=	THIS_MODULE,
+		.cra_init		=	crc32c_intel_cra_init,
 	}
 };
 
@@ -175,14 +177,14 @@ static struct crypto_alg alg = {
 static int __init crc32c_intel_mod_init(void)
 {
 	if (cpu_has_xmm4_2)
-		return crypto_register_alg(&alg);
+		return crypto_register_shash(&alg);
 	else
 		return -ENODEV;
 }
 
 static void __exit crc32c_intel_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(crc32c_intel_mod_init);
@@ -194,4 +196,3 @@ MODULE_LICENSE("GPL");
 
 MODULE_ALIAS("crc32c");
 MODULE_ALIAS("crc32c-intel");
-
diff --git a/crypto/Kconfig b/crypto/Kconfig
index dc20a34ba5efaf9922cf225f1ec18a962b5f1024..8dde4fcf99c947b7c292be0a775b102e43260879 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -102,6 +102,7 @@ config CRYPTO_NULL
 	tristate "Null algorithms"
 	select CRYPTO_ALGAPI
 	select CRYPTO_BLKCIPHER
+	select CRYPTO_HASH
 	help
 	  These are 'Null' algorithms, used by IPsec, which do nothing.
 
@@ -256,12 +257,10 @@ comment "Digest"
 config CRYPTO_CRC32C
 	tristate "CRC32c CRC algorithm"
 	select CRYPTO_HASH
-	select LIBCRC32C
 	help
 	  Castagnoli, et al Cyclic Redundancy-Check Algorithm.  Used
 	  by iSCSI for header and data digests and by others.
-	  See Castagnoli93.  This implementation uses lib/libcrc32c.
-	  Module will be crc32c.
+	  See Castagnoli93.  Module will be crc32c.
 
 config CRYPTO_CRC32C_INTEL
 	tristate "CRC32c INTEL hardware acceleration"
@@ -277,19 +276,19 @@ config CRYPTO_CRC32C_INTEL
 
 config CRYPTO_MD4
 	tristate "MD4 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  MD4 message digest algorithm (RFC1320).
 
 config CRYPTO_MD5
 	tristate "MD5 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  MD5 message digest algorithm (RFC1321).
 
 config CRYPTO_MICHAEL_MIC
 	tristate "Michael MIC keyed digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  Michael MIC is used for message integrity protection in TKIP
 	  (IEEE 802.11i). This algorithm is required for TKIP, but it
@@ -298,7 +297,7 @@ config CRYPTO_MICHAEL_MIC
 
 config CRYPTO_RMD128
 	tristate "RIPEMD-128 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  RIPEMD-128 (ISO/IEC 10118-3:2004).
 
@@ -311,7 +310,7 @@ config CRYPTO_RMD128
 
 config CRYPTO_RMD160
 	tristate "RIPEMD-160 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  RIPEMD-160 (ISO/IEC 10118-3:2004).
 
@@ -328,7 +327,7 @@ config CRYPTO_RMD160
 
 config CRYPTO_RMD256
 	tristate "RIPEMD-256 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  RIPEMD-256 is an optional extension of RIPEMD-128 with a
 	  256 bit hash. It is intended for applications that require
@@ -340,7 +339,7 @@ config CRYPTO_RMD256
 
 config CRYPTO_RMD320
 	tristate "RIPEMD-320 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  RIPEMD-320 is an optional extension of RIPEMD-160 with a
 	  320 bit hash. It is intended for applications that require
@@ -352,13 +351,13 @@ config CRYPTO_RMD320
 
 config CRYPTO_SHA1
 	tristate "SHA1 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
 
 config CRYPTO_SHA256
 	tristate "SHA224 and SHA256 digest algorithm"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  SHA256 secure hash standard (DFIPS 180-2).
 
@@ -370,7 +369,7 @@ config CRYPTO_SHA256
 
 config CRYPTO_SHA512
 	tristate "SHA384 and SHA512 digest algorithms"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  SHA512 secure hash standard (DFIPS 180-2).
 
@@ -382,7 +381,7 @@ config CRYPTO_SHA512
 
 config CRYPTO_TGR192
 	tristate "Tiger digest algorithms"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  Tiger hash algorithm 192, 160 and 128-bit hashes
 
@@ -395,7 +394,7 @@ config CRYPTO_TGR192
 
 config CRYPTO_WP512
 	tristate "Whirlpool digest algorithms"
-	select CRYPTO_ALGAPI
+	select CRYPTO_HASH
 	help
 	  Whirlpool hash algorithm 512, 384 and 256-bit hashes
 
diff --git a/crypto/Makefile b/crypto/Makefile
index cd4a4ed078ff1252642dba2fc87093fa97eedb18..46b08bf2035fdfd5e404953aa2b5f8740efa4c82 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -22,6 +22,7 @@ obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
 
 crypto_hash-objs := hash.o
 crypto_hash-objs += ahash.o
+crypto_hash-objs += shash.o
 obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
 
 cryptomgr-objs := algboss.o testmgr.o
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index 136dc98d8a03f9f7d8d1a259a123c10dc03d4f4c..b8b66ec3883b74d8ea2b7b41f1298a4b01a32ca8 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -60,102 +60,1068 @@ static inline u8 byte(const u32 x, const unsigned n)
 	return x >> (n << 3);
 }
 
-static u8 pow_tab[256] __initdata;
-static u8 log_tab[256] __initdata;
-static u8 sbx_tab[256] __initdata;
-static u8 isb_tab[256] __initdata;
-static u32 rco_tab[10];
-
-u32 crypto_ft_tab[4][256];
-u32 crypto_fl_tab[4][256];
-u32 crypto_it_tab[4][256];
-u32 crypto_il_tab[4][256];
-
-EXPORT_SYMBOL_GPL(crypto_ft_tab);
-EXPORT_SYMBOL_GPL(crypto_fl_tab);
-EXPORT_SYMBOL_GPL(crypto_it_tab);
-EXPORT_SYMBOL_GPL(crypto_il_tab);
-
-static inline u8 __init f_mult(u8 a, u8 b)
-{
-	u8 aa = log_tab[a], cc = aa + log_tab[b];
-
-	return pow_tab[cc + (cc < aa ? 1 : 0)];
-}
-
-#define ff_mult(a, b)	(a && b ? f_mult(a, b) : 0)
-
-static void __init gen_tabs(void)
-{
-	u32 i, t;
-	u8 p, q;
-
-	/*
-	 * log and power tables for GF(2**8) finite field with
-	 * 0x011b as modular polynomial - the simplest primitive
-	 * root is 0x03, used here to generate the tables
-	 */
-
-	for (i = 0, p = 1; i < 256; ++i) {
-		pow_tab[i] = (u8) p;
-		log_tab[p] = (u8) i;
-
-		p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0);
+static const u32 rco_tab[10] = { 1, 2, 4, 8, 16, 32, 64, 128, 27, 54 };
+
+const u32 crypto_ft_tab[4][256] = {
+	{
+		0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6,
+		0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591,
+		0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56,
+		0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec,
+		0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa,
+		0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb,
+		0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45,
+		0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b,
+		0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c,
+		0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83,
+		0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9,
+		0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a,
+		0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d,
+		0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f,
+		0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df,
+		0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea,
+		0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34,
+		0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b,
+		0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d,
+		0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413,
+		0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1,
+		0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6,
+		0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972,
+		0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85,
+		0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed,
+		0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511,
+		0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe,
+		0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b,
+		0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05,
+		0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1,
+		0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142,
+		0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf,
+		0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3,
+		0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e,
+		0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a,
+		0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6,
+		0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3,
+		0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b,
+		0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428,
+		0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad,
+		0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14,
+		0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8,
+		0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4,
+		0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2,
+		0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda,
+		0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949,
+		0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf,
+		0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810,
+		0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c,
+		0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697,
+		0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e,
+		0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f,
+		0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc,
+		0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c,
+		0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969,
+		0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27,
+		0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122,
+		0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433,
+		0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9,
+		0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5,
+		0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a,
+		0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0,
+		0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e,
+		0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c,
+	}, {
+		0x6363c6a5, 0x7c7cf884, 0x7777ee99, 0x7b7bf68d,
+		0xf2f2ff0d, 0x6b6bd6bd, 0x6f6fdeb1, 0xc5c59154,
+		0x30306050, 0x01010203, 0x6767cea9, 0x2b2b567d,
+		0xfefee719, 0xd7d7b562, 0xabab4de6, 0x7676ec9a,
+		0xcaca8f45, 0x82821f9d, 0xc9c98940, 0x7d7dfa87,
+		0xfafaef15, 0x5959b2eb, 0x47478ec9, 0xf0f0fb0b,
+		0xadad41ec, 0xd4d4b367, 0xa2a25ffd, 0xafaf45ea,
+		0x9c9c23bf, 0xa4a453f7, 0x7272e496, 0xc0c09b5b,
+		0xb7b775c2, 0xfdfde11c, 0x93933dae, 0x26264c6a,
+		0x36366c5a, 0x3f3f7e41, 0xf7f7f502, 0xcccc834f,
+		0x3434685c, 0xa5a551f4, 0xe5e5d134, 0xf1f1f908,
+		0x7171e293, 0xd8d8ab73, 0x31316253, 0x15152a3f,
+		0x0404080c, 0xc7c79552, 0x23234665, 0xc3c39d5e,
+		0x18183028, 0x969637a1, 0x05050a0f, 0x9a9a2fb5,
+		0x07070e09, 0x12122436, 0x80801b9b, 0xe2e2df3d,
+		0xebebcd26, 0x27274e69, 0xb2b27fcd, 0x7575ea9f,
+		0x0909121b, 0x83831d9e, 0x2c2c5874, 0x1a1a342e,
+		0x1b1b362d, 0x6e6edcb2, 0x5a5ab4ee, 0xa0a05bfb,
+		0x5252a4f6, 0x3b3b764d, 0xd6d6b761, 0xb3b37dce,
+		0x2929527b, 0xe3e3dd3e, 0x2f2f5e71, 0x84841397,
+		0x5353a6f5, 0xd1d1b968, 0x00000000, 0xededc12c,
+		0x20204060, 0xfcfce31f, 0xb1b179c8, 0x5b5bb6ed,
+		0x6a6ad4be, 0xcbcb8d46, 0xbebe67d9, 0x3939724b,
+		0x4a4a94de, 0x4c4c98d4, 0x5858b0e8, 0xcfcf854a,
+		0xd0d0bb6b, 0xefefc52a, 0xaaaa4fe5, 0xfbfbed16,
+		0x434386c5, 0x4d4d9ad7, 0x33336655, 0x85851194,
+		0x45458acf, 0xf9f9e910, 0x02020406, 0x7f7ffe81,
+		0x5050a0f0, 0x3c3c7844, 0x9f9f25ba, 0xa8a84be3,
+		0x5151a2f3, 0xa3a35dfe, 0x404080c0, 0x8f8f058a,
+		0x92923fad, 0x9d9d21bc, 0x38387048, 0xf5f5f104,
+		0xbcbc63df, 0xb6b677c1, 0xdadaaf75, 0x21214263,
+		0x10102030, 0xffffe51a, 0xf3f3fd0e, 0xd2d2bf6d,
+		0xcdcd814c, 0x0c0c1814, 0x13132635, 0xececc32f,
+		0x5f5fbee1, 0x979735a2, 0x444488cc, 0x17172e39,
+		0xc4c49357, 0xa7a755f2, 0x7e7efc82, 0x3d3d7a47,
+		0x6464c8ac, 0x5d5dbae7, 0x1919322b, 0x7373e695,
+		0x6060c0a0, 0x81811998, 0x4f4f9ed1, 0xdcdca37f,
+		0x22224466, 0x2a2a547e, 0x90903bab, 0x88880b83,
+		0x46468cca, 0xeeeec729, 0xb8b86bd3, 0x1414283c,
+		0xdedea779, 0x5e5ebce2, 0x0b0b161d, 0xdbdbad76,
+		0xe0e0db3b, 0x32326456, 0x3a3a744e, 0x0a0a141e,
+		0x494992db, 0x06060c0a, 0x2424486c, 0x5c5cb8e4,
+		0xc2c29f5d, 0xd3d3bd6e, 0xacac43ef, 0x6262c4a6,
+		0x919139a8, 0x959531a4, 0xe4e4d337, 0x7979f28b,
+		0xe7e7d532, 0xc8c88b43, 0x37376e59, 0x6d6ddab7,
+		0x8d8d018c, 0xd5d5b164, 0x4e4e9cd2, 0xa9a949e0,
+		0x6c6cd8b4, 0x5656acfa, 0xf4f4f307, 0xeaeacf25,
+		0x6565caaf, 0x7a7af48e, 0xaeae47e9, 0x08081018,
+		0xbaba6fd5, 0x7878f088, 0x25254a6f, 0x2e2e5c72,
+		0x1c1c3824, 0xa6a657f1, 0xb4b473c7, 0xc6c69751,
+		0xe8e8cb23, 0xdddda17c, 0x7474e89c, 0x1f1f3e21,
+		0x4b4b96dd, 0xbdbd61dc, 0x8b8b0d86, 0x8a8a0f85,
+		0x7070e090, 0x3e3e7c42, 0xb5b571c4, 0x6666ccaa,
+		0x484890d8, 0x03030605, 0xf6f6f701, 0x0e0e1c12,
+		0x6161c2a3, 0x35356a5f, 0x5757aef9, 0xb9b969d0,
+		0x86861791, 0xc1c19958, 0x1d1d3a27, 0x9e9e27b9,
+		0xe1e1d938, 0xf8f8eb13, 0x98982bb3, 0x11112233,
+		0x6969d2bb, 0xd9d9a970, 0x8e8e0789, 0x949433a7,
+		0x9b9b2db6, 0x1e1e3c22, 0x87871592, 0xe9e9c920,
+		0xcece8749, 0x5555aaff, 0x28285078, 0xdfdfa57a,
+		0x8c8c038f, 0xa1a159f8, 0x89890980, 0x0d0d1a17,
+		0xbfbf65da, 0xe6e6d731, 0x424284c6, 0x6868d0b8,
+		0x414182c3, 0x999929b0, 0x2d2d5a77, 0x0f0f1e11,
+		0xb0b07bcb, 0x5454a8fc, 0xbbbb6dd6, 0x16162c3a,
+	}, {
+		0x63c6a563, 0x7cf8847c, 0x77ee9977, 0x7bf68d7b,
+		0xf2ff0df2, 0x6bd6bd6b, 0x6fdeb16f, 0xc59154c5,
+		0x30605030, 0x01020301, 0x67cea967, 0x2b567d2b,
+		0xfee719fe, 0xd7b562d7, 0xab4de6ab, 0x76ec9a76,
+		0xca8f45ca, 0x821f9d82, 0xc98940c9, 0x7dfa877d,
+		0xfaef15fa, 0x59b2eb59, 0x478ec947, 0xf0fb0bf0,
+		0xad41ecad, 0xd4b367d4, 0xa25ffda2, 0xaf45eaaf,
+		0x9c23bf9c, 0xa453f7a4, 0x72e49672, 0xc09b5bc0,
+		0xb775c2b7, 0xfde11cfd, 0x933dae93, 0x264c6a26,
+		0x366c5a36, 0x3f7e413f, 0xf7f502f7, 0xcc834fcc,
+		0x34685c34, 0xa551f4a5, 0xe5d134e5, 0xf1f908f1,
+		0x71e29371, 0xd8ab73d8, 0x31625331, 0x152a3f15,
+		0x04080c04, 0xc79552c7, 0x23466523, 0xc39d5ec3,
+		0x18302818, 0x9637a196, 0x050a0f05, 0x9a2fb59a,
+		0x070e0907, 0x12243612, 0x801b9b80, 0xe2df3de2,
+		0xebcd26eb, 0x274e6927, 0xb27fcdb2, 0x75ea9f75,
+		0x09121b09, 0x831d9e83, 0x2c58742c, 0x1a342e1a,
+		0x1b362d1b, 0x6edcb26e, 0x5ab4ee5a, 0xa05bfba0,
+		0x52a4f652, 0x3b764d3b, 0xd6b761d6, 0xb37dceb3,
+		0x29527b29, 0xe3dd3ee3, 0x2f5e712f, 0x84139784,
+		0x53a6f553, 0xd1b968d1, 0x00000000, 0xedc12ced,
+		0x20406020, 0xfce31ffc, 0xb179c8b1, 0x5bb6ed5b,
+		0x6ad4be6a, 0xcb8d46cb, 0xbe67d9be, 0x39724b39,
+		0x4a94de4a, 0x4c98d44c, 0x58b0e858, 0xcf854acf,
+		0xd0bb6bd0, 0xefc52aef, 0xaa4fe5aa, 0xfbed16fb,
+		0x4386c543, 0x4d9ad74d, 0x33665533, 0x85119485,
+		0x458acf45, 0xf9e910f9, 0x02040602, 0x7ffe817f,
+		0x50a0f050, 0x3c78443c, 0x9f25ba9f, 0xa84be3a8,
+		0x51a2f351, 0xa35dfea3, 0x4080c040, 0x8f058a8f,
+		0x923fad92, 0x9d21bc9d, 0x38704838, 0xf5f104f5,
+		0xbc63dfbc, 0xb677c1b6, 0xdaaf75da, 0x21426321,
+		0x10203010, 0xffe51aff, 0xf3fd0ef3, 0xd2bf6dd2,
+		0xcd814ccd, 0x0c18140c, 0x13263513, 0xecc32fec,
+		0x5fbee15f, 0x9735a297, 0x4488cc44, 0x172e3917,
+		0xc49357c4, 0xa755f2a7, 0x7efc827e, 0x3d7a473d,
+		0x64c8ac64, 0x5dbae75d, 0x19322b19, 0x73e69573,
+		0x60c0a060, 0x81199881, 0x4f9ed14f, 0xdca37fdc,
+		0x22446622, 0x2a547e2a, 0x903bab90, 0x880b8388,
+		0x468cca46, 0xeec729ee, 0xb86bd3b8, 0x14283c14,
+		0xdea779de, 0x5ebce25e, 0x0b161d0b, 0xdbad76db,
+		0xe0db3be0, 0x32645632, 0x3a744e3a, 0x0a141e0a,
+		0x4992db49, 0x060c0a06, 0x24486c24, 0x5cb8e45c,
+		0xc29f5dc2, 0xd3bd6ed3, 0xac43efac, 0x62c4a662,
+		0x9139a891, 0x9531a495, 0xe4d337e4, 0x79f28b79,
+		0xe7d532e7, 0xc88b43c8, 0x376e5937, 0x6ddab76d,
+		0x8d018c8d, 0xd5b164d5, 0x4e9cd24e, 0xa949e0a9,
+		0x6cd8b46c, 0x56acfa56, 0xf4f307f4, 0xeacf25ea,
+		0x65caaf65, 0x7af48e7a, 0xae47e9ae, 0x08101808,
+		0xba6fd5ba, 0x78f08878, 0x254a6f25, 0x2e5c722e,
+		0x1c38241c, 0xa657f1a6, 0xb473c7b4, 0xc69751c6,
+		0xe8cb23e8, 0xdda17cdd, 0x74e89c74, 0x1f3e211f,
+		0x4b96dd4b, 0xbd61dcbd, 0x8b0d868b, 0x8a0f858a,
+		0x70e09070, 0x3e7c423e, 0xb571c4b5, 0x66ccaa66,
+		0x4890d848, 0x03060503, 0xf6f701f6, 0x0e1c120e,
+		0x61c2a361, 0x356a5f35, 0x57aef957, 0xb969d0b9,
+		0x86179186, 0xc19958c1, 0x1d3a271d, 0x9e27b99e,
+		0xe1d938e1, 0xf8eb13f8, 0x982bb398, 0x11223311,
+		0x69d2bb69, 0xd9a970d9, 0x8e07898e, 0x9433a794,
+		0x9b2db69b, 0x1e3c221e, 0x87159287, 0xe9c920e9,
+		0xce8749ce, 0x55aaff55, 0x28507828, 0xdfa57adf,
+		0x8c038f8c, 0xa159f8a1, 0x89098089, 0x0d1a170d,
+		0xbf65dabf, 0xe6d731e6, 0x4284c642, 0x68d0b868,
+		0x4182c341, 0x9929b099, 0x2d5a772d, 0x0f1e110f,
+		0xb07bcbb0, 0x54a8fc54, 0xbb6dd6bb, 0x162c3a16,
+	}, {
+		0xc6a56363, 0xf8847c7c, 0xee997777, 0xf68d7b7b,
+		0xff0df2f2, 0xd6bd6b6b, 0xdeb16f6f, 0x9154c5c5,
+		0x60503030, 0x02030101, 0xcea96767, 0x567d2b2b,
+		0xe719fefe, 0xb562d7d7, 0x4de6abab, 0xec9a7676,
+		0x8f45caca, 0x1f9d8282, 0x8940c9c9, 0xfa877d7d,
+		0xef15fafa, 0xb2eb5959, 0x8ec94747, 0xfb0bf0f0,
+		0x41ecadad, 0xb367d4d4, 0x5ffda2a2, 0x45eaafaf,
+		0x23bf9c9c, 0x53f7a4a4, 0xe4967272, 0x9b5bc0c0,
+		0x75c2b7b7, 0xe11cfdfd, 0x3dae9393, 0x4c6a2626,
+		0x6c5a3636, 0x7e413f3f, 0xf502f7f7, 0x834fcccc,
+		0x685c3434, 0x51f4a5a5, 0xd134e5e5, 0xf908f1f1,
+		0xe2937171, 0xab73d8d8, 0x62533131, 0x2a3f1515,
+		0x080c0404, 0x9552c7c7, 0x46652323, 0x9d5ec3c3,
+		0x30281818, 0x37a19696, 0x0a0f0505, 0x2fb59a9a,
+		0x0e090707, 0x24361212, 0x1b9b8080, 0xdf3de2e2,
+		0xcd26ebeb, 0x4e692727, 0x7fcdb2b2, 0xea9f7575,
+		0x121b0909, 0x1d9e8383, 0x58742c2c, 0x342e1a1a,
+		0x362d1b1b, 0xdcb26e6e, 0xb4ee5a5a, 0x5bfba0a0,
+		0xa4f65252, 0x764d3b3b, 0xb761d6d6, 0x7dceb3b3,
+		0x527b2929, 0xdd3ee3e3, 0x5e712f2f, 0x13978484,
+		0xa6f55353, 0xb968d1d1, 0x00000000, 0xc12ceded,
+		0x40602020, 0xe31ffcfc, 0x79c8b1b1, 0xb6ed5b5b,
+		0xd4be6a6a, 0x8d46cbcb, 0x67d9bebe, 0x724b3939,
+		0x94de4a4a, 0x98d44c4c, 0xb0e85858, 0x854acfcf,
+		0xbb6bd0d0, 0xc52aefef, 0x4fe5aaaa, 0xed16fbfb,
+		0x86c54343, 0x9ad74d4d, 0x66553333, 0x11948585,
+		0x8acf4545, 0xe910f9f9, 0x04060202, 0xfe817f7f,
+		0xa0f05050, 0x78443c3c, 0x25ba9f9f, 0x4be3a8a8,
+		0xa2f35151, 0x5dfea3a3, 0x80c04040, 0x058a8f8f,
+		0x3fad9292, 0x21bc9d9d, 0x70483838, 0xf104f5f5,
+		0x63dfbcbc, 0x77c1b6b6, 0xaf75dada, 0x42632121,
+		0x20301010, 0xe51affff, 0xfd0ef3f3, 0xbf6dd2d2,
+		0x814ccdcd, 0x18140c0c, 0x26351313, 0xc32fecec,
+		0xbee15f5f, 0x35a29797, 0x88cc4444, 0x2e391717,
+		0x9357c4c4, 0x55f2a7a7, 0xfc827e7e, 0x7a473d3d,
+		0xc8ac6464, 0xbae75d5d, 0x322b1919, 0xe6957373,
+		0xc0a06060, 0x19988181, 0x9ed14f4f, 0xa37fdcdc,
+		0x44662222, 0x547e2a2a, 0x3bab9090, 0x0b838888,
+		0x8cca4646, 0xc729eeee, 0x6bd3b8b8, 0x283c1414,
+		0xa779dede, 0xbce25e5e, 0x161d0b0b, 0xad76dbdb,
+		0xdb3be0e0, 0x64563232, 0x744e3a3a, 0x141e0a0a,
+		0x92db4949, 0x0c0a0606, 0x486c2424, 0xb8e45c5c,
+		0x9f5dc2c2, 0xbd6ed3d3, 0x43efacac, 0xc4a66262,
+		0x39a89191, 0x31a49595, 0xd337e4e4, 0xf28b7979,
+		0xd532e7e7, 0x8b43c8c8, 0x6e593737, 0xdab76d6d,
+		0x018c8d8d, 0xb164d5d5, 0x9cd24e4e, 0x49e0a9a9,
+		0xd8b46c6c, 0xacfa5656, 0xf307f4f4, 0xcf25eaea,
+		0xcaaf6565, 0xf48e7a7a, 0x47e9aeae, 0x10180808,
+		0x6fd5baba, 0xf0887878, 0x4a6f2525, 0x5c722e2e,
+		0x38241c1c, 0x57f1a6a6, 0x73c7b4b4, 0x9751c6c6,
+		0xcb23e8e8, 0xa17cdddd, 0xe89c7474, 0x3e211f1f,
+		0x96dd4b4b, 0x61dcbdbd, 0x0d868b8b, 0x0f858a8a,
+		0xe0907070, 0x7c423e3e, 0x71c4b5b5, 0xccaa6666,
+		0x90d84848, 0x06050303, 0xf701f6f6, 0x1c120e0e,
+		0xc2a36161, 0x6a5f3535, 0xaef95757, 0x69d0b9b9,
+		0x17918686, 0x9958c1c1, 0x3a271d1d, 0x27b99e9e,
+		0xd938e1e1, 0xeb13f8f8, 0x2bb39898, 0x22331111,
+		0xd2bb6969, 0xa970d9d9, 0x07898e8e, 0x33a79494,
+		0x2db69b9b, 0x3c221e1e, 0x15928787, 0xc920e9e9,
+		0x8749cece, 0xaaff5555, 0x50782828, 0xa57adfdf,
+		0x038f8c8c, 0x59f8a1a1, 0x09808989, 0x1a170d0d,
+		0x65dabfbf, 0xd731e6e6, 0x84c64242, 0xd0b86868,
+		0x82c34141, 0x29b09999, 0x5a772d2d, 0x1e110f0f,
+		0x7bcbb0b0, 0xa8fc5454, 0x6dd6bbbb, 0x2c3a1616,
 	}
+};
 
-	log_tab[1] = 0;
-
-	for (i = 0, p = 1; i < 10; ++i) {
-		rco_tab[i] = p;
-
-		p = (p << 1) ^ (p & 0x80 ? 0x01b : 0);
+const u32 crypto_fl_tab[4][256] = {
+	{
+		0x00000063, 0x0000007c, 0x00000077, 0x0000007b,
+		0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5,
+		0x00000030, 0x00000001, 0x00000067, 0x0000002b,
+		0x000000fe, 0x000000d7, 0x000000ab, 0x00000076,
+		0x000000ca, 0x00000082, 0x000000c9, 0x0000007d,
+		0x000000fa, 0x00000059, 0x00000047, 0x000000f0,
+		0x000000ad, 0x000000d4, 0x000000a2, 0x000000af,
+		0x0000009c, 0x000000a4, 0x00000072, 0x000000c0,
+		0x000000b7, 0x000000fd, 0x00000093, 0x00000026,
+		0x00000036, 0x0000003f, 0x000000f7, 0x000000cc,
+		0x00000034, 0x000000a5, 0x000000e5, 0x000000f1,
+		0x00000071, 0x000000d8, 0x00000031, 0x00000015,
+		0x00000004, 0x000000c7, 0x00000023, 0x000000c3,
+		0x00000018, 0x00000096, 0x00000005, 0x0000009a,
+		0x00000007, 0x00000012, 0x00000080, 0x000000e2,
+		0x000000eb, 0x00000027, 0x000000b2, 0x00000075,
+		0x00000009, 0x00000083, 0x0000002c, 0x0000001a,
+		0x0000001b, 0x0000006e, 0x0000005a, 0x000000a0,
+		0x00000052, 0x0000003b, 0x000000d6, 0x000000b3,
+		0x00000029, 0x000000e3, 0x0000002f, 0x00000084,
+		0x00000053, 0x000000d1, 0x00000000, 0x000000ed,
+		0x00000020, 0x000000fc, 0x000000b1, 0x0000005b,
+		0x0000006a, 0x000000cb, 0x000000be, 0x00000039,
+		0x0000004a, 0x0000004c, 0x00000058, 0x000000cf,
+		0x000000d0, 0x000000ef, 0x000000aa, 0x000000fb,
+		0x00000043, 0x0000004d, 0x00000033, 0x00000085,
+		0x00000045, 0x000000f9, 0x00000002, 0x0000007f,
+		0x00000050, 0x0000003c, 0x0000009f, 0x000000a8,
+		0x00000051, 0x000000a3, 0x00000040, 0x0000008f,
+		0x00000092, 0x0000009d, 0x00000038, 0x000000f5,
+		0x000000bc, 0x000000b6, 0x000000da, 0x00000021,
+		0x00000010, 0x000000ff, 0x000000f3, 0x000000d2,
+		0x000000cd, 0x0000000c, 0x00000013, 0x000000ec,
+		0x0000005f, 0x00000097, 0x00000044, 0x00000017,
+		0x000000c4, 0x000000a7, 0x0000007e, 0x0000003d,
+		0x00000064, 0x0000005d, 0x00000019, 0x00000073,
+		0x00000060, 0x00000081, 0x0000004f, 0x000000dc,
+		0x00000022, 0x0000002a, 0x00000090, 0x00000088,
+		0x00000046, 0x000000ee, 0x000000b8, 0x00000014,
+		0x000000de, 0x0000005e, 0x0000000b, 0x000000db,
+		0x000000e0, 0x00000032, 0x0000003a, 0x0000000a,
+		0x00000049, 0x00000006, 0x00000024, 0x0000005c,
+		0x000000c2, 0x000000d3, 0x000000ac, 0x00000062,
+		0x00000091, 0x00000095, 0x000000e4, 0x00000079,
+		0x000000e7, 0x000000c8, 0x00000037, 0x0000006d,
+		0x0000008d, 0x000000d5, 0x0000004e, 0x000000a9,
+		0x0000006c, 0x00000056, 0x000000f4, 0x000000ea,
+		0x00000065, 0x0000007a, 0x000000ae, 0x00000008,
+		0x000000ba, 0x00000078, 0x00000025, 0x0000002e,
+		0x0000001c, 0x000000a6, 0x000000b4, 0x000000c6,
+		0x000000e8, 0x000000dd, 0x00000074, 0x0000001f,
+		0x0000004b, 0x000000bd, 0x0000008b, 0x0000008a,
+		0x00000070, 0x0000003e, 0x000000b5, 0x00000066,
+		0x00000048, 0x00000003, 0x000000f6, 0x0000000e,
+		0x00000061, 0x00000035, 0x00000057, 0x000000b9,
+		0x00000086, 0x000000c1, 0x0000001d, 0x0000009e,
+		0x000000e1, 0x000000f8, 0x00000098, 0x00000011,
+		0x00000069, 0x000000d9, 0x0000008e, 0x00000094,
+		0x0000009b, 0x0000001e, 0x00000087, 0x000000e9,
+		0x000000ce, 0x00000055, 0x00000028, 0x000000df,
+		0x0000008c, 0x000000a1, 0x00000089, 0x0000000d,
+		0x000000bf, 0x000000e6, 0x00000042, 0x00000068,
+		0x00000041, 0x00000099, 0x0000002d, 0x0000000f,
+		0x000000b0, 0x00000054, 0x000000bb, 0x00000016,
+	}, {
+		0x00006300, 0x00007c00, 0x00007700, 0x00007b00,
+		0x0000f200, 0x00006b00, 0x00006f00, 0x0000c500,
+		0x00003000, 0x00000100, 0x00006700, 0x00002b00,
+		0x0000fe00, 0x0000d700, 0x0000ab00, 0x00007600,
+		0x0000ca00, 0x00008200, 0x0000c900, 0x00007d00,
+		0x0000fa00, 0x00005900, 0x00004700, 0x0000f000,
+		0x0000ad00, 0x0000d400, 0x0000a200, 0x0000af00,
+		0x00009c00, 0x0000a400, 0x00007200, 0x0000c000,
+		0x0000b700, 0x0000fd00, 0x00009300, 0x00002600,
+		0x00003600, 0x00003f00, 0x0000f700, 0x0000cc00,
+		0x00003400, 0x0000a500, 0x0000e500, 0x0000f100,
+		0x00007100, 0x0000d800, 0x00003100, 0x00001500,
+		0x00000400, 0x0000c700, 0x00002300, 0x0000c300,
+		0x00001800, 0x00009600, 0x00000500, 0x00009a00,
+		0x00000700, 0x00001200, 0x00008000, 0x0000e200,
+		0x0000eb00, 0x00002700, 0x0000b200, 0x00007500,
+		0x00000900, 0x00008300, 0x00002c00, 0x00001a00,
+		0x00001b00, 0x00006e00, 0x00005a00, 0x0000a000,
+		0x00005200, 0x00003b00, 0x0000d600, 0x0000b300,
+		0x00002900, 0x0000e300, 0x00002f00, 0x00008400,
+		0x00005300, 0x0000d100, 0x00000000, 0x0000ed00,
+		0x00002000, 0x0000fc00, 0x0000b100, 0x00005b00,
+		0x00006a00, 0x0000cb00, 0x0000be00, 0x00003900,
+		0x00004a00, 0x00004c00, 0x00005800, 0x0000cf00,
+		0x0000d000, 0x0000ef00, 0x0000aa00, 0x0000fb00,
+		0x00004300, 0x00004d00, 0x00003300, 0x00008500,
+		0x00004500, 0x0000f900, 0x00000200, 0x00007f00,
+		0x00005000, 0x00003c00, 0x00009f00, 0x0000a800,
+		0x00005100, 0x0000a300, 0x00004000, 0x00008f00,
+		0x00009200, 0x00009d00, 0x00003800, 0x0000f500,
+		0x0000bc00, 0x0000b600, 0x0000da00, 0x00002100,
+		0x00001000, 0x0000ff00, 0x0000f300, 0x0000d200,
+		0x0000cd00, 0x00000c00, 0x00001300, 0x0000ec00,
+		0x00005f00, 0x00009700, 0x00004400, 0x00001700,
+		0x0000c400, 0x0000a700, 0x00007e00, 0x00003d00,
+		0x00006400, 0x00005d00, 0x00001900, 0x00007300,
+		0x00006000, 0x00008100, 0x00004f00, 0x0000dc00,
+		0x00002200, 0x00002a00, 0x00009000, 0x00008800,
+		0x00004600, 0x0000ee00, 0x0000b800, 0x00001400,
+		0x0000de00, 0x00005e00, 0x00000b00, 0x0000db00,
+		0x0000e000, 0x00003200, 0x00003a00, 0x00000a00,
+		0x00004900, 0x00000600, 0x00002400, 0x00005c00,
+		0x0000c200, 0x0000d300, 0x0000ac00, 0x00006200,
+		0x00009100, 0x00009500, 0x0000e400, 0x00007900,
+		0x0000e700, 0x0000c800, 0x00003700, 0x00006d00,
+		0x00008d00, 0x0000d500, 0x00004e00, 0x0000a900,
+		0x00006c00, 0x00005600, 0x0000f400, 0x0000ea00,
+		0x00006500, 0x00007a00, 0x0000ae00, 0x00000800,
+		0x0000ba00, 0x00007800, 0x00002500, 0x00002e00,
+		0x00001c00, 0x0000a600, 0x0000b400, 0x0000c600,
+		0x0000e800, 0x0000dd00, 0x00007400, 0x00001f00,
+		0x00004b00, 0x0000bd00, 0x00008b00, 0x00008a00,
+		0x00007000, 0x00003e00, 0x0000b500, 0x00006600,
+		0x00004800, 0x00000300, 0x0000f600, 0x00000e00,
+		0x00006100, 0x00003500, 0x00005700, 0x0000b900,
+		0x00008600, 0x0000c100, 0x00001d00, 0x00009e00,
+		0x0000e100, 0x0000f800, 0x00009800, 0x00001100,
+		0x00006900, 0x0000d900, 0x00008e00, 0x00009400,
+		0x00009b00, 0x00001e00, 0x00008700, 0x0000e900,
+		0x0000ce00, 0x00005500, 0x00002800, 0x0000df00,
+		0x00008c00, 0x0000a100, 0x00008900, 0x00000d00,
+		0x0000bf00, 0x0000e600, 0x00004200, 0x00006800,
+		0x00004100, 0x00009900, 0x00002d00, 0x00000f00,
+		0x0000b000, 0x00005400, 0x0000bb00, 0x00001600,
+	}, {
+		0x00630000, 0x007c0000, 0x00770000, 0x007b0000,
+		0x00f20000, 0x006b0000, 0x006f0000, 0x00c50000,
+		0x00300000, 0x00010000, 0x00670000, 0x002b0000,
+		0x00fe0000, 0x00d70000, 0x00ab0000, 0x00760000,
+		0x00ca0000, 0x00820000, 0x00c90000, 0x007d0000,
+		0x00fa0000, 0x00590000, 0x00470000, 0x00f00000,
+		0x00ad0000, 0x00d40000, 0x00a20000, 0x00af0000,
+		0x009c0000, 0x00a40000, 0x00720000, 0x00c00000,
+		0x00b70000, 0x00fd0000, 0x00930000, 0x00260000,
+		0x00360000, 0x003f0000, 0x00f70000, 0x00cc0000,
+		0x00340000, 0x00a50000, 0x00e50000, 0x00f10000,
+		0x00710000, 0x00d80000, 0x00310000, 0x00150000,
+		0x00040000, 0x00c70000, 0x00230000, 0x00c30000,
+		0x00180000, 0x00960000, 0x00050000, 0x009a0000,
+		0x00070000, 0x00120000, 0x00800000, 0x00e20000,
+		0x00eb0000, 0x00270000, 0x00b20000, 0x00750000,
+		0x00090000, 0x00830000, 0x002c0000, 0x001a0000,
+		0x001b0000, 0x006e0000, 0x005a0000, 0x00a00000,
+		0x00520000, 0x003b0000, 0x00d60000, 0x00b30000,
+		0x00290000, 0x00e30000, 0x002f0000, 0x00840000,
+		0x00530000, 0x00d10000, 0x00000000, 0x00ed0000,
+		0x00200000, 0x00fc0000, 0x00b10000, 0x005b0000,
+		0x006a0000, 0x00cb0000, 0x00be0000, 0x00390000,
+		0x004a0000, 0x004c0000, 0x00580000, 0x00cf0000,
+		0x00d00000, 0x00ef0000, 0x00aa0000, 0x00fb0000,
+		0x00430000, 0x004d0000, 0x00330000, 0x00850000,
+		0x00450000, 0x00f90000, 0x00020000, 0x007f0000,
+		0x00500000, 0x003c0000, 0x009f0000, 0x00a80000,
+		0x00510000, 0x00a30000, 0x00400000, 0x008f0000,
+		0x00920000, 0x009d0000, 0x00380000, 0x00f50000,
+		0x00bc0000, 0x00b60000, 0x00da0000, 0x00210000,
+		0x00100000, 0x00ff0000, 0x00f30000, 0x00d20000,
+		0x00cd0000, 0x000c0000, 0x00130000, 0x00ec0000,
+		0x005f0000, 0x00970000, 0x00440000, 0x00170000,
+		0x00c40000, 0x00a70000, 0x007e0000, 0x003d0000,
+		0x00640000, 0x005d0000, 0x00190000, 0x00730000,
+		0x00600000, 0x00810000, 0x004f0000, 0x00dc0000,
+		0x00220000, 0x002a0000, 0x00900000, 0x00880000,
+		0x00460000, 0x00ee0000, 0x00b80000, 0x00140000,
+		0x00de0000, 0x005e0000, 0x000b0000, 0x00db0000,
+		0x00e00000, 0x00320000, 0x003a0000, 0x000a0000,
+		0x00490000, 0x00060000, 0x00240000, 0x005c0000,
+		0x00c20000, 0x00d30000, 0x00ac0000, 0x00620000,
+		0x00910000, 0x00950000, 0x00e40000, 0x00790000,
+		0x00e70000, 0x00c80000, 0x00370000, 0x006d0000,
+		0x008d0000, 0x00d50000, 0x004e0000, 0x00a90000,
+		0x006c0000, 0x00560000, 0x00f40000, 0x00ea0000,
+		0x00650000, 0x007a0000, 0x00ae0000, 0x00080000,
+		0x00ba0000, 0x00780000, 0x00250000, 0x002e0000,
+		0x001c0000, 0x00a60000, 0x00b40000, 0x00c60000,
+		0x00e80000, 0x00dd0000, 0x00740000, 0x001f0000,
+		0x004b0000, 0x00bd0000, 0x008b0000, 0x008a0000,
+		0x00700000, 0x003e0000, 0x00b50000, 0x00660000,
+		0x00480000, 0x00030000, 0x00f60000, 0x000e0000,
+		0x00610000, 0x00350000, 0x00570000, 0x00b90000,
+		0x00860000, 0x00c10000, 0x001d0000, 0x009e0000,
+		0x00e10000, 0x00f80000, 0x00980000, 0x00110000,
+		0x00690000, 0x00d90000, 0x008e0000, 0x00940000,
+		0x009b0000, 0x001e0000, 0x00870000, 0x00e90000,
+		0x00ce0000, 0x00550000, 0x00280000, 0x00df0000,
+		0x008c0000, 0x00a10000, 0x00890000, 0x000d0000,
+		0x00bf0000, 0x00e60000, 0x00420000, 0x00680000,
+		0x00410000, 0x00990000, 0x002d0000, 0x000f0000,
+		0x00b00000, 0x00540000, 0x00bb0000, 0x00160000,
+	}, {
+		0x63000000, 0x7c000000, 0x77000000, 0x7b000000,
+		0xf2000000, 0x6b000000, 0x6f000000, 0xc5000000,
+		0x30000000, 0x01000000, 0x67000000, 0x2b000000,
+		0xfe000000, 0xd7000000, 0xab000000, 0x76000000,
+		0xca000000, 0x82000000, 0xc9000000, 0x7d000000,
+		0xfa000000, 0x59000000, 0x47000000, 0xf0000000,
+		0xad000000, 0xd4000000, 0xa2000000, 0xaf000000,
+		0x9c000000, 0xa4000000, 0x72000000, 0xc0000000,
+		0xb7000000, 0xfd000000, 0x93000000, 0x26000000,
+		0x36000000, 0x3f000000, 0xf7000000, 0xcc000000,
+		0x34000000, 0xa5000000, 0xe5000000, 0xf1000000,
+		0x71000000, 0xd8000000, 0x31000000, 0x15000000,
+		0x04000000, 0xc7000000, 0x23000000, 0xc3000000,
+		0x18000000, 0x96000000, 0x05000000, 0x9a000000,
+		0x07000000, 0x12000000, 0x80000000, 0xe2000000,
+		0xeb000000, 0x27000000, 0xb2000000, 0x75000000,
+		0x09000000, 0x83000000, 0x2c000000, 0x1a000000,
+		0x1b000000, 0x6e000000, 0x5a000000, 0xa0000000,
+		0x52000000, 0x3b000000, 0xd6000000, 0xb3000000,
+		0x29000000, 0xe3000000, 0x2f000000, 0x84000000,
+		0x53000000, 0xd1000000, 0x00000000, 0xed000000,
+		0x20000000, 0xfc000000, 0xb1000000, 0x5b000000,
+		0x6a000000, 0xcb000000, 0xbe000000, 0x39000000,
+		0x4a000000, 0x4c000000, 0x58000000, 0xcf000000,
+		0xd0000000, 0xef000000, 0xaa000000, 0xfb000000,
+		0x43000000, 0x4d000000, 0x33000000, 0x85000000,
+		0x45000000, 0xf9000000, 0x02000000, 0x7f000000,
+		0x50000000, 0x3c000000, 0x9f000000, 0xa8000000,
+		0x51000000, 0xa3000000, 0x40000000, 0x8f000000,
+		0x92000000, 0x9d000000, 0x38000000, 0xf5000000,
+		0xbc000000, 0xb6000000, 0xda000000, 0x21000000,
+		0x10000000, 0xff000000, 0xf3000000, 0xd2000000,
+		0xcd000000, 0x0c000000, 0x13000000, 0xec000000,
+		0x5f000000, 0x97000000, 0x44000000, 0x17000000,
+		0xc4000000, 0xa7000000, 0x7e000000, 0x3d000000,
+		0x64000000, 0x5d000000, 0x19000000, 0x73000000,
+		0x60000000, 0x81000000, 0x4f000000, 0xdc000000,
+		0x22000000, 0x2a000000, 0x90000000, 0x88000000,
+		0x46000000, 0xee000000, 0xb8000000, 0x14000000,
+		0xde000000, 0x5e000000, 0x0b000000, 0xdb000000,
+		0xe0000000, 0x32000000, 0x3a000000, 0x0a000000,
+		0x49000000, 0x06000000, 0x24000000, 0x5c000000,
+		0xc2000000, 0xd3000000, 0xac000000, 0x62000000,
+		0x91000000, 0x95000000, 0xe4000000, 0x79000000,
+		0xe7000000, 0xc8000000, 0x37000000, 0x6d000000,
+		0x8d000000, 0xd5000000, 0x4e000000, 0xa9000000,
+		0x6c000000, 0x56000000, 0xf4000000, 0xea000000,
+		0x65000000, 0x7a000000, 0xae000000, 0x08000000,
+		0xba000000, 0x78000000, 0x25000000, 0x2e000000,
+		0x1c000000, 0xa6000000, 0xb4000000, 0xc6000000,
+		0xe8000000, 0xdd000000, 0x74000000, 0x1f000000,
+		0x4b000000, 0xbd000000, 0x8b000000, 0x8a000000,
+		0x70000000, 0x3e000000, 0xb5000000, 0x66000000,
+		0x48000000, 0x03000000, 0xf6000000, 0x0e000000,
+		0x61000000, 0x35000000, 0x57000000, 0xb9000000,
+		0x86000000, 0xc1000000, 0x1d000000, 0x9e000000,
+		0xe1000000, 0xf8000000, 0x98000000, 0x11000000,
+		0x69000000, 0xd9000000, 0x8e000000, 0x94000000,
+		0x9b000000, 0x1e000000, 0x87000000, 0xe9000000,
+		0xce000000, 0x55000000, 0x28000000, 0xdf000000,
+		0x8c000000, 0xa1000000, 0x89000000, 0x0d000000,
+		0xbf000000, 0xe6000000, 0x42000000, 0x68000000,
+		0x41000000, 0x99000000, 0x2d000000, 0x0f000000,
+		0xb0000000, 0x54000000, 0xbb000000, 0x16000000,
 	}
+};
 
-	for (i = 0; i < 256; ++i) {
-		p = (i ? pow_tab[255 - log_tab[i]] : 0);
-		q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2));
-		p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2));
-		sbx_tab[i] = p;
-		isb_tab[p] = (u8) i;
+const u32 crypto_it_tab[4][256] = {
+	{
+		0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a,
+		0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b,
+		0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5,
+		0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5,
+		0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d,
+		0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b,
+		0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295,
+		0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e,
+		0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927,
+		0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d,
+		0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362,
+		0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9,
+		0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52,
+		0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566,
+		0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3,
+		0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed,
+		0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e,
+		0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4,
+		0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4,
+		0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd,
+		0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d,
+		0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060,
+		0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967,
+		0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879,
+		0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000,
+		0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c,
+		0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36,
+		0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624,
+		0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b,
+		0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c,
+		0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12,
+		0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14,
+		0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3,
+		0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b,
+		0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8,
+		0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684,
+		0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7,
+		0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177,
+		0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947,
+		0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322,
+		0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498,
+		0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f,
+		0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54,
+		0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382,
+		0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf,
+		0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb,
+		0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83,
+		0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef,
+		0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029,
+		0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235,
+		0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733,
+		0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117,
+		0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4,
+		0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546,
+		0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb,
+		0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d,
+		0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb,
+		0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a,
+		0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773,
+		0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478,
+		0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2,
+		0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff,
+		0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664,
+		0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0,
+	}, {
+		0xa7f45150, 0x65417e53, 0xa4171ac3, 0x5e273a96,
+		0x6bab3bcb, 0x459d1ff1, 0x58faacab, 0x03e34b93,
+		0xfa302055, 0x6d76adf6, 0x76cc8891, 0x4c02f525,
+		0xd7e54ffc, 0xcb2ac5d7, 0x44352680, 0xa362b58f,
+		0x5ab1de49, 0x1bba2567, 0x0eea4598, 0xc0fe5de1,
+		0x752fc302, 0xf04c8112, 0x97468da3, 0xf9d36bc6,
+		0x5f8f03e7, 0x9c921595, 0x7a6dbfeb, 0x595295da,
+		0x83bed42d, 0x217458d3, 0x69e04929, 0xc8c98e44,
+		0x89c2756a, 0x798ef478, 0x3e58996b, 0x71b927dd,
+		0x4fe1beb6, 0xad88f017, 0xac20c966, 0x3ace7db4,
+		0x4adf6318, 0x311ae582, 0x33519760, 0x7f536245,
+		0x7764b1e0, 0xae6bbb84, 0xa081fe1c, 0x2b08f994,
+		0x68487058, 0xfd458f19, 0x6cde9487, 0xf87b52b7,
+		0xd373ab23, 0x024b72e2, 0x8f1fe357, 0xab55662a,
+		0x28ebb207, 0xc2b52f03, 0x7bc5869a, 0x0837d3a5,
+		0x872830f2, 0xa5bf23b2, 0x6a0302ba, 0x8216ed5c,
+		0x1ccf8a2b, 0xb479a792, 0xf207f3f0, 0xe2694ea1,
+		0xf4da65cd, 0xbe0506d5, 0x6234d11f, 0xfea6c48a,
+		0x532e349d, 0x55f3a2a0, 0xe18a0532, 0xebf6a475,
+		0xec830b39, 0xef6040aa, 0x9f715e06, 0x106ebd51,
+		0x8a213ef9, 0x06dd963d, 0x053eddae, 0xbde64d46,
+		0x8d5491b5, 0x5dc47105, 0xd406046f, 0x155060ff,
+		0xfb981924, 0xe9bdd697, 0x434089cc, 0x9ed96777,
+		0x42e8b0bd, 0x8b890788, 0x5b19e738, 0xeec879db,
+		0x0a7ca147, 0x0f427ce9, 0x1e84f8c9, 0x00000000,
+		0x86800983, 0xed2b3248, 0x70111eac, 0x725a6c4e,
+		0xff0efdfb, 0x38850f56, 0xd5ae3d1e, 0x392d3627,
+		0xd90f0a64, 0xa65c6821, 0x545b9bd1, 0x2e36243a,
+		0x670a0cb1, 0xe757930f, 0x96eeb4d2, 0x919b1b9e,
+		0xc5c0804f, 0x20dc61a2, 0x4b775a69, 0x1a121c16,
+		0xba93e20a, 0x2aa0c0e5, 0xe0223c43, 0x171b121d,
+		0x0d090e0b, 0xc78bf2ad, 0xa8b62db9, 0xa91e14c8,
+		0x19f15785, 0x0775af4c, 0xdd99eebb, 0x607fa3fd,
+		0x2601f79f, 0xf5725cbc, 0x3b6644c5, 0x7efb5b34,
+		0x29438b76, 0xc623cbdc, 0xfcedb668, 0xf1e4b863,
+		0xdc31d7ca, 0x85634210, 0x22971340, 0x11c68420,
+		0x244a857d, 0x3dbbd2f8, 0x32f9ae11, 0xa129c76d,
+		0x2f9e1d4b, 0x30b2dcf3, 0x52860dec, 0xe3c177d0,
+		0x16b32b6c, 0xb970a999, 0x489411fa, 0x64e94722,
+		0x8cfca8c4, 0x3ff0a01a, 0x2c7d56d8, 0x903322ef,
+		0x4e4987c7, 0xd138d9c1, 0xa2ca8cfe, 0x0bd49836,
+		0x81f5a6cf, 0xde7aa528, 0x8eb7da26, 0xbfad3fa4,
+		0x9d3a2ce4, 0x9278500d, 0xcc5f6a9b, 0x467e5462,
+		0x138df6c2, 0xb8d890e8, 0xf7392e5e, 0xafc382f5,
+		0x805d9fbe, 0x93d0697c, 0x2dd56fa9, 0x1225cfb3,
+		0x99acc83b, 0x7d1810a7, 0x639ce86e, 0xbb3bdb7b,
+		0x7826cd09, 0x18596ef4, 0xb79aec01, 0x9a4f83a8,
+		0x6e95e665, 0xe6ffaa7e, 0xcfbc2108, 0xe815efe6,
+		0x9be7bad9, 0x366f4ace, 0x099fead4, 0x7cb029d6,
+		0xb2a431af, 0x233f2a31, 0x94a5c630, 0x66a235c0,
+		0xbc4e7437, 0xca82fca6, 0xd090e0b0, 0xd8a73315,
+		0x9804f14a, 0xdaec41f7, 0x50cd7f0e, 0xf691172f,
+		0xd64d768d, 0xb0ef434d, 0x4daacc54, 0x0496e4df,
+		0xb5d19ee3, 0x886a4c1b, 0x1f2cc1b8, 0x5165467f,
+		0xea5e9d04, 0x358c015d, 0x7487fa73, 0x410bfb2e,
+		0x1d67b35a, 0xd2db9252, 0x5610e933, 0x47d66d13,
+		0x61d79a8c, 0x0ca1377a, 0x14f8598e, 0x3c13eb89,
+		0x27a9ceee, 0xc961b735, 0xe51ce1ed, 0xb1477a3c,
+		0xdfd29c59, 0x73f2553f, 0xce141879, 0x37c773bf,
+		0xcdf753ea, 0xaafd5f5b, 0x6f3ddf14, 0xdb447886,
+		0xf3afca81, 0xc468b93e, 0x3424382c, 0x40a3c25f,
+		0xc31d1672, 0x25e2bc0c, 0x493c288b, 0x950dff41,
+		0x01a83971, 0xb30c08de, 0xe4b4d89c, 0xc1566490,
+		0x84cb7b61, 0xb632d570, 0x5c6c4874, 0x57b8d042,
+	}, {
+		0xf45150a7, 0x417e5365, 0x171ac3a4, 0x273a965e,
+		0xab3bcb6b, 0x9d1ff145, 0xfaacab58, 0xe34b9303,
+		0x302055fa, 0x76adf66d, 0xcc889176, 0x02f5254c,
+		0xe54ffcd7, 0x2ac5d7cb, 0x35268044, 0x62b58fa3,
+		0xb1de495a, 0xba25671b, 0xea45980e, 0xfe5de1c0,
+		0x2fc30275, 0x4c8112f0, 0x468da397, 0xd36bc6f9,
+		0x8f03e75f, 0x9215959c, 0x6dbfeb7a, 0x5295da59,
+		0xbed42d83, 0x7458d321, 0xe0492969, 0xc98e44c8,
+		0xc2756a89, 0x8ef47879, 0x58996b3e, 0xb927dd71,
+		0xe1beb64f, 0x88f017ad, 0x20c966ac, 0xce7db43a,
+		0xdf63184a, 0x1ae58231, 0x51976033, 0x5362457f,
+		0x64b1e077, 0x6bbb84ae, 0x81fe1ca0, 0x08f9942b,
+		0x48705868, 0x458f19fd, 0xde94876c, 0x7b52b7f8,
+		0x73ab23d3, 0x4b72e202, 0x1fe3578f, 0x55662aab,
+		0xebb20728, 0xb52f03c2, 0xc5869a7b, 0x37d3a508,
+		0x2830f287, 0xbf23b2a5, 0x0302ba6a, 0x16ed5c82,
+		0xcf8a2b1c, 0x79a792b4, 0x07f3f0f2, 0x694ea1e2,
+		0xda65cdf4, 0x0506d5be, 0x34d11f62, 0xa6c48afe,
+		0x2e349d53, 0xf3a2a055, 0x8a0532e1, 0xf6a475eb,
+		0x830b39ec, 0x6040aaef, 0x715e069f, 0x6ebd5110,
+		0x213ef98a, 0xdd963d06, 0x3eddae05, 0xe64d46bd,
+		0x5491b58d, 0xc471055d, 0x06046fd4, 0x5060ff15,
+		0x981924fb, 0xbdd697e9, 0x4089cc43, 0xd967779e,
+		0xe8b0bd42, 0x8907888b, 0x19e7385b, 0xc879dbee,
+		0x7ca1470a, 0x427ce90f, 0x84f8c91e, 0x00000000,
+		0x80098386, 0x2b3248ed, 0x111eac70, 0x5a6c4e72,
+		0x0efdfbff, 0x850f5638, 0xae3d1ed5, 0x2d362739,
+		0x0f0a64d9, 0x5c6821a6, 0x5b9bd154, 0x36243a2e,
+		0x0a0cb167, 0x57930fe7, 0xeeb4d296, 0x9b1b9e91,
+		0xc0804fc5, 0xdc61a220, 0x775a694b, 0x121c161a,
+		0x93e20aba, 0xa0c0e52a, 0x223c43e0, 0x1b121d17,
+		0x090e0b0d, 0x8bf2adc7, 0xb62db9a8, 0x1e14c8a9,
+		0xf1578519, 0x75af4c07, 0x99eebbdd, 0x7fa3fd60,
+		0x01f79f26, 0x725cbcf5, 0x6644c53b, 0xfb5b347e,
+		0x438b7629, 0x23cbdcc6, 0xedb668fc, 0xe4b863f1,
+		0x31d7cadc, 0x63421085, 0x97134022, 0xc6842011,
+		0x4a857d24, 0xbbd2f83d, 0xf9ae1132, 0x29c76da1,
+		0x9e1d4b2f, 0xb2dcf330, 0x860dec52, 0xc177d0e3,
+		0xb32b6c16, 0x70a999b9, 0x9411fa48, 0xe9472264,
+		0xfca8c48c, 0xf0a01a3f, 0x7d56d82c, 0x3322ef90,
+		0x4987c74e, 0x38d9c1d1, 0xca8cfea2, 0xd498360b,
+		0xf5a6cf81, 0x7aa528de, 0xb7da268e, 0xad3fa4bf,
+		0x3a2ce49d, 0x78500d92, 0x5f6a9bcc, 0x7e546246,
+		0x8df6c213, 0xd890e8b8, 0x392e5ef7, 0xc382f5af,
+		0x5d9fbe80, 0xd0697c93, 0xd56fa92d, 0x25cfb312,
+		0xacc83b99, 0x1810a77d, 0x9ce86e63, 0x3bdb7bbb,
+		0x26cd0978, 0x596ef418, 0x9aec01b7, 0x4f83a89a,
+		0x95e6656e, 0xffaa7ee6, 0xbc2108cf, 0x15efe6e8,
+		0xe7bad99b, 0x6f4ace36, 0x9fead409, 0xb029d67c,
+		0xa431afb2, 0x3f2a3123, 0xa5c63094, 0xa235c066,
+		0x4e7437bc, 0x82fca6ca, 0x90e0b0d0, 0xa73315d8,
+		0x04f14a98, 0xec41f7da, 0xcd7f0e50, 0x91172ff6,
+		0x4d768dd6, 0xef434db0, 0xaacc544d, 0x96e4df04,
+		0xd19ee3b5, 0x6a4c1b88, 0x2cc1b81f, 0x65467f51,
+		0x5e9d04ea, 0x8c015d35, 0x87fa7374, 0x0bfb2e41,
+		0x67b35a1d, 0xdb9252d2, 0x10e93356, 0xd66d1347,
+		0xd79a8c61, 0xa1377a0c, 0xf8598e14, 0x13eb893c,
+		0xa9ceee27, 0x61b735c9, 0x1ce1ede5, 0x477a3cb1,
+		0xd29c59df, 0xf2553f73, 0x141879ce, 0xc773bf37,
+		0xf753eacd, 0xfd5f5baa, 0x3ddf146f, 0x447886db,
+		0xafca81f3, 0x68b93ec4, 0x24382c34, 0xa3c25f40,
+		0x1d1672c3, 0xe2bc0c25, 0x3c288b49, 0x0dff4195,
+		0xa8397101, 0x0c08deb3, 0xb4d89ce4, 0x566490c1,
+		0xcb7b6184, 0x32d570b6, 0x6c48745c, 0xb8d04257,
+	}, {
+		0x5150a7f4, 0x7e536541, 0x1ac3a417, 0x3a965e27,
+		0x3bcb6bab, 0x1ff1459d, 0xacab58fa, 0x4b9303e3,
+		0x2055fa30, 0xadf66d76, 0x889176cc, 0xf5254c02,
+		0x4ffcd7e5, 0xc5d7cb2a, 0x26804435, 0xb58fa362,
+		0xde495ab1, 0x25671bba, 0x45980eea, 0x5de1c0fe,
+		0xc302752f, 0x8112f04c, 0x8da39746, 0x6bc6f9d3,
+		0x03e75f8f, 0x15959c92, 0xbfeb7a6d, 0x95da5952,
+		0xd42d83be, 0x58d32174, 0x492969e0, 0x8e44c8c9,
+		0x756a89c2, 0xf478798e, 0x996b3e58, 0x27dd71b9,
+		0xbeb64fe1, 0xf017ad88, 0xc966ac20, 0x7db43ace,
+		0x63184adf, 0xe582311a, 0x97603351, 0x62457f53,
+		0xb1e07764, 0xbb84ae6b, 0xfe1ca081, 0xf9942b08,
+		0x70586848, 0x8f19fd45, 0x94876cde, 0x52b7f87b,
+		0xab23d373, 0x72e2024b, 0xe3578f1f, 0x662aab55,
+		0xb20728eb, 0x2f03c2b5, 0x869a7bc5, 0xd3a50837,
+		0x30f28728, 0x23b2a5bf, 0x02ba6a03, 0xed5c8216,
+		0x8a2b1ccf, 0xa792b479, 0xf3f0f207, 0x4ea1e269,
+		0x65cdf4da, 0x06d5be05, 0xd11f6234, 0xc48afea6,
+		0x349d532e, 0xa2a055f3, 0x0532e18a, 0xa475ebf6,
+		0x0b39ec83, 0x40aaef60, 0x5e069f71, 0xbd51106e,
+		0x3ef98a21, 0x963d06dd, 0xddae053e, 0x4d46bde6,
+		0x91b58d54, 0x71055dc4, 0x046fd406, 0x60ff1550,
+		0x1924fb98, 0xd697e9bd, 0x89cc4340, 0x67779ed9,
+		0xb0bd42e8, 0x07888b89, 0xe7385b19, 0x79dbeec8,
+		0xa1470a7c, 0x7ce90f42, 0xf8c91e84, 0x00000000,
+		0x09838680, 0x3248ed2b, 0x1eac7011, 0x6c4e725a,
+		0xfdfbff0e, 0x0f563885, 0x3d1ed5ae, 0x3627392d,
+		0x0a64d90f, 0x6821a65c, 0x9bd1545b, 0x243a2e36,
+		0x0cb1670a, 0x930fe757, 0xb4d296ee, 0x1b9e919b,
+		0x804fc5c0, 0x61a220dc, 0x5a694b77, 0x1c161a12,
+		0xe20aba93, 0xc0e52aa0, 0x3c43e022, 0x121d171b,
+		0x0e0b0d09, 0xf2adc78b, 0x2db9a8b6, 0x14c8a91e,
+		0x578519f1, 0xaf4c0775, 0xeebbdd99, 0xa3fd607f,
+		0xf79f2601, 0x5cbcf572, 0x44c53b66, 0x5b347efb,
+		0x8b762943, 0xcbdcc623, 0xb668fced, 0xb863f1e4,
+		0xd7cadc31, 0x42108563, 0x13402297, 0x842011c6,
+		0x857d244a, 0xd2f83dbb, 0xae1132f9, 0xc76da129,
+		0x1d4b2f9e, 0xdcf330b2, 0x0dec5286, 0x77d0e3c1,
+		0x2b6c16b3, 0xa999b970, 0x11fa4894, 0x472264e9,
+		0xa8c48cfc, 0xa01a3ff0, 0x56d82c7d, 0x22ef9033,
+		0x87c74e49, 0xd9c1d138, 0x8cfea2ca, 0x98360bd4,
+		0xa6cf81f5, 0xa528de7a, 0xda268eb7, 0x3fa4bfad,
+		0x2ce49d3a, 0x500d9278, 0x6a9bcc5f, 0x5462467e,
+		0xf6c2138d, 0x90e8b8d8, 0x2e5ef739, 0x82f5afc3,
+		0x9fbe805d, 0x697c93d0, 0x6fa92dd5, 0xcfb31225,
+		0xc83b99ac, 0x10a77d18, 0xe86e639c, 0xdb7bbb3b,
+		0xcd097826, 0x6ef41859, 0xec01b79a, 0x83a89a4f,
+		0xe6656e95, 0xaa7ee6ff, 0x2108cfbc, 0xefe6e815,
+		0xbad99be7, 0x4ace366f, 0xead4099f, 0x29d67cb0,
+		0x31afb2a4, 0x2a31233f, 0xc63094a5, 0x35c066a2,
+		0x7437bc4e, 0xfca6ca82, 0xe0b0d090, 0x3315d8a7,
+		0xf14a9804, 0x41f7daec, 0x7f0e50cd, 0x172ff691,
+		0x768dd64d, 0x434db0ef, 0xcc544daa, 0xe4df0496,
+		0x9ee3b5d1, 0x4c1b886a, 0xc1b81f2c, 0x467f5165,
+		0x9d04ea5e, 0x015d358c, 0xfa737487, 0xfb2e410b,
+		0xb35a1d67, 0x9252d2db, 0xe9335610, 0x6d1347d6,
+		0x9a8c61d7, 0x377a0ca1, 0x598e14f8, 0xeb893c13,
+		0xceee27a9, 0xb735c961, 0xe1ede51c, 0x7a3cb147,
+		0x9c59dfd2, 0x553f73f2, 0x1879ce14, 0x73bf37c7,
+		0x53eacdf7, 0x5f5baafd, 0xdf146f3d, 0x7886db44,
+		0xca81f3af, 0xb93ec468, 0x382c3424, 0xc25f40a3,
+		0x1672c31d, 0xbc0c25e2, 0x288b493c, 0xff41950d,
+		0x397101a8, 0x08deb30c, 0xd89ce4b4, 0x6490c156,
+		0x7b6184cb, 0xd570b632, 0x48745c6c, 0xd04257b8,
 	}
+};
 
-	for (i = 0; i < 256; ++i) {
-		p = sbx_tab[i];
-
-		t = p;
-		crypto_fl_tab[0][i] = t;
-		crypto_fl_tab[1][i] = rol32(t, 8);
-		crypto_fl_tab[2][i] = rol32(t, 16);
-		crypto_fl_tab[3][i] = rol32(t, 24);
-
-		t = ((u32) ff_mult(2, p)) |
-		    ((u32) p << 8) |
-		    ((u32) p << 16) | ((u32) ff_mult(3, p) << 24);
-
-		crypto_ft_tab[0][i] = t;
-		crypto_ft_tab[1][i] = rol32(t, 8);
-		crypto_ft_tab[2][i] = rol32(t, 16);
-		crypto_ft_tab[3][i] = rol32(t, 24);
-
-		p = isb_tab[i];
-
-		t = p;
-		crypto_il_tab[0][i] = t;
-		crypto_il_tab[1][i] = rol32(t, 8);
-		crypto_il_tab[2][i] = rol32(t, 16);
-		crypto_il_tab[3][i] = rol32(t, 24);
-
-		t = ((u32) ff_mult(14, p)) |
-		    ((u32) ff_mult(9, p) << 8) |
-		    ((u32) ff_mult(13, p) << 16) |
-		    ((u32) ff_mult(11, p) << 24);
-
-		crypto_it_tab[0][i] = t;
-		crypto_it_tab[1][i] = rol32(t, 8);
-		crypto_it_tab[2][i] = rol32(t, 16);
-		crypto_it_tab[3][i] = rol32(t, 24);
+const u32 crypto_il_tab[4][256] = {
+	{
+		0x00000052, 0x00000009, 0x0000006a, 0x000000d5,
+		0x00000030, 0x00000036, 0x000000a5, 0x00000038,
+		0x000000bf, 0x00000040, 0x000000a3, 0x0000009e,
+		0x00000081, 0x000000f3, 0x000000d7, 0x000000fb,
+		0x0000007c, 0x000000e3, 0x00000039, 0x00000082,
+		0x0000009b, 0x0000002f, 0x000000ff, 0x00000087,
+		0x00000034, 0x0000008e, 0x00000043, 0x00000044,
+		0x000000c4, 0x000000de, 0x000000e9, 0x000000cb,
+		0x00000054, 0x0000007b, 0x00000094, 0x00000032,
+		0x000000a6, 0x000000c2, 0x00000023, 0x0000003d,
+		0x000000ee, 0x0000004c, 0x00000095, 0x0000000b,
+		0x00000042, 0x000000fa, 0x000000c3, 0x0000004e,
+		0x00000008, 0x0000002e, 0x000000a1, 0x00000066,
+		0x00000028, 0x000000d9, 0x00000024, 0x000000b2,
+		0x00000076, 0x0000005b, 0x000000a2, 0x00000049,
+		0x0000006d, 0x0000008b, 0x000000d1, 0x00000025,
+		0x00000072, 0x000000f8, 0x000000f6, 0x00000064,
+		0x00000086, 0x00000068, 0x00000098, 0x00000016,
+		0x000000d4, 0x000000a4, 0x0000005c, 0x000000cc,
+		0x0000005d, 0x00000065, 0x000000b6, 0x00000092,
+		0x0000006c, 0x00000070, 0x00000048, 0x00000050,
+		0x000000fd, 0x000000ed, 0x000000b9, 0x000000da,
+		0x0000005e, 0x00000015, 0x00000046, 0x00000057,
+		0x000000a7, 0x0000008d, 0x0000009d, 0x00000084,
+		0x00000090, 0x000000d8, 0x000000ab, 0x00000000,
+		0x0000008c, 0x000000bc, 0x000000d3, 0x0000000a,
+		0x000000f7, 0x000000e4, 0x00000058, 0x00000005,
+		0x000000b8, 0x000000b3, 0x00000045, 0x00000006,
+		0x000000d0, 0x0000002c, 0x0000001e, 0x0000008f,
+		0x000000ca, 0x0000003f, 0x0000000f, 0x00000002,
+		0x000000c1, 0x000000af, 0x000000bd, 0x00000003,
+		0x00000001, 0x00000013, 0x0000008a, 0x0000006b,
+		0x0000003a, 0x00000091, 0x00000011, 0x00000041,
+		0x0000004f, 0x00000067, 0x000000dc, 0x000000ea,
+		0x00000097, 0x000000f2, 0x000000cf, 0x000000ce,
+		0x000000f0, 0x000000b4, 0x000000e6, 0x00000073,
+		0x00000096, 0x000000ac, 0x00000074, 0x00000022,
+		0x000000e7, 0x000000ad, 0x00000035, 0x00000085,
+		0x000000e2, 0x000000f9, 0x00000037, 0x000000e8,
+		0x0000001c, 0x00000075, 0x000000df, 0x0000006e,
+		0x00000047, 0x000000f1, 0x0000001a, 0x00000071,
+		0x0000001d, 0x00000029, 0x000000c5, 0x00000089,
+		0x0000006f, 0x000000b7, 0x00000062, 0x0000000e,
+		0x000000aa, 0x00000018, 0x000000be, 0x0000001b,
+		0x000000fc, 0x00000056, 0x0000003e, 0x0000004b,
+		0x000000c6, 0x000000d2, 0x00000079, 0x00000020,
+		0x0000009a, 0x000000db, 0x000000c0, 0x000000fe,
+		0x00000078, 0x000000cd, 0x0000005a, 0x000000f4,
+		0x0000001f, 0x000000dd, 0x000000a8, 0x00000033,
+		0x00000088, 0x00000007, 0x000000c7, 0x00000031,
+		0x000000b1, 0x00000012, 0x00000010, 0x00000059,
+		0x00000027, 0x00000080, 0x000000ec, 0x0000005f,
+		0x00000060, 0x00000051, 0x0000007f, 0x000000a9,
+		0x00000019, 0x000000b5, 0x0000004a, 0x0000000d,
+		0x0000002d, 0x000000e5, 0x0000007a, 0x0000009f,
+		0x00000093, 0x000000c9, 0x0000009c, 0x000000ef,
+		0x000000a0, 0x000000e0, 0x0000003b, 0x0000004d,
+		0x000000ae, 0x0000002a, 0x000000f5, 0x000000b0,
+		0x000000c8, 0x000000eb, 0x000000bb, 0x0000003c,
+		0x00000083, 0x00000053, 0x00000099, 0x00000061,
+		0x00000017, 0x0000002b, 0x00000004, 0x0000007e,
+		0x000000ba, 0x00000077, 0x000000d6, 0x00000026,
+		0x000000e1, 0x00000069, 0x00000014, 0x00000063,
+		0x00000055, 0x00000021, 0x0000000c, 0x0000007d,
+	}, {
+		0x00005200, 0x00000900, 0x00006a00, 0x0000d500,
+		0x00003000, 0x00003600, 0x0000a500, 0x00003800,
+		0x0000bf00, 0x00004000, 0x0000a300, 0x00009e00,
+		0x00008100, 0x0000f300, 0x0000d700, 0x0000fb00,
+		0x00007c00, 0x0000e300, 0x00003900, 0x00008200,
+		0x00009b00, 0x00002f00, 0x0000ff00, 0x00008700,
+		0x00003400, 0x00008e00, 0x00004300, 0x00004400,
+		0x0000c400, 0x0000de00, 0x0000e900, 0x0000cb00,
+		0x00005400, 0x00007b00, 0x00009400, 0x00003200,
+		0x0000a600, 0x0000c200, 0x00002300, 0x00003d00,
+		0x0000ee00, 0x00004c00, 0x00009500, 0x00000b00,
+		0x00004200, 0x0000fa00, 0x0000c300, 0x00004e00,
+		0x00000800, 0x00002e00, 0x0000a100, 0x00006600,
+		0x00002800, 0x0000d900, 0x00002400, 0x0000b200,
+		0x00007600, 0x00005b00, 0x0000a200, 0x00004900,
+		0x00006d00, 0x00008b00, 0x0000d100, 0x00002500,
+		0x00007200, 0x0000f800, 0x0000f600, 0x00006400,
+		0x00008600, 0x00006800, 0x00009800, 0x00001600,
+		0x0000d400, 0x0000a400, 0x00005c00, 0x0000cc00,
+		0x00005d00, 0x00006500, 0x0000b600, 0x00009200,
+		0x00006c00, 0x00007000, 0x00004800, 0x00005000,
+		0x0000fd00, 0x0000ed00, 0x0000b900, 0x0000da00,
+		0x00005e00, 0x00001500, 0x00004600, 0x00005700,
+		0x0000a700, 0x00008d00, 0x00009d00, 0x00008400,
+		0x00009000, 0x0000d800, 0x0000ab00, 0x00000000,
+		0x00008c00, 0x0000bc00, 0x0000d300, 0x00000a00,
+		0x0000f700, 0x0000e400, 0x00005800, 0x00000500,
+		0x0000b800, 0x0000b300, 0x00004500, 0x00000600,
+		0x0000d000, 0x00002c00, 0x00001e00, 0x00008f00,
+		0x0000ca00, 0x00003f00, 0x00000f00, 0x00000200,
+		0x0000c100, 0x0000af00, 0x0000bd00, 0x00000300,
+		0x00000100, 0x00001300, 0x00008a00, 0x00006b00,
+		0x00003a00, 0x00009100, 0x00001100, 0x00004100,
+		0x00004f00, 0x00006700, 0x0000dc00, 0x0000ea00,
+		0x00009700, 0x0000f200, 0x0000cf00, 0x0000ce00,
+		0x0000f000, 0x0000b400, 0x0000e600, 0x00007300,
+		0x00009600, 0x0000ac00, 0x00007400, 0x00002200,
+		0x0000e700, 0x0000ad00, 0x00003500, 0x00008500,
+		0x0000e200, 0x0000f900, 0x00003700, 0x0000e800,
+		0x00001c00, 0x00007500, 0x0000df00, 0x00006e00,
+		0x00004700, 0x0000f100, 0x00001a00, 0x00007100,
+		0x00001d00, 0x00002900, 0x0000c500, 0x00008900,
+		0x00006f00, 0x0000b700, 0x00006200, 0x00000e00,
+		0x0000aa00, 0x00001800, 0x0000be00, 0x00001b00,
+		0x0000fc00, 0x00005600, 0x00003e00, 0x00004b00,
+		0x0000c600, 0x0000d200, 0x00007900, 0x00002000,
+		0x00009a00, 0x0000db00, 0x0000c000, 0x0000fe00,
+		0x00007800, 0x0000cd00, 0x00005a00, 0x0000f400,
+		0x00001f00, 0x0000dd00, 0x0000a800, 0x00003300,
+		0x00008800, 0x00000700, 0x0000c700, 0x00003100,
+		0x0000b100, 0x00001200, 0x00001000, 0x00005900,
+		0x00002700, 0x00008000, 0x0000ec00, 0x00005f00,
+		0x00006000, 0x00005100, 0x00007f00, 0x0000a900,
+		0x00001900, 0x0000b500, 0x00004a00, 0x00000d00,
+		0x00002d00, 0x0000e500, 0x00007a00, 0x00009f00,
+		0x00009300, 0x0000c900, 0x00009c00, 0x0000ef00,
+		0x0000a000, 0x0000e000, 0x00003b00, 0x00004d00,
+		0x0000ae00, 0x00002a00, 0x0000f500, 0x0000b000,
+		0x0000c800, 0x0000eb00, 0x0000bb00, 0x00003c00,
+		0x00008300, 0x00005300, 0x00009900, 0x00006100,
+		0x00001700, 0x00002b00, 0x00000400, 0x00007e00,
+		0x0000ba00, 0x00007700, 0x0000d600, 0x00002600,
+		0x0000e100, 0x00006900, 0x00001400, 0x00006300,
+		0x00005500, 0x00002100, 0x00000c00, 0x00007d00,
+	}, {
+		0x00520000, 0x00090000, 0x006a0000, 0x00d50000,
+		0x00300000, 0x00360000, 0x00a50000, 0x00380000,
+		0x00bf0000, 0x00400000, 0x00a30000, 0x009e0000,
+		0x00810000, 0x00f30000, 0x00d70000, 0x00fb0000,
+		0x007c0000, 0x00e30000, 0x00390000, 0x00820000,
+		0x009b0000, 0x002f0000, 0x00ff0000, 0x00870000,
+		0x00340000, 0x008e0000, 0x00430000, 0x00440000,
+		0x00c40000, 0x00de0000, 0x00e90000, 0x00cb0000,
+		0x00540000, 0x007b0000, 0x00940000, 0x00320000,
+		0x00a60000, 0x00c20000, 0x00230000, 0x003d0000,
+		0x00ee0000, 0x004c0000, 0x00950000, 0x000b0000,
+		0x00420000, 0x00fa0000, 0x00c30000, 0x004e0000,
+		0x00080000, 0x002e0000, 0x00a10000, 0x00660000,
+		0x00280000, 0x00d90000, 0x00240000, 0x00b20000,
+		0x00760000, 0x005b0000, 0x00a20000, 0x00490000,
+		0x006d0000, 0x008b0000, 0x00d10000, 0x00250000,
+		0x00720000, 0x00f80000, 0x00f60000, 0x00640000,
+		0x00860000, 0x00680000, 0x00980000, 0x00160000,
+		0x00d40000, 0x00a40000, 0x005c0000, 0x00cc0000,
+		0x005d0000, 0x00650000, 0x00b60000, 0x00920000,
+		0x006c0000, 0x00700000, 0x00480000, 0x00500000,
+		0x00fd0000, 0x00ed0000, 0x00b90000, 0x00da0000,
+		0x005e0000, 0x00150000, 0x00460000, 0x00570000,
+		0x00a70000, 0x008d0000, 0x009d0000, 0x00840000,
+		0x00900000, 0x00d80000, 0x00ab0000, 0x00000000,
+		0x008c0000, 0x00bc0000, 0x00d30000, 0x000a0000,
+		0x00f70000, 0x00e40000, 0x00580000, 0x00050000,
+		0x00b80000, 0x00b30000, 0x00450000, 0x00060000,
+		0x00d00000, 0x002c0000, 0x001e0000, 0x008f0000,
+		0x00ca0000, 0x003f0000, 0x000f0000, 0x00020000,
+		0x00c10000, 0x00af0000, 0x00bd0000, 0x00030000,
+		0x00010000, 0x00130000, 0x008a0000, 0x006b0000,
+		0x003a0000, 0x00910000, 0x00110000, 0x00410000,
+		0x004f0000, 0x00670000, 0x00dc0000, 0x00ea0000,
+		0x00970000, 0x00f20000, 0x00cf0000, 0x00ce0000,
+		0x00f00000, 0x00b40000, 0x00e60000, 0x00730000,
+		0x00960000, 0x00ac0000, 0x00740000, 0x00220000,
+		0x00e70000, 0x00ad0000, 0x00350000, 0x00850000,
+		0x00e20000, 0x00f90000, 0x00370000, 0x00e80000,
+		0x001c0000, 0x00750000, 0x00df0000, 0x006e0000,
+		0x00470000, 0x00f10000, 0x001a0000, 0x00710000,
+		0x001d0000, 0x00290000, 0x00c50000, 0x00890000,
+		0x006f0000, 0x00b70000, 0x00620000, 0x000e0000,
+		0x00aa0000, 0x00180000, 0x00be0000, 0x001b0000,
+		0x00fc0000, 0x00560000, 0x003e0000, 0x004b0000,
+		0x00c60000, 0x00d20000, 0x00790000, 0x00200000,
+		0x009a0000, 0x00db0000, 0x00c00000, 0x00fe0000,
+		0x00780000, 0x00cd0000, 0x005a0000, 0x00f40000,
+		0x001f0000, 0x00dd0000, 0x00a80000, 0x00330000,
+		0x00880000, 0x00070000, 0x00c70000, 0x00310000,
+		0x00b10000, 0x00120000, 0x00100000, 0x00590000,
+		0x00270000, 0x00800000, 0x00ec0000, 0x005f0000,
+		0x00600000, 0x00510000, 0x007f0000, 0x00a90000,
+		0x00190000, 0x00b50000, 0x004a0000, 0x000d0000,
+		0x002d0000, 0x00e50000, 0x007a0000, 0x009f0000,
+		0x00930000, 0x00c90000, 0x009c0000, 0x00ef0000,
+		0x00a00000, 0x00e00000, 0x003b0000, 0x004d0000,
+		0x00ae0000, 0x002a0000, 0x00f50000, 0x00b00000,
+		0x00c80000, 0x00eb0000, 0x00bb0000, 0x003c0000,
+		0x00830000, 0x00530000, 0x00990000, 0x00610000,
+		0x00170000, 0x002b0000, 0x00040000, 0x007e0000,
+		0x00ba0000, 0x00770000, 0x00d60000, 0x00260000,
+		0x00e10000, 0x00690000, 0x00140000, 0x00630000,
+		0x00550000, 0x00210000, 0x000c0000, 0x007d0000,
+	}, {
+		0x52000000, 0x09000000, 0x6a000000, 0xd5000000,
+		0x30000000, 0x36000000, 0xa5000000, 0x38000000,
+		0xbf000000, 0x40000000, 0xa3000000, 0x9e000000,
+		0x81000000, 0xf3000000, 0xd7000000, 0xfb000000,
+		0x7c000000, 0xe3000000, 0x39000000, 0x82000000,
+		0x9b000000, 0x2f000000, 0xff000000, 0x87000000,
+		0x34000000, 0x8e000000, 0x43000000, 0x44000000,
+		0xc4000000, 0xde000000, 0xe9000000, 0xcb000000,
+		0x54000000, 0x7b000000, 0x94000000, 0x32000000,
+		0xa6000000, 0xc2000000, 0x23000000, 0x3d000000,
+		0xee000000, 0x4c000000, 0x95000000, 0x0b000000,
+		0x42000000, 0xfa000000, 0xc3000000, 0x4e000000,
+		0x08000000, 0x2e000000, 0xa1000000, 0x66000000,
+		0x28000000, 0xd9000000, 0x24000000, 0xb2000000,
+		0x76000000, 0x5b000000, 0xa2000000, 0x49000000,
+		0x6d000000, 0x8b000000, 0xd1000000, 0x25000000,
+		0x72000000, 0xf8000000, 0xf6000000, 0x64000000,
+		0x86000000, 0x68000000, 0x98000000, 0x16000000,
+		0xd4000000, 0xa4000000, 0x5c000000, 0xcc000000,
+		0x5d000000, 0x65000000, 0xb6000000, 0x92000000,
+		0x6c000000, 0x70000000, 0x48000000, 0x50000000,
+		0xfd000000, 0xed000000, 0xb9000000, 0xda000000,
+		0x5e000000, 0x15000000, 0x46000000, 0x57000000,
+		0xa7000000, 0x8d000000, 0x9d000000, 0x84000000,
+		0x90000000, 0xd8000000, 0xab000000, 0x00000000,
+		0x8c000000, 0xbc000000, 0xd3000000, 0x0a000000,
+		0xf7000000, 0xe4000000, 0x58000000, 0x05000000,
+		0xb8000000, 0xb3000000, 0x45000000, 0x06000000,
+		0xd0000000, 0x2c000000, 0x1e000000, 0x8f000000,
+		0xca000000, 0x3f000000, 0x0f000000, 0x02000000,
+		0xc1000000, 0xaf000000, 0xbd000000, 0x03000000,
+		0x01000000, 0x13000000, 0x8a000000, 0x6b000000,
+		0x3a000000, 0x91000000, 0x11000000, 0x41000000,
+		0x4f000000, 0x67000000, 0xdc000000, 0xea000000,
+		0x97000000, 0xf2000000, 0xcf000000, 0xce000000,
+		0xf0000000, 0xb4000000, 0xe6000000, 0x73000000,
+		0x96000000, 0xac000000, 0x74000000, 0x22000000,
+		0xe7000000, 0xad000000, 0x35000000, 0x85000000,
+		0xe2000000, 0xf9000000, 0x37000000, 0xe8000000,
+		0x1c000000, 0x75000000, 0xdf000000, 0x6e000000,
+		0x47000000, 0xf1000000, 0x1a000000, 0x71000000,
+		0x1d000000, 0x29000000, 0xc5000000, 0x89000000,
+		0x6f000000, 0xb7000000, 0x62000000, 0x0e000000,
+		0xaa000000, 0x18000000, 0xbe000000, 0x1b000000,
+		0xfc000000, 0x56000000, 0x3e000000, 0x4b000000,
+		0xc6000000, 0xd2000000, 0x79000000, 0x20000000,
+		0x9a000000, 0xdb000000, 0xc0000000, 0xfe000000,
+		0x78000000, 0xcd000000, 0x5a000000, 0xf4000000,
+		0x1f000000, 0xdd000000, 0xa8000000, 0x33000000,
+		0x88000000, 0x07000000, 0xc7000000, 0x31000000,
+		0xb1000000, 0x12000000, 0x10000000, 0x59000000,
+		0x27000000, 0x80000000, 0xec000000, 0x5f000000,
+		0x60000000, 0x51000000, 0x7f000000, 0xa9000000,
+		0x19000000, 0xb5000000, 0x4a000000, 0x0d000000,
+		0x2d000000, 0xe5000000, 0x7a000000, 0x9f000000,
+		0x93000000, 0xc9000000, 0x9c000000, 0xef000000,
+		0xa0000000, 0xe0000000, 0x3b000000, 0x4d000000,
+		0xae000000, 0x2a000000, 0xf5000000, 0xb0000000,
+		0xc8000000, 0xeb000000, 0xbb000000, 0x3c000000,
+		0x83000000, 0x53000000, 0x99000000, 0x61000000,
+		0x17000000, 0x2b000000, 0x04000000, 0x7e000000,
+		0xba000000, 0x77000000, 0xd6000000, 0x26000000,
+		0xe1000000, 0x69000000, 0x14000000, 0x63000000,
+		0x55000000, 0x21000000, 0x0c000000, 0x7d000000,
 	}
-}
+};
+
+EXPORT_SYMBOL_GPL(crypto_ft_tab);
+EXPORT_SYMBOL_GPL(crypto_fl_tab);
+EXPORT_SYMBOL_GPL(crypto_it_tab);
+EXPORT_SYMBOL_GPL(crypto_il_tab);
 
 /* initialise the key schedule from the user supplied key */
 
@@ -491,7 +1457,6 @@ static struct crypto_alg aes_alg = {
 
 static int __init aes_init(void)
 {
-	gen_tabs();
 	return crypto_register_alg(&aes_alg);
 }
 
diff --git a/crypto/ahash.c b/crypto/ahash.c
index 27128f2c687a62494b8d20b60191c916376aca4a..ba5292d69ebd206cdbf9961e130cf089dc584dd2 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -112,6 +112,22 @@ int crypto_hash_walk_first(struct ahash_request *req,
 }
 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
 
+int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
+				  struct crypto_hash_walk *walk,
+				  struct scatterlist *sg, unsigned int len)
+{
+	walk->total = len;
+
+	if (!walk->total)
+		return 0;
+
+	walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
+	walk->sg = sg;
+	walk->flags = hdesc->flags;
+
+	return hash_walk_new_entry(walk);
+}
+
 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
 				unsigned int keylen)
 {
@@ -146,6 +162,26 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
 	return ahash->setkey(tfm, key, keylen);
 }
 
+static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
+			  unsigned int keylen)
+{
+	return -ENOSYS;
+}
+
+int crypto_ahash_import(struct ahash_request *req, const u8 *in)
+{
+	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+	struct ahash_alg *alg = crypto_ahash_alg(tfm);
+
+	memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
+
+	if (alg->reinit)
+		alg->reinit(req);
+
+	return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_ahash_import);
+
 static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
 					u32 mask)
 {
@@ -164,7 +200,7 @@ static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 	crt->update = alg->update;
 	crt->final  = alg->final;
 	crt->digest = alg->digest;
-	crt->setkey = ahash_setkey;
+	crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
 	crt->digestsize = alg->digestsize;
 
 	return 0;
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c
index 72db0fd763cca01ed6ed17f35a494fa7744a0b47..0fac8ffc2fb7ef5dc6d39cca05d0292b584bb472 100644
--- a/crypto/ansi_cprng.c
+++ b/crypto/ansi_cprng.c
@@ -161,7 +161,7 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
 	/*
 	 * Now update our DT value
 	 */
-	for (i = 0; i < DEFAULT_BLK_SZ; i++) {
+	for (i = DEFAULT_BLK_SZ - 1; i >= 0; i--) {
 		ctx->DT[i] += 1;
 		if (ctx->DT[i] != 0)
 			break;
@@ -223,9 +223,10 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
 	}
 
 	/*
-	 * Copy up to the next whole block size
+	 * Copy any data less than an entire block
 	 */
 	if (byte_count < DEFAULT_BLK_SZ) {
+empty_rbuf:
 		for (; ctx->rand_data_valid < DEFAULT_BLK_SZ;
 			ctx->rand_data_valid++) {
 			*ptr = ctx->rand_data[ctx->rand_data_valid];
@@ -240,18 +241,22 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
 	 * Now copy whole blocks
 	 */
 	for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) {
-		if (_get_more_prng_bytes(ctx) < 0) {
-			memset(buf, 0, nbytes);
-			err = -EINVAL;
-			goto done;
+		if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
+			if (_get_more_prng_bytes(ctx) < 0) {
+				memset(buf, 0, nbytes);
+				err = -EINVAL;
+				goto done;
+			}
 		}
+		if (ctx->rand_data_valid > 0)
+			goto empty_rbuf;
 		memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ);
 		ctx->rand_data_valid += DEFAULT_BLK_SZ;
 		ptr += DEFAULT_BLK_SZ;
 	}
 
 	/*
-	 * Now copy any extra partial data
+	 * Now go back and get any remaining partial block
 	 */
 	if (byte_count)
 		goto remainder;
@@ -349,15 +354,25 @@ static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
 	return get_prng_bytes(rdata, dlen, prng);
 }
 
+/*
+ *  This is the cprng_registered reset method the seed value is
+ *  interpreted as the tuple { V KEY DT}
+ *  V and KEY are required during reset, and DT is optional, detected
+ *  as being present by testing the length of the seed
+ */
 static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
 {
 	struct prng_context *prng = crypto_rng_ctx(tfm);
-	u8 *key = seed + DEFAULT_PRNG_KSZ;
+	u8 *key = seed + DEFAULT_BLK_SZ;
+	u8 *dt = NULL;
 
 	if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ)
 		return -EINVAL;
 
-	reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, NULL);
+	if (slen >= (2 * DEFAULT_BLK_SZ + DEFAULT_PRNG_KSZ))
+		dt = key + DEFAULT_PRNG_KSZ;
+
+	reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, dt);
 
 	if (prng->flags & PRNG_NEED_RESET)
 		return -EINVAL;
@@ -379,7 +394,7 @@ static struct crypto_alg rng_alg = {
 		.rng = {
 			.rng_make_random	= cprng_get_random,
 			.rng_reset		= cprng_reset,
-			.seedsize = DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ,
+			.seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
 		}
 	}
 };
diff --git a/crypto/api.c b/crypto/api.c
index 0444d242e9854dbc1a63a61a37ecc37b94c52df7..9975a7bd246c21d4d7e3152249ffefdb38812ee5 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -300,8 +300,8 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
 
 	if (type) {
-		if (type->exit)
-			type->exit(tfm);
+		if (tfm->exit)
+			tfm->exit(tfm);
 		return;
 	}
 
@@ -379,17 +379,16 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 	if (err)
 		goto out_free_tfm;
 
-	if (alg->cra_init && (err = alg->cra_init(tfm))) {
-		if (err == -EAGAIN)
-			crypto_shoot_alg(alg);
+	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 		goto cra_init_failed;
-	}
 
 	goto out;
 
 cra_init_failed:
 	crypto_exit_ops(tfm);
 out_free_tfm:
+	if (err == -EAGAIN)
+		crypto_shoot_alg(alg);
 	kfree(tfm);
 out_err:
 	tfm = ERR_PTR(err);
@@ -404,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
  *	@type: Type of algorithm
  *	@mask: Mask for type comparison
  *
+ *	This function should not be used by new algorithm types.
+ *	Plesae use crypto_alloc_tfm instead.
+ *
  *	crypto_alloc_base() will first attempt to locate an already loaded
  *	algorithm.  If that fails and the kernel supports dynamically loadable
  *	modules, it will then attempt to load a module of the same name or
@@ -450,6 +452,111 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 	return ERR_PTR(err);
 }
 EXPORT_SYMBOL_GPL(crypto_alloc_base);
+
+struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
+				     const struct crypto_type *frontend)
+{
+	char *mem;
+	struct crypto_tfm *tfm = NULL;
+	unsigned int tfmsize;
+	unsigned int total;
+	int err = -ENOMEM;
+
+	tfmsize = frontend->tfmsize;
+	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
+
+	mem = kzalloc(total, GFP_KERNEL);
+	if (mem == NULL)
+		goto out_err;
+
+	tfm = (struct crypto_tfm *)(mem + tfmsize);
+	tfm->__crt_alg = alg;
+
+	err = frontend->init_tfm(tfm, frontend);
+	if (err)
+		goto out_free_tfm;
+
+	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
+		goto cra_init_failed;
+
+	goto out;
+
+cra_init_failed:
+	crypto_exit_ops(tfm);
+out_free_tfm:
+	if (err == -EAGAIN)
+		crypto_shoot_alg(alg);
+	kfree(mem);
+out_err:
+	tfm = ERR_PTR(err);
+out:
+	return tfm;
+}
+EXPORT_SYMBOL_GPL(crypto_create_tfm);
+
+/*
+ *	crypto_alloc_tfm - Locate algorithm and allocate transform
+ *	@alg_name: Name of algorithm
+ *	@frontend: Frontend algorithm type
+ *	@type: Type of algorithm
+ *	@mask: Mask for type comparison
+ *
+ *	crypto_alloc_tfm() will first attempt to locate an already loaded
+ *	algorithm.  If that fails and the kernel supports dynamically loadable
+ *	modules, it will then attempt to load a module of the same name or
+ *	alias.  If that fails it will send a query to any loaded crypto manager
+ *	to construct an algorithm on the fly.  A refcount is grabbed on the
+ *	algorithm which is then associated with the new transform.
+ *
+ *	The returned transform is of a non-determinate type.  Most people
+ *	should use one of the more specific allocation functions such as
+ *	crypto_alloc_blkcipher.
+ *
+ *	In case of error the return value is an error pointer.
+ */
+struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
+				    const struct crypto_type *frontend,
+				    u32 type, u32 mask)
+{
+	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
+	struct crypto_tfm *tfm;
+	int err;
+
+	type &= frontend->maskclear;
+	mask &= frontend->maskclear;
+	type |= frontend->type;
+	mask |= frontend->maskset;
+
+	lookup = frontend->lookup ?: crypto_alg_mod_lookup;
+
+	for (;;) {
+		struct crypto_alg *alg;
+
+		alg = lookup(alg_name, type, mask);
+		if (IS_ERR(alg)) {
+			err = PTR_ERR(alg);
+			goto err;
+		}
+
+		tfm = crypto_create_tfm(alg, frontend);
+		if (!IS_ERR(tfm))
+			return tfm;
+
+		crypto_mod_put(alg);
+		err = PTR_ERR(tfm);
+
+err:
+		if (err != -EAGAIN)
+			break;
+		if (signal_pending(current)) {
+			err = -EINTR;
+			break;
+		}
+	}
+
+	return ERR_PTR(err);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
  
 /*
  *	crypto_free_tfm - Free crypto transform
@@ -469,7 +576,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm)
 	alg = tfm->__crt_alg;
 	size = sizeof(*tfm) + alg->cra_ctxsize;
 
-	if (alg->cra_exit)
+	if (!tfm->exit && alg->cra_exit)
 		alg->cra_exit(tfm);
 	crypto_exit_ops(tfm);
 	crypto_mod_put(alg);
diff --git a/crypto/authenc.c b/crypto/authenc.c
index fd9f06c63d76efc223a6f025dcab10b19ceba84c..40b6e9ec9e3a0a2069b5f972226918dc33625294 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -11,6 +11,7 @@
  */
 
 #include <crypto/aead.h>
+#include <crypto/internal/hash.h>
 #include <crypto/internal/skcipher.h>
 #include <crypto/authenc.h>
 #include <crypto/scatterwalk.h>
@@ -431,6 +432,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
 	inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
 	inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
 					 auth->cra_hash.digestsize :
+					 auth->cra_type ?
+					 __crypto_shash_alg(auth)->digestsize :
 					 auth->cra_digest.dia_digestsize;
 
 	inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
diff --git a/crypto/camellia.c b/crypto/camellia.c
index 493fee7e0a8b46bbd9933d92681e0a7c16d0ab81..964635d163f4767a9ff7a47c271fb1f5bd3a6c59 100644
--- a/crypto/camellia.c
+++ b/crypto/camellia.c
@@ -35,6 +35,8 @@
 #include <linux/init.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
+#include <linux/bitops.h>
+#include <asm/unaligned.h>
 
 static const u32 camellia_sp1110[256] = {
 	0x70707000,0x82828200,0x2c2c2c00,0xececec00,
@@ -335,20 +337,6 @@ static const u32 camellia_sp4404[256] = {
 /*
  *  macros
  */
-#define GETU32(v, pt) \
-    do { \
-	/* latest breed of gcc is clever enough to use move */ \
-	memcpy(&(v), (pt), 4); \
-	(v) = be32_to_cpu(v); \
-    } while(0)
-
-/* rotation right shift 1byte */
-#define ROR8(x) (((x) >> 8) + ((x) << 24))
-/* rotation left shift 1bit */
-#define ROL1(x) (((x) << 1) + ((x) >> 31))
-/* rotation left shift 1byte */
-#define ROL8(x) (((x) << 8) + ((x) >> 24))
-
 #define ROLDQ(ll, lr, rl, rr, w0, w1, bits)		\
     do {						\
 	w0 = ll;					\
@@ -383,7 +371,7 @@ static const u32 camellia_sp4404[256] = {
 	   ^ camellia_sp3033[(u8)(il >> 8)]			\
 	   ^ camellia_sp4404[(u8)(il     )];			\
 	yl ^= yr;						\
-	yr = ROR8(yr);						\
+	yr = ror32(yr, 8);					\
 	yr ^= yl;						\
     } while(0)
 
@@ -405,7 +393,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	subL[7] ^= subL[1]; subR[7] ^= subR[1];
 	subL[1] ^= subR[1] & ~subR[9];
 	dw = subL[1] & subL[9],
-		subR[1] ^= ROL1(dw); /* modified for FLinv(kl2) */
+		subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */
 	/* round 8 */
 	subL[11] ^= subL[1]; subR[11] ^= subR[1];
 	/* round 10 */
@@ -414,7 +402,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	subL[15] ^= subL[1]; subR[15] ^= subR[1];
 	subL[1] ^= subR[1] & ~subR[17];
 	dw = subL[1] & subL[17],
-		subR[1] ^= ROL1(dw); /* modified for FLinv(kl4) */
+		subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */
 	/* round 14 */
 	subL[19] ^= subL[1]; subR[19] ^= subR[1];
 	/* round 16 */
@@ -430,7 +418,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	} else {
 		subL[1] ^= subR[1] & ~subR[25];
 		dw = subL[1] & subL[25],
-			subR[1] ^= ROL1(dw); /* modified for FLinv(kl6) */
+			subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */
 		/* round 20 */
 		subL[27] ^= subL[1]; subR[27] ^= subR[1];
 		/* round 22 */
@@ -450,7 +438,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 		subL[26] ^= kw4l; subR[26] ^= kw4r;
 		kw4l ^= kw4r & ~subR[24];
 		dw = kw4l & subL[24],
-			kw4r ^= ROL1(dw); /* modified for FL(kl5) */
+			kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */
 	}
 	/* round 17 */
 	subL[22] ^= kw4l; subR[22] ^= kw4r;
@@ -460,7 +448,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	subL[18] ^= kw4l; subR[18] ^= kw4r;
 	kw4l ^= kw4r & ~subR[16];
 	dw = kw4l & subL[16],
-		kw4r ^= ROL1(dw); /* modified for FL(kl3) */
+		kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */
 	/* round 11 */
 	subL[14] ^= kw4l; subR[14] ^= kw4r;
 	/* round 9 */
@@ -469,7 +457,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	subL[10] ^= kw4l; subR[10] ^= kw4r;
 	kw4l ^= kw4r & ~subR[8];
 	dw = kw4l & subL[8],
-		kw4r ^= ROL1(dw); /* modified for FL(kl1) */
+		kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */
 	/* round 5 */
 	subL[6] ^= kw4l; subR[6] ^= kw4r;
 	/* round 3 */
@@ -494,7 +482,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	SUBKEY_R(6) = subR[5] ^ subR[7];
 	tl = subL[10] ^ (subR[10] & ~subR[8]);
 	dw = tl & subL[8],  /* FL(kl1) */
-		tr = subR[10] ^ ROL1(dw);
+		tr = subR[10] ^ rol32(dw, 1);
 	SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
 	SUBKEY_R(7) = subR[6] ^ tr;
 	SUBKEY_L(8) = subL[8];       /* FL(kl1) */
@@ -503,7 +491,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	SUBKEY_R(9) = subR[9];
 	tl = subL[7] ^ (subR[7] & ~subR[9]);
 	dw = tl & subL[9],  /* FLinv(kl2) */
-		tr = subR[7] ^ ROL1(dw);
+		tr = subR[7] ^ rol32(dw, 1);
 	SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
 	SUBKEY_R(10) = tr ^ subR[11];
 	SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
@@ -516,7 +504,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	SUBKEY_R(14) = subR[13] ^ subR[15];
 	tl = subL[18] ^ (subR[18] & ~subR[16]);
 	dw = tl & subL[16], /* FL(kl3) */
-		tr = subR[18] ^ ROL1(dw);
+		tr = subR[18] ^ rol32(dw, 1);
 	SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
 	SUBKEY_R(15) = subR[14] ^ tr;
 	SUBKEY_L(16) = subL[16];     /* FL(kl3) */
@@ -525,7 +513,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	SUBKEY_R(17) = subR[17];
 	tl = subL[15] ^ (subR[15] & ~subR[17]);
 	dw = tl & subL[17], /* FLinv(kl4) */
-		tr = subR[15] ^ ROL1(dw);
+		tr = subR[15] ^ rol32(dw, 1);
 	SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
 	SUBKEY_R(18) = tr ^ subR[19];
 	SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
@@ -544,7 +532,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	} else {
 		tl = subL[26] ^ (subR[26] & ~subR[24]);
 		dw = tl & subL[24], /* FL(kl5) */
-			tr = subR[26] ^ ROL1(dw);
+			tr = subR[26] ^ rol32(dw, 1);
 		SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
 		SUBKEY_R(23) = subR[22] ^ tr;
 		SUBKEY_L(24) = subL[24];     /* FL(kl5) */
@@ -553,7 +541,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 		SUBKEY_R(25) = subR[25];
 		tl = subL[23] ^ (subR[23] & ~subR[25]);
 		dw = tl & subL[25], /* FLinv(kl6) */
-			tr = subR[23] ^ ROL1(dw);
+			tr = subR[23] ^ rol32(dw, 1);
 		SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
 		SUBKEY_R(26) = tr ^ subR[27];
 		SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
@@ -573,17 +561,17 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
 	/* apply the inverse of the last half of P-function */
 	i = 2;
 	do {
-		dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = ROL8(dw);/* round 1 */
+		dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = rol32(dw, 8);/* round 1 */
 		SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
-		dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = ROL8(dw);/* round 2 */
+		dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = rol32(dw, 8);/* round 2 */
 		SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
-		dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = ROL8(dw);/* round 3 */
+		dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = rol32(dw, 8);/* round 3 */
 		SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
-		dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = ROL8(dw);/* round 4 */
+		dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = rol32(dw, 8);/* round 4 */
 		SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
-		dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = ROL8(dw);/* round 5 */
+		dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = rol32(dw, 8);/* round 5 */
 		SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
-		dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = ROL8(dw);/* round 6 */
+		dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = rol32(dw, 8);/* round 6 */
 		SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
 		i += 8;
 	} while (i < max);
@@ -599,10 +587,10 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey)
 	/**
 	 *  k == kll || klr || krl || krr (|| is concatenation)
 	 */
-	GETU32(kll, key     );
-	GETU32(klr, key +  4);
-	GETU32(krl, key +  8);
-	GETU32(krr, key + 12);
+	kll = get_unaligned_be32(key);
+	klr = get_unaligned_be32(key + 4);
+	krl = get_unaligned_be32(key + 8);
+	krr = get_unaligned_be32(key + 12);
 
 	/* generate KL dependent subkeys */
 	/* kw1 */
@@ -707,14 +695,14 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
 	 *  key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
 	 *  (|| is concatenation)
 	 */
-	GETU32(kll,  key     );
-	GETU32(klr,  key +  4);
-	GETU32(krl,  key +  8);
-	GETU32(krr,  key + 12);
-	GETU32(krll, key + 16);
-	GETU32(krlr, key + 20);
-	GETU32(krrl, key + 24);
-	GETU32(krrr, key + 28);
+	kll = get_unaligned_be32(key);
+	klr = get_unaligned_be32(key + 4);
+	krl = get_unaligned_be32(key + 8);
+	krr = get_unaligned_be32(key + 12);
+	krll = get_unaligned_be32(key + 16);
+	krlr = get_unaligned_be32(key + 20);
+	krrl = get_unaligned_be32(key + 24);
+	krrr = get_unaligned_be32(key + 28);
 
 	/* generate KL dependent subkeys */
 	/* kw1 */
@@ -870,13 +858,13 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
 	t0 &= ll;							\
 	t2 |= rr;							\
 	rl ^= t2;							\
-	lr ^= ROL1(t0);							\
+	lr ^= rol32(t0, 1);						\
 	t3 = krl;							\
 	t1 = klr;							\
 	t3 &= rl;							\
 	t1 |= lr;							\
 	ll ^= t1;							\
-	rr ^= ROL1(t3);							\
+	rr ^= rol32(t3, 1);						\
     } while(0)
 
 #define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir)		\
@@ -892,7 +880,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
 	il ^= kl;							\
 	ir ^= il ^ kr;							\
 	yl ^= ir;							\
-	yr ^= ROR8(il) ^ ir;						\
+	yr ^= ror32(il, 8) ^ ir;						\
     } while(0)
 
 /* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
diff --git a/crypto/crc32c.c b/crypto/crc32c.c
index a882d9e4e63e741fd90ec2d6183cee337c841c54..973bc2cfab2ec00b8571894bc7ceb35541670cad 100644
--- a/crypto/crc32c.c
+++ b/crypto/crc32c.c
@@ -3,8 +3,29 @@
  *
  * CRC32C chksum
  *
- * This module file is a wrapper to invoke the lib/crc32c routines.
+ *@Article{castagnoli-crc,
+ * author =       { Guy Castagnoli and Stefan Braeuer and Martin Herrman},
+ * title =        {{Optimization of Cyclic Redundancy-Check Codes with 24
+ *                 and 32 Parity Bits}},
+ * journal =      IEEE Transactions on Communication,
+ * year =         {1993},
+ * volume =       {41},
+ * number =       {6},
+ * pages =        {},
+ * month =        {June},
+ *}
+ * Used by the iSCSI driver, possibly others, and derived from the
+ * the iscsi-crc.c module of the linux-iscsi driver at
+ * http://linux-iscsi.sourceforge.net.
  *
+ * Following the example of lib/crc32, this function is intended to be
+ * flexible and useful for all users.  Modules that currently have their
+ * own crc32c, but hopefully may be able to use this one are:
+ *  net/sctp (please add all your doco to here if you change to
+ *            use this one!)
+ *  <endoflist>
+ *
+ * Copyright (c) 2004 Cisco Systems, Inc.
  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  *
  * This program is free software; you can redistribute it and/or modify it
@@ -18,208 +39,217 @@
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/string.h>
-#include <linux/crc32c.h>
 #include <linux/kernel.h>
 
 #define CHKSUM_BLOCK_SIZE	1
 #define CHKSUM_DIGEST_SIZE	4
 
 struct chksum_ctx {
-	u32 crc;
 	u32 key;
 };
 
+struct chksum_desc_ctx {
+	u32 crc;
+};
+
 /*
- * Steps through buffer one byte at at time, calculates reflected 
- * crc using table.
+ * This is the CRC-32C table
+ * Generated with:
+ * width = 32 bits
+ * poly = 0x1EDC6F41
+ * reflect input bytes = true
+ * reflect output bytes = true
  */
 
-static void chksum_init(struct crypto_tfm *tfm)
-{
-	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
-
-	mctx->crc = mctx->key;
-}
+static const u32 crc32c_table[256] = {
+	0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L,
+	0xC79A971FL, 0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL,
+	0x8AD958CFL, 0x78B2DBCCL, 0x6BE22838L, 0x9989AB3BL,
+	0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L, 0x5E133C24L,
+	0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
+	0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L,
+	0x9A879FA0L, 0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L,
+	0x5D1D08BFL, 0xAF768BBCL, 0xBC267848L, 0x4E4DFB4BL,
+	0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L, 0x33ED7D2AL,
+	0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
+	0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L,
+	0x6DFE410EL, 0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL,
+	0x30E349B1L, 0xC288CAB2L, 0xD1D83946L, 0x23B3BA45L,
+	0xF779DEAEL, 0x05125DADL, 0x1642AE59L, 0xE4292D5AL,
+	0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
+	0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L,
+	0x417B1DBCL, 0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L,
+	0x86E18AA3L, 0x748A09A0L, 0x67DAFA54L, 0x95B17957L,
+	0xCBA24573L, 0x39C9C670L, 0x2A993584L, 0xD8F2B687L,
+	0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
+	0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L,
+	0x96BF4DCCL, 0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L,
+	0xDBFC821CL, 0x2997011FL, 0x3AC7F2EBL, 0xC8AC71E8L,
+	0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L, 0x0F36E6F7L,
+	0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
+	0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L,
+	0xEB1FCBADL, 0x197448AEL, 0x0A24BB5AL, 0xF84F3859L,
+	0x2C855CB2L, 0xDEEEDFB1L, 0xCDBE2C45L, 0x3FD5AF46L,
+	0x7198540DL, 0x83F3D70EL, 0x90A324FAL, 0x62C8A7F9L,
+	0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
+	0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L,
+	0x3CDB9BDDL, 0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L,
+	0x82F63B78L, 0x709DB87BL, 0x63CD4B8FL, 0x91A6C88CL,
+	0x456CAC67L, 0xB7072F64L, 0xA457DC90L, 0x563C5F93L,
+	0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
+	0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL,
+	0x92A8FC17L, 0x60C37F14L, 0x73938CE0L, 0x81F80FE3L,
+	0x55326B08L, 0xA759E80BL, 0xB4091BFFL, 0x466298FCL,
+	0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL, 0x0B21572CL,
+	0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
+	0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L,
+	0x65D122B9L, 0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL,
+	0x2892ED69L, 0xDAF96E6AL, 0xC9A99D9EL, 0x3BC21E9DL,
+	0xEF087A76L, 0x1D63F975L, 0x0E330A81L, 0xFC588982L,
+	0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
+	0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L,
+	0x38CC2A06L, 0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L,
+	0xFF56BD19L, 0x0D3D3E1AL, 0x1E6DCDEEL, 0xEC064EEDL,
+	0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L, 0xD0DDD530L,
+	0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
+	0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL,
+	0x8ECEE914L, 0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L,
+	0xD3D3E1ABL, 0x21B862A8L, 0x32E8915CL, 0xC083125FL,
+	0x144976B4L, 0xE622F5B7L, 0xF5720643L, 0x07198540L,
+	0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
+	0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL,
+	0xE330A81AL, 0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL,
+	0x24AA3F05L, 0xD6C1BC06L, 0xC5914FF2L, 0x37FACCF1L,
+	0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L, 0x7AB90321L,
+	0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
+	0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L,
+	0x34F4F86AL, 0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL,
+	0x79B737BAL, 0x8BDCB4B9L, 0x988C474DL, 0x6AE7C44EL,
+	0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L, 0xAD7D5351L
+};
 
 /*
- * Setting the seed allows arbitrary accumulators and flexible XOR policy
- * If your algorithm starts with ~0, then XOR with ~0 before you set
- * the seed.
+ * Steps through buffer one byte at at time, calculates reflected
+ * crc using table.
  */
-static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key,
-			 unsigned int keylen)
-{
-	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
 
-	if (keylen != sizeof(mctx->crc)) {
-		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
-		return -EINVAL;
-	}
-	mctx->key = le32_to_cpu(*(__le32 *)key);
-	return 0;
-}
-
-static void chksum_update(struct crypto_tfm *tfm, const u8 *data,
-			  unsigned int length)
+static u32 crc32c(u32 crc, const u8 *data, unsigned int length)
 {
-	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+	while (length--)
+		crc = crc32c_table[(crc ^ *data++) & 0xFFL] ^ (crc >> 8);
 
-	mctx->crc = crc32c(mctx->crc, data, length);
+	return crc;
 }
 
-static void chksum_final(struct crypto_tfm *tfm, u8 *out)
-{
-	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
-	
-	*(__le32 *)out = ~cpu_to_le32(mctx->crc);
-}
+/*
+ * Steps through buffer one byte at at time, calculates reflected 
+ * crc using table.
+ */
 
-static int crc32c_cra_init_old(struct crypto_tfm *tfm)
+static int chksum_init(struct shash_desc *desc)
 {
-	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+	ctx->crc = mctx->key;
 
-	mctx->key = ~0;
 	return 0;
 }
 
-static struct crypto_alg old_alg = {
-	.cra_name	=	"crc32c",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	CHKSUM_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct chksum_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list	=	LIST_HEAD_INIT(old_alg.cra_list),
-	.cra_init	=	crc32c_cra_init_old,
-	.cra_u		=	{
-		.digest = {
-			 .dia_digestsize=	CHKSUM_DIGEST_SIZE,
-			 .dia_setkey	=	chksum_setkey,
-			 .dia_init   	= 	chksum_init,
-			 .dia_update 	=	chksum_update,
-			 .dia_final  	=	chksum_final
-		 }
-	}
-};
-
 /*
  * Setting the seed allows arbitrary accumulators and flexible XOR policy
  * If your algorithm starts with ~0, then XOR with ~0 before you set
  * the seed.
  */
-static int crc32c_setkey(struct crypto_ahash *hash, const u8 *key,
+static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
 			 unsigned int keylen)
 {
-	u32 *mctx = crypto_ahash_ctx(hash);
+	struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
 
-	if (keylen != sizeof(u32)) {
-		crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
+	if (keylen != sizeof(mctx->key)) {
+		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 		return -EINVAL;
 	}
-	*mctx = le32_to_cpup((__le32 *)key);
+	mctx->key = le32_to_cpu(*(__le32 *)key);
 	return 0;
 }
 
-static int crc32c_init(struct ahash_request *req)
+static int chksum_update(struct shash_desc *desc, const u8 *data,
+			 unsigned int length)
 {
-	u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
-	u32 *crcp = ahash_request_ctx(req);
+	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
 
-	*crcp = *mctx;
+	ctx->crc = crc32c(ctx->crc, data, length);
 	return 0;
 }
 
-static int crc32c_update(struct ahash_request *req)
+static int chksum_final(struct shash_desc *desc, u8 *out)
 {
-	struct crypto_hash_walk walk;
-	u32 *crcp = ahash_request_ctx(req);
-	u32 crc = *crcp;
-	int nbytes;
-
-	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
-	     nbytes = crypto_hash_walk_done(&walk, 0))
-		crc = crc32c(crc, walk.data, nbytes);
+	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
 
-	*crcp = crc;
+	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
 	return 0;
 }
 
-static int crc32c_final(struct ahash_request *req)
+static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out)
 {
-	u32 *crcp = ahash_request_ctx(req);
-	
-	*(__le32 *)req->result = ~cpu_to_le32p(crcp);
+	*(__le32 *)out = ~cpu_to_le32(crc32c(*crcp, data, len));
 	return 0;
 }
 
-static int crc32c_digest(struct ahash_request *req)
+static int chksum_finup(struct shash_desc *desc, const u8 *data,
+			unsigned int len, u8 *out)
 {
-	struct crypto_hash_walk walk;
-	u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
-	u32 crc = *mctx;
-	int nbytes;
+	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
 
-	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
-	     nbytes = crypto_hash_walk_done(&walk, 0))
-		crc = crc32c(crc, walk.data, nbytes);
-
-	*(__le32 *)req->result = ~cpu_to_le32(crc);
-	return 0;
+	return __chksum_finup(&ctx->crc, data, len, out);
 }
 
-static int crc32c_cra_init(struct crypto_tfm *tfm)
+static int chksum_digest(struct shash_desc *desc, const u8 *data,
+			 unsigned int length, u8 *out)
 {
-	u32 *key = crypto_tfm_ctx(tfm);
+	struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
 
-	*key = ~0;
+	return __chksum_finup(&mctx->key, data, length, out);
+}
 
-	tfm->crt_ahash.reqsize = sizeof(u32);
+static int crc32c_cra_init(struct crypto_tfm *tfm)
+{
+	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
 
+	mctx->key = ~0;
 	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name		=	"crc32c",
-	.cra_driver_name	=	"crc32c-generic",
-	.cra_priority		=	100,
-	.cra_flags		=	CRYPTO_ALG_TYPE_AHASH,
-	.cra_blocksize		=	CHKSUM_BLOCK_SIZE,
-	.cra_alignmask		=	3,
-	.cra_ctxsize		=	sizeof(u32),
-	.cra_module		=	THIS_MODULE,
-	.cra_list		=	LIST_HEAD_INIT(alg.cra_list),
-	.cra_init		=	crc32c_cra_init,
-	.cra_type		=	&crypto_ahash_type,
-	.cra_u			=	{
-		.ahash = {
-			 .digestsize	=	CHKSUM_DIGEST_SIZE,
-			 .setkey	=	crc32c_setkey,
-			 .init   	= 	crc32c_init,
-			 .update 	=	crc32c_update,
-			 .final  	=	crc32c_final,
-			 .digest  	=	crc32c_digest,
-		 }
+static struct shash_alg alg = {
+	.digestsize		=	CHKSUM_DIGEST_SIZE,
+	.setkey			=	chksum_setkey,
+	.init   		= 	chksum_init,
+	.update 		=	chksum_update,
+	.final  		=	chksum_final,
+	.finup  		=	chksum_finup,
+	.digest  		=	chksum_digest,
+	.descsize		=	sizeof(struct chksum_desc_ctx),
+	.base			=	{
+		.cra_name		=	"crc32c",
+		.cra_driver_name	=	"crc32c-generic",
+		.cra_priority		=	100,
+		.cra_blocksize		=	CHKSUM_BLOCK_SIZE,
+		.cra_alignmask		=	3,
+		.cra_ctxsize		=	sizeof(struct chksum_ctx),
+		.cra_module		=	THIS_MODULE,
+		.cra_init		=	crc32c_cra_init,
 	}
 };
 
 static int __init crc32c_mod_init(void)
 {
-	int err;
-
-	err = crypto_register_alg(&old_alg);
-	if (err)
-		return err;
-
-	err = crypto_register_alg(&alg);
-	if (err)
-		crypto_unregister_alg(&old_alg);
-
-	return err;
+	return crypto_register_shash(&alg);
 }
 
 static void __exit crc32c_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
-	crypto_unregister_alg(&old_alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(crc32c_mod_init);
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index 1f7d53013a228b4a459470d86ebfc0bb17998239..cb71c9122bc0d794f5a531765fbda14a1187bcda 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -17,6 +17,7 @@
  *
  */
 
+#include <crypto/internal/hash.h>
 #include <crypto/internal/skcipher.h>
 #include <linux/init.h>
 #include <linux/module.h>
@@ -38,15 +39,31 @@ static int null_compress(struct crypto_tfm *tfm, const u8 *src,
 	return 0;
 }
 
-static void null_init(struct crypto_tfm *tfm)
-{ }
+static int null_init(struct shash_desc *desc)
+{
+	return 0;
+}
 
-static void null_update(struct crypto_tfm *tfm, const u8 *data,
-			unsigned int len)
-{ }
+static int null_update(struct shash_desc *desc, const u8 *data,
+		       unsigned int len)
+{
+	return 0;
+}
 
-static void null_final(struct crypto_tfm *tfm, u8 *out)
-{ }
+static int null_final(struct shash_desc *desc, u8 *out)
+{
+	return 0;
+}
+
+static int null_digest(struct shash_desc *desc, const u8 *data,
+		       unsigned int len, u8 *out)
+{
+	return 0;
+}
+
+static int null_hash_setkey(struct crypto_shash *tfm, const u8 *key,
+			    unsigned int keylen)
+{ return 0; }
 
 static int null_setkey(struct crypto_tfm *tfm, const u8 *key,
 		       unsigned int keylen)
@@ -89,19 +106,20 @@ static struct crypto_alg compress_null = {
 	.coa_decompress		=	null_compress } }
 };
 
-static struct crypto_alg digest_null = {
-	.cra_name		=	"digest_null",
-	.cra_flags		=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize		=	NULL_BLOCK_SIZE,
-	.cra_ctxsize		=	0,
-	.cra_module		=	THIS_MODULE,
-	.cra_list		=       LIST_HEAD_INIT(digest_null.cra_list),	
-	.cra_u			=	{ .digest = {
-	.dia_digestsize		=	NULL_DIGEST_SIZE,
-	.dia_setkey   		=	null_setkey,
-	.dia_init   		=	null_init,
-	.dia_update 		=	null_update,
-	.dia_final  		=	null_final } }
+static struct shash_alg digest_null = {
+	.digestsize		=	NULL_DIGEST_SIZE,
+	.setkey   		=	null_hash_setkey,
+	.init   		=	null_init,
+	.update 		=	null_update,
+	.finup 			=	null_digest,
+	.digest 		=	null_digest,
+	.final  		=	null_final,
+	.base			=	{
+		.cra_name		=	"digest_null",
+		.cra_flags		=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize		=	NULL_BLOCK_SIZE,
+		.cra_module		=	THIS_MODULE,
+	}
 };
 
 static struct crypto_alg cipher_null = {
@@ -154,7 +172,7 @@ static int __init crypto_null_mod_init(void)
 	if (ret < 0)
 		goto out_unregister_cipher;
 
-	ret = crypto_register_alg(&digest_null);
+	ret = crypto_register_shash(&digest_null);
 	if (ret < 0)
 		goto out_unregister_skcipher;
 
@@ -166,7 +184,7 @@ static int __init crypto_null_mod_init(void)
 	return ret;
 
 out_unregister_digest:
-	crypto_unregister_alg(&digest_null);
+	crypto_unregister_shash(&digest_null);
 out_unregister_skcipher:
 	crypto_unregister_alg(&skcipher_null);
 out_unregister_cipher:
@@ -177,7 +195,7 @@ static int __init crypto_null_mod_init(void)
 static void __exit crypto_null_mod_fini(void)
 {
 	crypto_unregister_alg(&compress_null);
-	crypto_unregister_alg(&digest_null);
+	crypto_unregister_shash(&digest_null);
 	crypto_unregister_alg(&skcipher_null);
 	crypto_unregister_alg(&cipher_null);
 }
diff --git a/crypto/des_generic.c b/crypto/des_generic.c
index 5d0e4580f998c23650eb24ba73135cd5af68a01e..5bd3ee345a64d8e60604a32097818a3a3d369a46 100644
--- a/crypto/des_generic.c
+++ b/crypto/des_generic.c
@@ -868,9 +868,10 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
 	u32 *flags = &tfm->crt_flags;
 
 	if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
-		     !((K[2] ^ K[4]) | (K[3] ^ K[5]))))
+		     !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
+		     (*flags & CRYPTO_TFM_REQ_WEAK_KEY))
 	{
-		*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
+		*flags |= CRYPTO_TFM_RES_WEAK_KEY;
 		return -EINVAL;
 	}
 
diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c
index 1302f4cae337489f5a7aafaa39a36e1fab56bdca..b82d61f4e26c487c8b9f1a8838621040e25fdd5b 100644
--- a/crypto/fcrypt.c
+++ b/crypto/fcrypt.c
@@ -73,7 +73,7 @@ do {								\
  * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h
  */
 #undef Z
-#define Z(x) __constant_cpu_to_be32(x << 3)
+#define Z(x) cpu_to_be32(x << 3)
 static const __be32 sbox0[256] = {
 	Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11),
 	Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06),
@@ -110,7 +110,7 @@ static const __be32 sbox0[256] = {
 };
 
 #undef Z
-#define Z(x) __constant_cpu_to_be32((x << 27) | (x >> 5))
+#define Z(x) cpu_to_be32((x << 27) | (x >> 5))
 static const __be32 sbox1[256] = {
 	Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e),
 	Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85),
@@ -147,7 +147,7 @@ static const __be32 sbox1[256] = {
 };
 
 #undef Z
-#define Z(x) __constant_cpu_to_be32(x << 11)
+#define Z(x) cpu_to_be32(x << 11)
 static const __be32 sbox2[256] = {
 	Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86),
 	Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d),
@@ -184,7 +184,7 @@ static const __be32 sbox2[256] = {
 };
 
 #undef Z
-#define Z(x) __constant_cpu_to_be32(x << 19)
+#define Z(x) cpu_to_be32(x << 19)
 static const __be32 sbox3[256] = {
 	Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2),
 	Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12),
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 7ff2d6a8c7d0b5c58b753140cd579e332e385819..0ad39c3749639e227bef876664ce3ff7ed147c2c 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -16,7 +16,7 @@
  *
  */
 
-#include <crypto/algapi.h>
+#include <crypto/internal/hash.h>
 #include <crypto/scatterwalk.h>
 #include <linux/err.h>
 #include <linux/init.h>
@@ -238,9 +238,11 @@ static struct crypto_instance *hmac_alloc(struct rtattr **tb)
 		return ERR_CAST(alg);
 
 	inst = ERR_PTR(-EINVAL);
-	ds = (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
-	     CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
-				    alg->cra_digest.dia_digestsize;
+	ds = alg->cra_type == &crypto_hash_type ?
+	     alg->cra_hash.digestsize :
+	     alg->cra_type ?
+	     __crypto_shash_alg(alg)->digestsize :
+	     alg->cra_digest.dia_digestsize;
 	if (ds > alg->cra_blocksize)
 		goto out_put_alg;
 
diff --git a/crypto/internal.h b/crypto/internal.h
index 8ef72d76092e40227000c3126688f254ffed6978..3c19a27a7563cd628513a777a59121a24caadf85 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err);
 void crypto_shoot_alg(struct crypto_alg *alg);
 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 				      u32 mask);
+struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
+				     const struct crypto_type *frontend);
 
 int crypto_register_instance(struct crypto_template *tmpl,
 			     struct crypto_instance *inst);
diff --git a/crypto/md4.c b/crypto/md4.c
index 3c19aa0750fdd2f19f135aec5f958a8b55c18d5a..7fca1f59a4f5253baff7a13db7d1fcab9cc9371e 100644
--- a/crypto/md4.c
+++ b/crypto/md4.c
@@ -20,8 +20,8 @@
  * (at your option) any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
-#include <linux/crypto.h>
 #include <linux/kernel.h>
 #include <linux/string.h>
 #include <linux/types.h>
@@ -58,7 +58,7 @@ static inline u32 H(u32 x, u32 y, u32 z)
 {
 	return x ^ y ^ z;
 }
-                        
+
 #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s))
 #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s))
 #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
@@ -148,24 +148,26 @@ static void md4_transform(u32 *hash, u32 const *in)
 
 static inline void md4_transform_helper(struct md4_ctx *ctx)
 {
-	le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32));
+	le32_to_cpu_array(ctx->block, ARRAY_SIZE(ctx->block));
 	md4_transform(ctx->hash, ctx->block);
 }
 
-static void md4_init(struct crypto_tfm *tfm)
+static int md4_init(struct shash_desc *desc)
 {
-	struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md4_ctx *mctx = shash_desc_ctx(desc);
 
 	mctx->hash[0] = 0x67452301;
 	mctx->hash[1] = 0xefcdab89;
 	mctx->hash[2] = 0x98badcfe;
 	mctx->hash[3] = 0x10325476;
 	mctx->byte_count = 0;
+
+	return 0;
 }
 
-static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
+static int md4_update(struct shash_desc *desc, const u8 *data, unsigned int len)
 {
-	struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md4_ctx *mctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
 
 	mctx->byte_count += len;
@@ -173,7 +175,7 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	if (avail > len) {
 		memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
 		       data, len);
-		return;
+		return 0;
 	}
 
 	memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
@@ -191,11 +193,13 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	}
 
 	memcpy(mctx->block, data, len);
+
+	return 0;
 }
 
-static void md4_final(struct crypto_tfm *tfm, u8 *out)
+static int md4_final(struct shash_desc *desc, u8 *out)
 {
-	struct md4_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md4_ctx *mctx = shash_desc_ctx(desc);
 	const unsigned int offset = mctx->byte_count & 0x3f;
 	char *p = (char *)mctx->block + offset;
 	int padding = 56 - (offset + 1);
@@ -214,33 +218,35 @@ static void md4_final(struct crypto_tfm *tfm, u8 *out)
 	le32_to_cpu_array(mctx->block, (sizeof(mctx->block) -
 	                  sizeof(u64)) / sizeof(u32));
 	md4_transform(mctx->hash, mctx->block);
-	cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
+	cpu_to_le32_array(mctx->hash, ARRAY_SIZE(mctx->hash));
 	memcpy(out, mctx->hash, sizeof(mctx->hash));
 	memset(mctx, 0, sizeof(*mctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	=	"md4",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	MD4_HMAC_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct md4_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list       =       LIST_HEAD_INIT(alg.cra_list),	
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	MD4_DIGEST_SIZE,
-	.dia_init   	= 	md4_init,
-	.dia_update 	=	md4_update,
-	.dia_final  	=	md4_final } }
+static struct shash_alg alg = {
+	.digestsize	=	MD4_DIGEST_SIZE,
+	.init		=	md4_init,
+	.update		=	md4_update,
+	.final		=	md4_final,
+	.descsize	=	sizeof(struct md4_ctx),
+	.base		=	{
+		.cra_name	=	"md4",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	MD4_HMAC_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init md4_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit md4_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(md4_mod_init);
diff --git a/crypto/md5.c b/crypto/md5.c
index 39268f3d2f1d249cdcabec3d8fe58315bee10725..83eb52961750115b6332869a4e19fa7b852d39cb 100644
--- a/crypto/md5.c
+++ b/crypto/md5.c
@@ -15,10 +15,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/string.h>
-#include <linux/crypto.h>
 #include <linux/types.h>
 #include <asm/byteorder.h>
 
@@ -147,20 +147,22 @@ static inline void md5_transform_helper(struct md5_ctx *ctx)
 	md5_transform(ctx->hash, ctx->block);
 }
 
-static void md5_init(struct crypto_tfm *tfm)
+static int md5_init(struct shash_desc *desc)
 {
-	struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md5_ctx *mctx = shash_desc_ctx(desc);
 
 	mctx->hash[0] = 0x67452301;
 	mctx->hash[1] = 0xefcdab89;
 	mctx->hash[2] = 0x98badcfe;
 	mctx->hash[3] = 0x10325476;
 	mctx->byte_count = 0;
+
+	return 0;
 }
 
-static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
+static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
 {
-	struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md5_ctx *mctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
 
 	mctx->byte_count += len;
@@ -168,7 +170,7 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	if (avail > len) {
 		memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
 		       data, len);
-		return;
+		return 0;
 	}
 
 	memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
@@ -186,11 +188,13 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	}
 
 	memcpy(mctx->block, data, len);
+
+	return 0;
 }
 
-static void md5_final(struct crypto_tfm *tfm, u8 *out)
+static int md5_final(struct shash_desc *desc, u8 *out)
 {
-	struct md5_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct md5_ctx *mctx = shash_desc_ctx(desc);
 	const unsigned int offset = mctx->byte_count & 0x3f;
 	char *p = (char *)mctx->block + offset;
 	int padding = 56 - (offset + 1);
@@ -212,30 +216,32 @@ static void md5_final(struct crypto_tfm *tfm, u8 *out)
 	cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
 	memcpy(out, mctx->hash, sizeof(mctx->hash));
 	memset(mctx, 0, sizeof(*mctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	=	"md5",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	MD5_HMAC_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct md5_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list	=	LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	MD5_DIGEST_SIZE,
-	.dia_init   	= 	md5_init,
-	.dia_update 	=	md5_update,
-	.dia_final  	=	md5_final } }
+static struct shash_alg alg = {
+	.digestsize	=	MD5_DIGEST_SIZE,
+	.init		=	md5_init,
+	.update		=	md5_update,
+	.final		=	md5_final,
+	.descsize	=	sizeof(struct md5_ctx),
+	.base		=	{
+		.cra_name	=	"md5",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	MD5_HMAC_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init md5_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit md5_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(md5_mod_init);
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c
index 9e917b8011b11e1b712bf769bf8c7bdab4a79a6e..079b761bc70d125b241da7d13d30d7cef8b43846 100644
--- a/crypto/michael_mic.c
+++ b/crypto/michael_mic.c
@@ -9,23 +9,25 @@
  * it under the terms of the GNU General Public License version 2 as
  * published by the Free Software Foundation.
  */
-
+#include <crypto/internal/hash.h>
 #include <asm/byteorder.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/string.h>
-#include <linux/crypto.h>
 #include <linux/types.h>
 
 
 struct michael_mic_ctx {
+	u32 l, r;
+};
+
+struct michael_mic_desc_ctx {
 	u8 pending[4];
 	size_t pending_len;
 
 	u32 l, r;
 };
 
-
 static inline u32 xswap(u32 val)
 {
 	return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8);
@@ -45,17 +47,22 @@ do {				\
 } while (0)
 
 
-static void michael_init(struct crypto_tfm *tfm)
+static int michael_init(struct shash_desc *desc)
 {
-	struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
+	struct michael_mic_ctx *ctx = crypto_shash_ctx(desc->tfm);
 	mctx->pending_len = 0;
+	mctx->l = ctx->l;
+	mctx->r = ctx->r;
+
+	return 0;
 }
 
 
-static void michael_update(struct crypto_tfm *tfm, const u8 *data,
+static int michael_update(struct shash_desc *desc, const u8 *data,
 			   unsigned int len)
 {
-	struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
 	const __le32 *src;
 
 	if (mctx->pending_len) {
@@ -68,7 +75,7 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
 		len -= flen;
 
 		if (mctx->pending_len < 4)
-			return;
+			return 0;
 
 		src = (const __le32 *)mctx->pending;
 		mctx->l ^= le32_to_cpup(src);
@@ -88,12 +95,14 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
 		mctx->pending_len = len;
 		memcpy(mctx->pending, src, len);
 	}
+
+	return 0;
 }
 
 
-static void michael_final(struct crypto_tfm *tfm, u8 *out)
+static int michael_final(struct shash_desc *desc, u8 *out)
 {
-	struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
 	u8 *data = mctx->pending;
 	__le32 *dst = (__le32 *)out;
 
@@ -119,17 +128,20 @@ static void michael_final(struct crypto_tfm *tfm, u8 *out)
 
 	dst[0] = cpu_to_le32(mctx->l);
 	dst[1] = cpu_to_le32(mctx->r);
+
+	return 0;
 }
 
 
-static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
+static int michael_setkey(struct crypto_shash *tfm, const u8 *key,
 			  unsigned int keylen)
 {
-	struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
+	struct michael_mic_ctx *mctx = crypto_shash_ctx(tfm);
+
 	const __le32 *data = (const __le32 *)key;
 
 	if (keylen != 8) {
-		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 		return -EINVAL;
 	}
 
@@ -138,33 +150,31 @@ static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
 	return 0;
 }
 
-
-static struct crypto_alg michael_mic_alg = {
-	.cra_name	= "michael_mic",
-	.cra_flags	= CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	= 8,
-	.cra_ctxsize	= sizeof(struct michael_mic_ctx),
-	.cra_module	= THIS_MODULE,
-	.cra_alignmask	= 3,
-	.cra_list	= LIST_HEAD_INIT(michael_mic_alg.cra_list),
-	.cra_u		= { .digest = {
-	.dia_digestsize	= 8,
-	.dia_init	= michael_init,
-	.dia_update	= michael_update,
-	.dia_final	= michael_final,
-	.dia_setkey	= michael_setkey } }
+static struct shash_alg alg = {
+	.digestsize		=	8,
+	.setkey			=	michael_setkey,
+	.init			=	michael_init,
+	.update			=	michael_update,
+	.final			=	michael_final,
+	.descsize		=	sizeof(struct michael_mic_desc_ctx),
+	.base			=	{
+		.cra_name		=	"michael_mic",
+		.cra_blocksize		=	8,
+		.cra_alignmask		=	3,
+		.cra_ctxsize		=	sizeof(struct michael_mic_ctx),
+		.cra_module		=	THIS_MODULE,
+	}
 };
 
-
 static int __init michael_mic_init(void)
 {
-	return crypto_register_alg(&michael_mic_alg);
+	return crypto_register_shash(&alg);
 }
 
 
 static void __exit michael_mic_exit(void)
 {
-	crypto_unregister_alg(&michael_mic_alg);
+	crypto_unregister_shash(&alg);
 }
 
 
diff --git a/crypto/proc.c b/crypto/proc.c
index 37a13d05636d4beaaea521d4538aa711e8bbb627..5dc07e442fca00f73359b53cb44669d4250bafc2 100644
--- a/crypto/proc.c
+++ b/crypto/proc.c
@@ -94,6 +94,17 @@ static int c_show(struct seq_file *m, void *p)
 	seq_printf(m, "selftest     : %s\n",
 		   (alg->cra_flags & CRYPTO_ALG_TESTED) ?
 		   "passed" : "unknown");
+
+	if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
+		seq_printf(m, "type         : larval\n");
+		seq_printf(m, "flags        : 0x%x\n", alg->cra_flags);
+		goto out;
+	}
+
+	if (alg->cra_type && alg->cra_type->show) {
+		alg->cra_type->show(m, alg);
+		goto out;
+	}
 	
 	switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
 	case CRYPTO_ALG_TYPE_CIPHER:
@@ -115,16 +126,11 @@ static int c_show(struct seq_file *m, void *p)
 		seq_printf(m, "type         : compression\n");
 		break;
 	default:
-		if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
-			seq_printf(m, "type         : larval\n");
-			seq_printf(m, "flags        : 0x%x\n", alg->cra_flags);
-		} else if (alg->cra_type && alg->cra_type->show)
-			alg->cra_type->show(m, alg);
-		else
-			seq_printf(m, "type         : unknown\n");
+		seq_printf(m, "type         : unknown\n");
 		break;
 	}
 
+out:
 	seq_putc(m, '\n');
 	return 0;
 }
diff --git a/crypto/rmd128.c b/crypto/rmd128.c
index 5de6fa2a76fbe489f4868341ce595971733e3a91..1ceb6735aa5386f5507fbd0c9e5cc19a66feb7b5 100644
--- a/crypto/rmd128.c
+++ b/crypto/rmd128.c
@@ -13,11 +13,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
-#include <linux/cryptohash.h>
 #include <linux/types.h>
 #include <asm/byteorder.h>
 
@@ -218,9 +217,9 @@ static void rmd128_transform(u32 *state, const __le32 *in)
 	return;
 }
 
-static void rmd128_init(struct crypto_tfm *tfm)
+static int rmd128_init(struct shash_desc *desc)
 {
-	struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd128_ctx *rctx = shash_desc_ctx(desc);
 
 	rctx->byte_count = 0;
 
@@ -230,12 +229,14 @@ static void rmd128_init(struct crypto_tfm *tfm)
 	rctx->state[3] = RMD_H3;
 
 	memset(rctx->buffer, 0, sizeof(rctx->buffer));
+
+	return 0;
 }
 
-static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
-			  unsigned int len)
+static int rmd128_update(struct shash_desc *desc, const u8 *data,
+			 unsigned int len)
 {
-	struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd128_ctx *rctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
 
 	rctx->byte_count += len;
@@ -244,7 +245,7 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
 	if (avail > len) {
 		memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
 		       data, len);
-		return;
+		goto out;
 	}
 
 	memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -262,12 +263,15 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
 	}
 
 	memcpy(rctx->buffer, data, len);
+
+out:
+	return 0;
 }
 
 /* Add padding and return the message digest. */
-static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
+static int rmd128_final(struct shash_desc *desc, u8 *out)
 {
-	struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd128_ctx *rctx = shash_desc_ctx(desc);
 	u32 i, index, padlen;
 	__le64 bits;
 	__le32 *dst = (__le32 *)out;
@@ -278,10 +282,10 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64 */
 	index = rctx->byte_count & 0x3f;
 	padlen = (index < 56) ? (56 - index) : ((64+56) - index);
-	rmd128_update(tfm, padding, padlen);
+	rmd128_update(desc, padding, padlen);
 
 	/* Append length */
-	rmd128_update(tfm, (const u8 *)&bits, sizeof(bits));
+	rmd128_update(desc, (const u8 *)&bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 4; i++)
@@ -289,31 +293,32 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Wipe context */
 	memset(rctx, 0, sizeof(*rctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	 =	"rmd128",
-	.cra_driver_name =	"rmd128",
-	.cra_flags	 =	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	 =	RMD128_BLOCK_SIZE,
-	.cra_ctxsize	 =	sizeof(struct rmd128_ctx),
-	.cra_module	 =	THIS_MODULE,
-	.cra_list	 =	LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		 =	{ .digest = {
-	.dia_digestsize	 =	RMD128_DIGEST_SIZE,
-	.dia_init	 =	rmd128_init,
-	.dia_update	 =	rmd128_update,
-	.dia_final	 =	rmd128_final } }
+static struct shash_alg alg = {
+	.digestsize	=	RMD128_DIGEST_SIZE,
+	.init		=	rmd128_init,
+	.update		=	rmd128_update,
+	.final		=	rmd128_final,
+	.descsize	=	sizeof(struct rmd128_ctx),
+	.base		=	{
+		.cra_name	 =	"rmd128",
+		.cra_flags	 =	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	 =	RMD128_BLOCK_SIZE,
+		.cra_module	 =	THIS_MODULE,
+	}
 };
 
 static int __init rmd128_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit rmd128_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(rmd128_mod_init);
@@ -321,5 +326,3 @@ module_exit(rmd128_mod_fini);
 
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("RIPEMD-128 Message Digest");
-
-MODULE_ALIAS("rmd128");
diff --git a/crypto/rmd160.c b/crypto/rmd160.c
index f001ec775e1fcd6d22a39fc453b953cccc851ec6..472261fc913fb43918c0074fc72023190e31acff 100644
--- a/crypto/rmd160.c
+++ b/crypto/rmd160.c
@@ -13,11 +13,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
-#include <linux/cryptohash.h>
 #include <linux/types.h>
 #include <asm/byteorder.h>
 
@@ -261,9 +260,9 @@ static void rmd160_transform(u32 *state, const __le32 *in)
 	return;
 }
 
-static void rmd160_init(struct crypto_tfm *tfm)
+static int rmd160_init(struct shash_desc *desc)
 {
-	struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd160_ctx *rctx = shash_desc_ctx(desc);
 
 	rctx->byte_count = 0;
 
@@ -274,12 +273,14 @@ static void rmd160_init(struct crypto_tfm *tfm)
 	rctx->state[4] = RMD_H4;
 
 	memset(rctx->buffer, 0, sizeof(rctx->buffer));
+
+	return 0;
 }
 
-static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
-			  unsigned int len)
+static int rmd160_update(struct shash_desc *desc, const u8 *data,
+			 unsigned int len)
 {
-	struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd160_ctx *rctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
 
 	rctx->byte_count += len;
@@ -288,7 +289,7 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
 	if (avail > len) {
 		memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
 		       data, len);
-		return;
+		goto out;
 	}
 
 	memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -306,12 +307,15 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
 	}
 
 	memcpy(rctx->buffer, data, len);
+
+out:
+	return 0;
 }
 
 /* Add padding and return the message digest. */
-static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
+static int rmd160_final(struct shash_desc *desc, u8 *out)
 {
-	struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd160_ctx *rctx = shash_desc_ctx(desc);
 	u32 i, index, padlen;
 	__le64 bits;
 	__le32 *dst = (__le32 *)out;
@@ -322,10 +326,10 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64 */
 	index = rctx->byte_count & 0x3f;
 	padlen = (index < 56) ? (56 - index) : ((64+56) - index);
-	rmd160_update(tfm, padding, padlen);
+	rmd160_update(desc, padding, padlen);
 
 	/* Append length */
-	rmd160_update(tfm, (const u8 *)&bits, sizeof(bits));
+	rmd160_update(desc, (const u8 *)&bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 5; i++)
@@ -333,31 +337,32 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Wipe context */
 	memset(rctx, 0, sizeof(*rctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	 =	"rmd160",
-	.cra_driver_name =	"rmd160",
-	.cra_flags	 =	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	 =	RMD160_BLOCK_SIZE,
-	.cra_ctxsize	 =	sizeof(struct rmd160_ctx),
-	.cra_module	 =	THIS_MODULE,
-	.cra_list	 =	LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		 =	{ .digest = {
-	.dia_digestsize	 =	RMD160_DIGEST_SIZE,
-	.dia_init	 =	rmd160_init,
-	.dia_update	 =	rmd160_update,
-	.dia_final	 =	rmd160_final } }
+static struct shash_alg alg = {
+	.digestsize	=	RMD160_DIGEST_SIZE,
+	.init		=	rmd160_init,
+	.update		=	rmd160_update,
+	.final		=	rmd160_final,
+	.descsize	=	sizeof(struct rmd160_ctx),
+	.base		=	{
+		.cra_name	 =	"rmd160",
+		.cra_flags	 =	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	 =	RMD160_BLOCK_SIZE,
+		.cra_module	 =	THIS_MODULE,
+	}
 };
 
 static int __init rmd160_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit rmd160_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(rmd160_mod_init);
@@ -365,5 +370,3 @@ module_exit(rmd160_mod_fini);
 
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("RIPEMD-160 Message Digest");
-
-MODULE_ALIAS("rmd160");
diff --git a/crypto/rmd256.c b/crypto/rmd256.c
index e3de5b4cb47f99bb46fea596426f869b770b68b0..72eafa8d2e7bdcb3e403eadeca3a58f6cc298ad0 100644
--- a/crypto/rmd256.c
+++ b/crypto/rmd256.c
@@ -13,11 +13,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
-#include <linux/cryptohash.h>
 #include <linux/types.h>
 #include <asm/byteorder.h>
 
@@ -233,9 +232,9 @@ static void rmd256_transform(u32 *state, const __le32 *in)
 	return;
 }
 
-static void rmd256_init(struct crypto_tfm *tfm)
+static int rmd256_init(struct shash_desc *desc)
 {
-	struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd256_ctx *rctx = shash_desc_ctx(desc);
 
 	rctx->byte_count = 0;
 
@@ -249,12 +248,14 @@ static void rmd256_init(struct crypto_tfm *tfm)
 	rctx->state[7] = RMD_H8;
 
 	memset(rctx->buffer, 0, sizeof(rctx->buffer));
+
+	return 0;
 }
 
-static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
-			  unsigned int len)
+static int rmd256_update(struct shash_desc *desc, const u8 *data,
+			 unsigned int len)
 {
-	struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd256_ctx *rctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
 
 	rctx->byte_count += len;
@@ -263,7 +264,7 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
 	if (avail > len) {
 		memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
 		       data, len);
-		return;
+		goto out;
 	}
 
 	memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -281,12 +282,15 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
 	}
 
 	memcpy(rctx->buffer, data, len);
+
+out:
+	return 0;
 }
 
 /* Add padding and return the message digest. */
-static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
+static int rmd256_final(struct shash_desc *desc, u8 *out)
 {
-	struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd256_ctx *rctx = shash_desc_ctx(desc);
 	u32 i, index, padlen;
 	__le64 bits;
 	__le32 *dst = (__le32 *)out;
@@ -297,10 +301,10 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64 */
 	index = rctx->byte_count & 0x3f;
 	padlen = (index < 56) ? (56 - index) : ((64+56) - index);
-	rmd256_update(tfm, padding, padlen);
+	rmd256_update(desc, padding, padlen);
 
 	/* Append length */
-	rmd256_update(tfm, (const u8 *)&bits, sizeof(bits));
+	rmd256_update(desc, (const u8 *)&bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 8; i++)
@@ -308,31 +312,32 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Wipe context */
 	memset(rctx, 0, sizeof(*rctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	 =	"rmd256",
-	.cra_driver_name =	"rmd256",
-	.cra_flags	 =	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	 =	RMD256_BLOCK_SIZE,
-	.cra_ctxsize	 =	sizeof(struct rmd256_ctx),
-	.cra_module	 =	THIS_MODULE,
-	.cra_list	 =	LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		 =	{ .digest = {
-	.dia_digestsize	 =	RMD256_DIGEST_SIZE,
-	.dia_init	 =	rmd256_init,
-	.dia_update	 =	rmd256_update,
-	.dia_final	 =	rmd256_final } }
+static struct shash_alg alg = {
+	.digestsize	=	RMD256_DIGEST_SIZE,
+	.init		=	rmd256_init,
+	.update		=	rmd256_update,
+	.final		=	rmd256_final,
+	.descsize	=	sizeof(struct rmd256_ctx),
+	.base		=	{
+		.cra_name	 =	"rmd256",
+		.cra_flags	 =	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	 =	RMD256_BLOCK_SIZE,
+		.cra_module	 =	THIS_MODULE,
+	}
 };
 
 static int __init rmd256_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit rmd256_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(rmd256_mod_init);
@@ -340,5 +345,3 @@ module_exit(rmd256_mod_fini);
 
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("RIPEMD-256 Message Digest");
-
-MODULE_ALIAS("rmd256");
diff --git a/crypto/rmd320.c b/crypto/rmd320.c
index b143d66e42c8095c3742589f06106dfe8b65b793..86becaba2f05bbd714121242110b28338c5da3f2 100644
--- a/crypto/rmd320.c
+++ b/crypto/rmd320.c
@@ -13,11 +13,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
-#include <linux/cryptohash.h>
 #include <linux/types.h>
 #include <asm/byteorder.h>
 
@@ -280,9 +279,9 @@ static void rmd320_transform(u32 *state, const __le32 *in)
 	return;
 }
 
-static void rmd320_init(struct crypto_tfm *tfm)
+static int rmd320_init(struct shash_desc *desc)
 {
-	struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd320_ctx *rctx = shash_desc_ctx(desc);
 
 	rctx->byte_count = 0;
 
@@ -298,12 +297,14 @@ static void rmd320_init(struct crypto_tfm *tfm)
 	rctx->state[9] = RMD_H9;
 
 	memset(rctx->buffer, 0, sizeof(rctx->buffer));
+
+	return 0;
 }
 
-static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
-			  unsigned int len)
+static int rmd320_update(struct shash_desc *desc, const u8 *data,
+			 unsigned int len)
 {
-	struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd320_ctx *rctx = shash_desc_ctx(desc);
 	const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
 
 	rctx->byte_count += len;
@@ -312,7 +313,7 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
 	if (avail > len) {
 		memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
 		       data, len);
-		return;
+		goto out;
 	}
 
 	memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -330,12 +331,15 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
 	}
 
 	memcpy(rctx->buffer, data, len);
+
+out:
+	return 0;
 }
 
 /* Add padding and return the message digest. */
-static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
+static int rmd320_final(struct shash_desc *desc, u8 *out)
 {
-	struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm);
+	struct rmd320_ctx *rctx = shash_desc_ctx(desc);
 	u32 i, index, padlen;
 	__le64 bits;
 	__le32 *dst = (__le32 *)out;
@@ -346,10 +350,10 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64 */
 	index = rctx->byte_count & 0x3f;
 	padlen = (index < 56) ? (56 - index) : ((64+56) - index);
-	rmd320_update(tfm, padding, padlen);
+	rmd320_update(desc, padding, padlen);
 
 	/* Append length */
-	rmd320_update(tfm, (const u8 *)&bits, sizeof(bits));
+	rmd320_update(desc, (const u8 *)&bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 10; i++)
@@ -357,31 +361,32 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Wipe context */
 	memset(rctx, 0, sizeof(*rctx));
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	 =	"rmd320",
-	.cra_driver_name =	"rmd320",
-	.cra_flags	 =	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	 =	RMD320_BLOCK_SIZE,
-	.cra_ctxsize	 =	sizeof(struct rmd320_ctx),
-	.cra_module	 =	THIS_MODULE,
-	.cra_list	 =	LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		 =	{ .digest = {
-	.dia_digestsize	 =	RMD320_DIGEST_SIZE,
-	.dia_init	 =	rmd320_init,
-	.dia_update	 =	rmd320_update,
-	.dia_final	 =	rmd320_final } }
+static struct shash_alg alg = {
+	.digestsize	=	RMD320_DIGEST_SIZE,
+	.init		=	rmd320_init,
+	.update		=	rmd320_update,
+	.final		=	rmd320_final,
+	.descsize	=	sizeof(struct rmd320_ctx),
+	.base		=	{
+		.cra_name	 =	"rmd320",
+		.cra_flags	 =	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	 =	RMD320_BLOCK_SIZE,
+		.cra_module	 =	THIS_MODULE,
+	}
 };
 
 static int __init rmd320_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit rmd320_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(rmd320_mod_init);
@@ -389,5 +394,3 @@ module_exit(rmd320_mod_fini);
 
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("RIPEMD-320 Message Digest");
-
-MODULE_ALIAS("rmd320");
diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c
index b07d55981741578b951b8accbe6cf2b47c771f95..eac10c11685cd436472d9262347922b88165b50d 100644
--- a/crypto/salsa20_generic.c
+++ b/crypto/salsa20_generic.c
@@ -24,6 +24,7 @@
 #include <linux/errno.h>
 #include <linux/crypto.h>
 #include <linux/types.h>
+#include <linux/bitops.h>
 #include <crypto/algapi.h>
 #include <asm/byteorder.h>
 
@@ -42,10 +43,6 @@ D. J. Bernstein
 Public domain.
 */
 
-#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n))))
-#define XOR(v,w) ((v) ^ (w))
-#define PLUS(v,w) (((v) + (w)))
-#define PLUSONE(v) (PLUS((v),1))
 #define U32TO8_LITTLE(p, v) \
 	{ (p)[0] = (v >>  0) & 0xff; (p)[1] = (v >>  8) & 0xff; \
 	  (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; }
@@ -65,41 +62,41 @@ static void salsa20_wordtobyte(u8 output[64], const u32 input[16])
 
 	memcpy(x, input, sizeof(x));
 	for (i = 20; i > 0; i -= 2) {
-		x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7));
-		x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9));
-		x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13));
-		x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18));
-		x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7));
-		x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9));
-		x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13));
-		x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18));
-		x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7));
-		x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9));
-		x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13));
-		x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18));
-		x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7));
-		x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9));
-		x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13));
-		x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18));
-		x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7));
-		x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9));
-		x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13));
-		x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18));
-		x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7));
-		x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9));
-		x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13));
-		x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18));
-		x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7));
-		x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9));
-		x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13));
-		x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18));
-		x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7));
-		x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9));
-		x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13));
-		x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18));
+		x[ 4] ^= rol32((x[ 0] + x[12]),  7);
+		x[ 8] ^= rol32((x[ 4] + x[ 0]),  9);
+		x[12] ^= rol32((x[ 8] + x[ 4]), 13);
+		x[ 0] ^= rol32((x[12] + x[ 8]), 18);
+		x[ 9] ^= rol32((x[ 5] + x[ 1]),  7);
+		x[13] ^= rol32((x[ 9] + x[ 5]),  9);
+		x[ 1] ^= rol32((x[13] + x[ 9]), 13);
+		x[ 5] ^= rol32((x[ 1] + x[13]), 18);
+		x[14] ^= rol32((x[10] + x[ 6]),  7);
+		x[ 2] ^= rol32((x[14] + x[10]),  9);
+		x[ 6] ^= rol32((x[ 2] + x[14]), 13);
+		x[10] ^= rol32((x[ 6] + x[ 2]), 18);
+		x[ 3] ^= rol32((x[15] + x[11]),  7);
+		x[ 7] ^= rol32((x[ 3] + x[15]),  9);
+		x[11] ^= rol32((x[ 7] + x[ 3]), 13);
+		x[15] ^= rol32((x[11] + x[ 7]), 18);
+		x[ 1] ^= rol32((x[ 0] + x[ 3]),  7);
+		x[ 2] ^= rol32((x[ 1] + x[ 0]),  9);
+		x[ 3] ^= rol32((x[ 2] + x[ 1]), 13);
+		x[ 0] ^= rol32((x[ 3] + x[ 2]), 18);
+		x[ 6] ^= rol32((x[ 5] + x[ 4]),  7);
+		x[ 7] ^= rol32((x[ 6] + x[ 5]),  9);
+		x[ 4] ^= rol32((x[ 7] + x[ 6]), 13);
+		x[ 5] ^= rol32((x[ 4] + x[ 7]), 18);
+		x[11] ^= rol32((x[10] + x[ 9]),  7);
+		x[ 8] ^= rol32((x[11] + x[10]),  9);
+		x[ 9] ^= rol32((x[ 8] + x[11]), 13);
+		x[10] ^= rol32((x[ 9] + x[ 8]), 18);
+		x[12] ^= rol32((x[15] + x[14]),  7);
+		x[13] ^= rol32((x[12] + x[15]),  9);
+		x[14] ^= rol32((x[13] + x[12]), 13);
+		x[15] ^= rol32((x[14] + x[13]), 18);
 	}
 	for (i = 0; i < 16; ++i)
-		x[i] = PLUS(x[i],input[i]);
+		x[i] += input[i];
 	for (i = 0; i < 16; ++i)
 		U32TO8_LITTLE(output + 4 * i,x[i]);
 }
@@ -150,9 +147,9 @@ static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst,
 	while (bytes) {
 		salsa20_wordtobyte(buf, ctx->input);
 
-		ctx->input[8] = PLUSONE(ctx->input[8]);
+		ctx->input[8]++;
 		if (!ctx->input[8])
-			ctx->input[9] = PLUSONE(ctx->input[9]);
+			ctx->input[9]++;
 
 		if (bytes <= 64) {
 			crypto_xor(dst, buf, bytes);
diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c
index c7c6899e1fca8efb0ce9af81c42747f180203ed7..9efef20454cba0281c9e08c9ec02da4eae0e3665 100644
--- a/crypto/sha1_generic.c
+++ b/crypto/sha1_generic.c
@@ -16,10 +16,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
 #include <linux/cryptohash.h>
 #include <linux/types.h>
 #include <crypto/sha.h>
@@ -31,9 +31,10 @@ struct sha1_ctx {
         u8 buffer[64];
 };
 
-static void sha1_init(struct crypto_tfm *tfm)
+static int sha1_init(struct shash_desc *desc)
 {
-	struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha1_ctx *sctx = shash_desc_ctx(desc);
+
 	static const struct sha1_ctx initstate = {
 	  0,
 	  { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
@@ -41,12 +42,14 @@ static void sha1_init(struct crypto_tfm *tfm)
 	};
 
 	*sctx = initstate;
+
+	return 0;
 }
 
-static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
+static int sha1_update(struct shash_desc *desc, const u8 *data,
 			unsigned int len)
 {
-	struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha1_ctx *sctx = shash_desc_ctx(desc);
 	unsigned int partial, done;
 	const u8 *src;
 
@@ -74,13 +77,15 @@ static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
 		partial = 0;
 	}
 	memcpy(sctx->buffer + partial, src, len - done);
+
+	return 0;
 }
 
 
 /* Add padding and return the message digest. */
-static void sha1_final(struct crypto_tfm *tfm, u8 *out)
+static int sha1_final(struct shash_desc *desc, u8 *out)
 {
-	struct sha1_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha1_ctx *sctx = shash_desc_ctx(desc);
 	__be32 *dst = (__be32 *)out;
 	u32 i, index, padlen;
 	__be64 bits;
@@ -91,10 +96,10 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64 */
 	index = sctx->count & 0x3f;
 	padlen = (index < 56) ? (56 - index) : ((64+56) - index);
-	sha1_update(tfm, padding, padlen);
+	sha1_update(desc, padding, padlen);
 
 	/* Append length */
-	sha1_update(tfm, (const u8 *)&bits, sizeof(bits));
+	sha1_update(desc, (const u8 *)&bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 5; i++)
@@ -102,32 +107,33 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Wipe context */
 	memset(sctx, 0, sizeof *sctx);
+
+	return 0;
 }
 
-static struct crypto_alg alg = {
-	.cra_name	=	"sha1",
-	.cra_driver_name=	"sha1-generic",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	SHA1_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct sha1_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_alignmask	=	3,
-	.cra_list       =       LIST_HEAD_INIT(alg.cra_list),
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	SHA1_DIGEST_SIZE,
-	.dia_init   	= 	sha1_init,
-	.dia_update 	=	sha1_update,
-	.dia_final  	=	sha1_final } }
+static struct shash_alg alg = {
+	.digestsize	=	SHA1_DIGEST_SIZE,
+	.init		=	sha1_init,
+	.update		=	sha1_update,
+	.final		=	sha1_final,
+	.descsize	=	sizeof(struct sha1_ctx),
+	.base		=	{
+		.cra_name	=	"sha1",
+		.cra_driver_name=	"sha1-generic",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	SHA1_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init sha1_generic_mod_init(void)
 {
-	return crypto_register_alg(&alg);
+	return crypto_register_shash(&alg);
 }
 
 static void __exit sha1_generic_mod_fini(void)
 {
-	crypto_unregister_alg(&alg);
+	crypto_unregister_shash(&alg);
 }
 
 module_init(sha1_generic_mod_init);
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index 5a8dd47558e566752d9d3cbd02231fd981be0a4e..caa3542e6ce8943a2b5ed5f9a0f0aaded9a486cc 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -17,10 +17,10 @@
  * any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
 #include <linux/types.h>
 #include <crypto/sha.h>
 #include <asm/byteorder.h>
@@ -69,7 +69,7 @@ static void sha256_transform(u32 *state, const u8 *input)
 	/* now blend */
 	for (i = 16; i < 64; i++)
 		BLEND_OP(i, W);
-    
+
 	/* load the state into our registers */
 	a=state[0];  b=state[1];  c=state[2];  d=state[3];
 	e=state[4];  f=state[5];  g=state[6];  h=state[7];
@@ -220,9 +220,9 @@ static void sha256_transform(u32 *state, const u8 *input)
 }
 
 
-static void sha224_init(struct crypto_tfm *tfm)
+static int sha224_init(struct shash_desc *desc)
 {
-	struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha256_ctx *sctx = shash_desc_ctx(desc);
 	sctx->state[0] = SHA224_H0;
 	sctx->state[1] = SHA224_H1;
 	sctx->state[2] = SHA224_H2;
@@ -233,11 +233,13 @@ static void sha224_init(struct crypto_tfm *tfm)
 	sctx->state[7] = SHA224_H7;
 	sctx->count[0] = 0;
 	sctx->count[1] = 0;
+
+	return 0;
 }
 
-static void sha256_init(struct crypto_tfm *tfm)
+static int sha256_init(struct shash_desc *desc)
 {
-	struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha256_ctx *sctx = shash_desc_ctx(desc);
 	sctx->state[0] = SHA256_H0;
 	sctx->state[1] = SHA256_H1;
 	sctx->state[2] = SHA256_H2;
@@ -247,12 +249,14 @@ static void sha256_init(struct crypto_tfm *tfm)
 	sctx->state[6] = SHA256_H6;
 	sctx->state[7] = SHA256_H7;
 	sctx->count[0] = sctx->count[1] = 0;
+
+	return 0;
 }
 
-static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
+static int sha256_update(struct shash_desc *desc, const u8 *data,
 			  unsigned int len)
 {
-	struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha256_ctx *sctx = shash_desc_ctx(desc);
 	unsigned int i, index, part_len;
 
 	/* Compute number of bytes mod 128 */
@@ -277,14 +281,16 @@ static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
 	} else {
 		i = 0;
 	}
-	
+
 	/* Buffer remaining input */
 	memcpy(&sctx->buf[index], &data[i], len-i);
+
+	return 0;
 }
 
-static void sha256_final(struct crypto_tfm *tfm, u8 *out)
+static int sha256_final(struct shash_desc *desc, u8 *out)
 {
-	struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha256_ctx *sctx = shash_desc_ctx(desc);
 	__be32 *dst = (__be32 *)out;
 	__be32 bits[2];
 	unsigned int index, pad_len;
@@ -298,10 +304,10 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
 	/* Pad out to 56 mod 64. */
 	index = (sctx->count[0] >> 3) & 0x3f;
 	pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
-	sha256_update(tfm, padding, pad_len);
+	sha256_update(desc, padding, pad_len);
 
 	/* Append length (before padding) */
-	sha256_update(tfm, (const u8 *)bits, sizeof(bits));
+	sha256_update(desc, (const u8 *)bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 8; i++)
@@ -309,71 +315,73 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
 
 	/* Zeroize sensitive information. */
 	memset(sctx, 0, sizeof(*sctx));
+
+	return 0;
 }
 
-static void sha224_final(struct crypto_tfm *tfm, u8 *hash)
+static int sha224_final(struct shash_desc *desc, u8 *hash)
 {
 	u8 D[SHA256_DIGEST_SIZE];
 
-	sha256_final(tfm, D);
+	sha256_final(desc, D);
 
 	memcpy(hash, D, SHA224_DIGEST_SIZE);
 	memset(D, 0, SHA256_DIGEST_SIZE);
+
+	return 0;
 }
 
-static struct crypto_alg sha256 = {
-	.cra_name	=	"sha256",
-	.cra_driver_name=	"sha256-generic",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	SHA256_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct sha256_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_alignmask	=	3,
-	.cra_list	=	LIST_HEAD_INIT(sha256.cra_list),
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	SHA256_DIGEST_SIZE,
-	.dia_init	=	sha256_init,
-	.dia_update	=	sha256_update,
-	.dia_final	=	sha256_final } }
+static struct shash_alg sha256 = {
+	.digestsize	=	SHA256_DIGEST_SIZE,
+	.init		=	sha256_init,
+	.update		=	sha256_update,
+	.final		=	sha256_final,
+	.descsize	=	sizeof(struct sha256_ctx),
+	.base		=	{
+		.cra_name	=	"sha256",
+		.cra_driver_name=	"sha256-generic",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	SHA256_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg sha224 = {
-	.cra_name	= "sha224",
-	.cra_driver_name = "sha224-generic",
-	.cra_flags	= CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	= SHA224_BLOCK_SIZE,
-	.cra_ctxsize	= sizeof(struct sha256_ctx),
-	.cra_module	= THIS_MODULE,
-	.cra_alignmask	= 3,
-	.cra_list	= LIST_HEAD_INIT(sha224.cra_list),
-	.cra_u		= { .digest = {
-	.dia_digestsize = SHA224_DIGEST_SIZE,
-	.dia_init	= sha224_init,
-	.dia_update	= sha256_update,
-	.dia_final	= sha224_final } }
+static struct shash_alg sha224 = {
+	.digestsize	=	SHA224_DIGEST_SIZE,
+	.init		=	sha224_init,
+	.update		=	sha256_update,
+	.final		=	sha224_final,
+	.descsize	=	sizeof(struct sha256_ctx),
+	.base		=	{
+		.cra_name	=	"sha224",
+		.cra_driver_name=	"sha224-generic",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	SHA224_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init sha256_generic_mod_init(void)
 {
 	int ret = 0;
 
-	ret = crypto_register_alg(&sha224);
+	ret = crypto_register_shash(&sha224);
 
 	if (ret < 0)
 		return ret;
 
-	ret = crypto_register_alg(&sha256);
+	ret = crypto_register_shash(&sha256);
 
 	if (ret < 0)
-		crypto_unregister_alg(&sha224);
+		crypto_unregister_shash(&sha224);
 
 	return ret;
 }
 
 static void __exit sha256_generic_mod_fini(void)
 {
-	crypto_unregister_alg(&sha224);
-	crypto_unregister_alg(&sha256);
+	crypto_unregister_shash(&sha224);
+	crypto_unregister_shash(&sha256);
 }
 
 module_init(sha256_generic_mod_init);
diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c
index bc3686138aebb2a5bd58a373e3aa7f24a71f6510..3bea38d12242d03e97d0159e3b6a6c5527559094 100644
--- a/crypto/sha512_generic.c
+++ b/crypto/sha512_generic.c
@@ -10,7 +10,7 @@
  * later version.
  *
  */
-
+#include <crypto/internal/hash.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
 #include <linux/mm.h>
@@ -18,16 +18,17 @@
 #include <linux/crypto.h>
 #include <linux/types.h>
 #include <crypto/sha.h>
-
+#include <linux/percpu.h>
 #include <asm/byteorder.h>
 
 struct sha512_ctx {
 	u64 state[8];
 	u32 count[4];
 	u8 buf[128];
-	u64 W[80];
 };
 
+static DEFINE_PER_CPU(u64[80], msg_schedule);
+
 static inline u64 Ch(u64 x, u64 y, u64 z)
 {
         return z ^ (x & (y ^ z));
@@ -89,11 +90,12 @@ static inline void BLEND_OP(int I, u64 *W)
 }
 
 static void
-sha512_transform(u64 *state, u64 *W, const u8 *input)
+sha512_transform(u64 *state, const u8 *input)
 {
 	u64 a, b, c, d, e, f, g, h, t1, t2;
 
 	int i;
+	u64 *W = get_cpu_var(msg_schedule);
 
 	/* load the input */
         for (i = 0; i < 16; i++)
@@ -132,12 +134,14 @@ sha512_transform(u64 *state, u64 *W, const u8 *input)
 
 	/* erase our data */
 	a = b = c = d = e = f = g = h = t1 = t2 = 0;
+	memset(W, 0, sizeof(__get_cpu_var(msg_schedule)));
+	put_cpu_var(msg_schedule);
 }
 
-static void
-sha512_init(struct crypto_tfm *tfm)
+static int
+sha512_init(struct shash_desc *desc)
 {
-	struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha512_ctx *sctx = shash_desc_ctx(desc);
 	sctx->state[0] = SHA512_H0;
 	sctx->state[1] = SHA512_H1;
 	sctx->state[2] = SHA512_H2;
@@ -147,12 +151,14 @@ sha512_init(struct crypto_tfm *tfm)
 	sctx->state[6] = SHA512_H6;
 	sctx->state[7] = SHA512_H7;
 	sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
+
+	return 0;
 }
 
-static void
-sha384_init(struct crypto_tfm *tfm)
+static int
+sha384_init(struct shash_desc *desc)
 {
-	struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha512_ctx *sctx = shash_desc_ctx(desc);
 	sctx->state[0] = SHA384_H0;
 	sctx->state[1] = SHA384_H1;
 	sctx->state[2] = SHA384_H2;
@@ -162,12 +168,14 @@ sha384_init(struct crypto_tfm *tfm)
 	sctx->state[6] = SHA384_H6;
 	sctx->state[7] = SHA384_H7;
         sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
+
+	return 0;
 }
 
-static void
-sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
+static int
+sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
 {
-	struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha512_ctx *sctx = shash_desc_ctx(desc);
 
 	unsigned int i, index, part_len;
 
@@ -187,10 +195,10 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	/* Transform as many times as possible. */
 	if (len >= part_len) {
 		memcpy(&sctx->buf[index], data, part_len);
-		sha512_transform(sctx->state, sctx->W, sctx->buf);
+		sha512_transform(sctx->state, sctx->buf);
 
 		for (i = part_len; i + 127 < len; i+=128)
-			sha512_transform(sctx->state, sctx->W, &data[i]);
+			sha512_transform(sctx->state, &data[i]);
 
 		index = 0;
 	} else {
@@ -200,14 +208,13 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
 	/* Buffer remaining input */
 	memcpy(&sctx->buf[index], &data[i], len - i);
 
-	/* erase our data */
-	memset(sctx->W, 0, sizeof(sctx->W));
+	return 0;
 }
 
-static void
-sha512_final(struct crypto_tfm *tfm, u8 *hash)
+static int
+sha512_final(struct shash_desc *desc, u8 *hash)
 {
-	struct sha512_ctx *sctx = crypto_tfm_ctx(tfm);
+	struct sha512_ctx *sctx = shash_desc_ctx(desc);
         static u8 padding[128] = { 0x80, };
 	__be64 *dst = (__be64 *)hash;
 	__be32 bits[4];
@@ -223,10 +230,10 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
 	/* Pad out to 112 mod 128. */
 	index = (sctx->count[0] >> 3) & 0x7f;
 	pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
-	sha512_update(tfm, padding, pad_len);
+	sha512_update(desc, padding, pad_len);
 
 	/* Append length (before padding) */
-	sha512_update(tfm, (const u8 *)bits, sizeof(bits));
+	sha512_update(desc, (const u8 *)bits, sizeof(bits));
 
 	/* Store state in digest */
 	for (i = 0; i < 8; i++)
@@ -234,66 +241,66 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
 
 	/* Zeroize sensitive information. */
 	memset(sctx, 0, sizeof(struct sha512_ctx));
+
+	return 0;
 }
 
-static void sha384_final(struct crypto_tfm *tfm, u8 *hash)
+static int sha384_final(struct shash_desc *desc, u8 *hash)
 {
-        u8 D[64];
+	u8 D[64];
+
+	sha512_final(desc, D);
 
-	sha512_final(tfm, D);
+	memcpy(hash, D, 48);
+	memset(D, 0, 64);
 
-        memcpy(hash, D, 48);
-        memset(D, 0, 64);
+	return 0;
 }
 
-static struct crypto_alg sha512 = {
-        .cra_name       = "sha512",
-        .cra_flags      = CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize  = SHA512_BLOCK_SIZE,
-        .cra_ctxsize    = sizeof(struct sha512_ctx),
-        .cra_module     = THIS_MODULE,
-	.cra_alignmask	= 3,
-        .cra_list       = LIST_HEAD_INIT(sha512.cra_list),
-        .cra_u          = { .digest = {
-                                .dia_digestsize = SHA512_DIGEST_SIZE,
-                                .dia_init       = sha512_init,
-                                .dia_update     = sha512_update,
-                                .dia_final      = sha512_final }
-        }
+static struct shash_alg sha512 = {
+	.digestsize	=	SHA512_DIGEST_SIZE,
+	.init		=	sha512_init,
+	.update		=	sha512_update,
+	.final		=	sha512_final,
+	.descsize	=	sizeof(struct sha512_ctx),
+	.base		=	{
+		.cra_name	=	"sha512",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	SHA512_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg sha384 = {
-        .cra_name       = "sha384",
-        .cra_flags      = CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize  = SHA384_BLOCK_SIZE,
-        .cra_ctxsize    = sizeof(struct sha512_ctx),
-	.cra_alignmask	= 3,
-        .cra_module     = THIS_MODULE,
-        .cra_list       = LIST_HEAD_INIT(sha384.cra_list),
-        .cra_u          = { .digest = {
-                                .dia_digestsize = SHA384_DIGEST_SIZE,
-                                .dia_init       = sha384_init,
-                                .dia_update     = sha512_update,
-                                .dia_final      = sha384_final }
-        }
+static struct shash_alg sha384 = {
+	.digestsize	=	SHA384_DIGEST_SIZE,
+	.init		=	sha384_init,
+	.update		=	sha512_update,
+	.final		=	sha384_final,
+	.descsize	=	sizeof(struct sha512_ctx),
+	.base		=	{
+		.cra_name	=	"sha384",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	SHA384_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init sha512_generic_mod_init(void)
 {
         int ret = 0;
 
-        if ((ret = crypto_register_alg(&sha384)) < 0)
+        if ((ret = crypto_register_shash(&sha384)) < 0)
                 goto out;
-        if ((ret = crypto_register_alg(&sha512)) < 0)
-                crypto_unregister_alg(&sha384);
+        if ((ret = crypto_register_shash(&sha512)) < 0)
+                crypto_unregister_shash(&sha384);
 out:
         return ret;
 }
 
 static void __exit sha512_generic_mod_fini(void)
 {
-        crypto_unregister_alg(&sha384);
-        crypto_unregister_alg(&sha512);
+        crypto_unregister_shash(&sha384);
+        crypto_unregister_shash(&sha512);
 }
 
 module_init(sha512_generic_mod_init);
diff --git a/crypto/shash.c b/crypto/shash.c
new file mode 100644
index 0000000000000000000000000000000000000000..c9df367332ffd1f122b1d675bcb434b936848bc4
--- /dev/null
+++ b/crypto/shash.c
@@ -0,0 +1,508 @@
+/*
+ * Synchronous Cryptographic Hash operations.
+ *
+ * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/scatterwalk.h>
+#include <crypto/internal/hash.h>
+#include <linux/err.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/slab.h>
+#include <linux/seq_file.h>
+
+static const struct crypto_type crypto_shash_type;
+
+static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
+{
+	return container_of(tfm, struct crypto_shash, base);
+}
+
+#include "internal.h"
+
+static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
+				  unsigned int keylen)
+{
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	unsigned long absize;
+	u8 *buffer, *alignbuffer;
+	int err;
+
+	absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
+	buffer = kmalloc(absize, GFP_KERNEL);
+	if (!buffer)
+		return -ENOMEM;
+
+	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
+	memcpy(alignbuffer, key, keylen);
+	err = shash->setkey(tfm, alignbuffer, keylen);
+	memset(alignbuffer, 0, keylen);
+	kfree(buffer);
+	return err;
+}
+
+int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
+			unsigned int keylen)
+{
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+
+	if (!shash->setkey)
+		return -ENOSYS;
+
+	if ((unsigned long)key & alignmask)
+		return shash_setkey_unaligned(tfm, key, keylen);
+
+	return shash->setkey(tfm, key, keylen);
+}
+EXPORT_SYMBOL_GPL(crypto_shash_setkey);
+
+static inline unsigned int shash_align_buffer_size(unsigned len,
+						   unsigned long mask)
+{
+	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
+}
+
+static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
+				  unsigned int len)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	unsigned int unaligned_len = alignmask + 1 -
+				     ((unsigned long)data & alignmask);
+	u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
+		__attribute__ ((aligned));
+
+	memcpy(buf, data, unaligned_len);
+
+	return shash->update(desc, buf, unaligned_len) ?:
+	       shash->update(desc, data + unaligned_len, len - unaligned_len);
+}
+
+int crypto_shash_update(struct shash_desc *desc, const u8 *data,
+			unsigned int len)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+
+	if ((unsigned long)data & alignmask)
+		return shash_update_unaligned(desc, data, len);
+
+	return shash->update(desc, data, len);
+}
+EXPORT_SYMBOL_GPL(crypto_shash_update);
+
+static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned int ds = crypto_shash_digestsize(tfm);
+	u8 buf[shash_align_buffer_size(ds, alignmask)]
+		__attribute__ ((aligned));
+	int err;
+
+	err = shash->final(desc, buf);
+	memcpy(out, buf, ds);
+	return err;
+}
+
+int crypto_shash_final(struct shash_desc *desc, u8 *out)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+
+	if ((unsigned long)out & alignmask)
+		return shash_final_unaligned(desc, out);
+
+	return shash->final(desc, out);
+}
+EXPORT_SYMBOL_GPL(crypto_shash_final);
+
+static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
+				 unsigned int len, u8 *out)
+{
+	return crypto_shash_update(desc, data, len) ?:
+	       crypto_shash_final(desc, out);
+}
+
+int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
+		       unsigned int len, u8 *out)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+
+	if (((unsigned long)data | (unsigned long)out) & alignmask ||
+	    !shash->finup)
+		return shash_finup_unaligned(desc, data, len, out);
+
+	return shash->finup(desc, data, len, out);
+}
+EXPORT_SYMBOL_GPL(crypto_shash_finup);
+
+static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
+				  unsigned int len, u8 *out)
+{
+	return crypto_shash_init(desc) ?:
+	       crypto_shash_update(desc, data, len) ?:
+	       crypto_shash_final(desc, out);
+}
+
+int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
+			unsigned int len, u8 *out)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *shash = crypto_shash_alg(tfm);
+	unsigned long alignmask = crypto_shash_alignmask(tfm);
+
+	if (((unsigned long)data | (unsigned long)out) & alignmask ||
+	    !shash->digest)
+		return shash_digest_unaligned(desc, data, len, out);
+
+	return shash->digest(desc, data, len, out);
+}
+EXPORT_SYMBOL_GPL(crypto_shash_digest);
+
+int crypto_shash_import(struct shash_desc *desc, const u8 *in)
+{
+	struct crypto_shash *tfm = desc->tfm;
+	struct shash_alg *alg = crypto_shash_alg(tfm);
+
+	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
+
+	if (alg->reinit)
+		alg->reinit(desc);
+
+	return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_shash_import);
+
+static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
+			      unsigned int keylen)
+{
+	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
+
+	return crypto_shash_setkey(*ctx, key, keylen);
+}
+
+static int shash_async_init(struct ahash_request *req)
+{
+	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
+	struct shash_desc *desc = ahash_request_ctx(req);
+
+	desc->tfm = *ctx;
+	desc->flags = req->base.flags;
+
+	return crypto_shash_init(desc);
+}
+
+static int shash_async_update(struct ahash_request *req)
+{
+	struct shash_desc *desc = ahash_request_ctx(req);
+	struct crypto_hash_walk walk;
+	int nbytes;
+
+	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
+	     nbytes = crypto_hash_walk_done(&walk, nbytes))
+		nbytes = crypto_shash_update(desc, walk.data, nbytes);
+
+	return nbytes;
+}
+
+static int shash_async_final(struct ahash_request *req)
+{
+	return crypto_shash_final(ahash_request_ctx(req), req->result);
+}
+
+static int shash_async_digest(struct ahash_request *req)
+{
+	struct scatterlist *sg = req->src;
+	unsigned int offset = sg->offset;
+	unsigned int nbytes = req->nbytes;
+	int err;
+
+	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
+		struct crypto_shash **ctx =
+			crypto_ahash_ctx(crypto_ahash_reqtfm(req));
+		struct shash_desc *desc = ahash_request_ctx(req);
+		void *data;
+
+		desc->tfm = *ctx;
+		desc->flags = req->base.flags;
+
+		data = crypto_kmap(sg_page(sg), 0);
+		err = crypto_shash_digest(desc, data + offset, nbytes,
+					  req->result);
+		crypto_kunmap(data, 0);
+		crypto_yield(desc->flags);
+		goto out;
+	}
+
+	err = shash_async_init(req);
+	if (err)
+		goto out;
+
+	err = shash_async_update(req);
+	if (err)
+		goto out;
+
+	err = shash_async_final(req);
+
+out:
+	return err;
+}
+
+static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
+{
+	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
+
+	crypto_free_shash(*ctx);
+}
+
+static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
+{
+	struct crypto_alg *calg = tfm->__crt_alg;
+	struct shash_alg *alg = __crypto_shash_alg(calg);
+	struct ahash_tfm *crt = &tfm->crt_ahash;
+	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
+	struct crypto_shash *shash;
+
+	if (!crypto_mod_get(calg))
+		return -EAGAIN;
+
+	shash = __crypto_shash_cast(crypto_create_tfm(
+		calg, &crypto_shash_type));
+	if (IS_ERR(shash)) {
+		crypto_mod_put(calg);
+		return PTR_ERR(shash);
+	}
+
+	*ctx = shash;
+	tfm->exit = crypto_exit_shash_ops_async;
+
+	crt->init = shash_async_init;
+	crt->update = shash_async_update;
+	crt->final  = shash_async_final;
+	crt->digest = shash_async_digest;
+	crt->setkey = shash_async_setkey;
+
+	crt->digestsize = alg->digestsize;
+	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
+
+	return 0;
+}
+
+static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
+			       unsigned int keylen)
+{
+	struct shash_desc *desc = crypto_hash_ctx(tfm);
+
+	return crypto_shash_setkey(desc->tfm, key, keylen);
+}
+
+static int shash_compat_init(struct hash_desc *hdesc)
+{
+	struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+
+	desc->flags = hdesc->flags;
+
+	return crypto_shash_init(desc);
+}
+
+static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
+			       unsigned int len)
+{
+	struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+	struct crypto_hash_walk walk;
+	int nbytes;
+
+	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
+	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
+		nbytes = crypto_shash_update(desc, walk.data, nbytes);
+
+	return nbytes;
+}
+
+static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
+{
+	return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
+}
+
+static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
+			       unsigned int nbytes, u8 *out)
+{
+	unsigned int offset = sg->offset;
+	int err;
+
+	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
+		struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+		void *data;
+
+		desc->flags = hdesc->flags;
+
+		data = crypto_kmap(sg_page(sg), 0);
+		err = crypto_shash_digest(desc, data + offset, nbytes, out);
+		crypto_kunmap(data, 0);
+		crypto_yield(desc->flags);
+		goto out;
+	}
+
+	err = shash_compat_init(hdesc);
+	if (err)
+		goto out;
+
+	err = shash_compat_update(hdesc, sg, nbytes);
+	if (err)
+		goto out;
+
+	err = shash_compat_final(hdesc, out);
+
+out:
+	return err;
+}
+
+static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
+{
+	struct shash_desc *desc= crypto_tfm_ctx(tfm);
+
+	crypto_free_shash(desc->tfm);
+}
+
+static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
+{
+	struct hash_tfm *crt = &tfm->crt_hash;
+	struct crypto_alg *calg = tfm->__crt_alg;
+	struct shash_alg *alg = __crypto_shash_alg(calg);
+	struct shash_desc *desc = crypto_tfm_ctx(tfm);
+	struct crypto_shash *shash;
+
+	shash = __crypto_shash_cast(crypto_create_tfm(
+		calg, &crypto_shash_type));
+	if (IS_ERR(shash))
+		return PTR_ERR(shash);
+
+	desc->tfm = shash;
+	tfm->exit = crypto_exit_shash_ops_compat;
+
+	crt->init = shash_compat_init;
+	crt->update = shash_compat_update;
+	crt->final  = shash_compat_final;
+	crt->digest = shash_compat_digest;
+	crt->setkey = shash_compat_setkey;
+
+	crt->digestsize = alg->digestsize;
+
+	return 0;
+}
+
+static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+{
+	switch (mask & CRYPTO_ALG_TYPE_MASK) {
+	case CRYPTO_ALG_TYPE_HASH_MASK:
+		return crypto_init_shash_ops_compat(tfm);
+	case CRYPTO_ALG_TYPE_AHASH_MASK:
+		return crypto_init_shash_ops_async(tfm);
+	}
+
+	return -EINVAL;
+}
+
+static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
+					 u32 mask)
+{
+	struct shash_alg *salg = __crypto_shash_alg(alg);
+
+	switch (mask & CRYPTO_ALG_TYPE_MASK) {
+	case CRYPTO_ALG_TYPE_HASH_MASK:
+		return sizeof(struct shash_desc) + salg->descsize;
+	case CRYPTO_ALG_TYPE_AHASH_MASK:
+		return sizeof(struct crypto_shash *);
+	}
+
+	return 0;
+}
+
+static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
+				 const struct crypto_type *frontend)
+{
+	if (frontend->type != CRYPTO_ALG_TYPE_SHASH)
+		return -EINVAL;
+	return 0;
+}
+
+static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
+					 const struct crypto_type *frontend)
+{
+	return alg->cra_ctxsize;
+}
+
+static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
+	__attribute__ ((unused));
+static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
+{
+	struct shash_alg *salg = __crypto_shash_alg(alg);
+
+	seq_printf(m, "type         : shash\n");
+	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
+	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
+	seq_printf(m, "descsize     : %u\n", salg->descsize);
+}
+
+static const struct crypto_type crypto_shash_type = {
+	.ctxsize = crypto_shash_ctxsize,
+	.extsize = crypto_shash_extsize,
+	.init = crypto_init_shash_ops,
+	.init_tfm = crypto_shash_init_tfm,
+#ifdef CONFIG_PROC_FS
+	.show = crypto_shash_show,
+#endif
+	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
+	.maskset = CRYPTO_ALG_TYPE_MASK,
+	.type = CRYPTO_ALG_TYPE_SHASH,
+	.tfmsize = offsetof(struct crypto_shash, base),
+};
+
+struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
+					u32 mask)
+{
+	return __crypto_shash_cast(
+		crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask));
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_shash);
+
+int crypto_register_shash(struct shash_alg *alg)
+{
+	struct crypto_alg *base = &alg->base;
+
+	if (alg->digestsize > PAGE_SIZE / 8 ||
+	    alg->descsize > PAGE_SIZE / 8)
+		return -EINVAL;
+
+	base->cra_type = &crypto_shash_type;
+	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
+	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
+
+	return crypto_register_alg(base);
+}
+EXPORT_SYMBOL_GPL(crypto_register_shash);
+
+int crypto_unregister_shash(struct shash_alg *alg)
+{
+	return crypto_unregister_alg(&alg->base);
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_shash);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Synchronous cryptographic hash type");
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index b828c6cf1b1d2d7179cfaaac3f0a3aa2e79f8749..a75f11ffb957257d15d67b7eecf55c666466b0f5 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -843,6 +843,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
 			goto out;
 		}
 
+		if (dlen != ctemplate[i].outlen) {
+			printk(KERN_ERR "alg: comp: Compression test %d "
+			       "failed for %s: output len = %d\n", i + 1, algo,
+			       dlen);
+			ret = -EINVAL;
+			goto out;
+		}
+
 		if (memcmp(result, ctemplate[i].output, dlen)) {
 			printk(KERN_ERR "alg: comp: Compression test %d "
 			       "failed for %s\n", i + 1, algo);
@@ -853,7 +861,7 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
 	}
 
 	for (i = 0; i < dtcount; i++) {
-		int ilen, ret, dlen = COMP_BUF_SIZE;
+		int ilen, dlen = COMP_BUF_SIZE;
 
 		memset(result, 0, sizeof (result));
 
@@ -867,6 +875,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
 			goto out;
 		}
 
+		if (dlen != dtemplate[i].outlen) {
+			printk(KERN_ERR "alg: comp: Decompression test %d "
+			       "failed for %s: output len = %d\n", i + 1, algo,
+			       dlen);
+			ret = -EINVAL;
+			goto out;
+		}
+
 		if (memcmp(result, dtemplate[i].output, dlen)) {
 			printk(KERN_ERR "alg: comp: Decompression test %d "
 			       "failed for %s\n", i + 1, algo);
@@ -1010,6 +1026,55 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
 	return err;
 }
 
+static int alg_test_crc32c(const struct alg_test_desc *desc,
+			   const char *driver, u32 type, u32 mask)
+{
+	struct crypto_shash *tfm;
+	u32 val;
+	int err;
+
+	err = alg_test_hash(desc, driver, type, mask);
+	if (err)
+		goto out;
+
+	tfm = crypto_alloc_shash(driver, type, mask);
+	if (IS_ERR(tfm)) {
+		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
+		       "%ld\n", driver, PTR_ERR(tfm));
+		err = PTR_ERR(tfm);
+		goto out;
+	}
+
+	do {
+		struct {
+			struct shash_desc shash;
+			char ctx[crypto_shash_descsize(tfm)];
+		} sdesc;
+
+		sdesc.shash.tfm = tfm;
+		sdesc.shash.flags = 0;
+
+		*(u32 *)sdesc.ctx = le32_to_cpu(420553207);
+		err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
+		if (err) {
+			printk(KERN_ERR "alg: crc32c: Operation failed for "
+			       "%s: %d\n", driver, err);
+			break;
+		}
+
+		if (val != ~420553207) {
+			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
+			       "%d\n", driver, val);
+			err = -EINVAL;
+		}
+	} while (0);
+
+	crypto_free_shash(tfm);
+
+out:
+	return err;
+}
+
 /* Please keep this list sorted by algorithm name. */
 static const struct alg_test_desc alg_test_descs[] = {
 	{
@@ -1134,7 +1199,7 @@ static const struct alg_test_desc alg_test_descs[] = {
 		}
 	}, {
 		.alg = "crc32c",
-		.test = alg_test_hash,
+		.test = alg_test_crc32c,
 		.suite = {
 			.hash = {
 				.vecs = crc32c_tv_template,
@@ -1801,6 +1866,7 @@ static int alg_find_test(const char *alg)
 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
 {
 	int i;
+	int rc;
 
 	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
 		char nalg[CRYPTO_MAX_ALG_NAME];
@@ -1820,8 +1886,12 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
 	if (i < 0)
 		goto notest;
 
-	return alg_test_descs[i].test(alg_test_descs + i, driver,
+	rc = alg_test_descs[i].test(alg_test_descs + i, driver,
 				      type, mask);
+	if (fips_enabled && rc)
+		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
+
+	return rc;
 
 notest:
 	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index dee94d9ecfba5e3724d37ebc844bd1219aba587d..132953e144d34bcb0766f4eebfce32107c7a489b 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -8349,7 +8349,7 @@ struct comp_testvec {
 
 /*
  * Deflate test vectors (null-terminated strings).
- * Params: winbits=11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL.
+ * Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL.
  */
 #define DEFLATE_COMP_TEST_VECTORS 2
 #define DEFLATE_DECOMP_TEST_VECTORS 2
diff --git a/crypto/tgr192.c b/crypto/tgr192.c
index a92414f24bebd2e5a65a40491d98501757971f83..cbca4f208c9f40017c8ff69c27c52f34862904fd 100644
--- a/crypto/tgr192.c
+++ b/crypto/tgr192.c
@@ -21,11 +21,11 @@
  * (at your option) any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
 #include <asm/byteorder.h>
-#include <linux/crypto.h>
 #include <linux/types.h>
 
 #define TGR192_DIGEST_SIZE 24
@@ -495,24 +495,26 @@ static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data)
 	tctx->c = c;
 }
 
-static void tgr192_init(struct crypto_tfm *tfm)
+static int tgr192_init(struct shash_desc *desc)
 {
-	struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
+	struct tgr192_ctx *tctx = shash_desc_ctx(desc);
 
 	tctx->a = 0x0123456789abcdefULL;
 	tctx->b = 0xfedcba9876543210ULL;
 	tctx->c = 0xf096a5b4c3b2e187ULL;
 	tctx->nblocks = 0;
 	tctx->count = 0;
+
+	return 0;
 }
 
 
 /* Update the message digest with the contents
  * of INBUF with length INLEN. */
-static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
+static int tgr192_update(struct shash_desc *desc, const u8 *inbuf,
 			  unsigned int len)
 {
-	struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
+	struct tgr192_ctx *tctx = shash_desc_ctx(desc);
 
 	if (tctx->count == 64) {	/* flush the buffer */
 		tgr192_transform(tctx, tctx->hash);
@@ -520,15 +522,15 @@ static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
 		tctx->nblocks++;
 	}
 	if (!inbuf) {
-		return;
+		return 0;
 	}
 	if (tctx->count) {
 		for (; len && tctx->count < 64; len--) {
 			tctx->hash[tctx->count++] = *inbuf++;
 		}
-		tgr192_update(tfm, NULL, 0);
+		tgr192_update(desc, NULL, 0);
 		if (!len) {
-			return;
+			return 0;
 		}
 
 	}
@@ -543,20 +545,22 @@ static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
 	for (; len && tctx->count < 64; len--) {
 		tctx->hash[tctx->count++] = *inbuf++;
 	}
+
+	return 0;
 }
 
 
 
 /* The routine terminates the computation */
-static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
+static int tgr192_final(struct shash_desc *desc, u8 * out)
 {
-	struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm);
+	struct tgr192_ctx *tctx = shash_desc_ctx(desc);
 	__be64 *dst = (__be64 *)out;
 	__be64 *be64p;
 	__le32 *le32p;
 	u32 t, msb, lsb;
 
-	tgr192_update(tfm, NULL, 0); /* flush */ ;
+	tgr192_update(desc, NULL, 0); /* flush */ ;
 
 	msb = 0;
 	t = tctx->nblocks;
@@ -584,7 +588,7 @@ static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
 		while (tctx->count < 64) {
 			tctx->hash[tctx->count++] = 0;
 		}
-		tgr192_update(tfm, NULL, 0); /* flush */ ;
+		tgr192_update(desc, NULL, 0); /* flush */ ;
 		memset(tctx->hash, 0, 56);    /* fill next block with zeroes */
 	}
 	/* append the 64 bit count */
@@ -598,91 +602,94 @@ static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
 	dst[0] = be64p[0] = cpu_to_be64(tctx->a);
 	dst[1] = be64p[1] = cpu_to_be64(tctx->b);
 	dst[2] = be64p[2] = cpu_to_be64(tctx->c);
+
+	return 0;
 }
 
-static void tgr160_final(struct crypto_tfm *tfm, u8 * out)
+static int tgr160_final(struct shash_desc *desc, u8 * out)
 {
 	u8 D[64];
 
-	tgr192_final(tfm, D);
+	tgr192_final(desc, D);
 	memcpy(out, D, TGR160_DIGEST_SIZE);
 	memset(D, 0, TGR192_DIGEST_SIZE);
+
+	return 0;
 }
 
-static void tgr128_final(struct crypto_tfm *tfm, u8 * out)
+static int tgr128_final(struct shash_desc *desc, u8 * out)
 {
 	u8 D[64];
 
-	tgr192_final(tfm, D);
+	tgr192_final(desc, D);
 	memcpy(out, D, TGR128_DIGEST_SIZE);
 	memset(D, 0, TGR192_DIGEST_SIZE);
+
+	return 0;
 }
 
-static struct crypto_alg tgr192 = {
-	.cra_name = "tgr192",
-	.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize = TGR192_BLOCK_SIZE,
-	.cra_ctxsize = sizeof(struct tgr192_ctx),
-	.cra_module = THIS_MODULE,
-	.cra_alignmask = 7,
-	.cra_list = LIST_HEAD_INIT(tgr192.cra_list),
-	.cra_u = {.digest = {
-			     .dia_digestsize = TGR192_DIGEST_SIZE,
-			     .dia_init = tgr192_init,
-			     .dia_update = tgr192_update,
-			     .dia_final = tgr192_final}}
+static struct shash_alg tgr192 = {
+	.digestsize	=	TGR192_DIGEST_SIZE,
+	.init		=	tgr192_init,
+	.update		=	tgr192_update,
+	.final		=	tgr192_final,
+	.descsize	=	sizeof(struct tgr192_ctx),
+	.base		=	{
+		.cra_name	=	"tgr192",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	TGR192_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg tgr160 = {
-	.cra_name = "tgr160",
-	.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize = TGR192_BLOCK_SIZE,
-	.cra_ctxsize = sizeof(struct tgr192_ctx),
-	.cra_module = THIS_MODULE,
-	.cra_alignmask = 7,
-	.cra_list = LIST_HEAD_INIT(tgr160.cra_list),
-	.cra_u = {.digest = {
-			     .dia_digestsize = TGR160_DIGEST_SIZE,
-			     .dia_init = tgr192_init,
-			     .dia_update = tgr192_update,
-			     .dia_final = tgr160_final}}
+static struct shash_alg tgr160 = {
+	.digestsize	=	TGR160_DIGEST_SIZE,
+	.init		=	tgr192_init,
+	.update		=	tgr192_update,
+	.final		=	tgr160_final,
+	.descsize	=	sizeof(struct tgr192_ctx),
+	.base		=	{
+		.cra_name	=	"tgr160",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	TGR192_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg tgr128 = {
-	.cra_name = "tgr128",
-	.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize = TGR192_BLOCK_SIZE,
-	.cra_ctxsize = sizeof(struct tgr192_ctx),
-	.cra_module = THIS_MODULE,
-	.cra_alignmask = 7,
-	.cra_list = LIST_HEAD_INIT(tgr128.cra_list),
-	.cra_u = {.digest = {
-			     .dia_digestsize = TGR128_DIGEST_SIZE,
-			     .dia_init = tgr192_init,
-			     .dia_update = tgr192_update,
-			     .dia_final = tgr128_final}}
+static struct shash_alg tgr128 = {
+	.digestsize	=	TGR128_DIGEST_SIZE,
+	.init		=	tgr192_init,
+	.update		=	tgr192_update,
+	.final		=	tgr128_final,
+	.descsize	=	sizeof(struct tgr192_ctx),
+	.base		=	{
+		.cra_name	=	"tgr128",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	TGR192_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init tgr192_mod_init(void)
 {
 	int ret = 0;
 
-	ret = crypto_register_alg(&tgr192);
+	ret = crypto_register_shash(&tgr192);
 
 	if (ret < 0) {
 		goto out;
 	}
 
-	ret = crypto_register_alg(&tgr160);
+	ret = crypto_register_shash(&tgr160);
 	if (ret < 0) {
-		crypto_unregister_alg(&tgr192);
+		crypto_unregister_shash(&tgr192);
 		goto out;
 	}
 
-	ret = crypto_register_alg(&tgr128);
+	ret = crypto_register_shash(&tgr128);
 	if (ret < 0) {
-		crypto_unregister_alg(&tgr192);
-		crypto_unregister_alg(&tgr160);
+		crypto_unregister_shash(&tgr192);
+		crypto_unregister_shash(&tgr160);
 	}
       out:
 	return ret;
@@ -690,9 +697,9 @@ static int __init tgr192_mod_init(void)
 
 static void __exit tgr192_mod_fini(void)
 {
-	crypto_unregister_alg(&tgr192);
-	crypto_unregister_alg(&tgr160);
-	crypto_unregister_alg(&tgr128);
+	crypto_unregister_shash(&tgr192);
+	crypto_unregister_shash(&tgr160);
+	crypto_unregister_shash(&tgr128);
 }
 
 MODULE_ALIAS("tgr160");
diff --git a/crypto/wp512.c b/crypto/wp512.c
index bff28560d66d8de4ce54c7c40f9fba604ecee29d..723427273687011b3c4e8fd995e1ec8d1f6546d8 100644
--- a/crypto/wp512.c
+++ b/crypto/wp512.c
@@ -19,11 +19,11 @@
  * (at your option) any later version.
  *
  */
+#include <crypto/internal/hash.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
 #include <asm/byteorder.h>
-#include <linux/crypto.h>
 #include <linux/types.h>
 
 #define WP512_DIGEST_SIZE 64
@@ -980,8 +980,8 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) {
 
 }
 
-static void wp512_init(struct crypto_tfm *tfm) {
-	struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
+static int wp512_init(struct shash_desc *desc) {
+	struct wp512_ctx *wctx = shash_desc_ctx(desc);
 	int i;
 
 	memset(wctx->bitLength, 0, 32);
@@ -990,12 +990,14 @@ static void wp512_init(struct crypto_tfm *tfm) {
 	for (i = 0; i < 8; i++) {
 		wctx->hash[i] = 0L;
 	}
+
+	return 0;
 }
 
-static void wp512_update(struct crypto_tfm *tfm, const u8 *source,
+static int wp512_update(struct shash_desc *desc, const u8 *source,
 			 unsigned int len)
 {
-	struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
+	struct wp512_ctx *wctx = shash_desc_ctx(desc);
 	int sourcePos    = 0;
 	unsigned int bits_len = len * 8; // convert to number of bits
 	int sourceGap    = (8 - ((int)bits_len & 7)) & 7;
@@ -1051,11 +1053,12 @@ static void wp512_update(struct crypto_tfm *tfm, const u8 *source,
 	wctx->bufferBits   = bufferBits;
 	wctx->bufferPos    = bufferPos;
 
+	return 0;
 }
 
-static void wp512_final(struct crypto_tfm *tfm, u8 *out)
+static int wp512_final(struct shash_desc *desc, u8 *out)
 {
-	struct wp512_ctx *wctx = crypto_tfm_ctx(tfm);
+	struct wp512_ctx *wctx = shash_desc_ctx(desc);
 	int i;
    	u8 *buffer      = wctx->buffer;
    	u8 *bitLength   = wctx->bitLength;
@@ -1084,89 +1087,95 @@ static void wp512_final(struct crypto_tfm *tfm, u8 *out)
 		digest[i] = cpu_to_be64(wctx->hash[i]);
    	wctx->bufferBits   = bufferBits;
    	wctx->bufferPos    = bufferPos;
+
+	return 0;
 }
 
-static void wp384_final(struct crypto_tfm *tfm, u8 *out)
+static int wp384_final(struct shash_desc *desc, u8 *out)
 {
 	u8 D[64];
 
-	wp512_final(tfm, D);
+	wp512_final(desc, D);
 	memcpy (out, D, WP384_DIGEST_SIZE);
 	memset (D, 0, WP512_DIGEST_SIZE);
+
+	return 0;
 }
 
-static void wp256_final(struct crypto_tfm *tfm, u8 *out)
+static int wp256_final(struct shash_desc *desc, u8 *out)
 {
 	u8 D[64];
 
-	wp512_final(tfm, D);
+	wp512_final(desc, D);
 	memcpy (out, D, WP256_DIGEST_SIZE);
 	memset (D, 0, WP512_DIGEST_SIZE);
+
+	return 0;
 }
 
-static struct crypto_alg wp512 = {
-	.cra_name	=	"wp512",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	WP512_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct wp512_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list       =       LIST_HEAD_INIT(wp512.cra_list),	
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	WP512_DIGEST_SIZE,
-	.dia_init   	= 	wp512_init,
-	.dia_update 	=	wp512_update,
-	.dia_final  	=	wp512_final } }
+static struct shash_alg wp512 = {
+	.digestsize	=	WP512_DIGEST_SIZE,
+	.init		=	wp512_init,
+	.update		=	wp512_update,
+	.final		=	wp512_final,
+	.descsize	=	sizeof(struct wp512_ctx),
+	.base		=	{
+		.cra_name	=	"wp512",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	WP512_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg wp384 = {
-	.cra_name	=	"wp384",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	WP512_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct wp512_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list       =       LIST_HEAD_INIT(wp384.cra_list),	
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	WP384_DIGEST_SIZE,
-	.dia_init   	= 	wp512_init,
-	.dia_update 	=	wp512_update,
-	.dia_final  	=	wp384_final } }
+static struct shash_alg wp384 = {
+	.digestsize	=	WP384_DIGEST_SIZE,
+	.init		=	wp512_init,
+	.update		=	wp512_update,
+	.final		=	wp384_final,
+	.descsize	=	sizeof(struct wp512_ctx),
+	.base		=	{
+		.cra_name	=	"wp384",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	WP512_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
-static struct crypto_alg wp256 = {
-	.cra_name	=	"wp256",
-	.cra_flags	=	CRYPTO_ALG_TYPE_DIGEST,
-	.cra_blocksize	=	WP512_BLOCK_SIZE,
-	.cra_ctxsize	=	sizeof(struct wp512_ctx),
-	.cra_module	=	THIS_MODULE,
-	.cra_list       =       LIST_HEAD_INIT(wp256.cra_list),	
-	.cra_u		=	{ .digest = {
-	.dia_digestsize	=	WP256_DIGEST_SIZE,
-	.dia_init   	= 	wp512_init,
-	.dia_update 	=	wp512_update,
-	.dia_final  	=	wp256_final } }
+static struct shash_alg wp256 = {
+	.digestsize	=	WP256_DIGEST_SIZE,
+	.init		=	wp512_init,
+	.update		=	wp512_update,
+	.final		=	wp256_final,
+	.descsize	=	sizeof(struct wp512_ctx),
+	.base		=	{
+		.cra_name	=	"wp256",
+		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
+		.cra_blocksize	=	WP512_BLOCK_SIZE,
+		.cra_module	=	THIS_MODULE,
+	}
 };
 
 static int __init wp512_mod_init(void)
 {
 	int ret = 0;
 
-	ret = crypto_register_alg(&wp512);
+	ret = crypto_register_shash(&wp512);
 
 	if (ret < 0)
 		goto out;
 
-	ret = crypto_register_alg(&wp384);
+	ret = crypto_register_shash(&wp384);
 	if (ret < 0)
 	{
-		crypto_unregister_alg(&wp512);
+		crypto_unregister_shash(&wp512);
 		goto out;
 	}
 
-	ret = crypto_register_alg(&wp256);
+	ret = crypto_register_shash(&wp256);
 	if (ret < 0)
 	{
-		crypto_unregister_alg(&wp512);
-		crypto_unregister_alg(&wp384);
+		crypto_unregister_shash(&wp512);
+		crypto_unregister_shash(&wp384);
 	}
 out:
 	return ret;
@@ -1174,9 +1183,9 @@ static int __init wp512_mod_init(void)
 
 static void __exit wp512_mod_fini(void)
 {
-	crypto_unregister_alg(&wp512);
-	crypto_unregister_alg(&wp384);
-	crypto_unregister_alg(&wp256);
+	crypto_unregister_shash(&wp512);
+	crypto_unregister_shash(&wp384);
+	crypto_unregister_shash(&wp256);
 }
 
 MODULE_ALIAS("wp384");
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c
index 4d22b21bd3e30ad872adaef9b87cd003904832bd..0c79fe7f15673563257fe6a3a01a0c451f6dd997 100644
--- a/drivers/crypto/hifn_795x.c
+++ b/drivers/crypto/hifn_795x.c
@@ -38,9 +38,6 @@
 
 #include <asm/kmap_types.h>
 
-#undef dprintk
-
-#define HIFN_TEST
 //#define HIFN_DEBUG
 
 #ifdef HIFN_DEBUG
@@ -363,14 +360,14 @@ static atomic_t hifn_dev_number;
 #define HIFN_NAMESIZE			32
 #define HIFN_MAX_RESULT_ORDER		5
 
-#define	HIFN_D_CMD_RSIZE		24*4
-#define	HIFN_D_SRC_RSIZE		80*4
-#define	HIFN_D_DST_RSIZE		80*4
-#define	HIFN_D_RES_RSIZE		24*4
+#define	HIFN_D_CMD_RSIZE		24*1
+#define	HIFN_D_SRC_RSIZE		80*1
+#define	HIFN_D_DST_RSIZE		80*1
+#define	HIFN_D_RES_RSIZE		24*1
 
 #define HIFN_D_DST_DALIGN		4
 
-#define HIFN_QUEUE_LENGTH		HIFN_D_CMD_RSIZE-1
+#define HIFN_QUEUE_LENGTH		(HIFN_D_CMD_RSIZE - 1)
 
 #define AES_MIN_KEY_SIZE		16
 #define AES_MAX_KEY_SIZE		32
@@ -406,8 +403,6 @@ struct hifn_dma {
 	u8			command_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_COMMAND];
 	u8			result_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_RESULT];
 
-	u64			test_src, test_dst;
-
 	/*
 	 *  Our current positions for insertion and removal from the descriptor
 	 *  rings.
@@ -434,9 +429,6 @@ struct hifn_device
 	struct pci_dev		*pdev;
 	void __iomem		*bar[3];
 
-	unsigned long		result_mem;
-	dma_addr_t		dst;
-
 	void			*desc_virt;
 	dma_addr_t		desc_dma;
 
@@ -446,8 +438,6 @@ struct hifn_device
 
 	spinlock_t		lock;
 
-	void 			*priv;
-
 	u32			flags;
 	int			active, started;
 	struct delayed_work	work;
@@ -657,12 +647,17 @@ struct ablkcipher_walk
 
 struct hifn_context
 {
-	u8			key[HIFN_MAX_CRYPT_KEY_LENGTH], *iv;
+	u8			key[HIFN_MAX_CRYPT_KEY_LENGTH];
 	struct hifn_device	*dev;
-	unsigned int		keysize, ivsize;
+	unsigned int		keysize;
+};
+
+struct hifn_request_context
+{
+	u8			*iv;
+	unsigned int		ivsize;
 	u8			op, type, mode, unused;
 	struct ablkcipher_walk	walk;
-	atomic_t		sg_num;
 };
 
 #define crypto_alg_to_hifn(a)	container_of(a, struct hifn_crypto_alg, alg)
@@ -1168,7 +1163,8 @@ static int hifn_setup_crypto_command(struct hifn_device *dev,
 }
 
 static int hifn_setup_cmd_desc(struct hifn_device *dev,
-		struct hifn_context *ctx, void *priv, unsigned int nbytes)
+		struct hifn_context *ctx, struct hifn_request_context *rctx,
+		void *priv, unsigned int nbytes)
 {
 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
 	int cmd_len, sa_idx;
@@ -1179,7 +1175,7 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
 	buf_pos = buf = dma->command_bufs[dma->cmdi];
 
 	mask = 0;
-	switch (ctx->op) {
+	switch (rctx->op) {
 		case ACRYPTO_OP_DECRYPT:
 			mask = HIFN_BASE_CMD_CRYPT | HIFN_BASE_CMD_DECODE;
 			break;
@@ -1196,15 +1192,15 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
 	buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes,
 			nbytes, mask, dev->snum);
 
-	if (ctx->op == ACRYPTO_OP_ENCRYPT || ctx->op == ACRYPTO_OP_DECRYPT) {
+	if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) {
 		u16 md = 0;
 
 		if (ctx->keysize)
 			md |= HIFN_CRYPT_CMD_NEW_KEY;
-		if (ctx->iv && ctx->mode != ACRYPTO_MODE_ECB)
+		if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB)
 			md |= HIFN_CRYPT_CMD_NEW_IV;
 
-		switch (ctx->mode) {
+		switch (rctx->mode) {
 			case ACRYPTO_MODE_ECB:
 				md |= HIFN_CRYPT_CMD_MODE_ECB;
 				break;
@@ -1221,7 +1217,7 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
 				goto err_out;
 		}
 
-		switch (ctx->type) {
+		switch (rctx->type) {
 			case ACRYPTO_TYPE_AES_128:
 				if (ctx->keysize != 16)
 					goto err_out;
@@ -1256,17 +1252,18 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
 
 		buf_pos += hifn_setup_crypto_command(dev, buf_pos,
 				nbytes, nbytes, ctx->key, ctx->keysize,
-				ctx->iv, ctx->ivsize, md);
+				rctx->iv, rctx->ivsize, md);
 	}
 
 	dev->sa[sa_idx] = priv;
+	dev->started++;
 
 	cmd_len = buf_pos - buf;
 	dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID |
 			HIFN_D_LAST | HIFN_D_MASKDONEIRQ);
 
 	if (++dma->cmdi == HIFN_D_CMD_RSIZE) {
-		dma->cmdr[dma->cmdi].l = __cpu_to_le32(HIFN_MAX_COMMAND |
+		dma->cmdr[dma->cmdi].l = __cpu_to_le32(
 			HIFN_D_VALID | HIFN_D_LAST |
 			HIFN_D_MASKDONEIRQ | HIFN_D_JUMP);
 		dma->cmdi = 0;
@@ -1284,7 +1281,7 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
 }
 
 static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
-		unsigned int offset, unsigned int size)
+		unsigned int offset, unsigned int size, int last)
 {
 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
 	int idx;
@@ -1296,12 +1293,12 @@ static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
 
 	dma->srcr[idx].p = __cpu_to_le32(addr);
 	dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
-			HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
+			HIFN_D_MASKDONEIRQ | (last ? HIFN_D_LAST : 0));
 
 	if (++idx == HIFN_D_SRC_RSIZE) {
 		dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID |
-				HIFN_D_JUMP |
-				HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
+				HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
+				(last ? HIFN_D_LAST : 0));
 		idx = 0;
 	}
 
@@ -1342,7 +1339,7 @@ static void hifn_setup_res_desc(struct hifn_device *dev)
 }
 
 static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
-		unsigned offset, unsigned size)
+		unsigned offset, unsigned size, int last)
 {
 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
 	int idx;
@@ -1353,12 +1350,12 @@ static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
 	idx = dma->dsti;
 	dma->dstr[idx].p = __cpu_to_le32(addr);
 	dma->dstr[idx].l = __cpu_to_le32(size |	HIFN_D_VALID |
-			HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
+			HIFN_D_MASKDONEIRQ | (last ? HIFN_D_LAST : 0));
 
 	if (++idx == HIFN_D_DST_RSIZE) {
 		dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID |
 				HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
-				HIFN_D_LAST);
+				(last ? HIFN_D_LAST : 0));
 		idx = 0;
 	}
 	dma->dsti = idx;
@@ -1370,16 +1367,52 @@ static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
 	}
 }
 
-static int hifn_setup_dma(struct hifn_device *dev, struct page *spage, unsigned int soff,
-		struct page *dpage, unsigned int doff, unsigned int nbytes, void *priv,
-		struct hifn_context *ctx)
+static int hifn_setup_dma(struct hifn_device *dev,
+		struct hifn_context *ctx, struct hifn_request_context *rctx,
+		struct scatterlist *src, struct scatterlist *dst,
+		unsigned int nbytes, void *priv)
 {
-	dprintk("%s: spage: %p, soffset: %u, dpage: %p, doffset: %u, nbytes: %u, priv: %p, ctx: %p.\n",
-			dev->name, spage, soff, dpage, doff, nbytes, priv, ctx);
+	struct scatterlist *t;
+	struct page *spage, *dpage;
+	unsigned int soff, doff;
+	unsigned int n, len;
 
-	hifn_setup_src_desc(dev, spage, soff, nbytes);
-	hifn_setup_cmd_desc(dev, ctx, priv, nbytes);
-	hifn_setup_dst_desc(dev, dpage, doff, nbytes);
+	n = nbytes;
+	while (n) {
+		spage = sg_page(src);
+		soff = src->offset;
+		len = min(src->length, n);
+
+		hifn_setup_src_desc(dev, spage, soff, len, n - len == 0);
+
+		src++;
+		n -= len;
+	}
+
+	t = &rctx->walk.cache[0];
+	n = nbytes;
+	while (n) {
+		if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
+			BUG_ON(!sg_page(t));
+			dpage = sg_page(t);
+			doff = 0;
+			len = t->length;
+		} else {
+			BUG_ON(!sg_page(dst));
+			dpage = sg_page(dst);
+			doff = dst->offset;
+			len = dst->length;
+		}
+		len = min(len, n);
+
+		hifn_setup_dst_desc(dev, dpage, doff, len, n - len == 0);
+
+		dst++;
+		t++;
+		n -= len;
+	}
+
+	hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes);
 	hifn_setup_res_desc(dev);
 	return 0;
 }
@@ -1424,32 +1457,26 @@ static void ablkcipher_walk_exit(struct ablkcipher_walk *w)
 	w->num = 0;
 }
 
-static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist *src,
+static int ablkcipher_add(unsigned int *drestp, struct scatterlist *dst,
 		unsigned int size, unsigned int *nbytesp)
 {
 	unsigned int copy, drest = *drestp, nbytes = *nbytesp;
 	int idx = 0;
-	void *saddr;
 
 	if (drest < size || size > nbytes)
 		return -EINVAL;
 
 	while (size) {
-		copy = min(drest, min(size, src->length));
-
-		saddr = kmap_atomic(sg_page(src), KM_SOFTIRQ1);
-		memcpy(daddr, saddr + src->offset, copy);
-		kunmap_atomic(saddr, KM_SOFTIRQ1);
+		copy = min(drest, min(size, dst->length));
 
 		size -= copy;
 		drest -= copy;
 		nbytes -= copy;
-		daddr += copy;
 
 		dprintk("%s: copy: %u, size: %u, drest: %u, nbytes: %u.\n",
 				__func__, copy, size, drest, nbytes);
 
-		src++;
+		dst++;
 		idx++;
 	}
 
@@ -1462,8 +1489,7 @@ static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist
 static int ablkcipher_walk(struct ablkcipher_request *req,
 		struct ablkcipher_walk *w)
 {
-	struct scatterlist *src, *dst, *t;
-	void *daddr;
+	struct scatterlist *dst, *t;
 	unsigned int nbytes = req->nbytes, offset, copy, diff;
 	int idx, tidx, err;
 
@@ -1473,26 +1499,22 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
 		if (idx >= w->num && (w->flags & ASYNC_FLAGS_MISALIGNED))
 			return -EINVAL;
 
-		src = &req->src[idx];
 		dst = &req->dst[idx];
 
-		dprintk("\n%s: slen: %u, dlen: %u, soff: %u, doff: %u, offset: %u, "
-				"nbytes: %u.\n",
-				__func__, src->length, dst->length, src->offset,
-				dst->offset, offset, nbytes);
+		dprintk("\n%s: dlen: %u, doff: %u, offset: %u, nbytes: %u.\n",
+			__func__, dst->length, dst->offset, offset, nbytes);
 
 		if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) ||
 		    !IS_ALIGNED(dst->length, HIFN_D_DST_DALIGN) ||
 		    offset) {
-			unsigned slen = min(src->length - offset, nbytes);
+			unsigned slen = min(dst->length - offset, nbytes);
 			unsigned dlen = PAGE_SIZE;
 
 			t = &w->cache[idx];
 
-			daddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0);
-			err = ablkcipher_add(daddr, &dlen, src, slen, &nbytes);
+			err = ablkcipher_add(&dlen, dst, slen, &nbytes);
 			if (err < 0)
-				goto err_out_unmap;
+				return err;
 
 			idx += err;
 
@@ -1528,21 +1550,19 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
 			} else {
 				copy += diff + nbytes;
 
-				src = &req->src[idx];
+				dst = &req->dst[idx];
 
-				err = ablkcipher_add(daddr + slen, &dlen, src, nbytes, &nbytes);
+				err = ablkcipher_add(&dlen, dst, nbytes, &nbytes);
 				if (err < 0)
-					goto err_out_unmap;
+					return err;
 
 				idx += err;
 			}
 
 			t->length = copy;
 			t->offset = offset;
-
-			kunmap_atomic(daddr, KM_SOFTIRQ0);
 		} else {
-			nbytes -= min(src->length, nbytes);
+			nbytes -= min(dst->length, nbytes);
 			idx++;
 		}
 
@@ -1550,26 +1570,22 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
 	}
 
 	return tidx;
-
-err_out_unmap:
-	kunmap_atomic(daddr, KM_SOFTIRQ0);
-	return err;
 }
 
 static int hifn_setup_session(struct ablkcipher_request *req)
 {
 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+	struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
 	struct hifn_device *dev = ctx->dev;
-	struct page *spage, *dpage;
-	unsigned long soff, doff, dlen, flags;
-	unsigned int nbytes = req->nbytes, idx = 0, len;
+	unsigned long dlen, flags;
+	unsigned int nbytes = req->nbytes, idx = 0;
 	int err = -EINVAL, sg_num;
-	struct scatterlist *src, *dst, *t;
+	struct scatterlist *dst;
 
-	if (ctx->iv && !ctx->ivsize && ctx->mode != ACRYPTO_MODE_ECB)
+	if (rctx->iv && !rctx->ivsize && rctx->mode != ACRYPTO_MODE_ECB)
 		goto err_out_exit;
 
-	ctx->walk.flags = 0;
+	rctx->walk.flags = 0;
 
 	while (nbytes) {
 		dst = &req->dst[idx];
@@ -1577,27 +1593,23 @@ static int hifn_setup_session(struct ablkcipher_request *req)
 
 		if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) ||
 		    !IS_ALIGNED(dlen, HIFN_D_DST_DALIGN))
-			ctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
+			rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
 
 		nbytes -= dlen;
 		idx++;
 	}
 
-	if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
-		err = ablkcipher_walk_init(&ctx->walk, idx, GFP_ATOMIC);
+	if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
+		err = ablkcipher_walk_init(&rctx->walk, idx, GFP_ATOMIC);
 		if (err < 0)
 			return err;
 	}
 
-	nbytes = req->nbytes;
-	idx = 0;
-
-	sg_num = ablkcipher_walk(req, &ctx->walk);
+	sg_num = ablkcipher_walk(req, &rctx->walk);
 	if (sg_num < 0) {
 		err = sg_num;
 		goto err_out_exit;
 	}
-	atomic_set(&ctx->sg_num, sg_num);
 
 	spin_lock_irqsave(&dev->lock, flags);
 	if (dev->started + sg_num > HIFN_QUEUE_LENGTH) {
@@ -1605,37 +1617,11 @@ static int hifn_setup_session(struct ablkcipher_request *req)
 		goto err_out;
 	}
 
-	dev->snum++;
-	dev->started += sg_num;
-
-	while (nbytes) {
-		src = &req->src[idx];
-		dst = &req->dst[idx];
-		t = &ctx->walk.cache[idx];
-
-		if (t->length) {
-			spage = dpage = sg_page(t);
-			soff = doff = 0;
-			len = t->length;
-		} else {
-			spage = sg_page(src);
-			soff = src->offset;
-
-			dpage = sg_page(dst);
-			doff = dst->offset;
-
-			len = dst->length;
-		}
-
-		idx++;
-
-		err = hifn_setup_dma(dev, spage, soff, dpage, doff, nbytes,
-				req, ctx);
-		if (err)
-			goto err_out;
+	err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req);
+	if (err)
+		goto err_out;
 
-		nbytes -= min(len, nbytes);
-	}
+	dev->snum++;
 
 	dev->active = HIFN_DEFAULT_ACTIVE_NUM;
 	spin_unlock_irqrestore(&dev->lock, flags);
@@ -1645,12 +1631,13 @@ static int hifn_setup_session(struct ablkcipher_request *req)
 err_out:
 	spin_unlock_irqrestore(&dev->lock, flags);
 err_out_exit:
-	if (err)
-		dprintk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
+	if (err) {
+		printk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
 				"type: %u, err: %d.\n",
-			dev->name, ctx->iv, ctx->ivsize,
+			dev->name, rctx->iv, rctx->ivsize,
 			ctx->key, ctx->keysize,
-			ctx->mode, ctx->op, ctx->type, err);
+			rctx->mode, rctx->op, rctx->type, err);
+	}
 
 	return err;
 }
@@ -1660,31 +1647,33 @@ static int hifn_test(struct hifn_device *dev, int encdec, u8 snum)
 	int n, err;
 	u8 src[16];
 	struct hifn_context ctx;
+	struct hifn_request_context rctx;
 	u8 fips_aes_ecb_from_zero[16] = {
 		0x66, 0xE9, 0x4B, 0xD4,
 		0xEF, 0x8A, 0x2C, 0x3B,
 		0x88, 0x4C, 0xFA, 0x59,
 		0xCA, 0x34, 0x2B, 0x2E};
+	struct scatterlist sg;
 
 	memset(src, 0, sizeof(src));
 	memset(ctx.key, 0, sizeof(ctx.key));
 
 	ctx.dev = dev;
 	ctx.keysize = 16;
-	ctx.ivsize = 0;
-	ctx.iv = NULL;
-	ctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT;
-	ctx.mode = ACRYPTO_MODE_ECB;
-	ctx.type = ACRYPTO_TYPE_AES_128;
-	atomic_set(&ctx.sg_num, 1);
-
-	err = hifn_setup_dma(dev,
-			virt_to_page(src), offset_in_page(src),
-			virt_to_page(src), offset_in_page(src),
-			sizeof(src), NULL, &ctx);
+	rctx.ivsize = 0;
+	rctx.iv = NULL;
+	rctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT;
+	rctx.mode = ACRYPTO_MODE_ECB;
+	rctx.type = ACRYPTO_TYPE_AES_128;
+	rctx.walk.cache[0].length = 0;
+
+	sg_init_one(&sg, &src, sizeof(src));
+
+	err = hifn_setup_dma(dev, &ctx, &rctx, &sg, &sg, sizeof(src), NULL);
 	if (err)
 		goto err_out;
 
+	dev->started = 0;
 	msleep(200);
 
 	dprintk("%s: decoded: ", dev->name);
@@ -1711,6 +1700,7 @@ static int hifn_start_device(struct hifn_device *dev)
 {
 	int err;
 
+	dev->started = dev->active = 0;
 	hifn_reset_dma(dev, 1);
 
 	err = hifn_enable_crypto(dev);
@@ -1764,90 +1754,65 @@ static int ablkcipher_get(void *saddr, unsigned int *srestp, unsigned int offset
 	return idx;
 }
 
-static void hifn_process_ready(struct ablkcipher_request *req, int error)
+static inline void hifn_complete_sa(struct hifn_device *dev, int i)
 {
-	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
-	struct hifn_device *dev;
-
-	dprintk("%s: req: %p, ctx: %p.\n", __func__, req, ctx);
+	unsigned long flags;
 
-	dev = ctx->dev;
-	dprintk("%s: req: %p, started: %d, sg_num: %d.\n",
-		__func__, req, dev->started, atomic_read(&ctx->sg_num));
+	spin_lock_irqsave(&dev->lock, flags);
+	dev->sa[i] = NULL;
+	dev->started--;
+	if (dev->started < 0)
+		printk("%s: started: %d.\n", __func__, dev->started);
+	spin_unlock_irqrestore(&dev->lock, flags);
+	BUG_ON(dev->started < 0);
+}
 
-	if (--dev->started < 0)
-		BUG();
+static void hifn_process_ready(struct ablkcipher_request *req, int error)
+{
+	struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
 
-	if (atomic_dec_and_test(&ctx->sg_num)) {
+	if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
 		unsigned int nbytes = req->nbytes;
 		int idx = 0, err;
 		struct scatterlist *dst, *t;
 		void *saddr;
 
-		if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
-			while (nbytes) {
-				t = &ctx->walk.cache[idx];
-				dst = &req->dst[idx];
-
-				dprintk("\n%s: sg_page(t): %p, t->length: %u, "
-					"sg_page(dst): %p, dst->length: %u, "
-					"nbytes: %u.\n",
-					__func__, sg_page(t), t->length,
-					sg_page(dst), dst->length, nbytes);
+		while (nbytes) {
+			t = &rctx->walk.cache[idx];
+			dst = &req->dst[idx];
 
-				if (!t->length) {
-					nbytes -= min(dst->length, nbytes);
-					idx++;
-					continue;
-				}
+			dprintk("\n%s: sg_page(t): %p, t->length: %u, "
+				"sg_page(dst): %p, dst->length: %u, "
+				"nbytes: %u.\n",
+				__func__, sg_page(t), t->length,
+				sg_page(dst), dst->length, nbytes);
 
-				saddr = kmap_atomic(sg_page(t), KM_IRQ1);
+			if (!t->length) {
+				nbytes -= min(dst->length, nbytes);
+				idx++;
+				continue;
+			}
 
-				err = ablkcipher_get(saddr, &t->length, t->offset,
-						dst, nbytes, &nbytes);
-				if (err < 0) {
-					kunmap_atomic(saddr, KM_IRQ1);
-					break;
-				}
+			saddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0);
 
-				idx += err;
-				kunmap_atomic(saddr, KM_IRQ1);
+			err = ablkcipher_get(saddr, &t->length, t->offset,
+					dst, nbytes, &nbytes);
+			if (err < 0) {
+				kunmap_atomic(saddr, KM_SOFTIRQ0);
+				break;
 			}
 
-			ablkcipher_walk_exit(&ctx->walk);
+			idx += err;
+			kunmap_atomic(saddr, KM_SOFTIRQ0);
 		}
 
-		req->base.complete(&req->base, error);
+		ablkcipher_walk_exit(&rctx->walk);
 	}
-}
 
-static void hifn_check_for_completion(struct hifn_device *dev, int error)
-{
-	int i;
-	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
-
-	for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
-		struct hifn_desc *d = &dma->resr[i];
-
-		if (!(d->l & __cpu_to_le32(HIFN_D_VALID)) && dev->sa[i]) {
-			dev->success++;
-			dev->reset = 0;
-			hifn_process_ready(dev->sa[i], error);
-			dev->sa[i] = NULL;
-		}
-
-		if (d->l & __cpu_to_le32(HIFN_D_DESTOVER | HIFN_D_OVER))
-			if (printk_ratelimit())
-				printk("%s: overflow detected [d: %u, o: %u] "
-						"at %d resr: l: %08x, p: %08x.\n",
-					dev->name,
-					!!(d->l & __cpu_to_le32(HIFN_D_DESTOVER)),
-					!!(d->l & __cpu_to_le32(HIFN_D_OVER)),
-					i, d->l, d->p);
-	}
+	req->base.complete(&req->base, error);
 }
 
-static void hifn_clear_rings(struct hifn_device *dev)
+static void hifn_clear_rings(struct hifn_device *dev, int error)
 {
 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
 	int i, u;
@@ -1864,21 +1829,26 @@ static void hifn_clear_rings(struct hifn_device *dev)
 		if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID))
 			break;
 
-		if (i != HIFN_D_RES_RSIZE)
-			u--;
+		if (dev->sa[i]) {
+			dev->success++;
+			dev->reset = 0;
+			hifn_process_ready(dev->sa[i], error);
+			hifn_complete_sa(dev, i);
+		}
 
-		if (++i == (HIFN_D_RES_RSIZE + 1))
+		if (++i == HIFN_D_RES_RSIZE)
 			i = 0;
+		u--;
 	}
 	dma->resk = i; dma->resu = u;
 
 	i = dma->srck; u = dma->srcu;
 	while (u != 0) {
-		if (i == HIFN_D_SRC_RSIZE)
-			i = 0;
 		if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID))
 			break;
-		i++, u--;
+		if (++i == HIFN_D_SRC_RSIZE)
+			i = 0;
+		u--;
 	}
 	dma->srck = i; dma->srcu = u;
 
@@ -1886,20 +1856,19 @@ static void hifn_clear_rings(struct hifn_device *dev)
 	while (u != 0) {
 		if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID))
 			break;
-		if (i != HIFN_D_CMD_RSIZE)
-			u--;
-		if (++i == (HIFN_D_CMD_RSIZE + 1))
+		if (++i == HIFN_D_CMD_RSIZE)
 			i = 0;
+		u--;
 	}
 	dma->cmdk = i; dma->cmdu = u;
 
 	i = dma->dstk; u = dma->dstu;
 	while (u != 0) {
-		if (i == HIFN_D_DST_RSIZE)
-			i = 0;
 		if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID))
 			break;
-		i++, u--;
+		if (++i == HIFN_D_DST_RSIZE)
+			i = 0;
+		u--;
 	}
 	dma->dstk = i; dma->dstu = u;
 
@@ -1944,30 +1913,39 @@ static void hifn_work(struct work_struct *work)
 	} else
 		dev->active--;
 
-	if (dev->prev_success == dev->success && dev->started)
+	if ((dev->prev_success == dev->success) && dev->started)
 		reset = 1;
 	dev->prev_success = dev->success;
 	spin_unlock_irqrestore(&dev->lock, flags);
 
 	if (reset) {
-		dprintk("%s: r: %08x, active: %d, started: %d, "
-				"success: %lu: reset: %d.\n",
-			dev->name, r, dev->active, dev->started,
-			dev->success, reset);
-
 		if (++dev->reset >= 5) {
-			dprintk("%s: really hard reset.\n", dev->name);
+			int i;
+			struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+
+			printk("%s: r: %08x, active: %d, started: %d, "
+				"success: %lu: qlen: %u/%u, reset: %d.\n",
+				dev->name, r, dev->active, dev->started,
+				dev->success, dev->queue.qlen, dev->queue.max_qlen,
+				reset);
+
+			printk("%s: res: ", __func__);
+			for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
+				printk("%x.%p ", dma->resr[i].l, dev->sa[i]);
+				if (dev->sa[i]) {
+					hifn_process_ready(dev->sa[i], -ENODEV);
+					hifn_complete_sa(dev, i);
+				}
+			}
+			printk("\n");
+
 			hifn_reset_dma(dev, 1);
 			hifn_stop_device(dev);
 			hifn_start_device(dev);
 			dev->reset = 0;
 		}
 
-		spin_lock_irqsave(&dev->lock, flags);
-		hifn_check_for_completion(dev, -EBUSY);
-		hifn_clear_rings(dev);
-		dev->started = 0;
-		spin_unlock_irqrestore(&dev->lock, flags);
+		tasklet_schedule(&dev->tasklet);
 	}
 
 	schedule_delayed_work(&dev->work, HZ);
@@ -1984,8 +1962,8 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
 	dprintk("%s: 1 dmacsr: %08x, dmareg: %08x, res: %08x [%d], "
 			"i: %d.%d.%d.%d, u: %d.%d.%d.%d.\n",
 		dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi,
-		dma->cmdu, dma->srcu, dma->dstu, dma->resu,
-		dma->cmdi, dma->srci, dma->dsti, dma->resi);
+		dma->cmdi, dma->srci, dma->dsti, dma->resi,
+		dma->cmdu, dma->srcu, dma->dstu, dma->resu);
 
 	if ((dmacsr & dev->dmareg) == 0)
 		return IRQ_NONE;
@@ -2002,11 +1980,10 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
 	if (restart) {
 		u32 puisr = hifn_read_0(dev, HIFN_0_PUISR);
 
-		if (printk_ratelimit())
-			printk("%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n",
-				dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER),
-				!!(dmacsr & HIFN_DMACSR_D_OVER),
-				puisr, !!(puisr & HIFN_PUISR_DSTOVER));
+		printk(KERN_WARNING "%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n",
+			dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER),
+			!!(dmacsr & HIFN_DMACSR_D_OVER),
+			puisr, !!(puisr & HIFN_PUISR_DSTOVER));
 		if (!!(puisr & HIFN_PUISR_DSTOVER))
 			hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
 		hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & (HIFN_DMACSR_R_OVER |
@@ -2016,12 +1993,11 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
 	restart = dmacsr & (HIFN_DMACSR_C_ABORT | HIFN_DMACSR_S_ABORT |
 			HIFN_DMACSR_D_ABORT | HIFN_DMACSR_R_ABORT);
 	if (restart) {
-		if (printk_ratelimit())
-			printk("%s: abort: c: %d, s: %d, d: %d, r: %d.\n",
-				dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT),
-				!!(dmacsr & HIFN_DMACSR_S_ABORT),
-				!!(dmacsr & HIFN_DMACSR_D_ABORT),
-				!!(dmacsr & HIFN_DMACSR_R_ABORT));
+		printk(KERN_WARNING "%s: abort: c: %d, s: %d, d: %d, r: %d.\n",
+			dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT),
+			!!(dmacsr & HIFN_DMACSR_S_ABORT),
+			!!(dmacsr & HIFN_DMACSR_D_ABORT),
+			!!(dmacsr & HIFN_DMACSR_R_ABORT));
 		hifn_reset_dma(dev, 1);
 		hifn_init_dma(dev);
 		hifn_init_registers(dev);
@@ -2034,7 +2010,6 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
 	}
 
 	tasklet_schedule(&dev->tasklet);
-	hifn_clear_rings(dev);
 
 	return IRQ_HANDLED;
 }
@@ -2048,21 +2023,25 @@ static void hifn_flush(struct hifn_device *dev)
 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
 	int i;
 
-	spin_lock_irqsave(&dev->lock, flags);
 	for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
 		struct hifn_desc *d = &dma->resr[i];
 
 		if (dev->sa[i]) {
 			hifn_process_ready(dev->sa[i],
 				(d->l & __cpu_to_le32(HIFN_D_VALID))?-ENODEV:0);
+			hifn_complete_sa(dev, i);
 		}
 	}
 
+	spin_lock_irqsave(&dev->lock, flags);
 	while ((async_req = crypto_dequeue_request(&dev->queue))) {
 		ctx = crypto_tfm_ctx(async_req->tfm);
 		req = container_of(async_req, struct ablkcipher_request, base);
+		spin_unlock_irqrestore(&dev->lock, flags);
 
 		hifn_process_ready(req, -ENODEV);
+
+		spin_lock_irqsave(&dev->lock, flags);
 	}
 	spin_unlock_irqrestore(&dev->lock, flags);
 }
@@ -2121,6 +2100,7 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
 		u8 type, u8 mode)
 {
 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+	struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
 	unsigned ivsize;
 
 	ivsize = crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req));
@@ -2141,11 +2121,11 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
 			type = ACRYPTO_TYPE_AES_256;
 	}
 
-	ctx->op = op;
-	ctx->mode = mode;
-	ctx->type = type;
-	ctx->iv = req->info;
-	ctx->ivsize = ivsize;
+	rctx->op = op;
+	rctx->mode = mode;
+	rctx->type = type;
+	rctx->iv = req->info;
+	rctx->ivsize = ivsize;
 
 	/*
 	 * HEAVY TODO: needs to kick Herbert XU to write documentation.
@@ -2158,7 +2138,7 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
 
 static int hifn_process_queue(struct hifn_device *dev)
 {
-	struct crypto_async_request *async_req;
+	struct crypto_async_request *async_req, *backlog;
 	struct hifn_context *ctx;
 	struct ablkcipher_request *req;
 	unsigned long flags;
@@ -2166,12 +2146,16 @@ static int hifn_process_queue(struct hifn_device *dev)
 
 	while (dev->started < HIFN_QUEUE_LENGTH) {
 		spin_lock_irqsave(&dev->lock, flags);
+		backlog = crypto_get_backlog(&dev->queue);
 		async_req = crypto_dequeue_request(&dev->queue);
 		spin_unlock_irqrestore(&dev->lock, flags);
 
 		if (!async_req)
 			break;
 
+		if (backlog)
+			backlog->complete(backlog, -EINPROGRESS);
+
 		ctx = crypto_tfm_ctx(async_req->tfm);
 		req = container_of(async_req, struct ablkcipher_request, base);
 
@@ -2496,7 +2480,7 @@ static int hifn_cra_init(struct crypto_tfm *tfm)
 	struct hifn_context *ctx = crypto_tfm_ctx(tfm);
 
 	ctx->dev = ha->dev;
-
+	tfm->crt_ablkcipher.reqsize = sizeof(struct hifn_request_context);
 	return 0;
 }
 
@@ -2574,7 +2558,10 @@ static void hifn_tasklet_callback(unsigned long data)
 	 * (like dev->success), but they are used in process
 	 * context or update is atomic (like setting dev->sa[i] to NULL).
 	 */
-	hifn_check_for_completion(dev, 0);
+	hifn_clear_rings(dev, 0);
+
+	if (dev->started < HIFN_QUEUE_LENGTH &&	dev->queue.qlen)
+		hifn_process_queue(dev);
 }
 
 static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
@@ -2631,22 +2618,11 @@ static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
 			goto err_out_unmap_bars;
 	}
 
-	dev->result_mem = __get_free_pages(GFP_KERNEL, HIFN_MAX_RESULT_ORDER);
-	if (!dev->result_mem) {
-		dprintk("Failed to allocate %d pages for result_mem.\n",
-				HIFN_MAX_RESULT_ORDER);
-		goto err_out_unmap_bars;
-	}
-	memset((void *)dev->result_mem, 0, PAGE_SIZE*(1<<HIFN_MAX_RESULT_ORDER));
-
-	dev->dst = pci_map_single(pdev, (void *)dev->result_mem,
-			PAGE_SIZE << HIFN_MAX_RESULT_ORDER, PCI_DMA_FROMDEVICE);
-
 	dev->desc_virt = pci_alloc_consistent(pdev, sizeof(struct hifn_dma),
 			&dev->desc_dma);
 	if (!dev->desc_virt) {
 		dprintk("Failed to allocate descriptor rings.\n");
-		goto err_out_free_result_pages;
+		goto err_out_unmap_bars;
 	}
 	memset(dev->desc_virt, 0, sizeof(struct hifn_dma));
 
@@ -2706,11 +2682,6 @@ static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
 	pci_free_consistent(pdev, sizeof(struct hifn_dma),
 			dev->desc_virt, dev->desc_dma);
 
-err_out_free_result_pages:
-	pci_unmap_single(pdev, dev->dst, PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
-			PCI_DMA_FROMDEVICE);
-	free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
-
 err_out_unmap_bars:
 	for (i=0; i<3; ++i)
 		if (dev->bar[i])
@@ -2748,10 +2719,6 @@ static void hifn_remove(struct pci_dev *pdev)
 
 		pci_free_consistent(pdev, sizeof(struct hifn_dma),
 				dev->desc_virt, dev->desc_dma);
-		pci_unmap_single(pdev, dev->dst,
-				PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
-				PCI_DMA_FROMDEVICE);
-		free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
 		for (i=0; i<3; ++i)
 			if (dev->bar[i])
 				iounmap(dev->bar[i]);
@@ -2782,6 +2749,11 @@ static int __devinit hifn_init(void)
 	unsigned int freq;
 	int err;
 
+	if (sizeof(dma_addr_t) > 4) {
+		printk(KERN_INFO "HIFN supports only 32-bit addresses.\n");
+		return -EINVAL;
+	}
+
 	if (strncmp(hifn_pll_ref, "ext", 3) &&
 	    strncmp(hifn_pll_ref, "pci", 3)) {
 		printk(KERN_ERR "hifn795x: invalid hifn_pll_ref clock, "
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c
index bf2917d197a018d13f1bc48ad4c93c0b0a93ccde..856b3cc2558387b7b239923c37c54f9d47b250ff 100644
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
@@ -15,6 +15,8 @@
 #include <linux/errno.h>
 #include <linux/interrupt.h>
 #include <linux/kernel.h>
+#include <linux/percpu.h>
+#include <linux/smp.h>
 #include <asm/byteorder.h>
 #include <asm/i387.h>
 #include "padlock.h"
@@ -49,6 +51,8 @@ struct aes_ctx {
 	u32 *D;
 };
 
+static DEFINE_PER_CPU(struct cword *, last_cword);
+
 /* Tells whether the ACE is capable to generate
    the extended key for a given key_len. */
 static inline int
@@ -89,6 +93,7 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 	const __le32 *key = (const __le32 *)in_key;
 	u32 *flags = &tfm->crt_flags;
 	struct crypto_aes_ctx gen_aes;
+	int cpu;
 
 	if (key_len % 8) {
 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
@@ -118,7 +123,7 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 
 	/* Don't generate extended keys if the hardware can do it. */
 	if (aes_hw_extkey_available(key_len))
-		return 0;
+		goto ok;
 
 	ctx->D = ctx->d_data;
 	ctx->cword.encrypt.keygen = 1;
@@ -131,15 +136,30 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 
 	memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH);
 	memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH);
+
+ok:
+	for_each_online_cpu(cpu)
+		if (&ctx->cword.encrypt == per_cpu(last_cword, cpu) ||
+		    &ctx->cword.decrypt == per_cpu(last_cword, cpu))
+			per_cpu(last_cword, cpu) = NULL;
+
 	return 0;
 }
 
 /* ====== Encryption/decryption routines ====== */
 
 /* These are the real call to PadLock. */
-static inline void padlock_reset_key(void)
+static inline void padlock_reset_key(struct cword *cword)
+{
+	int cpu = raw_smp_processor_id();
+
+	if (cword != per_cpu(last_cword, cpu))
+		asm volatile ("pushfl; popfl");
+}
+
+static inline void padlock_store_cword(struct cword *cword)
 {
-	asm volatile ("pushfl; popfl");
+	per_cpu(last_cword, raw_smp_processor_id()) = cword;
 }
 
 /*
@@ -149,7 +169,7 @@ static inline void padlock_reset_key(void)
  */
 
 static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key,
-				  void *control_word)
+				  struct cword *control_word)
 {
 	asm volatile (".byte 0xf3,0x0f,0xa7,0xc8"	/* rep xcryptecb */
 		      : "+S"(input), "+D"(output)
@@ -213,22 +233,24 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
 	struct aes_ctx *ctx = aes_ctx(tfm);
 	int ts_state;
-	padlock_reset_key();
 
+	padlock_reset_key(&ctx->cword.encrypt);
 	ts_state = irq_ts_save();
 	aes_crypt(in, out, ctx->E, &ctx->cword.encrypt);
 	irq_ts_restore(ts_state);
+	padlock_store_cword(&ctx->cword.encrypt);
 }
 
 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
 	struct aes_ctx *ctx = aes_ctx(tfm);
 	int ts_state;
-	padlock_reset_key();
 
+	padlock_reset_key(&ctx->cword.encrypt);
 	ts_state = irq_ts_save();
 	aes_crypt(in, out, ctx->D, &ctx->cword.decrypt);
 	irq_ts_restore(ts_state);
+	padlock_store_cword(&ctx->cword.encrypt);
 }
 
 static struct crypto_alg aes_alg = {
@@ -261,7 +283,7 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
 	int err;
 	int ts_state;
 
-	padlock_reset_key();
+	padlock_reset_key(&ctx->cword.encrypt);
 
 	blkcipher_walk_init(&walk, dst, src, nbytes);
 	err = blkcipher_walk_virt(desc, &walk);
@@ -276,6 +298,8 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
 	}
 	irq_ts_restore(ts_state);
 
+	padlock_store_cword(&ctx->cword.encrypt);
+
 	return err;
 }
 
@@ -288,7 +312,7 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
 	int err;
 	int ts_state;
 
-	padlock_reset_key();
+	padlock_reset_key(&ctx->cword.decrypt);
 
 	blkcipher_walk_init(&walk, dst, src, nbytes);
 	err = blkcipher_walk_virt(desc, &walk);
@@ -302,6 +326,9 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
 		err = blkcipher_walk_done(desc, &walk, nbytes);
 	}
 	irq_ts_restore(ts_state);
+
+	padlock_store_cword(&ctx->cword.encrypt);
+
 	return err;
 }
 
@@ -336,7 +363,7 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
 	int err;
 	int ts_state;
 
-	padlock_reset_key();
+	padlock_reset_key(&ctx->cword.encrypt);
 
 	blkcipher_walk_init(&walk, dst, src, nbytes);
 	err = blkcipher_walk_virt(desc, &walk);
@@ -353,6 +380,8 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
 	}
 	irq_ts_restore(ts_state);
 
+	padlock_store_cword(&ctx->cword.decrypt);
+
 	return err;
 }
 
@@ -365,7 +394,7 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
 	int err;
 	int ts_state;
 
-	padlock_reset_key();
+	padlock_reset_key(&ctx->cword.encrypt);
 
 	blkcipher_walk_init(&walk, dst, src, nbytes);
 	err = blkcipher_walk_virt(desc, &walk);
@@ -380,6 +409,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
 	}
 
 	irq_ts_restore(ts_state);
+
+	padlock_store_cword(&ctx->cword.encrypt);
+
 	return err;
 }
 
diff --git a/drivers/crypto/talitos.c b/drivers/crypto/talitos.c
index 24607669a52b1a0d41d6f51319c279284e1927f5..a3918c16b3dbb8650cef798fdfc173e4b66d5088 100644
--- a/drivers/crypto/talitos.c
+++ b/drivers/crypto/talitos.c
@@ -127,7 +127,6 @@ struct talitos_private {
 
 	/* request callback tasklet */
 	struct tasklet_struct done_task;
-	struct tasklet_struct error_task;
 
 	/* list of registered algorithms */
 	struct list_head alg_list;
@@ -138,6 +137,7 @@ struct talitos_private {
 
 /* .features flag */
 #define TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT 0x00000001
+#define TALITOS_FTR_HW_AUTH_CHECK 0x00000002
 
 /*
  * map virtual single (contiguous) pointer to h/w descriptor pointer
@@ -184,6 +184,11 @@ static int reset_channel(struct device *dev, int ch)
 	setbits32(priv->reg + TALITOS_CCCR_LO(ch), TALITOS_CCCR_LO_CDWE |
 		  TALITOS_CCCR_LO_CDIE);
 
+	/* and ICCR writeback, if available */
+	if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
+		setbits32(priv->reg + TALITOS_CCCR_LO(ch),
+		          TALITOS_CCCR_LO_IWSE);
+
 	return 0;
 }
 
@@ -239,6 +244,11 @@ static int init_device(struct device *dev)
 	setbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_INIT);
 	setbits32(priv->reg + TALITOS_IMR_LO, TALITOS_IMR_LO_INIT);
 
+	/* disable integrity check error interrupts (use writeback instead) */
+	if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
+		setbits32(priv->reg + TALITOS_MDEUICR_LO,
+		          TALITOS_MDEUICR_LO_ICE);
+
 	return 0;
 }
 
@@ -370,6 +380,12 @@ static void talitos_done(unsigned long data)
 
 	for (ch = 0; ch < priv->num_channels; ch++)
 		flush_channel(dev, ch, 0, 0);
+
+	/* At this point, all completed channels have been processed.
+	 * Unmask done interrupts for channels completed later on.
+	 */
+	setbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_INIT);
+	setbits32(priv->reg + TALITOS_IMR_LO, TALITOS_IMR_LO_INIT);
 }
 
 /*
@@ -469,16 +485,13 @@ static void report_eu_error(struct device *dev, int ch, struct talitos_desc *des
 /*
  * recover from error interrupts
  */
-static void talitos_error(unsigned long data)
+static void talitos_error(unsigned long data, u32 isr, u32 isr_lo)
 {
 	struct device *dev = (struct device *)data;
 	struct talitos_private *priv = dev_get_drvdata(dev);
 	unsigned int timeout = TALITOS_TIMEOUT;
 	int ch, error, reset_dev = 0, reset_ch = 0;
-	u32 isr, isr_lo, v, v_lo;
-
-	isr = in_be32(priv->reg + TALITOS_ISR);
-	isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);
+	u32 v, v_lo;
 
 	for (ch = 0; ch < priv->num_channels; ch++) {
 		/* skip channels without errors */
@@ -560,16 +573,19 @@ static irqreturn_t talitos_interrupt(int irq, void *data)
 
 	isr = in_be32(priv->reg + TALITOS_ISR);
 	isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);
-
-	/* ack */
+	/* Acknowledge interrupt */
 	out_be32(priv->reg + TALITOS_ICR, isr);
 	out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);
 
 	if (unlikely((isr & ~TALITOS_ISR_CHDONE) || isr_lo))
-		talitos_error((unsigned long)data);
+		talitos_error((unsigned long)data, isr, isr_lo);
 	else
-		if (likely(isr & TALITOS_ISR_CHDONE))
+		if (likely(isr & TALITOS_ISR_CHDONE)) {
+			/* mask further done interrupts. */
+			clrbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_DONE);
+			/* done_task will unmask done interrupts at exit */
 			tasklet_schedule(&priv->done_task);
+		}
 
 	return (isr || isr_lo) ? IRQ_HANDLED : IRQ_NONE;
 }
@@ -802,7 +818,7 @@ static void ipsec_esp_encrypt_done(struct device *dev,
 	aead_request_complete(areq, err);
 }
 
-static void ipsec_esp_decrypt_done(struct device *dev,
+static void ipsec_esp_decrypt_swauth_done(struct device *dev,
 				   struct talitos_desc *desc, void *context,
 				   int err)
 {
@@ -834,6 +850,27 @@ static void ipsec_esp_decrypt_done(struct device *dev,
 	aead_request_complete(req, err);
 }
 
+static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
+				   struct talitos_desc *desc, void *context,
+				   int err)
+{
+	struct aead_request *req = context;
+	struct ipsec_esp_edesc *edesc =
+		 container_of(desc, struct ipsec_esp_edesc, desc);
+
+	ipsec_esp_unmap(dev, edesc, req);
+
+	/* check ICV auth status */
+	if (!err)
+		if ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
+		    DESC_HDR_LO_ICCR1_PASS)
+			err = -EBADMSG;
+
+	kfree(edesc);
+
+	aead_request_complete(req, err);
+}
+
 /*
  * convert scatterlist to SEC h/w link table format
  * stop at cryptlen bytes
@@ -887,6 +924,7 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
 	unsigned int authsize = ctx->authsize;
 	unsigned int ivsize;
 	int sg_count, ret;
+	int sg_link_tbl_len;
 
 	/* hmac key */
 	map_single_talitos_ptr(dev, &desc->ptr[0], ctx->authkeylen, &ctx->key,
@@ -924,33 +962,19 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
 	if (sg_count == 1) {
 		desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src));
 	} else {
-		sg_count = sg_to_link_tbl(areq->src, sg_count, cryptlen,
+		sg_link_tbl_len = cryptlen;
+
+		if ((edesc->desc.hdr & DESC_HDR_MODE1_MDEU_CICV) &&
+			(edesc->desc.hdr & DESC_HDR_MODE0_ENCRYPT) == 0) {
+			sg_link_tbl_len = cryptlen + authsize;
+		}
+		sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len,
 					  &edesc->link_tbl[0]);
 		if (sg_count > 1) {
-			struct talitos_ptr *link_tbl_ptr =
-				&edesc->link_tbl[sg_count-1];
-			struct scatterlist *sg;
-			struct talitos_private *priv = dev_get_drvdata(dev);
-
 			desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP;
 			desc->ptr[4].ptr = cpu_to_be32(edesc->dma_link_tbl);
 			dma_sync_single_for_device(ctx->dev, edesc->dma_link_tbl,
 						   edesc->dma_len, DMA_BIDIRECTIONAL);
-			/* If necessary for this SEC revision,
-			 * add a link table entry for ICV.
-			 */
-			if ((priv->features &
-			     TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT) &&
-			    (edesc->desc.hdr & DESC_HDR_MODE0_ENCRYPT) == 0) {
-				link_tbl_ptr->j_extent = 0;
-				link_tbl_ptr++;
-				link_tbl_ptr->j_extent = DESC_PTR_LNKTBL_RETURN;
-				link_tbl_ptr->len = cpu_to_be16(authsize);
-				sg = sg_last(areq->src, edesc->src_nents ? : 1);
-				link_tbl_ptr->ptr = cpu_to_be32(
-						(char *)sg_dma_address(sg)
-						+ sg->length - authsize);
-			}
 		} else {
 			/* Only one segment now, so no link tbl needed */
 			desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src));
@@ -975,13 +999,9 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
 		desc->ptr[5].ptr = cpu_to_be32((struct talitos_ptr *)
 					       edesc->dma_link_tbl +
 					       edesc->src_nents + 1);
-		if (areq->src == areq->dst) {
-			memcpy(link_tbl_ptr, &edesc->link_tbl[0],
-			       edesc->src_nents * sizeof(struct talitos_ptr));
-		} else {
-			sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen,
-						  link_tbl_ptr);
-		}
+		sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen,
+					  link_tbl_ptr);
+
 		/* Add an entry to the link table for ICV data */
 		link_tbl_ptr += sg_count - 1;
 		link_tbl_ptr->j_extent = 0;
@@ -1106,11 +1126,14 @@ static int aead_authenc_encrypt(struct aead_request *req)
 	return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_encrypt_done);
 }
 
+
+
 static int aead_authenc_decrypt(struct aead_request *req)
 {
 	struct crypto_aead *authenc = crypto_aead_reqtfm(req);
 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
 	unsigned int authsize = ctx->authsize;
+	struct talitos_private *priv = dev_get_drvdata(ctx->dev);
 	struct ipsec_esp_edesc *edesc;
 	struct scatterlist *sg;
 	void *icvdata;
@@ -1122,22 +1145,39 @@ static int aead_authenc_decrypt(struct aead_request *req)
 	if (IS_ERR(edesc))
 		return PTR_ERR(edesc);
 
-	/* stash incoming ICV for later cmp with ICV generated by the h/w */
-	if (edesc->dma_len)
-		icvdata = &edesc->link_tbl[edesc->src_nents +
-					   edesc->dst_nents + 2];
-	else
-		icvdata = &edesc->link_tbl[0];
+	if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
+	    (((!edesc->src_nents && !edesc->dst_nents) ||
+		priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT))) {
+
+		/* decrypt and check the ICV */
+		edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND |
+				  DESC_HDR_MODE1_MDEU_CICV;
+
+		/* reset integrity check result bits */
+		edesc->desc.hdr_lo = 0;
+
+		return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_hwauth_done);
+
+	} else {
+
+		/* Have to check the ICV with software */
 
-	sg = sg_last(req->src, edesc->src_nents ? : 1);
+		edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
+
+		/* stash incoming ICV for later cmp with ICV generated by the h/w */
+		if (edesc->dma_len)
+			icvdata = &edesc->link_tbl[edesc->src_nents +
+						   edesc->dst_nents + 2];
+		else
+			icvdata = &edesc->link_tbl[0];
 
-	memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize,
-	       ctx->authsize);
+		sg = sg_last(req->src, edesc->src_nents ? : 1);
 
-	/* decrypt */
-	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
+		memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize,
+		       ctx->authsize);
 
-	return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_done);
+		return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_swauth_done);
+	}
 }
 
 static int aead_authenc_givencrypt(
@@ -1391,7 +1431,6 @@ static int talitos_remove(struct of_device *ofdev)
 	}
 
 	tasklet_kill(&priv->done_task);
-	tasklet_kill(&priv->error_task);
 
 	iounmap(priv->reg);
 
@@ -1451,10 +1490,9 @@ static int talitos_probe(struct of_device *ofdev,
 
 	priv->ofdev = ofdev;
 
-	INIT_LIST_HEAD(&priv->alg_list);
-
 	tasklet_init(&priv->done_task, talitos_done, (unsigned long)dev);
-	tasklet_init(&priv->error_task, talitos_error, (unsigned long)dev);
+
+	INIT_LIST_HEAD(&priv->alg_list);
 
 	priv->irq = irq_of_parse_and_map(np, 0);
 
@@ -1508,6 +1546,9 @@ static int talitos_probe(struct of_device *ofdev,
 	if (of_device_is_compatible(np, "fsl,sec3.0"))
 		priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
 
+	if (of_device_is_compatible(np, "fsl,sec2.1"))
+		priv->features |= TALITOS_FTR_HW_AUTH_CHECK;
+
 	priv->head_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels,
 				  GFP_KERNEL);
 	priv->tail_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels,
@@ -1551,7 +1592,7 @@ static int talitos_probe(struct of_device *ofdev,
 		goto err_out;
 	}
 	for (i = 0; i < priv->num_channels; i++)
-		atomic_set(&priv->submit_count[i], -priv->chfifo_len);
+		atomic_set(&priv->submit_count[i], -(priv->chfifo_len - 1));
 
 	priv->head = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL);
 	priv->tail = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL);
diff --git a/drivers/crypto/talitos.h b/drivers/crypto/talitos.h
index c48a405abf70c90a1f3cd177d8cde363abd36bb4..575981f0cfda76d943859a8ce1a5c74e5d2eafac 100644
--- a/drivers/crypto/talitos.h
+++ b/drivers/crypto/talitos.h
@@ -37,7 +37,8 @@
 #define TALITOS_MCR_LO			0x1038
 #define   TALITOS_MCR_SWR		0x1     /* s/w reset */
 #define TALITOS_IMR			0x1008  /* interrupt mask register */
-#define   TALITOS_IMR_INIT		0x10fff /* enable channel IRQs */
+#define   TALITOS_IMR_INIT		0x100ff /* enable channel IRQs */
+#define   TALITOS_IMR_DONE		0x00055 /* done IRQs */
 #define TALITOS_IMR_LO			0x100C
 #define   TALITOS_IMR_LO_INIT		0x20000 /* allow RNGU error IRQs */
 #define TALITOS_ISR			0x1010  /* interrupt status register */
@@ -55,6 +56,7 @@
 #define   TALITOS_CCCR_CONT		0x2    /* channel continue */
 #define   TALITOS_CCCR_RESET		0x1    /* channel reset */
 #define TALITOS_CCCR_LO(ch)		(ch * TALITOS_CH_STRIDE + 0x110c)
+#define   TALITOS_CCCR_LO_IWSE		0x80   /* chan. ICCR writeback enab. */
 #define   TALITOS_CCCR_LO_CDWE		0x10   /* chan. done writeback enab. */
 #define   TALITOS_CCCR_LO_NT		0x4    /* notification type */
 #define   TALITOS_CCCR_LO_CDIE		0x2    /* channel done IRQ enable */
@@ -102,6 +104,9 @@
 #define TALITOS_AESUISR_LO		0x4034
 #define TALITOS_MDEUISR			0x6030 /* message digest unit */
 #define TALITOS_MDEUISR_LO		0x6034
+#define TALITOS_MDEUICR			0x6038 /* interrupt control */
+#define TALITOS_MDEUICR_LO		0x603c
+#define   TALITOS_MDEUICR_LO_ICE	0x4000 /* integrity check IRQ enable */
 #define TALITOS_AFEUISR			0x8030 /* arc4 unit */
 #define TALITOS_AFEUISR_LO		0x8034
 #define TALITOS_RNGUISR			0xa030 /* random number unit */
@@ -129,31 +134,34 @@
  */
 
 /* written back when done */
-#define DESC_HDR_DONE			__constant_cpu_to_be32(0xff000000)
+#define DESC_HDR_DONE			cpu_to_be32(0xff000000)
+#define DESC_HDR_LO_ICCR1_MASK		cpu_to_be32(0x00180000)
+#define DESC_HDR_LO_ICCR1_PASS		cpu_to_be32(0x00080000)
+#define DESC_HDR_LO_ICCR1_FAIL		cpu_to_be32(0x00100000)
 
 /* primary execution unit select */
-#define	DESC_HDR_SEL0_MASK		__constant_cpu_to_be32(0xf0000000)
-#define	DESC_HDR_SEL0_AFEU		__constant_cpu_to_be32(0x10000000)
-#define	DESC_HDR_SEL0_DEU		__constant_cpu_to_be32(0x20000000)
-#define	DESC_HDR_SEL0_MDEUA		__constant_cpu_to_be32(0x30000000)
-#define	DESC_HDR_SEL0_MDEUB		__constant_cpu_to_be32(0xb0000000)
-#define	DESC_HDR_SEL0_RNG		__constant_cpu_to_be32(0x40000000)
-#define	DESC_HDR_SEL0_PKEU		__constant_cpu_to_be32(0x50000000)
-#define	DESC_HDR_SEL0_AESU		__constant_cpu_to_be32(0x60000000)
-#define	DESC_HDR_SEL0_KEU		__constant_cpu_to_be32(0x70000000)
-#define	DESC_HDR_SEL0_CRCU		__constant_cpu_to_be32(0x80000000)
+#define	DESC_HDR_SEL0_MASK		cpu_to_be32(0xf0000000)
+#define	DESC_HDR_SEL0_AFEU		cpu_to_be32(0x10000000)
+#define	DESC_HDR_SEL0_DEU		cpu_to_be32(0x20000000)
+#define	DESC_HDR_SEL0_MDEUA		cpu_to_be32(0x30000000)
+#define	DESC_HDR_SEL0_MDEUB		cpu_to_be32(0xb0000000)
+#define	DESC_HDR_SEL0_RNG		cpu_to_be32(0x40000000)
+#define	DESC_HDR_SEL0_PKEU		cpu_to_be32(0x50000000)
+#define	DESC_HDR_SEL0_AESU		cpu_to_be32(0x60000000)
+#define	DESC_HDR_SEL0_KEU		cpu_to_be32(0x70000000)
+#define	DESC_HDR_SEL0_CRCU		cpu_to_be32(0x80000000)
 
 /* primary execution unit mode (MODE0) and derivatives */
-#define	DESC_HDR_MODE0_ENCRYPT		__constant_cpu_to_be32(0x00100000)
-#define	DESC_HDR_MODE0_AESU_CBC		__constant_cpu_to_be32(0x00200000)
-#define	DESC_HDR_MODE0_DEU_CBC		__constant_cpu_to_be32(0x00400000)
-#define	DESC_HDR_MODE0_DEU_3DES		__constant_cpu_to_be32(0x00200000)
-#define	DESC_HDR_MODE0_MDEU_INIT	__constant_cpu_to_be32(0x01000000)
-#define	DESC_HDR_MODE0_MDEU_HMAC	__constant_cpu_to_be32(0x00800000)
-#define	DESC_HDR_MODE0_MDEU_PAD		__constant_cpu_to_be32(0x00400000)
-#define	DESC_HDR_MODE0_MDEU_MD5		__constant_cpu_to_be32(0x00200000)
-#define	DESC_HDR_MODE0_MDEU_SHA256	__constant_cpu_to_be32(0x00100000)
-#define	DESC_HDR_MODE0_MDEU_SHA1	__constant_cpu_to_be32(0x00000000)
+#define	DESC_HDR_MODE0_ENCRYPT		cpu_to_be32(0x00100000)
+#define	DESC_HDR_MODE0_AESU_CBC		cpu_to_be32(0x00200000)
+#define	DESC_HDR_MODE0_DEU_CBC		cpu_to_be32(0x00400000)
+#define	DESC_HDR_MODE0_DEU_3DES		cpu_to_be32(0x00200000)
+#define	DESC_HDR_MODE0_MDEU_INIT	cpu_to_be32(0x01000000)
+#define	DESC_HDR_MODE0_MDEU_HMAC	cpu_to_be32(0x00800000)
+#define	DESC_HDR_MODE0_MDEU_PAD		cpu_to_be32(0x00400000)
+#define	DESC_HDR_MODE0_MDEU_MD5		cpu_to_be32(0x00200000)
+#define	DESC_HDR_MODE0_MDEU_SHA256	cpu_to_be32(0x00100000)
+#define	DESC_HDR_MODE0_MDEU_SHA1	cpu_to_be32(0x00000000)
 #define	DESC_HDR_MODE0_MDEU_MD5_HMAC	(DESC_HDR_MODE0_MDEU_MD5 | \
 					 DESC_HDR_MODE0_MDEU_HMAC)
 #define	DESC_HDR_MODE0_MDEU_SHA256_HMAC	(DESC_HDR_MODE0_MDEU_SHA256 | \
@@ -162,18 +170,19 @@
 					 DESC_HDR_MODE0_MDEU_HMAC)
 
 /* secondary execution unit select (SEL1) */
-#define	DESC_HDR_SEL1_MASK		__constant_cpu_to_be32(0x000f0000)
-#define	DESC_HDR_SEL1_MDEUA		__constant_cpu_to_be32(0x00030000)
-#define	DESC_HDR_SEL1_MDEUB		__constant_cpu_to_be32(0x000b0000)
-#define	DESC_HDR_SEL1_CRCU		__constant_cpu_to_be32(0x00080000)
+#define	DESC_HDR_SEL1_MASK		cpu_to_be32(0x000f0000)
+#define	DESC_HDR_SEL1_MDEUA		cpu_to_be32(0x00030000)
+#define	DESC_HDR_SEL1_MDEUB		cpu_to_be32(0x000b0000)
+#define	DESC_HDR_SEL1_CRCU		cpu_to_be32(0x00080000)
 
 /* secondary execution unit mode (MODE1) and derivatives */
-#define	DESC_HDR_MODE1_MDEU_INIT	__constant_cpu_to_be32(0x00001000)
-#define	DESC_HDR_MODE1_MDEU_HMAC	__constant_cpu_to_be32(0x00000800)
-#define	DESC_HDR_MODE1_MDEU_PAD		__constant_cpu_to_be32(0x00000400)
-#define	DESC_HDR_MODE1_MDEU_MD5		__constant_cpu_to_be32(0x00000200)
-#define	DESC_HDR_MODE1_MDEU_SHA256	__constant_cpu_to_be32(0x00000100)
-#define	DESC_HDR_MODE1_MDEU_SHA1	__constant_cpu_to_be32(0x00000000)
+#define	DESC_HDR_MODE1_MDEU_CICV	cpu_to_be32(0x00004000)
+#define	DESC_HDR_MODE1_MDEU_INIT	cpu_to_be32(0x00001000)
+#define	DESC_HDR_MODE1_MDEU_HMAC	cpu_to_be32(0x00000800)
+#define	DESC_HDR_MODE1_MDEU_PAD		cpu_to_be32(0x00000400)
+#define	DESC_HDR_MODE1_MDEU_MD5		cpu_to_be32(0x00000200)
+#define	DESC_HDR_MODE1_MDEU_SHA256	cpu_to_be32(0x00000100)
+#define	DESC_HDR_MODE1_MDEU_SHA1	cpu_to_be32(0x00000000)
 #define	DESC_HDR_MODE1_MDEU_MD5_HMAC	(DESC_HDR_MODE1_MDEU_MD5 | \
 					 DESC_HDR_MODE1_MDEU_HMAC)
 #define	DESC_HDR_MODE1_MDEU_SHA256_HMAC	(DESC_HDR_MODE1_MDEU_SHA256 | \
@@ -182,16 +191,16 @@
 					 DESC_HDR_MODE1_MDEU_HMAC)
 
 /* direction of overall data flow (DIR) */
-#define	DESC_HDR_DIR_INBOUND		__constant_cpu_to_be32(0x00000002)
+#define	DESC_HDR_DIR_INBOUND		cpu_to_be32(0x00000002)
 
 /* request done notification (DN) */
-#define	DESC_HDR_DONE_NOTIFY		__constant_cpu_to_be32(0x00000001)
+#define	DESC_HDR_DONE_NOTIFY		cpu_to_be32(0x00000001)
 
 /* descriptor types */
-#define DESC_HDR_TYPE_AESU_CTR_NONSNOOP		__constant_cpu_to_be32(0 << 3)
-#define DESC_HDR_TYPE_IPSEC_ESP			__constant_cpu_to_be32(1 << 3)
-#define DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU	__constant_cpu_to_be32(2 << 3)
-#define DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU	__constant_cpu_to_be32(4 << 3)
+#define DESC_HDR_TYPE_AESU_CTR_NONSNOOP		cpu_to_be32(0 << 3)
+#define DESC_HDR_TYPE_IPSEC_ESP			cpu_to_be32(1 << 3)
+#define DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU	cpu_to_be32(2 << 3)
+#define DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU	cpu_to_be32(4 << 3)
 
 /* link table extent field bits */
 #define DESC_PTR_LNKTBL_JUMP			0x80
diff --git a/include/crypto/aes.h b/include/crypto/aes.h
index 40008d67ee3daf26573ad8648b491365134aa4d6..656a4c66a568f39cf769210bbfaab6d7dc6c7a07 100644
--- a/include/crypto/aes.h
+++ b/include/crypto/aes.h
@@ -23,10 +23,10 @@ struct crypto_aes_ctx {
 	u32 key_dec[AES_MAX_KEYLENGTH_U32];
 };
 
-extern u32 crypto_ft_tab[4][256];
-extern u32 crypto_fl_tab[4][256];
-extern u32 crypto_it_tab[4][256];
-extern u32 crypto_il_tab[4][256];
+extern const u32 crypto_ft_tab[4][256];
+extern const u32 crypto_fl_tab[4][256];
+extern const u32 crypto_it_tab[4][256];
+extern const u32 crypto_il_tab[4][256];
 
 int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 		unsigned int key_len);
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index 60d06e784be3aa54464c0263e4611c087cc0a17c..010545436efa22593f4a124c9a68849f78a85872 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -22,9 +22,18 @@ struct seq_file;
 
 struct crypto_type {
 	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
+	unsigned int (*extsize)(struct crypto_alg *alg,
+				const struct crypto_type *frontend);
 	int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
-	void (*exit)(struct crypto_tfm *tfm);
+	int (*init_tfm)(struct crypto_tfm *tfm,
+		        const struct crypto_type *frontend);
 	void (*show)(struct seq_file *m, struct crypto_alg *alg);
+	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
+
+	unsigned int type;
+	unsigned int maskclear;
+	unsigned int maskset;
+	unsigned int tfmsize;
 };
 
 struct crypto_instance {
@@ -239,6 +248,11 @@ static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
 	return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
 }
 
+static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
+{
+	return crypto_tfm_ctx(&tfm->base);
+}
+
 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
 {
 	return crypto_tfm_ctx_aligned(&tfm->base);
diff --git a/include/crypto/hash.h b/include/crypto/hash.h
index ee48ef8fb2eafff29c8aefe67024709d7696e69d..cd16d6e668ce78a523a7865f747d5078ad0ecd76 100644
--- a/include/crypto/hash.h
+++ b/include/crypto/hash.h
@@ -15,10 +15,40 @@
 
 #include <linux/crypto.h>
 
+struct shash_desc {
+	struct crypto_shash *tfm;
+	u32 flags;
+
+	void *__ctx[] CRYPTO_MINALIGN_ATTR;
+};
+
+struct shash_alg {
+	int (*init)(struct shash_desc *desc);
+	int (*reinit)(struct shash_desc *desc);
+	int (*update)(struct shash_desc *desc, const u8 *data,
+		      unsigned int len);
+	int (*final)(struct shash_desc *desc, u8 *out);
+	int (*finup)(struct shash_desc *desc, const u8 *data,
+		     unsigned int len, u8 *out);
+	int (*digest)(struct shash_desc *desc, const u8 *data,
+		      unsigned int len, u8 *out);
+	int (*setkey)(struct crypto_shash *tfm, const u8 *key,
+		      unsigned int keylen);
+
+	unsigned int descsize;
+	unsigned int digestsize;
+
+	struct crypto_alg base;
+};
+
 struct crypto_ahash {
 	struct crypto_tfm base;
 };
 
+struct crypto_shash {
+	struct crypto_tfm base;
+};
+
 static inline struct crypto_ahash *__crypto_ahash_cast(struct crypto_tfm *tfm)
 {
 	return (struct crypto_ahash *)tfm;
@@ -87,6 +117,11 @@ static inline unsigned int crypto_ahash_reqsize(struct crypto_ahash *tfm)
 	return crypto_ahash_crt(tfm)->reqsize;
 }
 
+static inline void *ahash_request_ctx(struct ahash_request *req)
+{
+	return req->__ctx;
+}
+
 static inline int crypto_ahash_setkey(struct crypto_ahash *tfm,
 				      const u8 *key, unsigned int keylen)
 {
@@ -101,6 +136,14 @@ static inline int crypto_ahash_digest(struct ahash_request *req)
 	return crt->digest(req);
 }
 
+static inline void crypto_ahash_export(struct ahash_request *req, u8 *out)
+{
+	memcpy(out, ahash_request_ctx(req),
+	       crypto_ahash_reqsize(crypto_ahash_reqtfm(req)));
+}
+
+int crypto_ahash_import(struct ahash_request *req, const u8 *in);
+
 static inline int crypto_ahash_init(struct ahash_request *req)
 {
 	struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req));
@@ -169,4 +212,86 @@ static inline void ahash_request_set_crypt(struct ahash_request *req,
 	req->result = result;
 }
 
+struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
+					u32 mask);
+
+static inline struct crypto_tfm *crypto_shash_tfm(struct crypto_shash *tfm)
+{
+	return &tfm->base;
+}
+
+static inline void crypto_free_shash(struct crypto_shash *tfm)
+{
+	crypto_free_tfm(crypto_shash_tfm(tfm));
+}
+
+static inline unsigned int crypto_shash_alignmask(
+	struct crypto_shash *tfm)
+{
+	return crypto_tfm_alg_alignmask(crypto_shash_tfm(tfm));
+}
+
+static inline struct shash_alg *__crypto_shash_alg(struct crypto_alg *alg)
+{
+	return container_of(alg, struct shash_alg, base);
+}
+
+static inline struct shash_alg *crypto_shash_alg(struct crypto_shash *tfm)
+{
+	return __crypto_shash_alg(crypto_shash_tfm(tfm)->__crt_alg);
+}
+
+static inline unsigned int crypto_shash_digestsize(struct crypto_shash *tfm)
+{
+	return crypto_shash_alg(tfm)->digestsize;
+}
+
+static inline u32 crypto_shash_get_flags(struct crypto_shash *tfm)
+{
+	return crypto_tfm_get_flags(crypto_shash_tfm(tfm));
+}
+
+static inline void crypto_shash_set_flags(struct crypto_shash *tfm, u32 flags)
+{
+	crypto_tfm_set_flags(crypto_shash_tfm(tfm), flags);
+}
+
+static inline void crypto_shash_clear_flags(struct crypto_shash *tfm, u32 flags)
+{
+	crypto_tfm_clear_flags(crypto_shash_tfm(tfm), flags);
+}
+
+static inline unsigned int crypto_shash_descsize(struct crypto_shash *tfm)
+{
+	return crypto_shash_alg(tfm)->descsize;
+}
+
+static inline void *shash_desc_ctx(struct shash_desc *desc)
+{
+	return desc->__ctx;
+}
+
+int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
+			unsigned int keylen);
+int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
+			unsigned int len, u8 *out);
+
+static inline void crypto_shash_export(struct shash_desc *desc, u8 *out)
+{
+	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
+}
+
+int crypto_shash_import(struct shash_desc *desc, const u8 *in);
+
+static inline int crypto_shash_init(struct shash_desc *desc)
+{
+	return crypto_shash_alg(desc->tfm)->init(desc);
+}
+
+int crypto_shash_update(struct shash_desc *desc, const u8 *data,
+			unsigned int len);
+int crypto_shash_final(struct shash_desc *desc, u8 *out);
+int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
+		       unsigned int len, u8 *out);
+
 #endif	/* _CRYPTO_HASH_H */
diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h
index 917ae57bad4ac2ec19415221f78c24e596fd908e..82b70564bcabe53cc9b109eee004fe419a8e5e44 100644
--- a/include/crypto/internal/hash.h
+++ b/include/crypto/internal/hash.h
@@ -39,6 +39,12 @@ extern const struct crypto_type crypto_ahash_type;
 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
 int crypto_hash_walk_first(struct ahash_request *req,
 			   struct crypto_hash_walk *walk);
+int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
+				  struct crypto_hash_walk *walk,
+				  struct scatterlist *sg, unsigned int len);
+
+int crypto_register_shash(struct shash_alg *alg);
+int crypto_unregister_shash(struct shash_alg *alg);
 
 static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm)
 {
@@ -63,16 +69,16 @@ static inline struct ahash_request *ahash_dequeue_request(
 	return ahash_request_cast(crypto_dequeue_request(queue));
 }
 
-static inline void *ahash_request_ctx(struct ahash_request *req)
-{
-	return req->__ctx;
-}
-
 static inline int ahash_tfm_in_queue(struct crypto_queue *queue,
 					  struct crypto_ahash *tfm)
 {
 	return crypto_tfm_in_queue(queue, crypto_ahash_tfm(tfm));
 }
 
+static inline void *crypto_shash_ctx(struct crypto_shash *tfm)
+{
+	return crypto_tfm_ctx(&tfm->base);
+}
+
 #endif	/* _CRYPTO_INTERNAL_HASH_H */
 
diff --git a/include/linux/crc32c.h b/include/linux/crc32c.h
index 508f512e5a2fcebbaf1f243e7a4fd42a7ace96db..bd8b44d96bdcbd38e764ddfa7b8d6cbed43ebe5d 100644
--- a/include/linux/crc32c.h
+++ b/include/linux/crc32c.h
@@ -3,9 +3,9 @@
 
 #include <linux/types.h>
 
-extern u32 crc32c_le(u32 crc, unsigned char const *address, size_t length);
-extern u32 crc32c_be(u32 crc, unsigned char const *address, size_t length);
+extern u32 crc32c(u32 crc, const void *address, unsigned int length);
 
-#define crc32c(seed, data, length)  crc32c_le(seed, (unsigned char const *)data, length)
+/* This macro exists for backwards-compatibility. */
+#define crc32c_le crc32c
 
 #endif	/* _LINUX_CRC32C_H */
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 3d2317e4af2e4b4475c2fc18ff1db3b475ae1d52..3bacd71509fb1ce7197ac675044807a2d76f2fd1 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -36,7 +36,8 @@
 #define CRYPTO_ALG_TYPE_ABLKCIPHER	0x00000005
 #define CRYPTO_ALG_TYPE_GIVCIPHER	0x00000006
 #define CRYPTO_ALG_TYPE_DIGEST		0x00000008
-#define CRYPTO_ALG_TYPE_HASH		0x00000009
+#define CRYPTO_ALG_TYPE_HASH		0x00000008
+#define CRYPTO_ALG_TYPE_SHASH		0x00000009
 #define CRYPTO_ALG_TYPE_AHASH		0x0000000a
 #define CRYPTO_ALG_TYPE_RNG		0x0000000c
 
@@ -220,6 +221,7 @@ struct ablkcipher_alg {
 
 struct ahash_alg {
 	int (*init)(struct ahash_request *req);
+	int (*reinit)(struct ahash_request *req);
 	int (*update)(struct ahash_request *req);
 	int (*final)(struct ahash_request *req);
 	int (*digest)(struct ahash_request *req);
@@ -480,6 +482,8 @@ struct crypto_tfm {
 		struct compress_tfm compress;
 		struct rng_tfm rng;
 	} crt_u;
+
+	void (*exit)(struct crypto_tfm *tfm);
 	
 	struct crypto_alg *__crt_alg;
 
@@ -544,7 +548,9 @@ struct crypto_attr_u32 {
  * Transform user interface.
  */
  
-struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
+struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
+				    const struct crypto_type *frontend,
+				    u32 type, u32 mask);
 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
 void crypto_free_tfm(struct crypto_tfm *tfm);
 
diff --git a/lib/Kconfig b/lib/Kconfig
index 85cf7ea978aafe6606e6e44cc634a82e6cf29477..fd4118e097f0711c9c08cf6e08575678fcc5ec16 100644
--- a/lib/Kconfig
+++ b/lib/Kconfig
@@ -64,6 +64,8 @@ config CRC7
 
 config LIBCRC32C
 	tristate "CRC32c (Castagnoli, et al) Cyclic Redundancy-Check"
+	select CRYPTO
+	select CRYPTO_CRC32C
 	help
 	  This option is provided for the case where no in-kernel-tree
 	  modules require CRC32c functions, but a module built outside the
diff --git a/lib/libcrc32c.c b/lib/libcrc32c.c
index b5c3287d8ea47567220cbafd2211dd12b5239356..244f5480c898c783059873deefd91764086d41b6 100644
--- a/lib/libcrc32c.c
+++ b/lib/libcrc32c.c
@@ -30,168 +30,52 @@
  * any later version.
  *
  */
-#include <linux/crc32c.h>
-#include <linux/compiler.h>
-#include <linux/module.h>
-
-MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
-MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations");
-MODULE_LICENSE("GPL");
 
-#define CRC32C_POLY_BE 0x1EDC6F41
-#define CRC32C_POLY_LE 0x82F63B78
+#include <crypto/hash.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
 
-#ifndef CRC_LE_BITS 
-# define CRC_LE_BITS 8
-#endif
+static struct crypto_shash *tfm;
 
+u32 crc32c(u32 crc, const void *address, unsigned int length)
+{
+	struct {
+		struct shash_desc shash;
+		char ctx[crypto_shash_descsize(tfm)];
+	} desc;
+	int err;
 
-/*
- * Haven't generated a big-endian table yet, but the bit-wise version
- * should at least work.
- */
-#if defined CRC_BE_BITS && CRC_BE_BITS != 1
-#undef CRC_BE_BITS
-#endif
-#ifndef CRC_BE_BITS
-# define CRC_BE_BITS 1
-#endif
+	desc.shash.tfm = tfm;
+	desc.shash.flags = 0;
+	*(u32 *)desc.ctx = crc;
 
-EXPORT_SYMBOL(crc32c_le);
+	err = crypto_shash_update(&desc.shash, address, length);
+	BUG_ON(err);
 
-#if CRC_LE_BITS == 1
-/*
- * Compute things bit-wise, as done in crc32.c.  We could share the tight 
- * loop below with crc32 and vary the POLY if we don't find value in terms
- * of space and maintainability in keeping the two modules separate.
- */
-u32 __pure
-crc32c_le(u32 crc, unsigned char const *p, size_t len)
-{
-	int i;
-	while (len--) {
-		crc ^= *p++;
-		for (i = 0; i < 8; i++)
-			crc = (crc >> 1) ^ ((crc & 1) ? CRC32C_POLY_LE : 0);
-	}
-	return crc;
+	return *(u32 *)desc.ctx;
 }
-#else
-
-/*
- * This is the CRC-32C table
- * Generated with:
- * width = 32 bits
- * poly = 0x1EDC6F41
- * reflect input bytes = true
- * reflect output bytes = true
- */
-
-static const u32 crc32c_table[256] = {
-	0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L,
-	0xC79A971FL, 0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL,
-	0x8AD958CFL, 0x78B2DBCCL, 0x6BE22838L, 0x9989AB3BL,
-	0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L, 0x5E133C24L,
-	0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
-	0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L,
-	0x9A879FA0L, 0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L,
-	0x5D1D08BFL, 0xAF768BBCL, 0xBC267848L, 0x4E4DFB4BL,
-	0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L, 0x33ED7D2AL,
-	0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
-	0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L,
-	0x6DFE410EL, 0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL,
-	0x30E349B1L, 0xC288CAB2L, 0xD1D83946L, 0x23B3BA45L,
-	0xF779DEAEL, 0x05125DADL, 0x1642AE59L, 0xE4292D5AL,
-	0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
-	0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L,
-	0x417B1DBCL, 0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L,
-	0x86E18AA3L, 0x748A09A0L, 0x67DAFA54L, 0x95B17957L,
-	0xCBA24573L, 0x39C9C670L, 0x2A993584L, 0xD8F2B687L,
-	0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
-	0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L,
-	0x96BF4DCCL, 0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L,
-	0xDBFC821CL, 0x2997011FL, 0x3AC7F2EBL, 0xC8AC71E8L,
-	0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L, 0x0F36E6F7L,
-	0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
-	0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L,
-	0xEB1FCBADL, 0x197448AEL, 0x0A24BB5AL, 0xF84F3859L,
-	0x2C855CB2L, 0xDEEEDFB1L, 0xCDBE2C45L, 0x3FD5AF46L,
-	0x7198540DL, 0x83F3D70EL, 0x90A324FAL, 0x62C8A7F9L,
-	0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
-	0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L,
-	0x3CDB9BDDL, 0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L,
-	0x82F63B78L, 0x709DB87BL, 0x63CD4B8FL, 0x91A6C88CL,
-	0x456CAC67L, 0xB7072F64L, 0xA457DC90L, 0x563C5F93L,
-	0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
-	0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL,
-	0x92A8FC17L, 0x60C37F14L, 0x73938CE0L, 0x81F80FE3L,
-	0x55326B08L, 0xA759E80BL, 0xB4091BFFL, 0x466298FCL,
-	0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL, 0x0B21572CL,
-	0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
-	0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L,
-	0x65D122B9L, 0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL,
-	0x2892ED69L, 0xDAF96E6AL, 0xC9A99D9EL, 0x3BC21E9DL,
-	0xEF087A76L, 0x1D63F975L, 0x0E330A81L, 0xFC588982L,
-	0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
-	0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L,
-	0x38CC2A06L, 0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L,
-	0xFF56BD19L, 0x0D3D3E1AL, 0x1E6DCDEEL, 0xEC064EEDL,
-	0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L, 0xD0DDD530L,
-	0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
-	0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL,
-	0x8ECEE914L, 0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L,
-	0xD3D3E1ABL, 0x21B862A8L, 0x32E8915CL, 0xC083125FL,
-	0x144976B4L, 0xE622F5B7L, 0xF5720643L, 0x07198540L,
-	0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
-	0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL,
-	0xE330A81AL, 0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL,
-	0x24AA3F05L, 0xD6C1BC06L, 0xC5914FF2L, 0x37FACCF1L,
-	0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L, 0x7AB90321L,
-	0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
-	0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L,
-	0x34F4F86AL, 0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL,
-	0x79B737BAL, 0x8BDCB4B9L, 0x988C474DL, 0x6AE7C44EL,
-	0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L, 0xAD7D5351L
-};
 
-/*
- * Steps through buffer one byte at at time, calculates reflected 
- * crc using table.
- */
+EXPORT_SYMBOL(crc32c);
 
-u32 __pure
-crc32c_le(u32 crc, unsigned char const *data, size_t length)
+static int __init libcrc32c_mod_init(void)
 {
-	while (length--)
-		crc =
-		    crc32c_table[(crc ^ *data++) & 0xFFL] ^ (crc >> 8);
+	tfm = crypto_alloc_shash("crc32c", 0, 0);
+	if (IS_ERR(tfm))
+		return PTR_ERR(tfm);
 
-	return crc;
+	return 0;
 }
 
-#endif	/* CRC_LE_BITS == 8 */
-
-EXPORT_SYMBOL(crc32c_be);
-
-#if CRC_BE_BITS == 1
-u32 __pure
-crc32c_be(u32 crc, unsigned char const *p, size_t len)
+static void __exit libcrc32c_mod_fini(void)
 {
-	int i;
-	while (len--) {
-		crc ^= *p++ << 24;
-		for (i = 0; i < 8; i++)
-			crc =
-			    (crc << 1) ^ ((crc & 0x80000000) ? CRC32C_POLY_BE :
-					  0);
-	}
-	return crc;
+	crypto_free_shash(tfm);
 }
-#endif
 
-/*
- * Unit test
- *
- * A small unit test suite is implemented as part of the crypto suite.
- * Select CRYPTO_CRC32C and use the tcrypt module to run the tests.
- */
+module_init(libcrc32c_mod_init);
+module_exit(libcrc32c_mod_fini);
+
+MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
+MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations");
+MODULE_LICENSE("GPL");