diff options
| author | Linus Torvalds <torvalds@linux-foundation.org> | 2018-04-04 17:11:08 -0700 |
|---|---|---|
| committer | Linus Torvalds <torvalds@linux-foundation.org> | 2018-04-04 17:11:08 -0700 |
| commit | 9eb31227cbccd3a37da0f42604f1ab5fc556bc53 (patch) | |
| tree | 9aa467e620e002bf01cecdd98e3908e0cc3e7221 /crypto | |
| parent | 527cd20771888443b5d8707debe98f62c7a1f596 (diff) | |
| parent | f444ec106407d600f17fa1a4bd14f84577401dec (diff) | |
| download | linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.tar.gz linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.tar.bz2 linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.zip | |
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"API:
- add AEAD support to crypto engine
- allow batch registration in simd
Algorithms:
- add CFB mode
- add speck block cipher
- add sm4 block cipher
- new test case for crct10dif
- improve scheduling latency on ARM
- scatter/gather support to gcm in aesni
- convert x86 crypto algorithms to skcihper
Drivers:
- hmac(sha224/sha256) support in inside-secure
- aes gcm/ccm support in stm32
- stm32mp1 support in stm32
- ccree driver from staging tree
- gcm support over QI in caam
- add ks-sa hwrng driver"
* 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (212 commits)
crypto: ccree - remove unused enums
crypto: ahash - Fix early termination in hash walk
crypto: brcm - explicitly cast cipher to hash type
crypto: talitos - don't leak pointers to authenc keys
crypto: qat - don't leak pointers to authenc keys
crypto: picoxcell - don't leak pointers to authenc keys
crypto: ixp4xx - don't leak pointers to authenc keys
crypto: chelsio - don't leak pointers to authenc keys
crypto: caam/qi - don't leak pointers to authenc keys
crypto: caam - don't leak pointers to authenc keys
crypto: lrw - Free rctx->ext with kzfree
crypto: talitos - fix IPsec cipher in length
crypto: Deduplicate le32_to_cpu_array() and cpu_to_le32_array()
crypto: doc - clarify hash callbacks state machine
crypto: api - Keep failed instances alive
crypto: api - Make crypto_alg_lookup static
crypto: api - Remove unused crypto_type lookup function
crypto: chelsio - Remove declaration of static function from header
crypto: inside-secure - hmac(sha224) support
crypto: inside-secure - hmac(sha256) support
..
Diffstat (limited to 'crypto')
| -rw-r--r-- | crypto/Kconfig | 129 | ||||
| -rw-r--r-- | crypto/Makefile | 4 | ||||
| -rw-r--r-- | crypto/ablk_helper.c | 150 | ||||
| -rw-r--r-- | crypto/ahash.c | 25 | ||||
| -rw-r--r-- | crypto/algapi.c | 8 | ||||
| -rw-r--r-- | crypto/api.c | 34 | ||||
| -rw-r--r-- | crypto/cfb.c | 353 | ||||
| -rw-r--r-- | crypto/crypto_engine.c | 301 | ||||
| -rw-r--r-- | crypto/crypto_user.c | 2 | ||||
| -rw-r--r-- | crypto/ecc.c | 23 | ||||
| -rw-r--r-- | crypto/ecdh.c | 23 | ||||
| -rw-r--r-- | crypto/internal.h | 1 | ||||
| -rw-r--r-- | crypto/lrw.c | 154 | ||||
| -rw-r--r-- | crypto/mcryptd.c | 34 | ||||
| -rw-r--r-- | crypto/md4.c | 17 | ||||
| -rw-r--r-- | crypto/md5.c | 17 | ||||
| -rw-r--r-- | crypto/rsa-pkcs1pad.c | 2 | ||||
| -rw-r--r-- | crypto/simd.c | 50 | ||||
| -rw-r--r-- | crypto/sm4_generic.c | 244 | ||||
| -rw-r--r-- | crypto/speck.c | 307 | ||||
| -rw-r--r-- | crypto/tcrypt.c | 3 | ||||
| -rw-r--r-- | crypto/testmgr.c | 45 | ||||
| -rw-r--r-- | crypto/testmgr.h | 1882 | ||||
| -rw-r--r-- | crypto/xts.c | 72 |
24 files changed, 3219 insertions, 661 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index b75264b09a46..c0dabed5122e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -245,10 +245,6 @@ config CRYPTO_TEST help Quick & dirty crypto test module. -config CRYPTO_ABLK_HELPER - tristate - select CRYPTO_CRYPTD - config CRYPTO_SIMD tristate select CRYPTO_CRYPTD @@ -324,6 +320,14 @@ config CRYPTO_CBC CBC: Cipher Block Chaining mode This block cipher algorithm is required for IPSec. +config CRYPTO_CFB + tristate "CFB support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + help + CFB: Cipher FeedBack mode + This block cipher algorithm is required for TPM2 Cryptography. + config CRYPTO_CTR tristate "CTR support" select CRYPTO_BLKCIPHER @@ -1114,7 +1118,7 @@ config CRYPTO_BLOWFISH_COMMON config CRYPTO_BLOWFISH_X86_64 tristate "Blowfish cipher algorithm (x86_64)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_BLOWFISH_COMMON help Blowfish cipher algorithm (x86_64), by Bruce Schneier. @@ -1145,10 +1149,8 @@ config CRYPTO_CAMELLIA_X86_64 tristate "Camellia cipher algorithm (x86_64)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_LRW - select CRYPTO_XTS help Camellia cipher algorithm module (x86_64). @@ -1164,12 +1166,10 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_BLKCIPHER select CRYPTO_CAMELLIA_X86_64 - select CRYPTO_LRW + select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_XTS help Camellia cipher algorithm module (x86_64/AES-NI/AVX). @@ -1186,14 +1186,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX2_X86_64 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX2)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_CAMELLIA_X86_64 select CRYPTO_CAMELLIA_AESNI_AVX_X86_64 - select CRYPTO_LRW - select CRYPTO_XTS help Camellia cipher algorithm module (x86_64/AES-NI/AVX2). @@ -1238,11 +1231,10 @@ config CRYPTO_CAST5 config CRYPTO_CAST5_AVX_X86_64 tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_CAST_COMMON + select CRYPTO_BLKCIPHER select CRYPTO_CAST5 + select CRYPTO_CAST_COMMON + select CRYPTO_SIMD help The CAST5 encryption algorithm (synonymous with CAST-128) is described in RFC2144. @@ -1261,13 +1253,11 @@ config CRYPTO_CAST6 config CRYPTO_CAST6_AVX_X86_64 tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_CAST_COMMON + select CRYPTO_BLKCIPHER select CRYPTO_CAST6 - select CRYPTO_LRW + select CRYPTO_CAST_COMMON + select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_XTS help The CAST6 encryption algorithm (synonymous with CAST-256) is @@ -1294,7 +1284,7 @@ config CRYPTO_DES_SPARC64 config CRYPTO_DES3_EDE_X86_64 tristate "Triple DES EDE cipher algorithm (x86-64)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_DES help Triple DES EDE (FIPS 46-3) algorithm. @@ -1422,13 +1412,10 @@ config CRYPTO_SERPENT config CRYPTO_SERPENT_SSE2_X86_64 tristate "Serpent cipher algorithm (x86_64/SSE2)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW - select CRYPTO_XTS + select CRYPTO_SIMD help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1444,13 +1431,10 @@ config CRYPTO_SERPENT_SSE2_X86_64 config CRYPTO_SERPENT_SSE2_586 tristate "Serpent cipher algorithm (i586/SSE2)" depends on X86 && !64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW - select CRYPTO_XTS + select CRYPTO_SIMD help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1466,12 +1450,10 @@ config CRYPTO_SERPENT_SSE2_586 config CRYPTO_SERPENT_AVX_X86_64 tristate "Serpent cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW + select CRYPTO_SIMD select CRYPTO_XTS help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1488,14 +1470,7 @@ config CRYPTO_SERPENT_AVX_X86_64 config CRYPTO_SERPENT_AVX2_X86_64 tristate "Serpent cipher algorithm (x86_64/AVX2)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_SERPENT select CRYPTO_SERPENT_AVX_X86_64 - select CRYPTO_LRW - select CRYPTO_XTS help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1508,6 +1483,45 @@ config CRYPTO_SERPENT_AVX2_X86_64 See also: <http://www.cl.cam.ac.uk/~rja14/serpent.html> +config CRYPTO_SM4 + tristate "SM4 cipher algorithm" + select CRYPTO_ALGAPI + help + SM4 cipher algorithms (OSCCA GB/T 32907-2016). + + SM4 (GBT.32907-2016) is a cryptographic standard issued by the + Organization of State Commercial Administration of China (OSCCA) + as an authorized cryptographic algorithms for the use within China. + + SMS4 was originally created for use in protecting wireless + networks, and is mandated in the Chinese National Standard for + Wireless LAN WAPI (Wired Authentication and Privacy Infrastructure) + (GB.15629.11-2003). + + The latest SM4 standard (GBT.32907-2016) was proposed by OSCCA and + standardized through TC 260 of the Standardization Administration + of the People's Republic of China (SAC). + + The input, output, and key of SMS4 are each 128 bits. + + See also: <https://eprint.iacr.org/2008/329.pdf> + + If unsure, say N. + +config CRYPTO_SPECK + tristate "Speck cipher algorithm" + select CRYPTO_ALGAPI + help + Speck is a lightweight block cipher that is tuned for optimal + performance in software (rather than hardware). + + Speck may not be as secure as AES, and should only be used on systems + where AES is not fast enough. + + See also: <https://eprint.iacr.org/2013/404.pdf> + + If unsure, say N. + config CRYPTO_TEA tristate "TEA, XTEA and XETA cipher algorithms" select CRYPTO_ALGAPI @@ -1581,12 +1595,10 @@ config CRYPTO_TWOFISH_X86_64 config CRYPTO_TWOFISH_X86_64_3WAY tristate "Twofish cipher algorithm (x86_64, 3-way parallel)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_TWOFISH_COMMON select CRYPTO_TWOFISH_X86_64 select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_LRW - select CRYPTO_XTS help Twofish cipher algorithm (x86_64, 3-way parallel). @@ -1604,15 +1616,12 @@ config CRYPTO_TWOFISH_X86_64_3WAY config CRYPTO_TWOFISH_AVX_X86_64 tristate "Twofish cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_TWOFISH_COMMON select CRYPTO_TWOFISH_X86_64 select CRYPTO_TWOFISH_X86_64_3WAY - select CRYPTO_LRW - select CRYPTO_XTS help Twofish cipher algorithm (x86_64/AVX). diff --git a/crypto/Makefile b/crypto/Makefile index cdbc03b35510..4fc69fe94e6a 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -78,6 +78,7 @@ obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o obj-$(CONFIG_CRYPTO_ECB) += ecb.o obj-$(CONFIG_CRYPTO_CBC) += cbc.o +obj-$(CONFIG_CRYPTO_CFB) += cfb.o obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o obj-$(CONFIG_CRYPTO_CTS) += cts.o obj-$(CONFIG_CRYPTO_LRW) += lrw.o @@ -100,6 +101,7 @@ obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o CFLAGS_serpent_generic.o := $(call cc-option,-fsched-pressure) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149 obj-$(CONFIG_CRYPTO_AES) += aes_generic.o CFLAGS_aes_generic.o := $(call cc-option,-fno-code-hoisting) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83356 +obj-$(CONFIG_CRYPTO_SM4) += sm4_generic.o obj-$(CONFIG_CRYPTO_AES_TI) += aes_ti.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o @@ -110,6 +112,7 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o obj-$(CONFIG_CRYPTO_SEED) += seed.o +obj-$(CONFIG_CRYPTO_SPECK) += speck.o obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o obj-$(CONFIG_CRYPTO_CHACHA20) += chacha20_generic.o obj-$(CONFIG_CRYPTO_POLY1305) += poly1305_generic.o @@ -149,6 +152,5 @@ obj-$(CONFIG_XOR_BLOCKS) += xor.o obj-$(CONFIG_ASYNC_CORE) += async_tx/ obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/ obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o -obj-$(CONFIG_CRYPTO_ABLK_HELPER) += ablk_helper.o crypto_simd-y := simd.o obj-$(CONFIG_CRYPTO_SIMD) += crypto_simd.o diff --git a/crypto/ablk_helper.c b/crypto/ablk_helper.c deleted file mode 100644 index 09776bb1360e..000000000000 --- a/crypto/ablk_helper.c +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Shared async block cipher helpers - * - * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> - * - * Based on aesni-intel_glue.c by: - * Copyright (C) 2008, Intel Corp. - * Author: Huang Ying <ying.huang@intel.com> - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ - -#include <linux/kernel.h> -#include <linux/crypto.h> -#include <linux/init.h> -#include <linux/module.h> -#include <crypto/algapi.h> -#include <crypto/cryptd.h> -#include <crypto/ablk_helper.h> -#include <asm/simd.h> - -int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, - unsigned int key_len) -{ - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base; - int err; - - crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); - crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm) - & CRYPTO_TFM_REQ_MASK); - err = crypto_ablkcipher_setkey(child, key, key_len); - crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child) - & CRYPTO_TFM_RES_MASK); - return err; -} -EXPORT_SYMBOL_GPL(ablk_set_key); - -int __ablk_encrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - struct blkcipher_desc desc; - - desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); - desc.info = req->info; - desc.flags = 0; - - return crypto_blkcipher_crt(desc.tfm)->encrypt( - &desc, req->dst, req->src, req->nbytes); -} -EXPORT_SYMBOL_GPL(__ablk_encrypt); - -int ablk_encrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - - if (!may_use_simd() || - (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) { - struct ablkcipher_request *cryptd_req = - ablkcipher_request_ctx(req); - - *cryptd_req = *req; - ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - - return crypto_ablkcipher_encrypt(cryptd_req); - } else { - return __ablk_encrypt(req); - } -} -EXPORT_SYMBOL_GPL(ablk_encrypt); - -int ablk_decrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - - if (!may_use_simd() || - (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) { - struct ablkcipher_request *cryptd_req = - ablkcipher_request_ctx(req); - - *cryptd_req = *req; - ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - - return crypto_ablkcipher_decrypt(cryptd_req); - } else { - struct blkcipher_desc desc; - - desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); - desc.info = req->info; - desc.flags = 0; - - return crypto_blkcipher_crt(desc.tfm)->decrypt( - &desc, req->dst, req->src, req->nbytes); - } -} -EXPORT_SYMBOL_GPL(ablk_decrypt); - -void ablk_exit(struct crypto_tfm *tfm) -{ - struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); - - cryptd_free_ablkcipher(ctx->cryptd_tfm); -} -EXPORT_SYMBOL_GPL(ablk_exit); - -int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name) -{ - struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); - struct cryptd_ablkcipher *cryptd_tfm; - - cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, CRYPTO_ALG_INTERNAL, - CRYPTO_ALG_INTERNAL); - if (IS_ERR(cryptd_tfm)) - return PTR_ERR(cryptd_tfm); - - ctx->cryptd_tfm = cryptd_tfm; - tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + - crypto_ablkcipher_reqsize(&cryptd_tfm->base); - - return 0; -} -EXPORT_SYMBOL_GPL(ablk_init_common); - -int ablk_init(struct crypto_tfm *tfm) -{ - char drv_name[CRYPTO_MAX_ALG_NAME]; - - snprintf(drv_name, sizeof(drv_name), "__driver-%s", - crypto_tfm_alg_driver_name(tfm)); - - return ablk_init_common(tfm, drv_name); -} -EXPORT_SYMBOL_GPL(ablk_init); - -MODULE_LICENSE("GPL"); diff --git a/crypto/ahash.c b/crypto/ahash.c index 266fc1d64f61..a64c143165b1 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -92,13 +92,14 @@ int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) if (nbytes && walk->offset & alignmask && !err) { walk->offset = ALIGN(walk->offset, alignmask + 1); - walk->data += walk->offset; - nbytes = min(nbytes, ((unsigned int)(PAGE_SIZE)) - walk->offset); walk->entrylen -= nbytes; - return nbytes; + if (nbytes) { + walk->data += walk->offset; + return nbytes; + } } if (walk->flags & CRYPTO_ALG_ASYNC) @@ -446,24 +447,12 @@ static int ahash_def_finup(struct ahash_request *req) return ahash_def_finup_finish1(req, err); } -static int ahash_no_export(struct ahash_request *req, void *out) -{ - return -ENOSYS; -} - -static int ahash_no_import(struct ahash_request *req, const void *in) -{ - return -ENOSYS; -} - static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) { struct crypto_ahash *hash = __crypto_ahash_cast(tfm); struct ahash_alg *alg = crypto_ahash_alg(hash); hash->setkey = ahash_nosetkey; - hash->export = ahash_no_export; - hash->import = ahash_no_import; if (tfm->__crt_alg->cra_type != &crypto_ahash_type) return crypto_init_shash_ops_async(tfm); @@ -473,16 +462,14 @@ static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) hash->final = alg->final; hash->finup = alg->finup ?: ahash_def_finup; hash->digest = alg->digest; + hash->export = alg->export; + hash->import = alg->import; if (alg->setkey) { hash->setkey = alg->setkey; if (!(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) crypto_ahash_set_flags(hash, CRYPTO_TFM_NEED_KEY); } - if (alg->export) - hash->export = alg->export; - if (alg->import) - hash->import = alg->import; return 0; } diff --git a/crypto/algapi.c b/crypto/algapi.c index 395b082d03a9..2a0271b5f62a 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -543,9 +543,6 @@ int crypto_register_instance(struct crypto_template *tmpl, inst->alg.cra_module = tmpl->module; inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; - if (unlikely(!crypto_mod_get(&inst->alg))) - return -EAGAIN; - down_write(&crypto_alg_sem); larval = __crypto_register_alg(&inst->alg); @@ -563,14 +560,9 @@ unlock: goto err; crypto_wait_for_test(larval); - - /* Remove instance if test failed */ - if (!(inst->alg.cra_flags & CRYPTO_ALG_TESTED)) - crypto_unregister_instance(inst); err = 0; err: - crypto_mod_put(&inst->alg); return err; } EXPORT_SYMBOL_GPL(crypto_register_instance); diff --git a/crypto/api.c b/crypto/api.c index 70a894e52ff3..1d5290c67108 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -193,17 +193,24 @@ static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) return alg; } -struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) +static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, + u32 mask) { struct crypto_alg *alg; + u32 test = 0; + + if (!((type | mask) & CRYPTO_ALG_TESTED)) + test |= CRYPTO_ALG_TESTED; down_read(&crypto_alg_sem); - alg = __crypto_alg_lookup(name, type, mask); + alg = __crypto_alg_lookup(name, type | test, mask | test); + if (!alg && test) + alg = __crypto_alg_lookup(name, type, mask) ? + ERR_PTR(-ELIBBAD) : NULL; up_read(&crypto_alg_sem); return alg; } -EXPORT_SYMBOL_GPL(crypto_alg_lookup); static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) @@ -227,10 +234,12 @@ static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, alg = crypto_alg_lookup(name, type, mask); } - if (alg) - return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; + if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg)) + alg = crypto_larval_wait(alg); + else if (!alg) + alg = crypto_larval_add(name, type, mask); - return crypto_larval_add(name, type, mask); + return alg; } int crypto_probing_notify(unsigned long val, void *v) @@ -253,11 +262,6 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) struct crypto_alg *larval; int ok; - if (!((type | mask) & CRYPTO_ALG_TESTED)) { - type |= CRYPTO_ALG_TESTED; - mask |= CRYPTO_ALG_TESTED; - } - /* * If the internal flag is set for a cipher, require a caller to * to invoke the cipher with the internal flag to use that cipher. @@ -485,20 +489,14 @@ struct crypto_alg *crypto_find_alg(const char *alg_name, const struct crypto_type *frontend, u32 type, u32 mask) { - struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = - crypto_alg_mod_lookup; - if (frontend) { type &= frontend->maskclear; mask &= frontend->maskclear; type |= frontend->type; mask |= frontend->maskset; - - if (frontend->lookup) - lookup = frontend->lookup; } - return lookup(alg_name, type, mask); + return crypto_alg_mod_lookup(alg_name, type, mask); } EXPORT_SYMBOL_GPL(crypto_find_alg); diff --git a/crypto/cfb.c b/crypto/cfb.c new file mode 100644 index 000000000000..94ee39bed758 --- /dev/null +++ b/crypto/cfb.c @@ -0,0 +1,353 @@ +//SPDX-License-Identifier: GPL-2.0 +/* + * CFB: Cipher FeedBack mode + * + * Copyright (c) 2018 James.Bottomley@HansenPartnership.com + * + * CFB is a stream cipher mode which is layered on to a block + * encryption scheme. It works very much like a one time pad where + * the pad is generated initially from the encrypted IV and then + * subsequently from the encrypted previous block of ciphertext. The + * pad is XOR'd into the plain text to get the final ciphertext. + * + * The scheme of CFB is best described by wikipedia: + * + * https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#CFB + * + * Note that since the pad for both encryption and decryption is + * generated by an encryption operation, CFB never uses the block + * decryption function. + */ + +#include <crypto/algapi.h> +#include <crypto/internal/skcipher.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/string.h> +#include <linux/types.h> + +struct crypto_cfb_ctx { + struct crypto_cipher *child; +}; + +static unsigned int crypto_cfb_bsize(struct crypto_skcipher *tfm) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + + return crypto_cipher_blocksize(child); +} + +static void crypto_cfb_encrypt_one(struct crypto_skcipher *tfm, + const u8 *src, u8 *dst) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + + crypto_cipher_encrypt_one(ctx->child, dst, src); +} + +/* final encrypt and decrypt is the same */ +static void crypto_cfb_final(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + const unsigned long alignmask = crypto_skcipher_alignmask(tfm); + u8 tmp[bsize + alignmask]; + u8 *stream = PTR_ALIGN(tmp + 0, alignmask + 1); + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + unsigned int nbytes = walk->nbytes; + + crypto_cfb_encrypt_one(tfm, iv, stream); + crypto_xor_cpy(dst, stream, src, nbytes); +} + +static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_cfb_encrypt_one(tfm, iv, dst); + crypto_xor(dst, src, bsize); + memcpy(iv, dst, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_cfb_encrypt_inplace(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmp[bsize]; + + do { + crypto_cfb_encrypt_one(tfm, iv, tmp); + crypto_xor(src, tmp, bsize); + iv = src; + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_encrypt(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_walk walk; + unsigned int bsize = crypto_cfb_bsize(tfm); + int err; + + err = skcipher_walk_virt(&walk, req, false); + + while (walk.nbytes >= bsize) { + if (walk.src.virt.addr == walk.dst.virt.addr) + err = crypto_cfb_encrypt_inplace(&walk, tfm); + else + err = crypto_cfb_encrypt_segment(&walk, tfm); + err = skcipher_walk_done(&walk, err); + } + + if (walk.nbytes) { + crypto_cfb_final(&walk, tfm); + err = skcipher_walk_done(&walk, 0); + } + + return err; +} + +static int crypto_cfb_decrypt_segment(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_cfb_encrypt_one(tfm, iv, dst); + crypto_xor(dst, iv, bsize); + iv = src; + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_decrypt_inplace(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmp[bsize]; + + do { + crypto_cfb_encrypt_one(tfm, iv, tmp); + memcpy(iv, src, bsize); + crypto_xor(src, tmp, bsize); + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_decrypt_blocks(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + if (walk->src.virt.addr == walk->dst.virt.addr) + return crypto_cfb_decrypt_inplace(walk, tfm); + else + return crypto_cfb_decrypt_segment(walk, tfm); +} + +static int crypto_cfb_setkey(struct crypto_skcipher *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_cfb_decrypt(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_walk walk; + const unsigned int bsize = crypto_cfb_bsize(tfm); + int err; + + err = skcipher_walk_virt(&walk, req, false); + + while (walk.nbytes >= bsize) { + err = crypto_cfb_decrypt_blocks(&walk, tfm); + err = skcipher_walk_done(&walk, err); + } + + if (walk.nbytes) { + crypto_cfb_final(&walk, tfm); + err = skcipher_walk_done(&walk, 0); + } + + return err; +} + +static int crypto_cfb_init_tfm(struct crypto_skcipher *tfm) +{ + struct skcipher_instance *inst = skcipher_alg_instance(tfm); + struct crypto_spawn *spawn = skcipher_instance_ctx(inst); + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; |
