#include "internal.h"
static LIST_HEAD(crypto_alg_list);
-static struct rw_semaphore crypto_alg_sem;
+static DECLARE_RWSEM(crypto_alg_sem);
static inline int crypto_alg_get(struct crypto_alg *alg)
{
struct crypto_alg *crypto_alg_lookup(char *name)
{
- struct list_head *p;
- struct crypto_alg *alg = NULL;
+ struct crypto_alg *q, *alg = NULL;
down_read(&crypto_alg_sem);
- list_for_each(p, &crypto_alg_list) {
- struct crypto_alg *q =
- list_entry(p, struct crypto_alg, cra_list);
-
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (!(strcmp(q->cra_name, name))) {
-
if (crypto_alg_get(q))
alg = q;
break;
}
}
-
+
up_read(&crypto_alg_sem);
return alg;
}
{
tfm->crt_flags = 0;
- if (flags & CRYPTO_TFM_REQ_ATOMIC)
- tfm->crt_flags |= CRYPTO_TFM_REQ_ATOMIC;
-
switch (crypto_tfm_alg_type(tfm)) {
case CRYPTO_ALG_TYPE_CIPHER:
return crypto_init_cipher_flags(tfm, flags);
kfree(tfm);
}
+static inline int crypto_alg_blocksize_check(struct crypto_alg *alg)
+{
+ return ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK)
+ == CRYPTO_ALG_TYPE_CIPHER &&
+ alg->cra_blocksize > CRYPTO_MAX_CIPHER_BLOCK_SIZE);
+}
+
int crypto_register_alg(struct crypto_alg *alg)
{
int ret = 0;
- struct list_head *p;
+ struct crypto_alg *q;
down_write(&crypto_alg_sem);
- list_for_each(p, &crypto_alg_list) {
- struct crypto_alg *q =
- list_entry(p, struct crypto_alg, cra_list);
-
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (!(strcmp(q->cra_name, alg->cra_name))) {
ret = -EEXIST;
goto out;
}
}
- list_add_tail(&alg->cra_list, &crypto_alg_list);
+
+ if (crypto_alg_blocksize_check(alg)) {
+ printk(KERN_WARNING "%s: blocksize %Zd exceeds max. "
+ "size %Zd\n", __FUNCTION__, alg->cra_blocksize,
+ CRYPTO_MAX_CIPHER_BLOCK_SIZE);
+ ret = -EINVAL;
+ }
+ else
+ list_add_tail(&alg->cra_list, &crypto_alg_list);
out:
up_write(&crypto_alg_sem);
return ret;
int crypto_unregister_alg(struct crypto_alg *alg)
{
int ret = -ENOENT;
- struct list_head *p;
+ struct crypto_alg *q;
BUG_ON(!alg->cra_module);
down_write(&crypto_alg_sem);
- list_for_each(p, &crypto_alg_list) {
- if (alg == (void *)p) {
- list_del(p);
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
+ if (alg == q) {
+ list_del(&alg->cra_list);
ret = 0;
goto out;
}
struct proc_dir_entry *proc;
printk(KERN_INFO "Initializing Cryptographic API\n");
- init_rwsem(&crypto_alg_sem);
proc = create_proc_entry("crypto", 0, NULL);
if (proc)
proc->proc_fops = &proc_crypto_ops;
clen = rlen;
}
- p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff;
+ p = crypto_kmap(sg[i].page) + sg[i].offset + coff;
if (in)
memcpy(&buf[copied], p, clen);
else
memcpy(p, &buf[copied], clen);
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
*last = aligned ? 0 : clen;
copied += clen;
}
{
int i, coff;
size_t bsize = crypto_tfm_alg_blocksize(tfm);
- u8 tmp[CRYPTO_MAX_BLOCK_SIZE];
+ u8 tmp[CRYPTO_MAX_CIPHER_BLOCK_SIZE];
if (sglen(sg, nsg) % bsize) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
for (i = 0, coff = 0; i < nsg; i++) {
int n = 0, boff = 0;
int len = sg[i].length - coff;
- char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff;
+ char *p = crypto_kmap(sg[i].page) + sg[i].offset + coff;
while (len) {
if (len < bsize) {
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
n = gather_chunks(tfm, tmp, sg, i, len, &coff);
prfn(tfm, tmp, crfn, enc);
scatter_chunks(tfm, tmp, sg, i, len, &coff);
goto unmapped;
} else {
prfn(tfm, p, crfn, enc);
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
crypto_yield(tfm);
/* remap and point to recalculated offset */
boff += bsize;
- p = crypto_kmap(tfm, sg[i].page)
- + sg[i].offset + coff + boff;
+ p = crypto_kmap(sg[i].page)
+ + sg[i].offset + coff + boff;
len -= bsize;
coff = 0;
}
}
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
unmapped:
i += n;
memcpy(tfm->crt_cipher.cit_iv, block,
crypto_tfm_alg_blocksize(tfm));
} else {
- u8 buf[CRYPTO_MAX_BLOCK_SIZE];
+ u8 buf[CRYPTO_MAX_CIPHER_BLOCK_SIZE];
fn(tfm->crt_ctx, buf, block);
xor_64(buf, tfm->crt_cipher.cit_iv);
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des_ctx),
.cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
.cra_u = { .cipher = {
.cia_keysize = DES_KEY_SIZE,
.cia_ivsize = DES_BLOCK_SIZE,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des3_ede_ctx),
.cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list),
.cra_u = { .cipher = {
.cia_keysize = DES3_EDE_KEY_SIZE,
.cia_ivsize = DES3_EDE_BLOCK_SIZE,
{
int ret = 0;
- INIT_LIST_HEAD(&des_alg.cra_list);
- INIT_LIST_HEAD(&des3_ede_alg.cra_list);
-
ret = crypto_register_alg(&des_alg);
if (ret < 0)
goto out;
int i;
for (i = 0; i < nsg; i++) {
- char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset;
+ char *p = crypto_kmap(sg[i].page) + sg[i].offset;
tfm->__crt_alg->cra_digest.dia_update(tfm->crt_ctx,
p, sg[i].length);
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
crypto_yield(tfm);
}
return;
tfm->crt_digest.dit_init(tfm);
for (i = 0; i < nsg; i++) {
- char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset;
+ char *p = crypto_kmap(sg[i].page) + sg[i].offset;
tfm->__crt_alg->cra_digest.dia_update(tfm->crt_ctx,
p, sg[i].length);
- crypto_kunmap(tfm, p);
+ crypto_kunmap(p);
crypto_yield(tfm);
}
crypto_digest_final(tfm, out);
#include <asm/hardirq.h>
#include <asm/softirq.h>
-static inline void *crypto_kmap(struct crypto_tfm *tfm, struct page *page)
+static inline void *crypto_kmap(struct page *page)
{
- if (tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC) {
-#ifdef CONFIG_HIGHMEM /* XXX: remove this after the api change */
- local_bh_disable();
-#endif
- return kmap_atomic(page, KM_CRYPTO_SOFTIRQ);
- } else
- return kmap_atomic(page, KM_CRYPTO_USER);
+ return kmap_atomic(page, in_softirq() ?
+ KM_CRYPTO_SOFTIRQ : KM_CRYPTO_USER);
}
-static inline void crypto_kunmap(struct crypto_tfm *tfm, void *vaddr)
+static inline void crypto_kunmap(void *vaddr)
{
- if (tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC) {
- kunmap_atomic(vaddr, KM_CRYPTO_SOFTIRQ);
-#ifdef CONFIG_HIGHMEM /* XXX: remove this after the api change */
- local_bh_enable();
-#endif
- } else
- kunmap_atomic(vaddr, KM_CRYPTO_USER);
+ return kunmap_atomic(vaddr, in_softirq() ?
+ KM_CRYPTO_SOFTIRQ : KM_CRYPTO_USER);
}
static inline void crypto_yield(struct crypto_tfm *tfm)
{
- if (!(tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC))
+ if (!in_softirq())
cond_resched();
}
.cra_blocksize = MD4_HMAC_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct md4_ctx),
.cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .digest = {
.dia_digestsize = MD4_DIGEST_SIZE,
.dia_init = md4_init,
static int __init init(void)
{
- INIT_LIST_HEAD(&alg.cra_list);
return crypto_register_alg(&alg);
}
.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct md5_ctx),
.cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .digest = {
.dia_digestsize = MD5_DIGEST_SIZE,
.dia_init = md5_init,
static int __init init(void)
{
- INIT_LIST_HEAD(&alg.cra_list);
return crypto_register_alg(&alg);
}
.cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct sha1_ctx),
.cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .digest = {
.dia_digestsize = SHA1_DIGEST_SIZE,
.dia_init = sha1_init,
static int __init init(void)
{
- INIT_LIST_HEAD(&alg.cra_list);
return crypto_register_alg(&alg);
}
*/
i = 7;
key = des_tv[i].key;
- tfm->crt_flags = CRYPTO_TFM_REQ_ATOMIC;
+ tfm->crt_flags = 0;
ret = crypto_cipher_setkey(tfm, key, 8);
if (ret) {
for (i = 0; i < DES_CBC_ENC_TEST_VECTORS; i++) {
printk("test %d:\n", i + 1);
- tfm->crt_flags |= CRYPTO_TFM_REQ_ATOMIC;
key = des_tv[i].key;
ret = crypto_cipher_setkey(tfm, key, 8);
#define CRYPTO_TFM_MODE_CFB 0x00000004
#define CRYPTO_TFM_MODE_CTR 0x00000008
-#define CRYPTO_TFM_REQ_ATOMIC 0x00000100
-#define CRYPTO_TFM_REQ_WEAK_KEY 0x00000200
+#define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
#define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
#define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
*/
#define CRYPTO_UNSPEC 0
#define CRYPTO_MAX_ALG_NAME 64
-#define CRYPTO_MAX_BLOCK_SIZE 16
+#define CRYPTO_MAX_CIPHER_BLOCK_SIZE 16
struct scatterlist;