Skip to content

Commit

Permalink
---
Browse files Browse the repository at this point in the history
yaml
---
r: 67028
b: refs/heads/master
c: f19f511
h: refs/heads/master
v: v3
  • Loading branch information
Rik Snel authored and David S. Miller committed Oct 10, 2007
1 parent 991d2f8 commit 53a82ae
Show file tree
Hide file tree
Showing 8 changed files with 747 additions and 1 deletion.
2 changes: 1 addition & 1 deletion [refs]
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
---
refs/heads/master: 5aaff0c8f7dd3515c9f1ca57f86463f30779acc7
refs/heads/master: f19f5111c94053ba4931892f5c01c806de33942e
11 changes: 11 additions & 0 deletions trunk/crypto/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,17 @@ config CRYPTO_LRW
The first 128, 192 or 256 bits in the key are used for AES and the
rest is used to tie each cipher block to its logical position.

config CRYPTO_XTS
tristate "XTS support (EXPERIMENTAL)"
depends on EXPERIMENTAL
select CRYPTO_BLKCIPHER
select CRYPTO_MANAGER
select CRYPTO_GF128MUL
help
XTS: IEEE1619/D16 narrow block cipher use with aes-xts-plain,
key size 256, 384 or 512 bits. This implementation currently
can't handle a sectorsize which is not a multiple of 16 bytes.

config CRYPTO_CRYPTD
tristate "Software async crypto daemon"
select CRYPTO_ABLKCIPHER
Expand Down
1 change: 1 addition & 0 deletions trunk/crypto/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ obj-$(CONFIG_CRYPTO_ECB) += ecb.o
obj-$(CONFIG_CRYPTO_CBC) += cbc.o
obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o
obj-$(CONFIG_CRYPTO_LRW) += lrw.o
obj-$(CONFIG_CRYPTO_XTS) += xts.o
obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
obj-$(CONFIG_CRYPTO_DES) += des.o
obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
Expand Down
11 changes: 11 additions & 0 deletions trunk/crypto/gf128mul.c
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,17 @@ static void gf128mul_x_bbe(be128 *r, const be128 *x)
r->b = cpu_to_be64((b << 1) ^ _tt);
}

void gf128mul_x_ble(be128 *r, const be128 *x)
{
u64 a = le64_to_cpu(x->a);
u64 b = le64_to_cpu(x->b);
u64 _tt = gf128mul_table_bbe[b >> 63];

r->a = cpu_to_le64((a << 1) ^ _tt);
r->b = cpu_to_le64((b << 1) | (a >> 63));
}
EXPORT_SYMBOL(gf128mul_x_ble);

static void gf128mul_x8_lle(be128 *x)
{
u64 a = be64_to_cpu(x->a);
Expand Down
12 changes: 12 additions & 0 deletions trunk/crypto/tcrypt.c
Original file line number Diff line number Diff line change
Expand Up @@ -955,6 +955,10 @@ static void do_test(void)
AES_LRW_ENC_TEST_VECTORS);
test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
AES_LRW_DEC_TEST_VECTORS);
test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
AES_XTS_ENC_TEST_VECTORS);
test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
AES_XTS_DEC_TEST_VECTORS);

//CAST5
test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
Expand Down Expand Up @@ -1138,6 +1142,10 @@ static void do_test(void)
AES_LRW_ENC_TEST_VECTORS);
test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
AES_LRW_DEC_TEST_VECTORS);
test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
AES_XTS_ENC_TEST_VECTORS);
test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
AES_XTS_DEC_TEST_VECTORS);
break;

case 11:
Expand Down Expand Up @@ -1313,6 +1321,10 @@ static void do_test(void)
aes_lrw_speed_template);
test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
aes_lrw_speed_template);
test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
aes_xts_speed_template);
test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
aes_xts_speed_template);
break;

case 201:
Expand Down
417 changes: 417 additions & 0 deletions trunk/crypto/tcrypt.h

Large diffs are not rendered by default.

292 changes: 292 additions & 0 deletions trunk/crypto/xts.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,292 @@
/* XTS: as defined in IEEE1619/D16
* http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
* (sector sizes which are not a multiple of 16 bytes are,
* however currently unsupported)
*
* Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
*
* Based om ecb.c
* Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*/
#include <crypto/algapi.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/scatterlist.h>
#include <linux/slab.h>

#include <crypto/b128ops.h>
#include <crypto/gf128mul.h>

struct priv {
struct crypto_cipher *child;
struct crypto_cipher *tweak;
};

static int setkey(struct crypto_tfm *parent, const u8 *key,
unsigned int keylen)
{
struct priv *ctx = crypto_tfm_ctx(parent);
struct crypto_cipher *child = ctx->tweak;
u32 *flags = &parent->crt_flags;
int err;

/* key consists of keys of equal size concatenated, therefore
* the length must be even */
if (keylen % 2) {
/* tell the user why there was an error */
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
}

/* we need two cipher instances: one to compute the inital 'tweak'
* by encrypting the IV (usually the 'plain' iv) and the other
* one to encrypt and decrypt the data */

/* tweak cipher, uses Key2 i.e. the second half of *key */
crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
err = crypto_cipher_setkey(child, key + keylen/2, keylen/2);
if (err)
return err;

crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
CRYPTO_TFM_RES_MASK);

child = ctx->child;

/* data cipher, uses Key1 i.e. the first half of *key */
crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
err = crypto_cipher_setkey(child, key, keylen/2);
if (err)
return err;

crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
CRYPTO_TFM_RES_MASK);

return 0;
}

struct sinfo {
be128 t;
struct crypto_tfm *tfm;
void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
};

static inline void xts_round(struct sinfo *s, void *dst, const void *src)
{
be128_xor(dst, &s->t, src); /* PP <- T xor P */
s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */
be128_xor(dst, dst, &s->t); /* C <- T xor CC */
}

static int crypt(struct blkcipher_desc *d,
struct blkcipher_walk *w, struct priv *ctx,
void (*tw)(struct crypto_tfm *, u8 *, const u8 *),
void (*fn)(struct crypto_tfm *, u8 *, const u8 *))
{
int err;
unsigned int avail;
const int bs = crypto_cipher_blocksize(ctx->child);
struct sinfo s = {
.tfm = crypto_cipher_tfm(ctx->child),
.fn = fn
};
be128 *iv;
u8 *wsrc;
u8 *wdst;

err = blkcipher_walk_virt(d, w);
if (!w->nbytes)
return err;

avail = w->nbytes;

wsrc = w->src.virt.addr;
wdst = w->dst.virt.addr;

/* calculate first value of T */
iv = (be128 *)w->iv;
tw(crypto_cipher_tfm(ctx->tweak), (void *)&s.t, w->iv);

goto first;

for (;;) {
do {
gf128mul_x_ble(&s.t, &s.t);

first:
xts_round(&s, wdst, wsrc);

wsrc += bs;
wdst += bs;
} while ((avail -= bs) >= bs);

err = blkcipher_walk_done(d, w, avail);
if (!w->nbytes)
break;

avail = w->nbytes;

wsrc = w->src.virt.addr;
wdst = w->dst.virt.addr;
}

return err;
}

static int encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
struct scatterlist *src, unsigned int nbytes)
{
struct priv *ctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk w;

blkcipher_walk_init(&w, dst, src, nbytes);
return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt,
crypto_cipher_alg(ctx->child)->cia_encrypt);
}

static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
struct scatterlist *src, unsigned int nbytes)
{
struct priv *ctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk w;

blkcipher_walk_init(&w, dst, src, nbytes);
return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt,
crypto_cipher_alg(ctx->child)->cia_decrypt);
}

static int init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
struct crypto_spawn *spawn = crypto_instance_ctx(inst);
struct priv *ctx = crypto_tfm_ctx(tfm);
u32 *flags = &tfm->crt_flags;

cipher = crypto_spawn_cipher(spawn);
if (IS_ERR(cipher))
return PTR_ERR(cipher);

if (crypto_cipher_blocksize(cipher) != 16) {
*flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
crypto_free_cipher(cipher);
return -EINVAL;
}

ctx->child = cipher;

cipher = crypto_spawn_cipher(spawn);
if (IS_ERR(cipher)) {
crypto_free_cipher(ctx->child);
return PTR_ERR(cipher);
}

/* this check isn't really needed, leave it here just in case */
if (crypto_cipher_blocksize(cipher) != 16) {
crypto_free_cipher(cipher);
crypto_free_cipher(ctx->child);
*flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
return -EINVAL;
}

ctx->tweak = cipher;

return 0;
}

static void exit_tfm(struct crypto_tfm *tfm)
{
struct priv *ctx = crypto_tfm_ctx(tfm);
crypto_free_cipher(ctx->child);
crypto_free_cipher(ctx->tweak);
}

static struct crypto_instance *alloc(struct rtattr **tb)
{
struct crypto_instance *inst;
struct crypto_alg *alg;
int err;

err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
if (err)
return ERR_PTR(err);

alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg))
return ERR_PTR(PTR_ERR(alg));

inst = crypto_alloc_instance("xts", alg);
if (IS_ERR(inst))
goto out_put_alg;

inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
inst->alg.cra_priority = alg->cra_priority;
inst->alg.cra_blocksize = alg->cra_blocksize;

if (alg->cra_alignmask < 7)
inst->alg.cra_alignmask = 7;
else
inst->alg.cra_alignmask = alg->cra_alignmask;

inst->alg.cra_type = &crypto_blkcipher_type;

inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
inst->alg.cra_blkcipher.min_keysize =
2 * alg->cra_cipher.cia_min_keysize;
inst->alg.cra_blkcipher.max_keysize =
2 * alg->cra_cipher.cia_max_keysize;

inst->alg.cra_ctxsize = sizeof(struct priv);

inst->alg.cra_init = init_tfm;
inst->alg.cra_exit = exit_tfm;

inst->alg.cra_blkcipher.setkey = setkey;
inst->alg.cra_blkcipher.encrypt = encrypt;
inst->alg.cra_blkcipher.decrypt = decrypt;

out_put_alg:
crypto_mod_put(alg);
return inst;
}

static void free(struct crypto_instance *inst)
{
crypto_drop_spawn(crypto_instance_ctx(inst));
kfree(inst);
}

static struct crypto_template crypto_tmpl = {
.name = "xts",
.alloc = alloc,
.free = free,
.module = THIS_MODULE,
};

static int __init crypto_module_init(void)
{
return crypto_register_template(&crypto_tmpl);
}

static void __exit crypto_module_exit(void)
{
crypto_unregister_template(&crypto_tmpl);
}

module_init(crypto_module_init);
module_exit(crypto_module_exit);

MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("XTS block cipher mode");
2 changes: 2 additions & 0 deletions trunk/include/crypto/gf128mul.h
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,8 @@ void gf128mul_lle(be128 *a, const be128 *b);

void gf128mul_bbe(be128 *a, const be128 *b);

/* multiply by x in ble format, needed by XTS */
void gf128mul_x_ble(be128 *a, const be128 *b);

/* 4k table optimization */

Expand Down

0 comments on commit 53a82ae

Please sign in to comment.