[CRYPTO] Add alignmask for low-level cipher implementations
[linux-2.6.git] / crypto / cipher.c
1 /*
2  * Cryptographic API.
3  *
4  * Cipher operations.
5  *
6  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * This program is free software; you can redistribute it and/or modify it
10  * under the terms of the GNU General Public License as published by the Free
11  * Software Foundation; either version 2 of the License, or (at your option) 
12  * any later version.
13  *
14  */
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
19 #include <linux/mm.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
23 #include "internal.h"
24 #include "scatterwalk.h"
25
26 static inline void xor_64(u8 *a, const u8 *b)
27 {
28         ((u32 *)a)[0] ^= ((u32 *)b)[0];
29         ((u32 *)a)[1] ^= ((u32 *)b)[1];
30 }
31
32 static inline void xor_128(u8 *a, const u8 *b)
33 {
34         ((u32 *)a)[0] ^= ((u32 *)b)[0];
35         ((u32 *)a)[1] ^= ((u32 *)b)[1];
36         ((u32 *)a)[2] ^= ((u32 *)b)[2];
37         ((u32 *)a)[3] ^= ((u32 *)b)[3];
38 }
39
40 static unsigned int crypt_slow(const struct cipher_desc *desc,
41                                struct scatter_walk *in,
42                                struct scatter_walk *out, unsigned int bsize)
43 {
44         unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask;
45         u8 buffer[bsize * 2 + alignmask];
46         u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47         u8 *dst = src + bsize;
48         unsigned int n;
49
50         n = scatterwalk_copychunks(src, in, bsize, 0);
51         scatterwalk_advance(in, n);
52
53         desc->prfn(desc, dst, src, bsize);
54
55         n = scatterwalk_copychunks(dst, out, bsize, 1);
56         scatterwalk_advance(out, n);
57
58         return bsize;
59 }
60
61 static inline unsigned int crypt_fast(const struct cipher_desc *desc,
62                                       struct scatter_walk *in,
63                                       struct scatter_walk *out,
64                                       unsigned int nbytes, u8 *tmp)
65 {
66         u8 *src, *dst;
67
68         src = in->data;
69         dst = scatterwalk_samebuf(in, out) ? src : out->data;
70
71         if (tmp) {
72                 memcpy(tmp, in->data, nbytes);
73                 src = tmp;
74                 dst = tmp;
75         }
76
77         nbytes = desc->prfn(desc, dst, src, nbytes);
78
79         if (tmp)
80                 memcpy(out->data, tmp, nbytes);
81
82         scatterwalk_advance(in, nbytes);
83         scatterwalk_advance(out, nbytes);
84
85         return nbytes;
86 }
87
88 /* 
89  * Generic encrypt/decrypt wrapper for ciphers, handles operations across
90  * multiple page boundaries by using temporary blocks.  In user context,
91  * the kernel is given a chance to schedule us once per page.
92  */
93 static int crypt(const struct cipher_desc *desc,
94                  struct scatterlist *dst,
95                  struct scatterlist *src,
96                  unsigned int nbytes)
97 {
98         struct scatter_walk walk_in, walk_out;
99         struct crypto_tfm *tfm = desc->tfm;
100         const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
101         unsigned int alignmask = tfm->__crt_alg->cra_alignmask;
102         unsigned long buffer = 0;
103
104         if (!nbytes)
105                 return 0;
106
107         if (nbytes % bsize) {
108                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
109                 return -EINVAL;
110         }
111
112         scatterwalk_start(&walk_in, src);
113         scatterwalk_start(&walk_out, dst);
114
115         for(;;) {
116                 unsigned int n = nbytes;
117                 u8 *tmp = NULL;
118
119                 if (!scatterwalk_aligned(&walk_in, alignmask) ||
120                     !scatterwalk_aligned(&walk_out, alignmask)) {
121                         if (!buffer) {
122                                 buffer = __get_free_page(GFP_ATOMIC);
123                                 if (!buffer)
124                                         n = 0;
125                         }
126                         tmp = (u8 *)buffer;
127                 }
128
129                 scatterwalk_map(&walk_in, 0);
130                 scatterwalk_map(&walk_out, 1);
131
132                 n = scatterwalk_clamp(&walk_in, n);
133                 n = scatterwalk_clamp(&walk_out, n);
134
135                 if (likely(n >= bsize))
136                         n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
137                 else
138                         n = crypt_slow(desc, &walk_in, &walk_out, bsize);
139
140                 nbytes -= n;
141
142                 scatterwalk_done(&walk_in, 0, nbytes);
143                 scatterwalk_done(&walk_out, 1, nbytes);
144
145                 if (!nbytes)
146                         break;
147
148                 crypto_yield(tfm);
149         }
150
151         if (buffer)
152                 free_page(buffer);
153
154         return 0;
155 }
156
157 static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
158                                         u8 *dst, const u8 *src,
159                                         unsigned int nbytes)
160 {
161         struct crypto_tfm *tfm = desc->tfm;
162         void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
163         int bsize = crypto_tfm_alg_blocksize(tfm);
164
165         void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
166         u8 *iv = desc->info;
167         unsigned int done = 0;
168
169         do {
170                 xor(iv, src);
171                 fn(crypto_tfm_ctx(tfm), dst, iv);
172                 memcpy(iv, dst, bsize);
173
174                 src += bsize;
175                 dst += bsize;
176         } while ((done += bsize) < nbytes);
177
178         return done;
179 }
180
181 static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
182                                         u8 *dst, const u8 *src,
183                                         unsigned int nbytes)
184 {
185         struct crypto_tfm *tfm = desc->tfm;
186         void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
187         int bsize = crypto_tfm_alg_blocksize(tfm);
188
189         u8 stack[src == dst ? bsize : 0];
190         u8 *buf = stack;
191         u8 **dst_p = src == dst ? &buf : &dst;
192
193         void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
194         u8 *iv = desc->info;
195         unsigned int done = 0;
196
197         do {
198                 u8 *tmp_dst = *dst_p;
199
200                 fn(crypto_tfm_ctx(tfm), tmp_dst, src);
201                 xor(tmp_dst, iv);
202                 memcpy(iv, src, bsize);
203                 if (tmp_dst != dst)
204                         memcpy(dst, tmp_dst, bsize);
205
206                 src += bsize;
207                 dst += bsize;
208         } while ((done += bsize) < nbytes);
209
210         return done;
211 }
212
213 static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst,
214                                 const u8 *src, unsigned int nbytes)
215 {
216         struct crypto_tfm *tfm = desc->tfm;
217         int bsize = crypto_tfm_alg_blocksize(tfm);
218         void (*fn)(void *, u8 *, const u8 *) = desc->crfn;
219         unsigned int done = 0;
220
221         do {
222                 fn(crypto_tfm_ctx(tfm), dst, src);
223
224                 src += bsize;
225                 dst += bsize;
226         } while ((done += bsize) < nbytes);
227
228         return done;
229 }
230
231 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
232 {
233         struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
234         
235         if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
236                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
237                 return -EINVAL;
238         } else
239                 return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen,
240                                        &tfm->crt_flags);
241 }
242
243 static int ecb_encrypt(struct crypto_tfm *tfm,
244                        struct scatterlist *dst,
245                        struct scatterlist *src, unsigned int nbytes)
246 {
247         struct cipher_desc desc;
248         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
249
250         desc.tfm = tfm;
251         desc.crfn = cipher->cia_encrypt;
252         desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process;
253
254         return crypt(&desc, dst, src, nbytes);
255 }
256
257 static int ecb_decrypt(struct crypto_tfm *tfm,
258                        struct scatterlist *dst,
259                        struct scatterlist *src,
260                        unsigned int nbytes)
261 {
262         struct cipher_desc desc;
263         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
264
265         desc.tfm = tfm;
266         desc.crfn = cipher->cia_decrypt;
267         desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process;
268
269         return crypt(&desc, dst, src, nbytes);
270 }
271
272 static int cbc_encrypt(struct crypto_tfm *tfm,
273                        struct scatterlist *dst,
274                        struct scatterlist *src,
275                        unsigned int nbytes)
276 {
277         struct cipher_desc desc;
278         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
279
280         desc.tfm = tfm;
281         desc.crfn = cipher->cia_encrypt;
282         desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
283         desc.info = tfm->crt_cipher.cit_iv;
284
285         return crypt(&desc, dst, src, nbytes);
286 }
287
288 static int cbc_encrypt_iv(struct crypto_tfm *tfm,
289                           struct scatterlist *dst,
290                           struct scatterlist *src,
291                           unsigned int nbytes, u8 *iv)
292 {
293         struct cipher_desc desc;
294         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
295
296         desc.tfm = tfm;
297         desc.crfn = cipher->cia_encrypt;
298         desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
299         desc.info = iv;
300
301         return crypt(&desc, dst, src, nbytes);
302 }
303
304 static int cbc_decrypt(struct crypto_tfm *tfm,
305                        struct scatterlist *dst,
306                        struct scatterlist *src,
307                        unsigned int nbytes)
308 {
309         struct cipher_desc desc;
310         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
311
312         desc.tfm = tfm;
313         desc.crfn = cipher->cia_decrypt;
314         desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
315         desc.info = tfm->crt_cipher.cit_iv;
316
317         return crypt(&desc, dst, src, nbytes);
318 }
319
320 static int cbc_decrypt_iv(struct crypto_tfm *tfm,
321                           struct scatterlist *dst,
322                           struct scatterlist *src,
323                           unsigned int nbytes, u8 *iv)
324 {
325         struct cipher_desc desc;
326         struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
327
328         desc.tfm = tfm;
329         desc.crfn = cipher->cia_decrypt;
330         desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
331         desc.info = iv;
332
333         return crypt(&desc, dst, src, nbytes);
334 }
335
336 static int nocrypt(struct crypto_tfm *tfm,
337                    struct scatterlist *dst,
338                    struct scatterlist *src,
339                    unsigned int nbytes)
340 {
341         return -ENOSYS;
342 }
343
344 static int nocrypt_iv(struct crypto_tfm *tfm,
345                       struct scatterlist *dst,
346                       struct scatterlist *src,
347                       unsigned int nbytes, u8 *iv)
348 {
349         return -ENOSYS;
350 }
351
352 int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
353 {
354         u32 mode = flags & CRYPTO_TFM_MODE_MASK;
355         
356         tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB;
357         if (flags & CRYPTO_TFM_REQ_WEAK_KEY)
358                 tfm->crt_flags = CRYPTO_TFM_REQ_WEAK_KEY;
359         
360         return 0;
361 }
362
363 int crypto_init_cipher_ops(struct crypto_tfm *tfm)
364 {
365         int ret = 0;
366         struct cipher_tfm *ops = &tfm->crt_cipher;
367
368         ops->cit_setkey = setkey;
369
370         switch (tfm->crt_cipher.cit_mode) {
371         case CRYPTO_TFM_MODE_ECB:
372                 ops->cit_encrypt = ecb_encrypt;
373                 ops->cit_decrypt = ecb_decrypt;
374                 break;
375                 
376         case CRYPTO_TFM_MODE_CBC:
377                 ops->cit_encrypt = cbc_encrypt;
378                 ops->cit_decrypt = cbc_decrypt;
379                 ops->cit_encrypt_iv = cbc_encrypt_iv;
380                 ops->cit_decrypt_iv = cbc_decrypt_iv;
381                 break;
382                 
383         case CRYPTO_TFM_MODE_CFB:
384                 ops->cit_encrypt = nocrypt;
385                 ops->cit_decrypt = nocrypt;
386                 ops->cit_encrypt_iv = nocrypt_iv;
387                 ops->cit_decrypt_iv = nocrypt_iv;
388                 break;
389         
390         case CRYPTO_TFM_MODE_CTR:
391                 ops->cit_encrypt = nocrypt;
392                 ops->cit_decrypt = nocrypt;
393                 ops->cit_encrypt_iv = nocrypt_iv;
394                 ops->cit_decrypt_iv = nocrypt_iv;
395                 break;
396
397         default:
398                 BUG();
399         }
400         
401         if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
402                 
403                 switch (crypto_tfm_alg_blocksize(tfm)) {
404                 case 8:
405                         ops->cit_xor_block = xor_64;
406                         break;
407                         
408                 case 16:
409                         ops->cit_xor_block = xor_128;
410                         break;
411                         
412                 default:
413                         printk(KERN_WARNING "%s: block size %u not supported\n",
414                                crypto_tfm_alg_name(tfm),
415                                crypto_tfm_alg_blocksize(tfm));
416                         ret = -EINVAL;
417                         goto out;
418                 }
419                 
420                 ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
421                 ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL);
422                 if (ops->cit_iv == NULL)
423                         ret = -ENOMEM;
424         }
425
426 out:    
427         return ret;
428 }
429
430 void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
431 {
432         kfree(tfm->crt_cipher.cit_iv);
433 }