ccm.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <linux/err.h>
  16. #include <linux/init.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <linux/slab.h>
  20. #include "internal.h"
  21. struct ccm_instance_ctx {
  22. struct crypto_skcipher_spawn ctr;
  23. struct crypto_spawn cipher;
  24. };
  25. struct crypto_ccm_ctx {
  26. struct crypto_cipher *cipher;
  27. struct crypto_skcipher *ctr;
  28. };
  29. struct crypto_rfc4309_ctx {
  30. struct crypto_aead *child;
  31. u8 nonce[3];
  32. };
  33. struct crypto_rfc4309_req_ctx {
  34. struct scatterlist src[3];
  35. struct scatterlist dst[3];
  36. struct aead_request subreq;
  37. };
  38. struct crypto_ccm_req_priv_ctx {
  39. u8 odata[16];
  40. u8 idata[16];
  41. u8 auth_tag[16];
  42. u32 ilen;
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct skcipher_request skreq;
  47. };
  48. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  49. struct aead_request *req)
  50. {
  51. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  52. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  53. }
  54. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  55. {
  56. __be32 data;
  57. memset(block, 0, csize);
  58. block += csize;
  59. if (csize >= 4)
  60. csize = 4;
  61. else if (msglen > (1 << (8 * csize)))
  62. return -EOVERFLOW;
  63. data = cpu_to_be32(msglen);
  64. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  65. return 0;
  66. }
  67. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  68. unsigned int keylen)
  69. {
  70. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  71. struct crypto_skcipher *ctr = ctx->ctr;
  72. struct crypto_cipher *tfm = ctx->cipher;
  73. int err = 0;
  74. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  75. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  76. CRYPTO_TFM_REQ_MASK);
  77. err = crypto_skcipher_setkey(ctr, key, keylen);
  78. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  79. CRYPTO_TFM_RES_MASK);
  80. if (err)
  81. goto out;
  82. crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
  83. crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
  84. CRYPTO_TFM_REQ_MASK);
  85. err = crypto_cipher_setkey(tfm, key, keylen);
  86. crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
  87. CRYPTO_TFM_RES_MASK);
  88. out:
  89. return err;
  90. }
  91. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  92. unsigned int authsize)
  93. {
  94. switch (authsize) {
  95. case 4:
  96. case 6:
  97. case 8:
  98. case 10:
  99. case 12:
  100. case 14:
  101. case 16:
  102. break;
  103. default:
  104. return -EINVAL;
  105. }
  106. return 0;
  107. }
  108. static int format_input(u8 *info, struct aead_request *req,
  109. unsigned int cryptlen)
  110. {
  111. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  112. unsigned int lp = req->iv[0];
  113. unsigned int l = lp + 1;
  114. unsigned int m;
  115. m = crypto_aead_authsize(aead);
  116. memcpy(info, req->iv, 16);
  117. /* format control info per RFC 3610 and
  118. * NIST Special Publication 800-38C
  119. */
  120. *info |= (8 * ((m - 2) / 2));
  121. if (req->assoclen)
  122. *info |= 64;
  123. return set_msg_len(info + 16 - l, cryptlen, l);
  124. }
  125. static int format_adata(u8 *adata, unsigned int a)
  126. {
  127. int len = 0;
  128. /* add control info for associated data
  129. * RFC 3610 and NIST Special Publication 800-38C
  130. */
  131. if (a < 65280) {
  132. *(__be16 *)adata = cpu_to_be16(a);
  133. len = 2;
  134. } else {
  135. *(__be16 *)adata = cpu_to_be16(0xfffe);
  136. *(__be32 *)&adata[2] = cpu_to_be32(a);
  137. len = 6;
  138. }
  139. return len;
  140. }
  141. static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
  142. struct crypto_ccm_req_priv_ctx *pctx)
  143. {
  144. unsigned int bs = 16;
  145. u8 *odata = pctx->odata;
  146. u8 *idata = pctx->idata;
  147. int datalen, getlen;
  148. datalen = n;
  149. /* first time in here, block may be partially filled. */
  150. getlen = bs - pctx->ilen;
  151. if (datalen >= getlen) {
  152. memcpy(idata + pctx->ilen, data, getlen);
  153. crypto_xor(odata, idata, bs);
  154. crypto_cipher_encrypt_one(tfm, odata, odata);
  155. datalen -= getlen;
  156. data += getlen;
  157. pctx->ilen = 0;
  158. }
  159. /* now encrypt rest of data */
  160. while (datalen >= bs) {
  161. crypto_xor(odata, data, bs);
  162. crypto_cipher_encrypt_one(tfm, odata, odata);
  163. datalen -= bs;
  164. data += bs;
  165. }
  166. /* check and see if there's leftover data that wasn't
  167. * enough to fill a block.
  168. */
  169. if (datalen) {
  170. memcpy(idata + pctx->ilen, data, datalen);
  171. pctx->ilen += datalen;
  172. }
  173. }
  174. static void get_data_to_compute(struct crypto_cipher *tfm,
  175. struct crypto_ccm_req_priv_ctx *pctx,
  176. struct scatterlist *sg, unsigned int len)
  177. {
  178. struct scatter_walk walk;
  179. u8 *data_src;
  180. int n;
  181. scatterwalk_start(&walk, sg);
  182. while (len) {
  183. n = scatterwalk_clamp(&walk, len);
  184. if (!n) {
  185. scatterwalk_start(&walk, sg_next(walk.sg));
  186. n = scatterwalk_clamp(&walk, len);
  187. }
  188. data_src = scatterwalk_map(&walk);
  189. compute_mac(tfm, data_src, n, pctx);
  190. len -= n;
  191. scatterwalk_unmap(data_src);
  192. scatterwalk_advance(&walk, n);
  193. scatterwalk_done(&walk, 0, len);
  194. if (len)
  195. crypto_yield(pctx->flags);
  196. }
  197. /* any leftover needs padding and then encrypted */
  198. if (pctx->ilen) {
  199. int padlen;
  200. u8 *odata = pctx->odata;
  201. u8 *idata = pctx->idata;
  202. padlen = 16 - pctx->ilen;
  203. memset(idata + pctx->ilen, 0, padlen);
  204. crypto_xor(odata, idata, 16);
  205. crypto_cipher_encrypt_one(tfm, odata, odata);
  206. pctx->ilen = 0;
  207. }
  208. }
  209. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  210. unsigned int cryptlen)
  211. {
  212. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  213. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  214. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  215. struct crypto_cipher *cipher = ctx->cipher;
  216. unsigned int assoclen = req->assoclen;
  217. u8 *odata = pctx->odata;
  218. u8 *idata = pctx->idata;
  219. int err;
  220. /* format control data for input */
  221. err = format_input(odata, req, cryptlen);
  222. if (err)
  223. goto out;
  224. /* encrypt first block to use as start in computing mac */
  225. crypto_cipher_encrypt_one(cipher, odata, odata);
  226. /* format associated data and compute into mac */
  227. if (assoclen) {
  228. pctx->ilen = format_adata(idata, assoclen);
  229. get_data_to_compute(cipher, pctx, req->src, req->assoclen);
  230. } else {
  231. pctx->ilen = 0;
  232. }
  233. /* compute plaintext into mac */
  234. if (cryptlen)
  235. get_data_to_compute(cipher, pctx, plain, cryptlen);
  236. out:
  237. return err;
  238. }
  239. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  240. {
  241. struct aead_request *req = areq->data;
  242. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  243. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  244. u8 *odata = pctx->odata;
  245. if (!err)
  246. scatterwalk_map_and_copy(odata, req->dst,
  247. req->assoclen + req->cryptlen,
  248. crypto_aead_authsize(aead), 1);
  249. aead_request_complete(req, err);
  250. }
  251. static inline int crypto_ccm_check_iv(const u8 *iv)
  252. {
  253. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  254. if (1 > iv[0] || iv[0] > 7)
  255. return -EINVAL;
  256. return 0;
  257. }
  258. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  259. {
  260. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  261. struct scatterlist *sg;
  262. u8 *iv = req->iv;
  263. int err;
  264. err = crypto_ccm_check_iv(iv);
  265. if (err)
  266. return err;
  267. pctx->flags = aead_request_flags(req);
  268. /* Note: rfc 3610 and NIST 800-38C require counter of
  269. * zero to encrypt auth tag.
  270. */
  271. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  272. sg_init_table(pctx->src, 3);
  273. sg_set_buf(pctx->src, tag, 16);
  274. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  275. if (sg != pctx->src + 1)
  276. sg_chain(pctx->src, 2, sg);
  277. if (req->src != req->dst) {
  278. sg_init_table(pctx->dst, 3);
  279. sg_set_buf(pctx->dst, tag, 16);
  280. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  281. if (sg != pctx->dst + 1)
  282. sg_chain(pctx->dst, 2, sg);
  283. }
  284. return 0;
  285. }
  286. static int crypto_ccm_encrypt(struct aead_request *req)
  287. {
  288. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  289. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  290. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  291. struct skcipher_request *skreq = &pctx->skreq;
  292. struct scatterlist *dst;
  293. unsigned int cryptlen = req->cryptlen;
  294. u8 *odata = pctx->odata;
  295. u8 *iv = req->iv;
  296. int err;
  297. err = crypto_ccm_init_crypt(req, odata);
  298. if (err)
  299. return err;
  300. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  301. if (err)
  302. return err;
  303. dst = pctx->src;
  304. if (req->src != req->dst)
  305. dst = pctx->dst;
  306. skcipher_request_set_tfm(skreq, ctx->ctr);
  307. skcipher_request_set_callback(skreq, pctx->flags,
  308. crypto_ccm_encrypt_done, req);
  309. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  310. err = crypto_skcipher_encrypt(skreq);
  311. if (err)
  312. return err;
  313. /* copy authtag to end of dst */
  314. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  315. crypto_aead_authsize(aead), 1);
  316. return err;
  317. }
  318. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  319. int err)
  320. {
  321. struct aead_request *req = areq->data;
  322. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  323. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  324. unsigned int authsize = crypto_aead_authsize(aead);
  325. unsigned int cryptlen = req->cryptlen - authsize;
  326. struct scatterlist *dst;
  327. pctx->flags = 0;
  328. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  329. if (!err) {
  330. err = crypto_ccm_auth(req, dst, cryptlen);
  331. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  332. err = -EBADMSG;
  333. }
  334. aead_request_complete(req, err);
  335. }
  336. static int crypto_ccm_decrypt(struct aead_request *req)
  337. {
  338. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  339. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  340. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  341. struct skcipher_request *skreq = &pctx->skreq;
  342. struct scatterlist *dst;
  343. unsigned int authsize = crypto_aead_authsize(aead);
  344. unsigned int cryptlen = req->cryptlen;
  345. u8 *authtag = pctx->auth_tag;
  346. u8 *odata = pctx->odata;
  347. u8 *iv = req->iv;
  348. int err;
  349. cryptlen -= authsize;
  350. err = crypto_ccm_init_crypt(req, authtag);
  351. if (err)
  352. return err;
  353. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  354. authsize, 0);
  355. dst = pctx->src;
  356. if (req->src != req->dst)
  357. dst = pctx->dst;
  358. skcipher_request_set_tfm(skreq, ctx->ctr);
  359. skcipher_request_set_callback(skreq, pctx->flags,
  360. crypto_ccm_decrypt_done, req);
  361. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  362. err = crypto_skcipher_decrypt(skreq);
  363. if (err)
  364. return err;
  365. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  366. if (err)
  367. return err;
  368. /* verify */
  369. if (crypto_memneq(authtag, odata, authsize))
  370. return -EBADMSG;
  371. return err;
  372. }
  373. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  374. {
  375. struct aead_instance *inst = aead_alg_instance(tfm);
  376. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  377. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  378. struct crypto_cipher *cipher;
  379. struct crypto_skcipher *ctr;
  380. unsigned long align;
  381. int err;
  382. cipher = crypto_spawn_cipher(&ictx->cipher);
  383. if (IS_ERR(cipher))
  384. return PTR_ERR(cipher);
  385. ctr = crypto_spawn_skcipher2(&ictx->ctr);
  386. err = PTR_ERR(ctr);
  387. if (IS_ERR(ctr))
  388. goto err_free_cipher;
  389. ctx->cipher = cipher;
  390. ctx->ctr = ctr;
  391. align = crypto_aead_alignmask(tfm);
  392. align &= ~(crypto_tfm_ctx_alignment() - 1);
  393. crypto_aead_set_reqsize(
  394. tfm,
  395. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  396. crypto_skcipher_reqsize(ctr));
  397. return 0;
  398. err_free_cipher:
  399. crypto_free_cipher(cipher);
  400. return err;
  401. }
  402. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  403. {
  404. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  405. crypto_free_cipher(ctx->cipher);
  406. crypto_free_skcipher(ctx->ctr);
  407. }
  408. static void crypto_ccm_free(struct aead_instance *inst)
  409. {
  410. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  411. crypto_drop_spawn(&ctx->cipher);
  412. crypto_drop_skcipher(&ctx->ctr);
  413. kfree(inst);
  414. }
  415. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  416. struct rtattr **tb,
  417. const char *full_name,
  418. const char *ctr_name,
  419. const char *cipher_name)
  420. {
  421. struct crypto_attr_type *algt;
  422. struct aead_instance *inst;
  423. struct skcipher_alg *ctr;
  424. struct crypto_alg *cipher;
  425. struct ccm_instance_ctx *ictx;
  426. int err;
  427. algt = crypto_get_attr_type(tb);
  428. if (IS_ERR(algt))
  429. return PTR_ERR(algt);
  430. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  431. return -EINVAL;
  432. cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
  433. CRYPTO_ALG_TYPE_MASK);
  434. if (IS_ERR(cipher))
  435. return PTR_ERR(cipher);
  436. err = -EINVAL;
  437. if (cipher->cra_blocksize != 16)
  438. goto out_put_cipher;
  439. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  440. err = -ENOMEM;
  441. if (!inst)
  442. goto out_put_cipher;
  443. ictx = aead_instance_ctx(inst);
  444. err = crypto_init_spawn(&ictx->cipher, cipher,
  445. aead_crypto_instance(inst),
  446. CRYPTO_ALG_TYPE_MASK);
  447. if (err)
  448. goto err_free_inst;
  449. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  450. err = crypto_grab_skcipher2(&ictx->ctr, ctr_name, 0,
  451. crypto_requires_sync(algt->type,
  452. algt->mask));
  453. if (err)
  454. goto err_drop_cipher;
  455. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  456. /* Not a stream cipher? */
  457. err = -EINVAL;
  458. if (ctr->base.cra_blocksize != 1)
  459. goto err_drop_ctr;
  460. /* We want the real thing! */
  461. if (crypto_skcipher_alg_ivsize(ctr) != 16)
  462. goto err_drop_ctr;
  463. err = -ENAMETOOLONG;
  464. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  465. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  466. cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  467. goto err_drop_ctr;
  468. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  469. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  470. inst->alg.base.cra_priority = (cipher->cra_priority +
  471. ctr->base.cra_priority) / 2;
  472. inst->alg.base.cra_blocksize = 1;
  473. inst->alg.base.cra_alignmask = cipher->cra_alignmask |
  474. ctr->base.cra_alignmask |
  475. (__alignof__(u32) - 1);
  476. inst->alg.ivsize = 16;
  477. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  478. inst->alg.maxauthsize = 16;
  479. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  480. inst->alg.init = crypto_ccm_init_tfm;
  481. inst->alg.exit = crypto_ccm_exit_tfm;
  482. inst->alg.setkey = crypto_ccm_setkey;
  483. inst->alg.setauthsize = crypto_ccm_setauthsize;
  484. inst->alg.encrypt = crypto_ccm_encrypt;
  485. inst->alg.decrypt = crypto_ccm_decrypt;
  486. inst->free = crypto_ccm_free;
  487. err = aead_register_instance(tmpl, inst);
  488. if (err)
  489. goto err_drop_ctr;
  490. out_put_cipher:
  491. crypto_mod_put(cipher);
  492. return err;
  493. err_drop_ctr:
  494. crypto_drop_skcipher(&ictx->ctr);
  495. err_drop_cipher:
  496. crypto_drop_spawn(&ictx->cipher);
  497. err_free_inst:
  498. kfree(inst);
  499. goto out_put_cipher;
  500. }
  501. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  502. {
  503. const char *cipher_name;
  504. char ctr_name[CRYPTO_MAX_ALG_NAME];
  505. char full_name[CRYPTO_MAX_ALG_NAME];
  506. cipher_name = crypto_attr_alg_name(tb[1]);
  507. if (IS_ERR(cipher_name))
  508. return PTR_ERR(cipher_name);
  509. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  510. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  511. return -ENAMETOOLONG;
  512. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  513. CRYPTO_MAX_ALG_NAME)
  514. return -ENAMETOOLONG;
  515. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  516. cipher_name);
  517. }
  518. static struct crypto_template crypto_ccm_tmpl = {
  519. .name = "ccm",
  520. .create = crypto_ccm_create,
  521. .module = THIS_MODULE,
  522. };
  523. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  524. struct rtattr **tb)
  525. {
  526. const char *ctr_name;
  527. const char *cipher_name;
  528. char full_name[CRYPTO_MAX_ALG_NAME];
  529. ctr_name = crypto_attr_alg_name(tb[1]);
  530. if (IS_ERR(ctr_name))
  531. return PTR_ERR(ctr_name);
  532. cipher_name = crypto_attr_alg_name(tb[2]);
  533. if (IS_ERR(cipher_name))
  534. return PTR_ERR(cipher_name);
  535. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  536. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  537. return -ENAMETOOLONG;
  538. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  539. cipher_name);
  540. }
  541. static struct crypto_template crypto_ccm_base_tmpl = {
  542. .name = "ccm_base",
  543. .create = crypto_ccm_base_create,
  544. .module = THIS_MODULE,
  545. };
  546. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  547. unsigned int keylen)
  548. {
  549. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  550. struct crypto_aead *child = ctx->child;
  551. int err;
  552. if (keylen < 3)
  553. return -EINVAL;
  554. keylen -= 3;
  555. memcpy(ctx->nonce, key + keylen, 3);
  556. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  557. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  558. CRYPTO_TFM_REQ_MASK);
  559. err = crypto_aead_setkey(child, key, keylen);
  560. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  561. CRYPTO_TFM_RES_MASK);
  562. return err;
  563. }
  564. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  565. unsigned int authsize)
  566. {
  567. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  568. switch (authsize) {
  569. case 8:
  570. case 12:
  571. case 16:
  572. break;
  573. default:
  574. return -EINVAL;
  575. }
  576. return crypto_aead_setauthsize(ctx->child, authsize);
  577. }
  578. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  579. {
  580. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  581. struct aead_request *subreq = &rctx->subreq;
  582. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  583. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  584. struct crypto_aead *child = ctx->child;
  585. struct scatterlist *sg;
  586. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  587. crypto_aead_alignmask(child) + 1);
  588. /* L' */
  589. iv[0] = 3;
  590. memcpy(iv + 1, ctx->nonce, 3);
  591. memcpy(iv + 4, req->iv, 8);
  592. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  593. sg_init_table(rctx->src, 3);
  594. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  595. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  596. if (sg != rctx->src + 1)
  597. sg_chain(rctx->src, 2, sg);
  598. if (req->src != req->dst) {
  599. sg_init_table(rctx->dst, 3);
  600. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  601. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  602. if (sg != rctx->dst + 1)
  603. sg_chain(rctx->dst, 2, sg);
  604. }
  605. aead_request_set_tfm(subreq, child);
  606. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  607. req->base.data);
  608. aead_request_set_crypt(subreq, rctx->src,
  609. req->src == req->dst ? rctx->src : rctx->dst,
  610. req->cryptlen, iv);
  611. aead_request_set_ad(subreq, req->assoclen - 8);
  612. return subreq;
  613. }
  614. static int crypto_rfc4309_encrypt(struct aead_request *req)
  615. {
  616. if (req->assoclen != 16 && req->assoclen != 20)
  617. return -EINVAL;
  618. req = crypto_rfc4309_crypt(req);
  619. return crypto_aead_encrypt(req);
  620. }
  621. static int crypto_rfc4309_decrypt(struct aead_request *req)
  622. {
  623. if (req->assoclen != 16 && req->assoclen != 20)
  624. return -EINVAL;
  625. req = crypto_rfc4309_crypt(req);
  626. return crypto_aead_decrypt(req);
  627. }
  628. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  629. {
  630. struct aead_instance *inst = aead_alg_instance(tfm);
  631. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  632. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  633. struct crypto_aead *aead;
  634. unsigned long align;
  635. aead = crypto_spawn_aead(spawn);
  636. if (IS_ERR(aead))
  637. return PTR_ERR(aead);
  638. ctx->child = aead;
  639. align = crypto_aead_alignmask(aead);
  640. align &= ~(crypto_tfm_ctx_alignment() - 1);
  641. crypto_aead_set_reqsize(
  642. tfm,
  643. sizeof(struct crypto_rfc4309_req_ctx) +
  644. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  645. align + 32);
  646. return 0;
  647. }
  648. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  649. {
  650. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  651. crypto_free_aead(ctx->child);
  652. }
  653. static void crypto_rfc4309_free(struct aead_instance *inst)
  654. {
  655. crypto_drop_aead(aead_instance_ctx(inst));
  656. kfree(inst);
  657. }
  658. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  659. struct rtattr **tb)
  660. {
  661. struct crypto_attr_type *algt;
  662. struct aead_instance *inst;
  663. struct crypto_aead_spawn *spawn;
  664. struct aead_alg *alg;
  665. const char *ccm_name;
  666. int err;
  667. algt = crypto_get_attr_type(tb);
  668. if (IS_ERR(algt))
  669. return PTR_ERR(algt);
  670. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  671. return -EINVAL;
  672. ccm_name = crypto_attr_alg_name(tb[1]);
  673. if (IS_ERR(ccm_name))
  674. return PTR_ERR(ccm_name);
  675. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  676. if (!inst)
  677. return -ENOMEM;
  678. spawn = aead_instance_ctx(inst);
  679. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  680. err = crypto_grab_aead(spawn, ccm_name, 0,
  681. crypto_requires_sync(algt->type, algt->mask));
  682. if (err)
  683. goto out_free_inst;
  684. alg = crypto_spawn_aead_alg(spawn);
  685. err = -EINVAL;
  686. /* We only support 16-byte blocks. */
  687. if (crypto_aead_alg_ivsize(alg) != 16)
  688. goto out_drop_alg;
  689. /* Not a stream cipher? */
  690. if (alg->base.cra_blocksize != 1)
  691. goto out_drop_alg;
  692. err = -ENAMETOOLONG;
  693. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  694. "rfc4309(%s)", alg->base.cra_name) >=
  695. CRYPTO_MAX_ALG_NAME ||
  696. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  697. "rfc4309(%s)", alg->base.cra_driver_name) >=
  698. CRYPTO_MAX_ALG_NAME)
  699. goto out_drop_alg;
  700. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  701. inst->alg.base.cra_priority = alg->base.cra_priority;
  702. inst->alg.base.cra_blocksize = 1;
  703. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  704. inst->alg.ivsize = 8;
  705. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  706. inst->alg.maxauthsize = 16;
  707. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  708. inst->alg.init = crypto_rfc4309_init_tfm;
  709. inst->alg.exit = crypto_rfc4309_exit_tfm;
  710. inst->alg.setkey = crypto_rfc4309_setkey;
  711. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  712. inst->alg.encrypt = crypto_rfc4309_encrypt;
  713. inst->alg.decrypt = crypto_rfc4309_decrypt;
  714. inst->free = crypto_rfc4309_free;
  715. err = aead_register_instance(tmpl, inst);
  716. if (err)
  717. goto out_drop_alg;
  718. out:
  719. return err;
  720. out_drop_alg:
  721. crypto_drop_aead(spawn);
  722. out_free_inst:
  723. kfree(inst);
  724. goto out;
  725. }
  726. static struct crypto_template crypto_rfc4309_tmpl = {
  727. .name = "rfc4309",
  728. .create = crypto_rfc4309_create,
  729. .module = THIS_MODULE,
  730. };
  731. static int __init crypto_ccm_module_init(void)
  732. {
  733. int err;
  734. err = crypto_register_template(&crypto_ccm_base_tmpl);
  735. if (err)
  736. goto out;
  737. err = crypto_register_template(&crypto_ccm_tmpl);
  738. if (err)
  739. goto out_undo_base;
  740. err = crypto_register_template(&crypto_rfc4309_tmpl);
  741. if (err)
  742. goto out_undo_ccm;
  743. out:
  744. return err;
  745. out_undo_ccm:
  746. crypto_unregister_template(&crypto_ccm_tmpl);
  747. out_undo_base:
  748. crypto_unregister_template(&crypto_ccm_base_tmpl);
  749. goto out;
  750. }
  751. static void __exit crypto_ccm_module_exit(void)
  752. {
  753. crypto_unregister_template(&crypto_rfc4309_tmpl);
  754. crypto_unregister_template(&crypto_ccm_tmpl);
  755. crypto_unregister_template(&crypto_ccm_base_tmpl);
  756. }
  757. module_init(crypto_ccm_module_init);
  758. module_exit(crypto_ccm_module_exit);
  759. MODULE_LICENSE("GPL");
  760. MODULE_DESCRIPTION("Counter with CBC MAC");
  761. MODULE_ALIAS_CRYPTO("ccm_base");
  762. MODULE_ALIAS_CRYPTO("rfc4309");
  763. MODULE_ALIAS_CRYPTO("ccm");