1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3 /*
4  * Crypto driver to handle HASH algorithms using NVIDIA Security Engine.
5  */
6 
7 #include <linux/clk.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/module.h>
10 #include <linux/of_device.h>
11 #include <linux/platform_device.h>
12 
13 #include <crypto/aes.h>
14 #include <crypto/sha1.h>
15 #include <crypto/sha2.h>
16 #include <crypto/sha3.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/hash.h>
21 
22 #include "tegra-se.h"
23 
24 struct tegra_sha_ctx {
25 	struct tegra_se *se;
26 	unsigned int alg;
27 	bool fallback;
28 	u32 key_id;
29 	struct crypto_ahash *fallback_tfm;
30 };
31 
32 struct tegra_sha_reqctx {
33 	struct scatterlist *src_sg;
34 	struct tegra_se_datbuf datbuf;
35 	struct tegra_se_datbuf residue;
36 	struct tegra_se_datbuf digest;
37 	struct tegra_se_datbuf intr_res;
38 	unsigned int alg;
39 	unsigned int config;
40 	unsigned int total_len;
41 	unsigned int blk_size;
42 	unsigned int task;
43 	u32 key_id;
44 	u32 result[HASH_RESULT_REG_COUNT];
45 	struct ahash_request fallback_req;
46 };
47 
tegra_sha_get_config(u32 alg)48 static int tegra_sha_get_config(u32 alg)
49 {
50 	int cfg = 0;
51 
52 	switch (alg) {
53 	case SE_ALG_SHA1:
54 		cfg |= SE_SHA_ENC_ALG_SHA;
55 		cfg |= SE_SHA_ENC_MODE_SHA1;
56 		break;
57 
58 	case SE_ALG_HMAC_SHA224:
59 		cfg |= SE_SHA_ENC_ALG_HMAC;
60 		fallthrough;
61 	case SE_ALG_SHA224:
62 		cfg |= SE_SHA_ENC_ALG_SHA;
63 		cfg |= SE_SHA_ENC_MODE_SHA224;
64 		break;
65 
66 	case SE_ALG_HMAC_SHA256:
67 		cfg |= SE_SHA_ENC_ALG_HMAC;
68 		fallthrough;
69 	case SE_ALG_SHA256:
70 		cfg |= SE_SHA_ENC_ALG_SHA;
71 		cfg |= SE_SHA_ENC_MODE_SHA256;
72 		break;
73 
74 	case SE_ALG_HMAC_SHA384:
75 		cfg |= SE_SHA_ENC_ALG_HMAC;
76 		fallthrough;
77 	case SE_ALG_SHA384:
78 		cfg |= SE_SHA_ENC_ALG_SHA;
79 		cfg |= SE_SHA_ENC_MODE_SHA384;
80 		break;
81 
82 	case SE_ALG_HMAC_SHA512:
83 		cfg |= SE_SHA_ENC_ALG_HMAC;
84 		fallthrough;
85 	case SE_ALG_SHA512:
86 		cfg |= SE_SHA_ENC_ALG_SHA;
87 		cfg |= SE_SHA_ENC_MODE_SHA512;
88 		break;
89 
90 	case SE_ALG_SHA3_224:
91 		cfg |= SE_SHA_ENC_ALG_SHA;
92 		cfg |= SE_SHA_ENC_MODE_SHA3_224;
93 		break;
94 	case SE_ALG_SHA3_256:
95 		cfg |= SE_SHA_ENC_ALG_SHA;
96 		cfg |= SE_SHA_ENC_MODE_SHA3_256;
97 		break;
98 	case SE_ALG_SHA3_384:
99 		cfg |= SE_SHA_ENC_ALG_SHA;
100 		cfg |= SE_SHA_ENC_MODE_SHA3_384;
101 		break;
102 	case SE_ALG_SHA3_512:
103 		cfg |= SE_SHA_ENC_ALG_SHA;
104 		cfg |= SE_SHA_ENC_MODE_SHA3_512;
105 		break;
106 	default:
107 		return -EINVAL;
108 	}
109 
110 	return cfg;
111 }
112 
tegra_sha_fallback_init(struct ahash_request * req)113 static int tegra_sha_fallback_init(struct ahash_request *req)
114 {
115 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
116 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
117 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
118 
119 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
120 	rctx->fallback_req.base.flags = req->base.flags &
121 					CRYPTO_TFM_REQ_MAY_SLEEP;
122 
123 	return crypto_ahash_init(&rctx->fallback_req);
124 }
125 
tegra_sha_fallback_update(struct ahash_request * req)126 static int tegra_sha_fallback_update(struct ahash_request *req)
127 {
128 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
129 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
130 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
131 
132 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
133 	rctx->fallback_req.base.flags = req->base.flags &
134 					CRYPTO_TFM_REQ_MAY_SLEEP;
135 	rctx->fallback_req.nbytes = req->nbytes;
136 	rctx->fallback_req.src = req->src;
137 
138 	return crypto_ahash_update(&rctx->fallback_req);
139 }
140 
tegra_sha_fallback_final(struct ahash_request * req)141 static int tegra_sha_fallback_final(struct ahash_request *req)
142 {
143 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
144 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
145 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
146 
147 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
148 	rctx->fallback_req.base.flags = req->base.flags &
149 					CRYPTO_TFM_REQ_MAY_SLEEP;
150 	rctx->fallback_req.result = req->result;
151 
152 	return crypto_ahash_final(&rctx->fallback_req);
153 }
154 
tegra_sha_fallback_finup(struct ahash_request * req)155 static int tegra_sha_fallback_finup(struct ahash_request *req)
156 {
157 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
158 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
159 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
160 
161 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
162 	rctx->fallback_req.base.flags = req->base.flags &
163 					CRYPTO_TFM_REQ_MAY_SLEEP;
164 
165 	rctx->fallback_req.nbytes = req->nbytes;
166 	rctx->fallback_req.src = req->src;
167 	rctx->fallback_req.result = req->result;
168 
169 	return crypto_ahash_finup(&rctx->fallback_req);
170 }
171 
tegra_sha_fallback_digest(struct ahash_request * req)172 static int tegra_sha_fallback_digest(struct ahash_request *req)
173 {
174 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
175 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
176 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
177 
178 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
179 	rctx->fallback_req.base.flags = req->base.flags &
180 					CRYPTO_TFM_REQ_MAY_SLEEP;
181 
182 	rctx->fallback_req.nbytes = req->nbytes;
183 	rctx->fallback_req.src = req->src;
184 	rctx->fallback_req.result = req->result;
185 
186 	return crypto_ahash_digest(&rctx->fallback_req);
187 }
188 
tegra_sha_fallback_import(struct ahash_request * req,const void * in)189 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in)
190 {
191 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
192 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
193 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
194 
195 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
196 	rctx->fallback_req.base.flags = req->base.flags &
197 					CRYPTO_TFM_REQ_MAY_SLEEP;
198 
199 	return crypto_ahash_import(&rctx->fallback_req, in);
200 }
201 
tegra_sha_fallback_export(struct ahash_request * req,void * out)202 static int tegra_sha_fallback_export(struct ahash_request *req, void *out)
203 {
204 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
205 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
206 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
207 
208 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
209 	rctx->fallback_req.base.flags = req->base.flags &
210 					CRYPTO_TFM_REQ_MAY_SLEEP;
211 
212 	return crypto_ahash_export(&rctx->fallback_req, out);
213 }
214 
tegra_se_insert_hash_result(struct tegra_sha_ctx * ctx,u32 * cpuvaddr,struct tegra_sha_reqctx * rctx)215 static int tegra_se_insert_hash_result(struct tegra_sha_ctx *ctx, u32 *cpuvaddr,
216 				       struct tegra_sha_reqctx *rctx)
217 {
218 	__be32 *res_be = (__be32 *)rctx->intr_res.buf;
219 	u32 *res = (u32 *)rctx->intr_res.buf;
220 	int i = 0, j;
221 
222 	cpuvaddr[i++] = 0;
223 	cpuvaddr[i++] = host1x_opcode_setpayload(HASH_RESULT_REG_COUNT);
224 	cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_HASH_RESULT);
225 
226 	for (j = 0; j < HASH_RESULT_REG_COUNT; j++) {
227 		int idx = j;
228 
229 		/*
230 		 * The initial, intermediate and final hash value of SHA-384, SHA-512
231 		 * in SHA_HASH_RESULT registers follow the below layout of bytes.
232 		 *
233 		 * +---------------+------------+
234 		 * | HASH_RESULT_0 | B4...B7    |
235 		 * +---------------+------------+
236 		 * | HASH_RESULT_1 | B0...B3    |
237 		 * +---------------+------------+
238 		 * | HASH_RESULT_2 | B12...B15  |
239 		 * +---------------+------------+
240 		 * | HASH_RESULT_3 | B8...B11   |
241 		 * +---------------+------------+
242 		 * |            ......          |
243 		 * +---------------+------------+
244 		 * | HASH_RESULT_14| B60...B63  |
245 		 * +---------------+------------+
246 		 * | HASH_RESULT_15| B56...B59  |
247 		 * +---------------+------------+
248 		 *
249 		 */
250 		if (ctx->alg == SE_ALG_SHA384 || ctx->alg == SE_ALG_SHA512)
251 			idx = (j % 2) ? j - 1 : j + 1;
252 
253 		/* For SHA-1, SHA-224, SHA-256, SHA-384, SHA-512 the initial
254 		 * intermediate and final hash value when stored in
255 		 * SHA_HASH_RESULT registers, the byte order is NOT in
256 		 * little-endian.
257 		 */
258 		if (ctx->alg <= SE_ALG_SHA512)
259 			cpuvaddr[i++] = be32_to_cpu(res_be[idx]);
260 		else
261 			cpuvaddr[i++] = res[idx];
262 	}
263 
264 	return i;
265 }
266 
tegra_sha_prep_cmd(struct tegra_sha_ctx * ctx,u32 * cpuvaddr,struct tegra_sha_reqctx * rctx)267 static int tegra_sha_prep_cmd(struct tegra_sha_ctx *ctx, u32 *cpuvaddr,
268 			      struct tegra_sha_reqctx *rctx)
269 {
270 	struct tegra_se *se = ctx->se;
271 	u64 msg_len, msg_left;
272 	int i = 0;
273 
274 	msg_len = rctx->total_len * 8;
275 	msg_left = rctx->datbuf.size * 8;
276 
277 	/*
278 	 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine,
279 	 * HW treats it as the last buffer and process the data.
280 	 * Therefore, add an extra byte to msg_left if it is not the
281 	 * last buffer.
282 	 */
283 	if (rctx->task & SHA_UPDATE) {
284 		msg_left += 8;
285 		msg_len += 8;
286 	}
287 
288 	cpuvaddr[i++] = host1x_opcode_setpayload(8);
289 	cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH);
290 	cpuvaddr[i++] = lower_32_bits(msg_len);
291 	cpuvaddr[i++] = upper_32_bits(msg_len);
292 	cpuvaddr[i++] = 0;
293 	cpuvaddr[i++] = 0;
294 	cpuvaddr[i++] = lower_32_bits(msg_left);
295 	cpuvaddr[i++] = upper_32_bits(msg_left);
296 	cpuvaddr[i++] = 0;
297 	cpuvaddr[i++] = 0;
298 	cpuvaddr[i++] = host1x_opcode_setpayload(2);
299 	cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG);
300 	cpuvaddr[i++] = rctx->config;
301 
302 	if (rctx->task & SHA_FIRST) {
303 		cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT;
304 		rctx->task &= ~SHA_FIRST;
305 	} else {
306 		/*
307 		 * If it isn't the first task, program the HASH_RESULT register
308 		 * with the intermediate result from the previous task
309 		 */
310 		i += tegra_se_insert_hash_result(ctx, cpuvaddr + i, rctx);
311 	}
312 
313 	cpuvaddr[i++] = host1x_opcode_setpayload(4);
314 	cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_IN_ADDR);
315 	cpuvaddr[i++] = rctx->datbuf.addr;
316 	cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) |
317 				SE_ADDR_HI_SZ(rctx->datbuf.size));
318 
319 	if (rctx->task & SHA_UPDATE) {
320 		cpuvaddr[i++] = rctx->intr_res.addr;
321 		cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->intr_res.addr)) |
322 					SE_ADDR_HI_SZ(rctx->intr_res.size));
323 	} else {
324 		cpuvaddr[i++] = rctx->digest.addr;
325 		cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) |
326 					SE_ADDR_HI_SZ(rctx->digest.size));
327 	}
328 
329 	if (rctx->key_id) {
330 		cpuvaddr[i++] = host1x_opcode_setpayload(1);
331 		cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG);
332 		cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id);
333 	}
334 
335 	cpuvaddr[i++] = host1x_opcode_setpayload(1);
336 	cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION);
337 	cpuvaddr[i++] = SE_SHA_OP_WRSTALL | SE_SHA_OP_START |
338 			SE_SHA_OP_LASTBUF;
339 	cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
340 	cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
341 			host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
342 
343 	dev_dbg(se->dev, "msg len %llu msg left %llu sz %zd cfg %#x",
344 		msg_len, msg_left, rctx->datbuf.size, rctx->config);
345 
346 	return i;
347 }
348 
tegra_sha_do_init(struct ahash_request * req)349 static int tegra_sha_do_init(struct ahash_request *req)
350 {
351 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
352 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
353 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
354 	struct tegra_se *se = ctx->se;
355 
356 	if (ctx->fallback)
357 		return tegra_sha_fallback_init(req);
358 
359 	rctx->total_len = 0;
360 	rctx->datbuf.size = 0;
361 	rctx->residue.size = 0;
362 	rctx->key_id = ctx->key_id;
363 	rctx->task |= SHA_FIRST;
364 	rctx->alg = ctx->alg;
365 	rctx->blk_size = crypto_ahash_blocksize(tfm);
366 	rctx->digest.size = crypto_ahash_digestsize(tfm);
367 
368 	rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
369 					      &rctx->digest.addr, GFP_KERNEL);
370 	if (!rctx->digest.buf)
371 		goto digbuf_fail;
372 
373 	rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size,
374 					       &rctx->residue.addr, GFP_KERNEL);
375 	if (!rctx->residue.buf)
376 		goto resbuf_fail;
377 
378 	rctx->intr_res.size = HASH_RESULT_REG_COUNT * 4;
379 	rctx->intr_res.buf = dma_alloc_coherent(se->dev, rctx->intr_res.size,
380 						&rctx->intr_res.addr, GFP_KERNEL);
381 	if (!rctx->intr_res.buf)
382 		goto intr_res_fail;
383 
384 	return 0;
385 
386 intr_res_fail:
387 	dma_free_coherent(se->dev, rctx->residue.size, rctx->residue.buf,
388 			  rctx->residue.addr);
389 resbuf_fail:
390 	dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
391 			  rctx->digest.addr);
392 digbuf_fail:
393 	return -ENOMEM;
394 }
395 
tegra_sha_do_update(struct ahash_request * req)396 static int tegra_sha_do_update(struct ahash_request *req)
397 {
398 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
399 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
400 	struct tegra_se *se = ctx->se;
401 	unsigned int nblks, nresidue, size, ret;
402 	u32 *cpuvaddr = se->cmdbuf->addr;
403 
404 	nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size;
405 	nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size;
406 
407 	/*
408 	 * If nbytes is a multiple of block size and there is no residue,
409 	 * then reserve the last block as residue during final() to process.
410 	 */
411 	if (!nresidue && nblks) {
412 		nresidue = rctx->blk_size;
413 		nblks--;
414 	}
415 
416 	rctx->src_sg = req->src;
417 	rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue;
418 
419 	/*
420 	 * If nbytes are less than a block size, copy it residue and
421 	 * return. The bytes will be processed in final()
422 	 */
423 	if (nblks < 1) {
424 		scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size,
425 					 rctx->src_sg, 0, req->nbytes, 0);
426 		rctx->residue.size += req->nbytes;
427 
428 		return 0;
429 	}
430 
431 	rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size,
432 					      &rctx->datbuf.addr, GFP_KERNEL);
433 	if (!rctx->datbuf.buf)
434 		return -ENOMEM;
435 
436 	/* Copy the previous residue first */
437 	if (rctx->residue.size)
438 		memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
439 
440 	scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size,
441 				 rctx->src_sg, 0, req->nbytes - nresidue, 0);
442 
443 	scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg,
444 				 req->nbytes - nresidue, nresidue, 0);
445 
446 	/* Update residue value with the residue after current block */
447 	rctx->residue.size = nresidue;
448 	rctx->total_len += rctx->datbuf.size;
449 
450 	rctx->config = tegra_sha_get_config(rctx->alg) |
451 			SE_SHA_DST_MEMORY;
452 
453 	size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx);
454 	ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
455 
456 	dma_free_coherent(se->dev, rctx->datbuf.size,
457 			  rctx->datbuf.buf, rctx->datbuf.addr);
458 
459 	return ret;
460 }
461 
tegra_sha_do_final(struct ahash_request * req)462 static int tegra_sha_do_final(struct ahash_request *req)
463 {
464 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
465 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
466 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
467 	struct tegra_se *se = ctx->se;
468 	u32 *cpuvaddr = se->cmdbuf->addr;
469 	int size, ret = 0;
470 
471 	if (rctx->residue.size) {
472 		rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size,
473 						      &rctx->datbuf.addr, GFP_KERNEL);
474 		if (!rctx->datbuf.buf) {
475 			ret = -ENOMEM;
476 			goto out_free;
477 		}
478 
479 		memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
480 	}
481 
482 	rctx->datbuf.size = rctx->residue.size;
483 	rctx->total_len += rctx->residue.size;
484 
485 	rctx->config = tegra_sha_get_config(rctx->alg) |
486 		       SE_SHA_DST_MEMORY;
487 
488 	size = tegra_sha_prep_cmd(ctx, cpuvaddr, rctx);
489 	ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
490 	if (ret)
491 		goto out;
492 
493 	/* Copy result */
494 	memcpy(req->result, rctx->digest.buf, rctx->digest.size);
495 
496 out:
497 	if (rctx->residue.size)
498 		dma_free_coherent(se->dev, rctx->datbuf.size,
499 				  rctx->datbuf.buf, rctx->datbuf.addr);
500 out_free:
501 	dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm),
502 			  rctx->residue.buf, rctx->residue.addr);
503 	dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
504 			  rctx->digest.addr);
505 
506 	dma_free_coherent(se->dev, rctx->intr_res.size, rctx->intr_res.buf,
507 			  rctx->intr_res.addr);
508 
509 	return ret;
510 }
511 
tegra_sha_do_one_req(struct crypto_engine * engine,void * areq)512 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq)
513 {
514 	struct ahash_request *req = ahash_request_cast(areq);
515 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
516 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
517 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
518 	struct tegra_se *se = ctx->se;
519 	int ret = 0;
520 
521 	if (rctx->task & SHA_INIT) {
522 		ret = tegra_sha_do_init(req);
523 		if (ret)
524 			goto out;
525 
526 		rctx->task &= ~SHA_INIT;
527 	}
528 
529 	if (rctx->task & SHA_UPDATE) {
530 		ret = tegra_sha_do_update(req);
531 		if (ret)
532 			goto out;
533 
534 		rctx->task &= ~SHA_UPDATE;
535 	}
536 
537 	if (rctx->task & SHA_FINAL) {
538 		ret = tegra_sha_do_final(req);
539 		if (ret)
540 			goto out;
541 
542 		rctx->task &= ~SHA_FINAL;
543 	}
544 
545 out:
546 	crypto_finalize_hash_request(se->engine, req, ret);
547 
548 	return 0;
549 }
550 
tegra_sha_init_fallback(struct crypto_ahash * tfm,struct tegra_sha_ctx * ctx,const char * algname)551 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx,
552 				    const char *algname)
553 {
554 	unsigned int statesize;
555 
556 	ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC |
557 						CRYPTO_ALG_NEED_FALLBACK);
558 
559 	if (IS_ERR(ctx->fallback_tfm)) {
560 		dev_warn(ctx->se->dev,
561 			 "failed to allocate fallback for %s\n", algname);
562 		ctx->fallback_tfm = NULL;
563 		return;
564 	}
565 
566 	statesize = crypto_ahash_statesize(ctx->fallback_tfm);
567 
568 	if (statesize > sizeof(struct tegra_sha_reqctx))
569 		crypto_ahash_set_statesize(tfm, statesize);
570 
571 	/* Update reqsize if fallback is added */
572 	crypto_ahash_set_reqsize(tfm,
573 				 sizeof(struct tegra_sha_reqctx) +
574 			crypto_ahash_reqsize(ctx->fallback_tfm));
575 }
576 
tegra_sha_cra_init(struct crypto_tfm * tfm)577 static int tegra_sha_cra_init(struct crypto_tfm *tfm)
578 {
579 	struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
580 	struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm);
581 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
582 	struct tegra_se_alg *se_alg;
583 	const char *algname;
584 	int ret;
585 
586 	algname = crypto_tfm_alg_name(tfm);
587 	se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base);
588 
589 	crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx));
590 
591 	ctx->se = se_alg->se_dev;
592 	ctx->fallback = false;
593 	ctx->key_id = 0;
594 
595 	ret = se_algname_to_algid(algname);
596 	if (ret < 0) {
597 		dev_err(ctx->se->dev, "invalid algorithm\n");
598 		return ret;
599 	}
600 
601 	if (se_alg->alg_base)
602 		tegra_sha_init_fallback(ahash_tfm, ctx, algname);
603 
604 	ctx->alg = ret;
605 
606 	return 0;
607 }
608 
tegra_sha_cra_exit(struct crypto_tfm * tfm)609 static void tegra_sha_cra_exit(struct crypto_tfm *tfm)
610 {
611 	struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
612 
613 	if (ctx->fallback_tfm)
614 		crypto_free_ahash(ctx->fallback_tfm);
615 
616 	tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg);
617 }
618 
tegra_hmac_fallback_setkey(struct tegra_sha_ctx * ctx,const u8 * key,unsigned int keylen)619 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key,
620 				      unsigned int keylen)
621 {
622 	if (!ctx->fallback_tfm) {
623 		dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen);
624 		return -EINVAL;
625 	}
626 
627 	ctx->fallback = true;
628 	return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen);
629 }
630 
tegra_hmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)631 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key,
632 			     unsigned int keylen)
633 {
634 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
635 	int ret;
636 
637 	if (aes_check_keylen(keylen))
638 		return tegra_hmac_fallback_setkey(ctx, key, keylen);
639 
640 	ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id);
641 	if (ret)
642 		return tegra_hmac_fallback_setkey(ctx, key, keylen);
643 
644 	ctx->fallback = false;
645 
646 	return 0;
647 }
648 
tegra_sha_init(struct ahash_request * req)649 static int tegra_sha_init(struct ahash_request *req)
650 {
651 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
652 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
653 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
654 
655 	rctx->task = SHA_INIT;
656 
657 	return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
658 }
659 
tegra_sha_update(struct ahash_request * req)660 static int tegra_sha_update(struct ahash_request *req)
661 {
662 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
663 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
664 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
665 
666 	if (ctx->fallback)
667 		return tegra_sha_fallback_update(req);
668 
669 	rctx->task |= SHA_UPDATE;
670 
671 	return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
672 }
673 
tegra_sha_final(struct ahash_request * req)674 static int tegra_sha_final(struct ahash_request *req)
675 {
676 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
677 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
678 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
679 
680 	if (ctx->fallback)
681 		return tegra_sha_fallback_final(req);
682 
683 	rctx->task |= SHA_FINAL;
684 
685 	return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
686 }
687 
tegra_sha_finup(struct ahash_request * req)688 static int tegra_sha_finup(struct ahash_request *req)
689 {
690 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
691 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
692 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
693 
694 	if (ctx->fallback)
695 		return tegra_sha_fallback_finup(req);
696 
697 	rctx->task |= SHA_UPDATE | SHA_FINAL;
698 
699 	return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
700 }
701 
tegra_sha_digest(struct ahash_request * req)702 static int tegra_sha_digest(struct ahash_request *req)
703 {
704 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
705 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
706 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
707 
708 	if (ctx->fallback)
709 		return tegra_sha_fallback_digest(req);
710 
711 	rctx->task |= SHA_INIT | SHA_UPDATE | SHA_FINAL;
712 
713 	return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
714 }
715 
tegra_sha_export(struct ahash_request * req,void * out)716 static int tegra_sha_export(struct ahash_request *req, void *out)
717 {
718 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
719 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
720 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
721 
722 	if (ctx->fallback)
723 		return tegra_sha_fallback_export(req, out);
724 
725 	memcpy(out, rctx, sizeof(*rctx));
726 
727 	return 0;
728 }
729 
tegra_sha_import(struct ahash_request * req,const void * in)730 static int tegra_sha_import(struct ahash_request *req, const void *in)
731 {
732 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
733 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
734 	struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
735 
736 	if (ctx->fallback)
737 		return tegra_sha_fallback_import(req, in);
738 
739 	memcpy(rctx, in, sizeof(*rctx));
740 
741 	return 0;
742 }
743 
744 static struct tegra_se_alg tegra_hash_algs[] = {
745 	{
746 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
747 		.alg.ahash.base = {
748 			.init = tegra_sha_init,
749 			.update = tegra_sha_update,
750 			.final = tegra_sha_final,
751 			.finup = tegra_sha_finup,
752 			.digest = tegra_sha_digest,
753 			.export = tegra_sha_export,
754 			.import = tegra_sha_import,
755 			.halg.digestsize = SHA1_DIGEST_SIZE,
756 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
757 			.halg.base = {
758 				.cra_name = "sha1",
759 				.cra_driver_name = "tegra-se-sha1",
760 				.cra_priority = 300,
761 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
762 				.cra_blocksize = SHA1_BLOCK_SIZE,
763 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
764 				.cra_alignmask = 0,
765 				.cra_module = THIS_MODULE,
766 				.cra_init = tegra_sha_cra_init,
767 				.cra_exit = tegra_sha_cra_exit,
768 			}
769 		}
770 	}, {
771 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
772 		.alg.ahash.base = {
773 			.init = tegra_sha_init,
774 			.update = tegra_sha_update,
775 			.final = tegra_sha_final,
776 			.finup = tegra_sha_finup,
777 			.digest = tegra_sha_digest,
778 			.export = tegra_sha_export,
779 			.import = tegra_sha_import,
780 			.halg.digestsize = SHA224_DIGEST_SIZE,
781 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
782 			.halg.base = {
783 				.cra_name = "sha224",
784 				.cra_driver_name = "tegra-se-sha224",
785 				.cra_priority = 300,
786 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
787 				.cra_blocksize = SHA224_BLOCK_SIZE,
788 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
789 				.cra_alignmask = 0,
790 				.cra_module = THIS_MODULE,
791 				.cra_init = tegra_sha_cra_init,
792 				.cra_exit = tegra_sha_cra_exit,
793 			}
794 		}
795 	}, {
796 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
797 		.alg.ahash.base = {
798 			.init = tegra_sha_init,
799 			.update = tegra_sha_update,
800 			.final = tegra_sha_final,
801 			.finup = tegra_sha_finup,
802 			.digest = tegra_sha_digest,
803 			.export = tegra_sha_export,
804 			.import = tegra_sha_import,
805 			.halg.digestsize = SHA256_DIGEST_SIZE,
806 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
807 			.halg.base = {
808 				.cra_name = "sha256",
809 				.cra_driver_name = "tegra-se-sha256",
810 				.cra_priority = 300,
811 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
812 				.cra_blocksize = SHA256_BLOCK_SIZE,
813 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
814 				.cra_alignmask = 0,
815 				.cra_module = THIS_MODULE,
816 				.cra_init = tegra_sha_cra_init,
817 				.cra_exit = tegra_sha_cra_exit,
818 			}
819 		}
820 	}, {
821 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
822 		.alg.ahash.base = {
823 			.init = tegra_sha_init,
824 			.update = tegra_sha_update,
825 			.final = tegra_sha_final,
826 			.finup = tegra_sha_finup,
827 			.digest = tegra_sha_digest,
828 			.export = tegra_sha_export,
829 			.import = tegra_sha_import,
830 			.halg.digestsize = SHA384_DIGEST_SIZE,
831 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
832 			.halg.base = {
833 				.cra_name = "sha384",
834 				.cra_driver_name = "tegra-se-sha384",
835 				.cra_priority = 300,
836 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
837 				.cra_blocksize = SHA384_BLOCK_SIZE,
838 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
839 				.cra_alignmask = 0,
840 				.cra_module = THIS_MODULE,
841 				.cra_init = tegra_sha_cra_init,
842 				.cra_exit = tegra_sha_cra_exit,
843 			}
844 		}
845 	}, {
846 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
847 		.alg.ahash.base = {
848 			.init = tegra_sha_init,
849 			.update = tegra_sha_update,
850 			.final = tegra_sha_final,
851 			.finup = tegra_sha_finup,
852 			.digest = tegra_sha_digest,
853 			.export = tegra_sha_export,
854 			.import = tegra_sha_import,
855 			.halg.digestsize = SHA512_DIGEST_SIZE,
856 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
857 			.halg.base = {
858 				.cra_name = "sha512",
859 				.cra_driver_name = "tegra-se-sha512",
860 				.cra_priority = 300,
861 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
862 				.cra_blocksize = SHA512_BLOCK_SIZE,
863 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
864 				.cra_alignmask = 0,
865 				.cra_module = THIS_MODULE,
866 				.cra_init = tegra_sha_cra_init,
867 				.cra_exit = tegra_sha_cra_exit,
868 			}
869 		}
870 	}, {
871 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
872 		.alg.ahash.base = {
873 			.init = tegra_sha_init,
874 			.update = tegra_sha_update,
875 			.final = tegra_sha_final,
876 			.finup = tegra_sha_finup,
877 			.digest = tegra_sha_digest,
878 			.export = tegra_sha_export,
879 			.import = tegra_sha_import,
880 			.halg.digestsize = SHA3_224_DIGEST_SIZE,
881 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
882 			.halg.base = {
883 				.cra_name = "sha3-224",
884 				.cra_driver_name = "tegra-se-sha3-224",
885 				.cra_priority = 300,
886 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
887 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
888 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
889 				.cra_alignmask = 0,
890 				.cra_module = THIS_MODULE,
891 				.cra_init = tegra_sha_cra_init,
892 				.cra_exit = tegra_sha_cra_exit,
893 			}
894 		}
895 	}, {
896 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
897 		.alg.ahash.base = {
898 			.init = tegra_sha_init,
899 			.update = tegra_sha_update,
900 			.final = tegra_sha_final,
901 			.finup = tegra_sha_finup,
902 			.digest = tegra_sha_digest,
903 			.export = tegra_sha_export,
904 			.import = tegra_sha_import,
905 			.halg.digestsize = SHA3_256_DIGEST_SIZE,
906 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
907 			.halg.base = {
908 				.cra_name = "sha3-256",
909 				.cra_driver_name = "tegra-se-sha3-256",
910 				.cra_priority = 300,
911 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
912 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
913 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
914 				.cra_alignmask = 0,
915 				.cra_module = THIS_MODULE,
916 				.cra_init = tegra_sha_cra_init,
917 				.cra_exit = tegra_sha_cra_exit,
918 			}
919 		}
920 	}, {
921 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
922 		.alg.ahash.base = {
923 			.init = tegra_sha_init,
924 			.update = tegra_sha_update,
925 			.final = tegra_sha_final,
926 			.finup = tegra_sha_finup,
927 			.digest = tegra_sha_digest,
928 			.export = tegra_sha_export,
929 			.import = tegra_sha_import,
930 			.halg.digestsize = SHA3_384_DIGEST_SIZE,
931 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
932 			.halg.base = {
933 				.cra_name = "sha3-384",
934 				.cra_driver_name = "tegra-se-sha3-384",
935 				.cra_priority = 300,
936 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
937 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
938 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
939 				.cra_alignmask = 0,
940 				.cra_module = THIS_MODULE,
941 				.cra_init = tegra_sha_cra_init,
942 				.cra_exit = tegra_sha_cra_exit,
943 			}
944 		}
945 	}, {
946 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
947 		.alg.ahash.base = {
948 			.init = tegra_sha_init,
949 			.update = tegra_sha_update,
950 			.final = tegra_sha_final,
951 			.finup = tegra_sha_finup,
952 			.digest = tegra_sha_digest,
953 			.export = tegra_sha_export,
954 			.import = tegra_sha_import,
955 			.halg.digestsize = SHA3_512_DIGEST_SIZE,
956 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
957 			.halg.base = {
958 				.cra_name = "sha3-512",
959 				.cra_driver_name = "tegra-se-sha3-512",
960 				.cra_priority = 300,
961 				.cra_flags = CRYPTO_ALG_TYPE_AHASH,
962 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
963 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
964 				.cra_alignmask = 0,
965 				.cra_module = THIS_MODULE,
966 				.cra_init = tegra_sha_cra_init,
967 				.cra_exit = tegra_sha_cra_exit,
968 			}
969 		}
970 	}, {
971 		.alg_base = "sha224",
972 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
973 		.alg.ahash.base = {
974 			.init = tegra_sha_init,
975 			.update = tegra_sha_update,
976 			.final = tegra_sha_final,
977 			.finup = tegra_sha_finup,
978 			.digest = tegra_sha_digest,
979 			.export = tegra_sha_export,
980 			.import = tegra_sha_import,
981 			.setkey = tegra_hmac_setkey,
982 			.halg.digestsize = SHA224_DIGEST_SIZE,
983 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
984 			.halg.base = {
985 				.cra_name = "hmac(sha224)",
986 				.cra_driver_name = "tegra-se-hmac-sha224",
987 				.cra_priority = 300,
988 				.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
989 				.cra_blocksize = SHA224_BLOCK_SIZE,
990 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
991 				.cra_alignmask = 0,
992 				.cra_module = THIS_MODULE,
993 				.cra_init = tegra_sha_cra_init,
994 				.cra_exit = tegra_sha_cra_exit,
995 			}
996 		}
997 	}, {
998 		.alg_base = "sha256",
999 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1000 		.alg.ahash.base = {
1001 			.init = tegra_sha_init,
1002 			.update = tegra_sha_update,
1003 			.final = tegra_sha_final,
1004 			.finup = tegra_sha_finup,
1005 			.digest = tegra_sha_digest,
1006 			.export = tegra_sha_export,
1007 			.import = tegra_sha_import,
1008 			.setkey = tegra_hmac_setkey,
1009 			.halg.digestsize = SHA256_DIGEST_SIZE,
1010 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
1011 			.halg.base = {
1012 				.cra_name = "hmac(sha256)",
1013 				.cra_driver_name = "tegra-se-hmac-sha256",
1014 				.cra_priority = 300,
1015 				.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
1016 				.cra_blocksize = SHA256_BLOCK_SIZE,
1017 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
1018 				.cra_alignmask = 0,
1019 				.cra_module = THIS_MODULE,
1020 				.cra_init = tegra_sha_cra_init,
1021 				.cra_exit = tegra_sha_cra_exit,
1022 			}
1023 		}
1024 	}, {
1025 		.alg_base = "sha384",
1026 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1027 		.alg.ahash.base = {
1028 			.init = tegra_sha_init,
1029 			.update = tegra_sha_update,
1030 			.final = tegra_sha_final,
1031 			.finup = tegra_sha_finup,
1032 			.digest = tegra_sha_digest,
1033 			.export = tegra_sha_export,
1034 			.import = tegra_sha_import,
1035 			.setkey = tegra_hmac_setkey,
1036 			.halg.digestsize = SHA384_DIGEST_SIZE,
1037 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
1038 			.halg.base = {
1039 				.cra_name = "hmac(sha384)",
1040 				.cra_driver_name = "tegra-se-hmac-sha384",
1041 				.cra_priority = 300,
1042 				.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
1043 				.cra_blocksize = SHA384_BLOCK_SIZE,
1044 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
1045 				.cra_alignmask = 0,
1046 				.cra_module = THIS_MODULE,
1047 				.cra_init = tegra_sha_cra_init,
1048 				.cra_exit = tegra_sha_cra_exit,
1049 			}
1050 		}
1051 	}, {
1052 		.alg_base = "sha512",
1053 		.alg.ahash.op.do_one_request = tegra_sha_do_one_req,
1054 		.alg.ahash.base = {
1055 			.init = tegra_sha_init,
1056 			.update = tegra_sha_update,
1057 			.final = tegra_sha_final,
1058 			.finup = tegra_sha_finup,
1059 			.digest = tegra_sha_digest,
1060 			.export = tegra_sha_export,
1061 			.import = tegra_sha_import,
1062 			.setkey = tegra_hmac_setkey,
1063 			.halg.digestsize = SHA512_DIGEST_SIZE,
1064 			.halg.statesize = sizeof(struct tegra_sha_reqctx),
1065 			.halg.base = {
1066 				.cra_name = "hmac(sha512)",
1067 				.cra_driver_name = "tegra-se-hmac-sha512",
1068 				.cra_priority = 300,
1069 				.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
1070 				.cra_blocksize = SHA512_BLOCK_SIZE,
1071 				.cra_ctxsize = sizeof(struct tegra_sha_ctx),
1072 				.cra_alignmask = 0,
1073 				.cra_module = THIS_MODULE,
1074 				.cra_init = tegra_sha_cra_init,
1075 				.cra_exit = tegra_sha_cra_exit,
1076 			}
1077 		}
1078 	}
1079 };
1080 
tegra_hash_kac_manifest(u32 user,u32 alg,u32 keylen)1081 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
1082 {
1083 	int manifest;
1084 
1085 	manifest = SE_KAC_USER_NS;
1086 
1087 	switch (alg) {
1088 	case SE_ALG_HMAC_SHA224:
1089 	case SE_ALG_HMAC_SHA256:
1090 	case SE_ALG_HMAC_SHA384:
1091 	case SE_ALG_HMAC_SHA512:
1092 		manifest |= SE_KAC_HMAC;
1093 		break;
1094 	default:
1095 		return -EINVAL;
1096 	}
1097 
1098 	switch (keylen) {
1099 	case AES_KEYSIZE_128:
1100 		manifest |= SE_KAC_SIZE_128;
1101 		break;
1102 	case AES_KEYSIZE_192:
1103 		manifest |= SE_KAC_SIZE_192;
1104 		break;
1105 	case AES_KEYSIZE_256:
1106 	default:
1107 		manifest |= SE_KAC_SIZE_256;
1108 		break;
1109 	}
1110 
1111 	return manifest;
1112 }
1113 
tegra_init_hash(struct tegra_se * se)1114 int tegra_init_hash(struct tegra_se *se)
1115 {
1116 	struct ahash_engine_alg *alg;
1117 	int i, ret;
1118 
1119 	se->manifest = tegra_hash_kac_manifest;
1120 
1121 	for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) {
1122 		tegra_hash_algs[i].se_dev = se;
1123 		alg = &tegra_hash_algs[i].alg.ahash;
1124 
1125 		ret = crypto_engine_register_ahash(alg);
1126 		if (ret) {
1127 			dev_err(se->dev, "failed to register %s\n",
1128 				alg->base.halg.base.cra_name);
1129 			goto sha_err;
1130 		}
1131 	}
1132 
1133 	return 0;
1134 
1135 sha_err:
1136 	while (i--)
1137 		crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1138 
1139 	return ret;
1140 }
1141 
tegra_deinit_hash(struct tegra_se * se)1142 void tegra_deinit_hash(struct tegra_se *se)
1143 {
1144 	int i;
1145 
1146 	for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++)
1147 		crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1148 }
1149