1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API.
4 *
5 * TEA, XTEA, and XETA crypto alogrithms
6 *
7 * The TEA and Xtended TEA algorithms were developed by David Wheeler
8 * and Roger Needham at the Computer Laboratory of Cambridge University.
9 *
10 * Due to the order of evaluation in XTEA many people have incorrectly
11 * implemented it. XETA (XTEA in the wrong order), exists for
12 * compatibility with these implementations.
13 *
14 * Copyright (c) 2004 Aaron Grothe [email protected]
15 */
16
17 #include <crypto/algapi.h>
18 #include <linux/init.h>
19 #include <linux/module.h>
20 #include <linux/mm.h>
21 #include <linux/unaligned.h>
22 #include <linux/types.h>
23
24 #define TEA_KEY_SIZE 16
25 #define TEA_BLOCK_SIZE 8
26 #define TEA_ROUNDS 32
27 #define TEA_DELTA 0x9e3779b9
28
29 #define XTEA_KEY_SIZE 16
30 #define XTEA_BLOCK_SIZE 8
31 #define XTEA_ROUNDS 32
32 #define XTEA_DELTA 0x9e3779b9
33
34 struct tea_ctx {
35 u32 KEY[4];
36 };
37
38 struct xtea_ctx {
39 u32 KEY[4];
40 };
41
tea_setkey(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)42 static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
43 unsigned int key_len)
44 {
45 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
46
47 ctx->KEY[0] = get_unaligned_le32(&in_key[0]);
48 ctx->KEY[1] = get_unaligned_le32(&in_key[4]);
49 ctx->KEY[2] = get_unaligned_le32(&in_key[8]);
50 ctx->KEY[3] = get_unaligned_le32(&in_key[12]);
51
52 return 0;
53
54 }
55
tea_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)56 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
57 {
58 u32 y, z, n, sum = 0;
59 u32 k0, k1, k2, k3;
60 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
61
62 y = get_unaligned_le32(&src[0]);
63 z = get_unaligned_le32(&src[4]);
64
65 k0 = ctx->KEY[0];
66 k1 = ctx->KEY[1];
67 k2 = ctx->KEY[2];
68 k3 = ctx->KEY[3];
69
70 n = TEA_ROUNDS;
71
72 while (n-- > 0) {
73 sum += TEA_DELTA;
74 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
75 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
76 }
77
78 put_unaligned_le32(y, &dst[0]);
79 put_unaligned_le32(z, &dst[4]);
80 }
81
tea_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)82 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
83 {
84 u32 y, z, n, sum;
85 u32 k0, k1, k2, k3;
86 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
87
88 y = get_unaligned_le32(&src[0]);
89 z = get_unaligned_le32(&src[4]);
90
91 k0 = ctx->KEY[0];
92 k1 = ctx->KEY[1];
93 k2 = ctx->KEY[2];
94 k3 = ctx->KEY[3];
95
96 sum = TEA_DELTA << 5;
97
98 n = TEA_ROUNDS;
99
100 while (n-- > 0) {
101 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
102 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
103 sum -= TEA_DELTA;
104 }
105
106 put_unaligned_le32(y, &dst[0]);
107 put_unaligned_le32(z, &dst[4]);
108 }
109
xtea_setkey(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)110 static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
111 unsigned int key_len)
112 {
113 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
114
115 ctx->KEY[0] = get_unaligned_le32(&in_key[0]);
116 ctx->KEY[1] = get_unaligned_le32(&in_key[4]);
117 ctx->KEY[2] = get_unaligned_le32(&in_key[8]);
118 ctx->KEY[3] = get_unaligned_le32(&in_key[12]);
119
120 return 0;
121
122 }
123
xtea_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)124 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
125 {
126 u32 y, z, sum = 0;
127 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
128 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
129
130 y = get_unaligned_le32(&src[0]);
131 z = get_unaligned_le32(&src[4]);
132
133 while (sum != limit) {
134 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
135 sum += XTEA_DELTA;
136 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
137 }
138
139 put_unaligned_le32(y, &dst[0]);
140 put_unaligned_le32(z, &dst[4]);
141 }
142
xtea_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)143 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
144 {
145 u32 y, z, sum;
146 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
147
148 y = get_unaligned_le32(&src[0]);
149 z = get_unaligned_le32(&src[4]);
150
151 sum = XTEA_DELTA * XTEA_ROUNDS;
152
153 while (sum) {
154 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
155 sum -= XTEA_DELTA;
156 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
157 }
158
159 put_unaligned_le32(y, &dst[0]);
160 put_unaligned_le32(z, &dst[4]);
161 }
162
163
xeta_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)164 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
165 {
166 u32 y, z, sum = 0;
167 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
168 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
169
170 y = get_unaligned_le32(&src[0]);
171 z = get_unaligned_le32(&src[4]);
172
173 while (sum != limit) {
174 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
175 sum += XTEA_DELTA;
176 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
177 }
178
179 put_unaligned_le32(y, &dst[0]);
180 put_unaligned_le32(z, &dst[4]);
181 }
182
xeta_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)183 static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
184 {
185 u32 y, z, sum;
186 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
187
188 y = get_unaligned_le32(&src[0]);
189 z = get_unaligned_le32(&src[4]);
190
191 sum = XTEA_DELTA * XTEA_ROUNDS;
192
193 while (sum) {
194 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
195 sum -= XTEA_DELTA;
196 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
197 }
198
199 put_unaligned_le32(y, &dst[0]);
200 put_unaligned_le32(z, &dst[4]);
201 }
202
203 static struct crypto_alg tea_algs[3] = { {
204 .cra_name = "tea",
205 .cra_driver_name = "tea-generic",
206 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
207 .cra_blocksize = TEA_BLOCK_SIZE,
208 .cra_ctxsize = sizeof (struct tea_ctx),
209 .cra_module = THIS_MODULE,
210 .cra_u = { .cipher = {
211 .cia_min_keysize = TEA_KEY_SIZE,
212 .cia_max_keysize = TEA_KEY_SIZE,
213 .cia_setkey = tea_setkey,
214 .cia_encrypt = tea_encrypt,
215 .cia_decrypt = tea_decrypt } }
216 }, {
217 .cra_name = "xtea",
218 .cra_driver_name = "xtea-generic",
219 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
220 .cra_blocksize = XTEA_BLOCK_SIZE,
221 .cra_ctxsize = sizeof (struct xtea_ctx),
222 .cra_module = THIS_MODULE,
223 .cra_u = { .cipher = {
224 .cia_min_keysize = XTEA_KEY_SIZE,
225 .cia_max_keysize = XTEA_KEY_SIZE,
226 .cia_setkey = xtea_setkey,
227 .cia_encrypt = xtea_encrypt,
228 .cia_decrypt = xtea_decrypt } }
229 }, {
230 .cra_name = "xeta",
231 .cra_driver_name = "xeta-generic",
232 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
233 .cra_blocksize = XTEA_BLOCK_SIZE,
234 .cra_ctxsize = sizeof (struct xtea_ctx),
235 .cra_module = THIS_MODULE,
236 .cra_u = { .cipher = {
237 .cia_min_keysize = XTEA_KEY_SIZE,
238 .cia_max_keysize = XTEA_KEY_SIZE,
239 .cia_setkey = xtea_setkey,
240 .cia_encrypt = xeta_encrypt,
241 .cia_decrypt = xeta_decrypt } }
242 } };
243
tea_mod_init(void)244 static int __init tea_mod_init(void)
245 {
246 return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
247 }
248
tea_mod_fini(void)249 static void __exit tea_mod_fini(void)
250 {
251 crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
252 }
253
254 MODULE_ALIAS_CRYPTO("tea");
255 MODULE_ALIAS_CRYPTO("xtea");
256 MODULE_ALIAS_CRYPTO("xeta");
257
258 subsys_initcall(tea_mod_init);
259 module_exit(tea_mod_fini);
260
261 MODULE_LICENSE("GPL");
262 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
263