]>
Commit | Line | Data |
---|---|---|
a672a02a SL |
1 | /* |
2 | * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved. | |
3 | * | |
4 | * Licensed under the Apache License 2.0 (the "License"). You may not use | |
5 | * this file except in compliance with the License. You can obtain a copy | |
6 | * in the file LICENSE in the source distribution or at | |
7 | * https://www.openssl.org/source/license.html | |
8 | */ | |
9 | ||
10 | #include "ciphers_locl.h" | |
11 | #include "internal/aes_platform.h" | |
12 | ||
13 | static const PROV_GCM_HW aes_gcm; | |
14 | ||
15 | static int gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv, size_t ivlen); | |
16 | static int gcm_aad_update(PROV_GCM_CTX *ctx, const unsigned char *aad, | |
17 | size_t aad_len); | |
18 | static int gcm_cipher_final(PROV_GCM_CTX *ctx, unsigned char *tag); | |
19 | static int gcm_one_shot(PROV_GCM_CTX *ctx, unsigned char *aad, size_t aad_len, | |
20 | const unsigned char *in, size_t in_len, | |
21 | unsigned char *out, unsigned char *tag, size_t tag_len); | |
22 | static int gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in, | |
23 | size_t len, unsigned char *out); | |
24 | ||
25 | #define SET_KEY_CTR_FN(ks, fn_set_enc_key, fn_block, fn_ctr) \ | |
26 | ctx->ks = ks; \ | |
27 | fn_set_enc_key(key, keylen * 8, ks); \ | |
28 | CRYPTO_gcm128_init(&ctx->gcm, ks, (block128_f)fn_block); \ | |
29 | ctx->ctr = (ctr128_f)fn_ctr; \ | |
30 | ctx->key_set = 1; | |
31 | ||
32 | #if defined(AESNI_CAPABLE) | |
33 | ||
34 | /* AES-NI section */ | |
35 | static int aesni_gcm_init_key(PROV_GCM_CTX *ctx, const unsigned char *key, | |
36 | size_t keylen) | |
37 | { | |
38 | PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; | |
39 | AES_KEY *ks = &actx->ks.ks; | |
40 | ||
41 | SET_KEY_CTR_FN(ks, aesni_set_encrypt_key, aesni_encrypt, | |
42 | aesni_ctr32_encrypt_blocks); | |
43 | return 1; | |
44 | } | |
45 | ||
46 | static const PROV_GCM_HW aesni_gcm = { | |
47 | aesni_gcm_init_key, | |
48 | gcm_setiv, | |
49 | gcm_aad_update, | |
50 | gcm_cipher_update, | |
51 | gcm_cipher_final, | |
52 | gcm_one_shot | |
53 | }; | |
54 | ||
55 | const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits) | |
56 | { | |
57 | return AESNI_CAPABLE ? &aesni_gcm : &aes_gcm; | |
58 | } | |
59 | ||
60 | #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__)) | |
61 | ||
62 | /* Fujitsu SPARC64 X support */ | |
63 | ||
64 | static int t4_aes_gcm_init_key(PROV_GCM_CTX *ctx, const unsigned char *key, | |
65 | size_t keylen) | |
66 | { | |
67 | ctr128_f ctr; | |
68 | PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; | |
69 | AES_KEY *ks = &actx->ks.ks; | |
70 | ||
71 | ||
72 | switch (keylen) { | |
73 | case 16: | |
74 | ctr = (ctr128_f)aes128_t4_ctr32_encrypt; | |
75 | break; | |
76 | case 24: | |
77 | ctr = (ctr128_f)aes192_t4_ctr32_encrypt; | |
78 | break; | |
79 | case 32: | |
80 | ctr = (ctr128_f)aes256_t4_ctr32_encrypt; | |
81 | break; | |
82 | default: | |
83 | return 0; | |
84 | } | |
85 | ||
86 | SET_KEY_CTR_FN(ks, aes_t4_set_encrypt_key, aes_t4_encrypt, ctr); | |
87 | return 1; | |
88 | } | |
89 | ||
90 | static const PROV_GCM_HW t4_aes_gcm = { | |
91 | t4_aes_gcm_init_key, | |
92 | gcm_setiv, | |
93 | gcm_aad_update, | |
94 | gcm_cipher_update, | |
95 | gcm_cipher_final, | |
96 | gcm_one_shot | |
97 | }; | |
98 | const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits) | |
99 | { | |
100 | return SPARC_AES_CAPABLE ? &t4_aes_gcm : &aes_gcm; | |
101 | } | |
102 | ||
103 | #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__) | |
104 | # include "gcm_s390x.c" | |
105 | #else | |
106 | const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits) | |
107 | { | |
108 | return &aes_gcm; | |
109 | } | |
110 | #endif | |
111 | ||
112 | static int generic_aes_gcm_init_key(PROV_GCM_CTX *ctx, const unsigned char *key, | |
113 | size_t keylen) | |
114 | { | |
115 | PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; | |
116 | AES_KEY *ks = &actx->ks.ks; | |
117 | ||
118 | # ifdef HWAES_CAPABLE | |
119 | if (HWAES_CAPABLE) { | |
120 | # ifdef HWAES_ctr32_encrypt_blocks | |
121 | SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt, | |
122 | HWAES_ctr32_encrypt_blocks); | |
123 | # else | |
124 | SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt, NULL); | |
125 | # endif /* HWAES_ctr32_encrypt_blocks */ | |
126 | } else | |
127 | # endif /* HWAES_CAPABLE */ | |
128 | ||
129 | # ifdef BSAES_CAPABLE | |
130 | if (BSAES_CAPABLE) { | |
131 | SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, | |
132 | bsaes_ctr32_encrypt_blocks); | |
133 | } else | |
134 | # endif /* BSAES_CAPABLE */ | |
135 | ||
136 | # ifdef VPAES_CAPABLE | |
137 | if (VPAES_CAPABLE) { | |
138 | SET_KEY_CTR_FN(ks, vpaes_set_encrypt_key, vpaes_encrypt, NULL); | |
139 | } else | |
140 | # endif /* VPAES_CAPABLE */ | |
141 | ||
142 | { | |
143 | # ifdef AES_CTR_ASM | |
144 | SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, AES_ctr32_encrypt); | |
145 | # else | |
146 | SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, NULL); | |
147 | # endif /* AES_CTR_ASM */ | |
148 | } | |
149 | ctx->key_set = 1; | |
150 | return 1; | |
151 | } | |
152 | ||
153 | static int gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv, size_t ivlen) | |
154 | { | |
155 | CRYPTO_gcm128_setiv(&ctx->gcm, iv, ivlen); | |
156 | return 1; | |
157 | } | |
158 | ||
159 | static int gcm_aad_update(PROV_GCM_CTX *ctx, | |
160 | const unsigned char *aad, size_t aad_len) | |
161 | { | |
162 | return CRYPTO_gcm128_aad(&ctx->gcm, aad, aad_len) == 0; | |
163 | } | |
164 | ||
165 | static int gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in, | |
166 | size_t len, unsigned char *out) | |
167 | { | |
168 | if (ctx->enc) { | |
169 | if (ctx->ctr != NULL) { | |
170 | #if defined(AES_GCM_ASM) | |
171 | size_t bulk = 0; | |
172 | ||
173 | if (len >= 32 && AES_GCM_ASM(ctx)) { | |
174 | size_t res = (16 - ctx->gcm.mres) % 16; | |
175 | ||
176 | if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, res)) | |
177 | return 0; | |
178 | bulk = aesni_gcm_encrypt(in + res, out + res, len - res, | |
179 | ctx->gcm.key, | |
180 | ctx->gcm.Yi.c, ctx->gcm.Xi.u); | |
181 | ctx->gcm.len.u[1] += bulk; | |
182 | bulk += res; | |
183 | } | |
184 | if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in + bulk, out + bulk, | |
185 | len - bulk, ctx->ctr)) | |
186 | return 0; | |
187 | #else | |
188 | if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr)) | |
189 | return 0; | |
190 | #endif /* AES_GCM_ASM */ | |
191 | } else { | |
192 | if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, len)) | |
193 | return 0; | |
194 | } | |
195 | } else { | |
196 | if (ctx->ctr != NULL) { | |
197 | #if defined(AES_GCM_ASM) | |
198 | size_t bulk = 0; | |
199 | ||
200 | if (len >= 16 && AES_GCM_ASM(ctx)) { | |
201 | size_t res = (16 - ctx->gcm.mres) % 16; | |
202 | ||
203 | if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, res)) | |
204 | return -1; | |
205 | ||
206 | bulk = aesni_gcm_decrypt(in + res, out + res, len - res, | |
207 | ctx->gcm.key, | |
208 | ctx->gcm.Yi.c, ctx->gcm.Xi.u); | |
209 | ctx->gcm.len.u[1] += bulk; | |
210 | bulk += res; | |
211 | } | |
212 | if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in + bulk, out + bulk, | |
213 | len - bulk, ctx->ctr)) | |
214 | return 0; | |
215 | #else | |
216 | if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr)) | |
217 | return 0; | |
218 | #endif /* AES_GCM_ASM */ | |
219 | } else { | |
220 | if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, len)) | |
221 | return 0; | |
222 | } | |
223 | } | |
224 | return 1; | |
225 | } | |
226 | ||
227 | static int gcm_cipher_final(PROV_GCM_CTX *ctx, unsigned char *tag) | |
228 | { | |
229 | if (ctx->enc) { | |
230 | CRYPTO_gcm128_tag(&ctx->gcm, tag, GCM_TAG_MAX_SIZE); | |
231 | ctx->taglen = GCM_TAG_MAX_SIZE; | |
232 | } else { | |
233 | if (ctx->taglen < 0 | |
234 | || CRYPTO_gcm128_finish(&ctx->gcm, tag, ctx->taglen) != 0) | |
235 | return 0; | |
236 | } | |
237 | return 1; | |
238 | } | |
239 | ||
240 | static int gcm_one_shot(PROV_GCM_CTX *ctx, unsigned char *aad, size_t aad_len, | |
241 | const unsigned char *in, size_t in_len, | |
242 | unsigned char *out, unsigned char *tag, size_t tag_len) | |
243 | { | |
244 | int ret = 0; | |
245 | ||
246 | /* Use saved AAD */ | |
247 | if (!ctx->hw->aadupdate(ctx, aad, aad_len)) | |
248 | goto err; | |
249 | if (!ctx->hw->cipherupdate(ctx, in, in_len, out)) | |
250 | goto err; | |
251 | ctx->taglen = GCM_TAG_MAX_SIZE; | |
252 | if (!ctx->hw->cipherfinal(ctx, tag)) | |
253 | goto err; | |
254 | ret = 1; | |
255 | ||
256 | err: | |
257 | return ret; | |
258 | } | |
259 | ||
260 | static const PROV_GCM_HW aes_gcm = { | |
261 | generic_aes_gcm_init_key, | |
262 | gcm_setiv, | |
263 | gcm_aad_update, | |
264 | gcm_cipher_update, | |
265 | gcm_cipher_final, | |
266 | gcm_one_shot | |
267 | }; | |
268 | ||
269 | #if !defined(OPENSSL_NO_ARIA) && !defined(FIPS_MODE) | |
270 | ||
271 | static int aria_gcm_init_key(PROV_GCM_CTX *ctx, const unsigned char *key, | |
272 | size_t keylen) | |
273 | { | |
274 | PROV_ARIA_GCM_CTX *actx = (PROV_ARIA_GCM_CTX *)ctx; | |
275 | ARIA_KEY *ks = &actx->ks.ks; | |
276 | ||
277 | SET_KEY_CTR_FN(ks, aria_set_encrypt_key, aria_encrypt, NULL); | |
278 | return 1; | |
279 | } | |
280 | ||
281 | static int aria_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in, | |
282 | size_t len, unsigned char *out) | |
283 | { | |
284 | if (ctx->enc) { | |
285 | if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, len)) | |
286 | return 0; | |
287 | } else { | |
288 | if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, len)) | |
289 | return 0; | |
290 | } | |
291 | return 1; | |
292 | } | |
293 | ||
294 | static const PROV_GCM_HW aria_gcm = { | |
295 | aria_gcm_init_key, | |
296 | gcm_setiv, | |
297 | gcm_aad_update, | |
298 | aria_cipher_update, | |
299 | gcm_cipher_final, | |
300 | gcm_one_shot | |
301 | }; | |
302 | const PROV_GCM_HW *PROV_ARIA_HW_gcm(size_t keybits) | |
303 | { | |
304 | return &aria_gcm; | |
305 | } | |
306 | ||
307 | #endif /* !defined(OPENSSL_NO_ARIA) && !defined(FIPS_MODE) */ |