]> git.ipfire.org Git - thirdparty/openssl.git/blob - providers/implementations/ciphers/cipher_aes_cts.c
Fix Segfault in EVP_PKEY_CTX_dup when the ctx has an undefined operation.
[thirdparty/openssl.git] / providers / implementations / ciphers / cipher_aes_cts.c
1 /*
2 * Copyright 2020 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10 /*
11 * Helper functions for AES CBC CTS ciphers.
12 *
13 * The function dispatch tables are embedded into cipher_aes.c
14 * using cipher_aes_cts.inc
15 */
16
17 /*
18 * Refer to SP800-38A-Addendum
19 *
20 * Ciphertext stealing encrypts plaintext using a block cipher, without padding
21 * the message to a multiple of the block size, so the ciphertext is the same
22 * size as the plaintext.
23 * It does this by altering processing of the last two blocks of the message.
24 * The processing of all but the last two blocks is unchanged, but a portion of
25 * the second-last block's ciphertext is "stolen" to pad the last plaintext
26 * block. The padded final block is then encrypted as usual.
27 * The final ciphertext for the last two blocks, consists of the partial block
28 * (with the "stolen" portion omitted) plus the full final block,
29 * which are the same size as the original plaintext.
30 * Decryption requires decrypting the final block first, then restoring the
31 * stolen ciphertext to the partial block, which can then be decrypted as usual.
32
33 * AES_CBC_CTS has 3 variants:
34 * (1) CS1 The NIST variant.
35 * If the length is a multiple of the blocksize it is the same as CBC mode.
36 * otherwise it produces C1||C2||(C(n-1))*||Cn.
37 * Where C(n-1)* is a partial block.
38 * (2) CS2
39 * If the length is a multiple of the blocksize it is the same as CBC mode.
40 * otherwise it produces C1||C2||Cn||(C(n-1))*.
41 * Where C(n-1)* is a partial block.
42 * (3) CS3 The Kerberos5 variant.
43 * Produces C1||C2||Cn||(C(n-1))* regardless of the length.
44 * If the length is a multiple of the blocksize it looks similar to CBC mode
45 * with the last 2 blocks swapped.
46 * Otherwise it is the same as CS2.
47 */
48
49 #include "e_os.h" /* strcasecmp */
50 #include <openssl/core_names.h>
51 #include <openssl/aes.h>
52 #include "prov/ciphercommon.h"
53 #include "internal/nelem.h"
54 #include "cipher_aes_cts.h"
55
56 /* The value assigned to 0 is the default */
57 #define CTS_CS1 0
58 #define CTS_CS2 1
59 #define CTS_CS3 2
60
61 typedef union {
62 size_t align;
63 unsigned char c[AES_BLOCK_SIZE];
64 } aligned_16bytes;
65
66 typedef struct cts_mode_name2id_st {
67 unsigned int id;
68 const char *name;
69 } CTS_MODE_NAME2ID;
70
71 static CTS_MODE_NAME2ID cts_modes[] =
72 {
73 { CTS_CS1, OSSL_CIPHER_CTS_MODE_CS1 },
74 { CTS_CS2, OSSL_CIPHER_CTS_MODE_CS2 },
75 { CTS_CS3, OSSL_CIPHER_CTS_MODE_CS3 },
76 };
77
78 const char *ossl_aes_cbc_cts_mode_id2name(unsigned int id)
79 {
80 size_t i;
81
82 for (i = 0; i < OSSL_NELEM(cts_modes); ++i) {
83 if (cts_modes[i].id == id)
84 return cts_modes[i].name;
85 }
86 return NULL;
87 }
88
89 int ossl_aes_cbc_cts_mode_name2id(const char *name)
90 {
91 size_t i;
92
93 for (i = 0; i < OSSL_NELEM(cts_modes); ++i) {
94 if (strcasecmp(name, cts_modes[i].name) == 0)
95 return (int)cts_modes[i].id;
96 }
97 return -1;
98 }
99
100 static size_t cts128_cs1_encrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
101 unsigned char *out, size_t len)
102 {
103 aligned_16bytes tmp_in;
104 size_t residue;
105
106 residue = len % AES_BLOCK_SIZE;
107 len -= residue;
108 if (!ctx->hw->cipher(ctx, out, in, len))
109 return 0;
110
111 if (residue == 0)
112 return len;
113
114 in += len;
115 out += len;
116
117 memset(tmp_in.c, 0, sizeof(tmp_in));
118 memcpy(tmp_in.c, in, residue);
119 if (!ctx->hw->cipher(ctx, out - AES_BLOCK_SIZE + residue, tmp_in.c,
120 AES_BLOCK_SIZE))
121 return 0;
122 return len + residue;
123 }
124
125 static void do_xor(const unsigned char *in1, const unsigned char *in2,
126 size_t len, unsigned char *out)
127 {
128 size_t i;
129
130 for (i = 0; i < len; ++i)
131 out[i] = in1[i] ^ in2[i];
132 }
133
134 static size_t cts128_cs1_decrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
135 unsigned char *out, size_t len)
136 {
137 aligned_16bytes mid_iv, ct_mid, pt_last;
138 size_t residue;
139
140 residue = len % AES_BLOCK_SIZE;
141 if (residue == 0) {
142 /* If there are no partial blocks then it is the same as CBC mode */
143 if (!ctx->hw->cipher(ctx, out, in, len))
144 return 0;
145 return len;
146 }
147 /* Process blocks at the start - but leave the last 2 blocks */
148 len -= AES_BLOCK_SIZE + residue;
149 if (len > 0) {
150 if (!ctx->hw->cipher(ctx, out, in, len))
151 return 0;
152 in += len;
153 out += len;
154 }
155 /* Save the iv that will be used by the second last block */
156 memcpy(mid_iv.c, ctx->iv, AES_BLOCK_SIZE);
157
158 /* Decrypt the last block first using an iv of zero */
159 memset(ctx->iv, 0, AES_BLOCK_SIZE);
160 if (!ctx->hw->cipher(ctx, pt_last.c, in + residue, AES_BLOCK_SIZE))
161 return 0;
162
163 /*
164 * Rebuild the ciphertext of the second last block as a combination of
165 * the decrypted last block + replace the start with the ciphertext bytes
166 * of the partial second last block.
167 */
168 memcpy(ct_mid.c, in, residue);
169 memcpy(ct_mid.c + residue, pt_last.c + residue, AES_BLOCK_SIZE - residue);
170 /*
171 * Restore the last partial ciphertext block.
172 * Now that we have the cipher text of the second last block, apply
173 * that to the partial plaintext end block. We have already decrypted the
174 * block using an IV of zero. For decryption the IV is just XORed after
175 * doing an AES block - so just XOR in the cipher text.
176 */
177 do_xor(ct_mid.c, pt_last.c, residue, out + AES_BLOCK_SIZE);
178
179 /* Restore the iv needed by the second last block */
180 memcpy(ctx->iv, mid_iv.c, AES_BLOCK_SIZE);
181 /*
182 * Decrypt the second last plaintext block now that we have rebuilt the
183 * ciphertext.
184 */
185 if (!ctx->hw->cipher(ctx, out, ct_mid.c, AES_BLOCK_SIZE))
186 return 0;
187
188 return len + AES_BLOCK_SIZE + residue;
189 }
190
191 static size_t cts128_cs3_encrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
192 unsigned char *out, size_t len)
193 {
194 aligned_16bytes tmp_in;
195 size_t residue;
196
197 if (len <= AES_BLOCK_SIZE) /* CS3 requires 2 blocks */
198 return 0;
199
200 residue = len % AES_BLOCK_SIZE;
201 if (residue == 0)
202 residue = AES_BLOCK_SIZE;
203 len -= residue;
204
205 if (!ctx->hw->cipher(ctx, out, in, len))
206 return 0;
207
208 in += len;
209 out += len;
210
211 memset(tmp_in.c, 0, sizeof(tmp_in));
212 memcpy(tmp_in.c, in, residue);
213 memcpy(out, out - AES_BLOCK_SIZE, residue);
214 if (!ctx->hw->cipher(ctx, out - AES_BLOCK_SIZE, tmp_in.c, AES_BLOCK_SIZE))
215 return 0;
216 return len + residue;
217 }
218
219 /*
220 * Note:
221 * The cipher text (in) is of the form C(0), C(1), ., C(n), C(n-1)* where
222 * C(n) is a full block and C(n-1)* can be a partial block
223 * (but could be a full block).
224 * This means that the output plaintext (out) needs to swap the plaintext of
225 * the last two decoded ciphertext blocks.
226 */
227 static size_t cts128_cs3_decrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
228 unsigned char *out, size_t len)
229 {
230 aligned_16bytes mid_iv, ct_mid, pt_last;
231 size_t residue;
232
233 if (len <= AES_BLOCK_SIZE) /* CS3 requires 2 blocks */
234 return 0;
235
236 /* Process blocks at the start - but leave the last 2 blocks */
237 residue = len % AES_BLOCK_SIZE;
238 if (residue == 0)
239 residue = AES_BLOCK_SIZE;
240 len -= AES_BLOCK_SIZE + residue;
241
242 if (len > 0) {
243 if (!ctx->hw->cipher(ctx, out, in, len))
244 return 0;
245 in += len;
246 out += len;
247 }
248 /* Save the iv that will be used by the second last block */
249 memcpy(mid_iv.c, ctx->iv, AES_BLOCK_SIZE);
250
251 /* Decrypt the Cn block first using an iv of zero */
252 memset(ctx->iv, 0, AES_BLOCK_SIZE);
253 if (!ctx->hw->cipher(ctx, pt_last.c, in, AES_BLOCK_SIZE))
254 return 0;
255
256 /*
257 * Rebuild the ciphertext of C(n-1) as a combination of
258 * the decrypted C(n) block + replace the start with the ciphertext bytes
259 * of the partial last block.
260 */
261 memcpy(ct_mid.c, in + AES_BLOCK_SIZE, residue);
262 if (residue != AES_BLOCK_SIZE)
263 memcpy(ct_mid.c + residue, pt_last.c + residue, AES_BLOCK_SIZE - residue);
264 /*
265 * Restore the last partial ciphertext block.
266 * Now that we have the cipher text of the second last block, apply
267 * that to the partial plaintext end block. We have already decrypted the
268 * block using an IV of zero. For decryption the IV is just XORed after
269 * doing an AES block - so just XOR in the ciphertext.
270 */
271 do_xor(ct_mid.c, pt_last.c, residue, out + AES_BLOCK_SIZE);
272
273 /* Restore the iv needed by the second last block */
274 memcpy(ctx->iv, mid_iv.c, AES_BLOCK_SIZE);
275 /*
276 * Decrypt the second last plaintext block now that we have rebuilt the
277 * ciphertext.
278 */
279 if (!ctx->hw->cipher(ctx, out, ct_mid.c, AES_BLOCK_SIZE))
280 return 0;
281
282 return len + AES_BLOCK_SIZE + residue;
283 }
284
285 static size_t cts128_cs2_encrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
286 unsigned char *out, size_t len)
287 {
288 if (len % AES_BLOCK_SIZE == 0) {
289 /* If there are no partial blocks then it is the same as CBC mode */
290 if (!ctx->hw->cipher(ctx, out, in, len))
291 return 0;
292 return len;
293 }
294 /* For partial blocks CS2 is equivalent to CS3 */
295 return cts128_cs3_encrypt(ctx, in, out, len);
296 }
297
298 static size_t cts128_cs2_decrypt(PROV_CIPHER_CTX *ctx, const unsigned char *in,
299 unsigned char *out, size_t len)
300 {
301 if (len % AES_BLOCK_SIZE == 0) {
302 /* If there are no partial blocks then it is the same as CBC mode */
303 if (!ctx->hw->cipher(ctx, out, in, len))
304 return 0;
305 return len;
306 }
307 /* For partial blocks CS2 is equivalent to CS3 */
308 return cts128_cs3_decrypt(ctx, in, out, len);
309 }
310
311 int ossl_aes_cbc_cts_block_update(void *vctx, unsigned char *out, size_t *outl,
312 size_t outsize, const unsigned char *in,
313 size_t inl)
314 {
315 PROV_CIPHER_CTX *ctx = (PROV_CIPHER_CTX *)vctx;
316 size_t sz = 0;
317
318 if (inl < AES_BLOCK_SIZE) /* There must be at least one block for CTS mode */
319 return 0;
320 if (outsize < inl)
321 return 0;
322 if (out == NULL) {
323 *outl = inl;
324 return 1;
325 }
326
327 /*
328 * Return an error if the update is called multiple times, only one shot
329 * is supported.
330 */
331 if (ctx->updated == 1)
332 return 0;
333
334 if (ctx->enc) {
335 if (ctx->cts_mode == CTS_CS1)
336 sz = cts128_cs1_encrypt(ctx, in, out, inl);
337 else if (ctx->cts_mode == CTS_CS2)
338 sz = cts128_cs2_encrypt(ctx, in, out, inl);
339 else if (ctx->cts_mode == CTS_CS3)
340 sz = cts128_cs3_encrypt(ctx, in, out, inl);
341 } else {
342 if (ctx->cts_mode == CTS_CS1)
343 sz = cts128_cs1_decrypt(ctx, in, out, inl);
344 else if (ctx->cts_mode == CTS_CS2)
345 sz = cts128_cs2_decrypt(ctx, in, out, inl);
346 else if (ctx->cts_mode == CTS_CS3)
347 sz = cts128_cs3_decrypt(ctx, in, out, inl);
348 }
349 if (sz == 0)
350 return 0;
351 ctx->updated = 1; /* Stop multiple updates being allowed */
352 *outl = sz;
353 return 1;
354 }
355
356 int ossl_aes_cbc_cts_block_final(void *vctx, unsigned char *out, size_t *outl,
357 size_t outsize)
358 {
359 *outl = 0;
360 return 1;
361 }