2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
11 * IBM S390X AES GCM support
12 * Note this file is included by aes_gcm_hw.c
15 /* iv + padding length for iv lengths != 12 */
16 #define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
18 static int s390x_aes_gcm_init_key(PROV_GCM_CTX
*ctx
,
19 const unsigned char *key
, size_t keylen
)
21 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
24 memcpy(&actx
->plat
.s390x
.param
.kma
.k
, key
, keylen
);
25 actx
->plat
.s390x
.fc
= S390X_AES_FC(keylen
);
27 actx
->plat
.s390x
.fc
|= S390X_DECRYPT
;
31 static int s390x_aes_gcm_setiv(PROV_GCM_CTX
*ctx
, const unsigned char *iv
,
34 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
35 S390X_KMA_PARAMS
*kma
= &actx
->plat
.s390x
.param
.kma
;
41 actx
->plat
.s390x
.mreslen
= 0;
42 actx
->plat
.s390x
.areslen
= 0;
43 actx
->plat
.s390x
.kreslen
= 0;
45 if (ivlen
== AES_GCM_IV_DEFAULT_SIZE
) {
46 memcpy(&kma
->j0
, iv
, ivlen
);
50 unsigned long long ivbits
= ivlen
<< 3;
51 size_t len
= S390X_gcm_ivpadlen(ivlen
);
52 unsigned char iv_zero_pad
[S390X_gcm_ivpadlen(AES_GCM_IV_MAX_SIZE
)];
54 * The IV length needs to be zero padded to be a multiple of 16 bytes
55 * followed by 8 bytes of zeros and 8 bytes for the IV length.
56 * The GHASH of this value can then be calculated.
58 memcpy(iv_zero_pad
, iv
, ivlen
);
59 memset(iv_zero_pad
+ ivlen
, 0, len
- ivlen
);
60 memcpy(iv_zero_pad
+ len
- sizeof(ivbits
), &ivbits
, sizeof(ivbits
));
62 * Calculate the ghash of the iv - the result is stored into the tag
65 s390x_kma(iv_zero_pad
, len
, NULL
, 0, NULL
, actx
->plat
.s390x
.fc
, kma
);
66 actx
->plat
.s390x
.fc
|= S390X_KMA_HS
; /* The hash subkey is set */
68 /* Copy the 128 bit GHASH result into J0 and clear the tag */
69 kma
->j0
.g
[0] = kma
->t
.g
[0];
70 kma
->j0
.g
[1] = kma
->t
.g
[1];
73 /* Set the 32 bit counter */
74 kma
->cv
.w
= kma
->j0
.w
[3];
79 static int s390x_aes_gcm_cipher_final(PROV_GCM_CTX
*ctx
, unsigned char *tag
)
81 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
82 S390X_KMA_PARAMS
*kma
= &actx
->plat
.s390x
.param
.kma
;
83 unsigned char out
[AES_BLOCK_SIZE
];
88 s390x_kma(actx
->plat
.s390x
.ares
, actx
->plat
.s390x
.areslen
,
89 actx
->plat
.s390x
.mres
, actx
->plat
.s390x
.mreslen
, out
,
90 actx
->plat
.s390x
.fc
| S390X_KMA_LAAD
| S390X_KMA_LPC
, kma
);
92 /* gctx->mres already returned to the caller */
93 OPENSSL_cleanse(out
, actx
->plat
.s390x
.mreslen
);
96 ctx
->taglen
= AES_GCM_TAG_MAX_SIZE
;
97 memcpy(tag
, kma
->t
.b
, ctx
->taglen
);
103 rc
= (CRYPTO_memcmp(tag
, kma
->t
.b
, ctx
->taglen
) == 0);
108 static int s390x_aes_gcm_one_shot(PROV_GCM_CTX
*ctx
,
109 unsigned char *aad
, size_t aad_len
,
110 const unsigned char *in
, size_t in_len
,
112 unsigned char *tag
, size_t taglen
)
114 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
115 S390X_KMA_PARAMS
*kma
= &actx
->plat
.s390x
.param
.kma
;
118 kma
->taadl
= aad_len
<< 3;
119 kma
->tpcl
= in_len
<< 3;
120 s390x_kma(aad
, aad_len
, in
, in_len
, out
,
121 actx
->plat
.s390x
.fc
| S390X_KMA_LAAD
| S390X_KMA_LPC
, kma
);
124 memcpy(tag
, kma
->t
.b
, taglen
);
127 rc
= (CRYPTO_memcmp(tag
, kma
->t
.b
, taglen
) == 0);
133 * Process additional authenticated data. Returns 1 on success. Code is
136 static int s390x_aes_gcm_aad_update(PROV_GCM_CTX
*ctx
,
137 const unsigned char *aad
, size_t len
)
139 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
140 S390X_KMA_PARAMS
*kma
= &actx
->plat
.s390x
.param
.kma
;
141 unsigned long long alen
;
144 /* If already processed pt/ct then error */
148 /* update the total aad length */
149 alen
= kma
->taadl
+ len
;
150 if (alen
> (U64(1) << 61) || (sizeof(len
) == 8 && alen
< len
))
154 /* check if there is any existing aad data from a previous add */
155 n
= actx
->plat
.s390x
.areslen
;
157 /* add additional data to a buffer until it has 16 bytes */
159 actx
->plat
.s390x
.ares
[n
] = *aad
;
164 /* ctx->ares contains a complete block if offset has wrapped around */
166 s390x_kma(actx
->plat
.s390x
.ares
, 16, NULL
, 0, NULL
,
167 actx
->plat
.s390x
.fc
, kma
);
168 actx
->plat
.s390x
.fc
|= S390X_KMA_HS
;
170 actx
->plat
.s390x
.areslen
= n
;
173 /* If there are leftover bytes (< 128 bits) save them for next time */
175 /* Add any remaining 16 byte blocks (128 bit each) */
178 s390x_kma(aad
, len
, NULL
, 0, NULL
, actx
->plat
.s390x
.fc
, kma
);
179 actx
->plat
.s390x
.fc
|= S390X_KMA_HS
;
184 actx
->plat
.s390x
.areslen
= rem
;
188 actx
->plat
.s390x
.ares
[rem
] = aad
[rem
];
195 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 1 for
196 * success. Code is big-endian.
198 static int s390x_aes_gcm_cipher_update(PROV_GCM_CTX
*ctx
,
199 const unsigned char *in
, size_t len
,
202 PROV_AES_GCM_CTX
*actx
= (PROV_AES_GCM_CTX
*)ctx
;
203 S390X_KMA_PARAMS
*kma
= &actx
->plat
.s390x
.param
.kma
;
204 const unsigned char *inptr
;
205 unsigned long long mlen
;
213 mlen
= kma
->tpcl
+ len
;
214 if (mlen
> ((U64(1) << 36) - 32) || (sizeof(len
) == 8 && mlen
< len
))
218 n
= actx
->plat
.s390x
.mreslen
;
223 actx
->plat
.s390x
.mres
[n
] = *inptr
;
228 /* ctx->mres contains a complete block if offset has wrapped around */
230 s390x_kma(actx
->plat
.s390x
.ares
, actx
->plat
.s390x
.areslen
,
231 actx
->plat
.s390x
.mres
, 16, buf
.b
,
232 actx
->plat
.s390x
.fc
| S390X_KMA_LAAD
, kma
);
233 actx
->plat
.s390x
.fc
|= S390X_KMA_HS
;
234 actx
->plat
.s390x
.areslen
= 0;
236 /* previous call already encrypted/decrypted its remainder,
237 * see comment below */
238 n
= actx
->plat
.s390x
.mreslen
;
246 actx
->plat
.s390x
.mreslen
= 0;
254 s390x_kma(actx
->plat
.s390x
.ares
, actx
->plat
.s390x
.areslen
, in
, len
, out
,
255 actx
->plat
.s390x
.fc
| S390X_KMA_LAAD
, kma
);
258 actx
->plat
.s390x
.fc
|= S390X_KMA_HS
;
259 actx
->plat
.s390x
.areslen
= 0;
263 * If there is a remainder, it has to be saved such that it can be
264 * processed by kma later. However, we also have to do the for-now
265 * unauthenticated encryption/decryption part here and now...
268 if (!actx
->plat
.s390x
.mreslen
) {
269 buf
.w
[0] = kma
->j0
.w
[0];
270 buf
.w
[1] = kma
->j0
.w
[1];
271 buf
.w
[2] = kma
->j0
.w
[2];
272 buf
.w
[3] = kma
->cv
.w
+ 1;
273 s390x_km(buf
.b
, 16, actx
->plat
.s390x
.kres
,
274 actx
->plat
.s390x
.fc
& 0x1f, &kma
->k
);
277 n
= actx
->plat
.s390x
.mreslen
;
278 for (i
= 0; i
< rem
; i
++) {
279 actx
->plat
.s390x
.mres
[n
+ i
] = in
[i
];
280 out
[i
] = in
[i
] ^ actx
->plat
.s390x
.kres
[n
+ i
];
282 actx
->plat
.s390x
.mreslen
+= rem
;
287 static const PROV_GCM_HW s390x_aes_gcm
= {
288 s390x_aes_gcm_init_key
,
290 s390x_aes_gcm_aad_update
,
291 s390x_aes_gcm_cipher_update
,
292 s390x_aes_gcm_cipher_final
,
293 s390x_aes_gcm_one_shot
296 const PROV_GCM_HW
*PROV_AES_HW_gcm(size_t keybits
)
298 if ((keybits
== 128 && S390X_aes_128_gcm_CAPABLE
)
299 || (keybits
== 192 && S390X_aes_192_gcm_CAPABLE
)
300 || (keybits
== 256 && S390X_aes_256_gcm_CAPABLE
))
301 return &s390x_aes_gcm
;