#define PAES_256_PROTKEY_SIZE (32 + 32) /* key + verification pattern */
#define PXTS_256_PROTKEY_SIZE (32 + 32 + 32) /* k1 + k2 + verification pattern */
+static bool pkey_clrkey_allowed;
+module_param_named(clrkey, pkey_clrkey_allowed, bool, 0444);
+MODULE_PARM_DESC(clrkey, "Allow clear key material (default N)");
+
static u8 *ctrblk;
static DEFINE_MUTEX(ctrblk_lock);
* This function may sleep - don't call in non-sleeping context.
*/
static inline int convert_key(const u8 *key, unsigned int keylen,
- struct paes_protkey *pk)
+ struct paes_protkey *pk, bool tested)
{
+ u32 xflags = PKEY_XFLAG_NOMEMALLOC;
int rc, i;
+ if (tested && !pkey_clrkey_allowed)
+ xflags |= PKEY_XFLAG_NOCLEARKEY;
+
pk->len = sizeof(pk->protkey);
/*
}
rc = pkey_key2protkey(key, keylen,
pk->protkey, &pk->len, &pk->type,
- PKEY_XFLAG_NOMEMALLOC);
+ xflags);
}
out:
* unnecessary additional conversion but never to invalid data on en-
* or decrypt operations.
*/
-static int paes_convert_key(struct s390_paes_ctx *ctx)
+static int paes_convert_key(struct s390_paes_ctx *ctx, bool tested)
{
struct paes_protkey pk;
int rc;
ctx->pk_state = PK_STATE_CONVERT_IN_PROGRESS;
spin_unlock_bh(&ctx->pk_lock);
- rc = convert_key(ctx->keybuf, ctx->keylen, &pk);
+ rc = convert_key(ctx->keybuf, ctx->keylen, &pk, tested);
/* update context */
spin_lock_bh(&ctx->pk_lock);
* pk_type, pk_len and the protected key in the tfm context.
* See also comments on function paes_convert_key.
*/
-static int pxts_convert_key(struct s390_pxts_ctx *ctx)
+static int pxts_convert_key(struct s390_pxts_ctx *ctx, bool tested)
{
struct paes_protkey pk0, pk1;
size_t split_keylen;
ctx->pk_state = PK_STATE_CONVERT_IN_PROGRESS;
spin_unlock_bh(&ctx->pk_lock);
- rc = convert_key(ctx->keybuf, ctx->keylen, &pk0);
+ rc = convert_key(ctx->keybuf, ctx->keylen, &pk0, tested);
if (rc)
goto out;
}
split_keylen = ctx->keylen / 2;
rc = convert_key(ctx->keybuf + split_keylen,
- split_keylen, &pk1);
+ split_keylen, &pk1, tested);
if (rc)
goto out;
if (pk0.type != pk1.type) {
unsigned int key_len)
{
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
+ bool tested = crypto_skcipher_tested(tfm);
long fc;
int rc;
goto out;
/* convert key into protected key */
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
static int ecb_paes_do_crypt(struct s390_paes_ctx *ctx,
struct s390_pecb_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
struct ecb_param *param = &req_ctx->param;
struct skcipher_walk *walk = &req_ctx->walk;
rc = -EKEYEXPIRED;
goto out;
}
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
spin_lock_bh(&ctx->pk_lock);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/*
/* Try synchronous operation if no active engine usage */
if (!atomic_read(&ctx->via_engine_ctr)) {
- rc = ecb_paes_do_crypt(ctx, req_ctx, false);
+ rc = ecb_paes_do_crypt(ctx, req_ctx, tested, false);
if (rc == 0)
goto out;
}
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/* walk has already been prepared */
- rc = ecb_paes_do_crypt(ctx, req_ctx, true);
+ rc = ecb_paes_do_crypt(ctx, req_ctx, tested, true);
if (rc == -EKEYEXPIRED) {
/*
* Protected key expired, conversion is in process.
unsigned int key_len)
{
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
+ bool tested = crypto_skcipher_tested(tfm);
long fc;
int rc;
goto out;
/* convert raw key into protected key */
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
static int cbc_paes_do_crypt(struct s390_paes_ctx *ctx,
struct s390_pcbc_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
struct cbc_param *param = &req_ctx->param;
struct skcipher_walk *walk = &req_ctx->walk;
rc = -EKEYEXPIRED;
goto out;
}
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
spin_lock_bh(&ctx->pk_lock);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/*
/* Try synchronous operation if no active engine usage */
if (!atomic_read(&ctx->via_engine_ctr)) {
- rc = cbc_paes_do_crypt(ctx, req_ctx, false);
+ rc = cbc_paes_do_crypt(ctx, req_ctx, tested, false);
if (rc == 0)
goto out;
}
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/* walk has already been prepared */
- rc = cbc_paes_do_crypt(ctx, req_ctx, true);
+ rc = cbc_paes_do_crypt(ctx, req_ctx, tested, true);
if (rc == -EKEYEXPIRED) {
/*
* Protected key expired, conversion is in process.
unsigned int key_len)
{
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
+ bool tested = crypto_skcipher_tested(tfm);
long fc;
int rc;
goto out;
/* convert raw key into protected key */
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
static int ctr_paes_do_crypt(struct s390_paes_ctx *ctx,
struct s390_pctr_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
struct ctr_param *param = &req_ctx->param;
struct skcipher_walk *walk = &req_ctx->walk;
rc = -EKEYEXPIRED;
goto out;
}
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc) {
if (locked)
mutex_unlock(&ctrblk_lock);
rc = -EKEYEXPIRED;
goto out;
}
- rc = paes_convert_key(ctx);
+ rc = paes_convert_key(ctx, tested);
if (rc)
goto out;
spin_lock_bh(&ctx->pk_lock);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/*
/* Try synchronous operation if no active engine usage */
if (!atomic_read(&ctx->via_engine_ctr)) {
- rc = ctr_paes_do_crypt(ctx, req_ctx, false);
+ rc = ctr_paes_do_crypt(ctx, req_ctx, tested, false);
if (rc == 0)
goto out;
}
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/* walk has already been prepared */
- rc = ctr_paes_do_crypt(ctx, req_ctx, true);
+ rc = ctr_paes_do_crypt(ctx, req_ctx, tested, true);
if (rc == -EKEYEXPIRED) {
/*
* Protected key expired, conversion is in process.
unsigned int in_keylen)
{
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
+ bool tested = crypto_skcipher_tested(tfm);
u8 ckey[2 * AES_MAX_KEY_SIZE];
unsigned int ckey_len;
long fc;
goto out;
/* convert raw key(s) into protected key(s) */
- rc = pxts_convert_key(ctx);
+ rc = pxts_convert_key(ctx, tested);
if (rc)
goto out;
static int xts_paes_do_crypt_fullkey(struct s390_pxts_ctx *ctx,
struct s390_pxts_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
struct xts_full_km_param *param = &req_ctx->param.full_km_param;
struct skcipher_walk *walk = &req_ctx->walk;
rc = -EKEYEXPIRED;
goto out;
}
- rc = pxts_convert_key(ctx);
+ rc = pxts_convert_key(ctx, tested);
if (rc)
goto out;
spin_lock_bh(&ctx->pk_lock);
struct xts_km_param *param,
struct skcipher_walk *walk,
unsigned int keylen,
- unsigned int offset, bool maysleep)
+ unsigned int offset,
+ bool tested, bool maysleep)
{
struct xts_pcc_param pcc_param;
unsigned long cc = 1;
rc = -EKEYEXPIRED;
break;
}
- rc = pxts_convert_key(ctx);
+ rc = pxts_convert_key(ctx, tested);
if (rc)
break;
continue;
static int xts_paes_do_crypt_2keys(struct s390_pxts_ctx *ctx,
struct s390_pxts_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
struct xts_km_param *param = &req_ctx->param.km_param;
struct skcipher_walk *walk = &req_ctx->walk;
if (!req_ctx->param_init_done) {
rc = __xts_2keys_prep_param(ctx, param, walk,
- keylen, offset, maysleep);
+ keylen, offset, tested, maysleep);
if (rc)
goto out;
req_ctx->param_init_done = true;
rc = -EKEYEXPIRED;
goto out;
}
- rc = pxts_convert_key(ctx);
+ rc = pxts_convert_key(ctx, tested);
if (rc)
goto out;
spin_lock_bh(&ctx->pk_lock);
static int xts_paes_do_crypt(struct s390_pxts_ctx *ctx,
struct s390_pxts_req_ctx *req_ctx,
- bool maysleep)
+ bool tested, bool maysleep)
{
int pk_state, rc = 0;
switch (ctx->fc) {
case CPACF_KM_PXTS_128:
case CPACF_KM_PXTS_256:
- rc = xts_paes_do_crypt_2keys(ctx, req_ctx, maysleep);
+ rc = xts_paes_do_crypt_2keys(ctx, req_ctx, tested, maysleep);
break;
case CPACF_KM_PXTS_128_FULL:
case CPACF_KM_PXTS_256_FULL:
- rc = xts_paes_do_crypt_fullkey(ctx, req_ctx, maysleep);
+ rc = xts_paes_do_crypt_fullkey(ctx, req_ctx, tested, maysleep);
break;
default:
rc = -EINVAL;
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/*
/* Try synchronous operation if no active engine usage */
if (!atomic_read(&ctx->via_engine_ctr)) {
- rc = xts_paes_do_crypt(ctx, req_ctx, false);
+ rc = xts_paes_do_crypt(ctx, req_ctx, tested, false);
if (rc == 0)
goto out;
}
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
struct skcipher_walk *walk = &req_ctx->walk;
+ bool tested = crypto_skcipher_tested(tfm);
int rc;
/* walk has already been prepared */
- rc = xts_paes_do_crypt(ctx, req_ctx, true);
+ rc = xts_paes_do_crypt(ctx, req_ctx, tested, true);
if (rc == -EKEYEXPIRED) {
/*
* Protected key expired, conversion is in process.