char *g; /* m */
dma_addr_t dma_g;
+ struct crypto_kpp *soft_tfm;
};
struct hpre_ecdh_ctx {
/* low address: x->y */
unsigned char *g;
dma_addr_t dma_g;
+ struct crypto_kpp *soft_tfm;
};
struct hpre_ctx {
unsigned int curve_id;
/* for high performance core */
u8 enable_hpcore;
+ bool fallback;
};
struct hpre_asym_request {
struct hpre *hpre;
qp = hpre_create_qp(type);
- if (!qp)
+ if (!qp) {
+ ctx->qp = NULL;
return -ENODEV;
+ }
qp->req_cb = hpre_alg_cb;
ctx->qp = qp;
return ret;
}
+static struct kpp_request *hpre_dh_prepare_fb_req(struct kpp_request *req)
+{
+ struct kpp_request *fb_req = kpp_request_ctx(req);
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+
+ kpp_request_set_tfm(fb_req, ctx->dh.soft_tfm);
+ kpp_request_set_callback(fb_req, req->base.flags, req->base.complete, req->base.data);
+ kpp_request_set_input(fb_req, req->src, req->src_len);
+ kpp_request_set_output(fb_req, req->dst, req->dst_len);
+
+ return fb_req;
+}
+
+static int hpre_dh_generate_public_key(struct kpp_request *req)
+{
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ struct kpp_request *fb_req;
+
+ if (ctx->fallback) {
+ fb_req = hpre_dh_prepare_fb_req(req);
+ return crypto_kpp_generate_public_key(fb_req);
+ }
+
+ return hpre_dh_compute_value(req);
+}
+
+static int hpre_dh_compute_shared_secret(struct kpp_request *req)
+{
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ struct kpp_request *fb_req;
+
+ if (ctx->fallback) {
+ fb_req = hpre_dh_prepare_fb_req(req);
+ return crypto_kpp_compute_shared_secret(fb_req);
+ }
+
+ return hpre_dh_compute_value(req);
+}
+
static int hpre_is_dh_params_length_valid(unsigned int key_sz)
{
#define _HPRE_DH_GRP1 768
struct device *dev = ctx->dev;
unsigned int sz;
- if (params->p_size > HPRE_DH_MAX_P_SZ)
- return -EINVAL;
-
- if (hpre_is_dh_params_length_valid(params->p_size <<
- HPRE_BITS_2_BYTES_SHIFT))
- return -EINVAL;
-
sz = ctx->key_sz = params->p_size;
ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,
&ctx->dh.dma_xa_p, GFP_KERNEL);
struct device *dev = ctx->dev;
unsigned int sz = ctx->key_sz;
+ if (!ctx->qp)
+ return;
+
if (ctx->dh.g) {
dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);
ctx->dh.g = NULL;
if (crypto_dh_decode_key(buf, len, ¶ms) < 0)
return -EINVAL;
+ if (!ctx->qp)
+ goto set_soft_secret;
+
+ if (hpre_is_dh_params_length_valid(params.p_size <<
+ HPRE_BITS_2_BYTES_SHIFT))
+ goto set_soft_secret;
+
/* Free old secret if any */
hpre_dh_clear_ctx(ctx, false);
memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,
params.key_size);
+ ctx->fallback = false;
return 0;
err_clear_ctx:
hpre_dh_clear_ctx(ctx, false);
return ret;
+set_soft_secret:
+ ctx->fallback = true;
+ return crypto_kpp_set_secret(ctx->dh.soft_tfm, buf, len);
}
static unsigned int hpre_dh_max_size(struct crypto_kpp *tfm)
{
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ if (ctx->fallback)
+ return crypto_kpp_maxsize(ctx->dh.soft_tfm);
+
return ctx->key_sz;
}
static int hpre_dh_init_tfm(struct crypto_kpp *tfm)
{
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ const char *alg = kpp_alg_name(tfm);
+ unsigned int reqsize;
+ int ret;
+
+ ctx->dh.soft_tfm = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(ctx->dh.soft_tfm)) {
+ pr_err("Failed to alloc dh tfm!\n");
+ return PTR_ERR(ctx->dh.soft_tfm);
+ }
+
+ crypto_kpp_set_flags(ctx->dh.soft_tfm, crypto_kpp_get_flags(tfm));
+
+ reqsize = max(sizeof(struct hpre_asym_request) + hpre_align_pd(),
+ sizeof(struct kpp_request) + crypto_kpp_reqsize(ctx->dh.soft_tfm));
+ kpp_set_reqsize(tfm, reqsize);
- kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());
+ ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);
+ if (ret && ret != -ENODEV) {
+ crypto_free_kpp(ctx->dh.soft_tfm);
+ return ret;
+ } else if (ret == -ENODEV) {
+ ctx->fallback = true;
+ }
- return hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);
+ return 0;
}
static void hpre_dh_exit_tfm(struct crypto_kpp *tfm)
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
hpre_dh_clear_ctx(ctx, true);
+ crypto_free_kpp(ctx->dh.soft_tfm);
}
static void hpre_rsa_drop_leading_zeros(const char **ptr, size_t *len)
struct hpre_sqe *msg = &hpre_req->req;
int ret;
- /* For 512 and 1536 bits key size, use soft tfm instead */
- if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
- ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
+ /* For unsupported key size and unavailable devices, use soft tfm instead */
+ if (ctx->fallback) {
akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
ret = crypto_akcipher_encrypt(req);
akcipher_request_set_tfm(req, tfm);
struct hpre_sqe *msg = &hpre_req->req;
int ret;
- /* For 512 and 1536 bits key size, use soft tfm instead */
- if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
- ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
+ /* For unsupported key size and unavailable devices, use soft tfm instead */
+ if (ctx->fallback) {
akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
ret = crypto_akcipher_decrypt(req);
akcipher_request_set_tfm(req, tfm);
ctx->key_sz = vlen;
/* if invalid key size provided, we use software tfm */
- if (!hpre_rsa_key_size_is_support(ctx->key_sz))
+ if (!hpre_rsa_key_size_is_support(ctx->key_sz)) {
+ ctx->fallback = true;
return 0;
+ }
ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1,
&ctx->rsa.dma_pubkey,
unsigned int half_key_sz = ctx->key_sz >> 1;
struct device *dev = ctx->dev;
+ if (!ctx->qp)
+ return;
+
if (ctx->rsa.pubkey) {
dma_free_coherent(dev, ctx->key_sz << 1,
ctx->rsa.pubkey, ctx->rsa.dma_pubkey);
goto free;
}
+ ctx->fallback = false;
return 0;
free:
if (ret)
return ret;
+ if (!ctx->qp)
+ return 0;
+
return hpre_rsa_setkey(ctx, key, keylen, false);
}
if (ret)
return ret;
+ if (!ctx->qp)
+ return 0;
+
return hpre_rsa_setkey(ctx, key, keylen, true);
}
{
struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
- /* For 512 and 1536 bits key size, use soft tfm instead */
- if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
- ctx->key_sz == HPRE_RSA_1536BITS_KSZ)
+ /* For unsupported key size and unavailable devices, use soft tfm instead */
+ if (ctx->fallback)
return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);
return ctx->key_sz;
hpre_align_pd());
ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);
- if (ret)
+ if (ret && ret != -ENODEV) {
crypto_free_akcipher(ctx->rsa.soft_tfm);
+ return ret;
+ } else if (ret == -ENODEV) {
+ ctx->fallback = true;
+ }
- return ret;
+ return 0;
}
static void hpre_rsa_exit_tfm(struct crypto_akcipher *tfm)
struct ecdh params;
int ret;
+ if (ctx->fallback)
+ return crypto_kpp_set_secret(ctx->ecdh.soft_tfm, buf, len);
+
if (crypto_ecdh_decode_key(buf, len, ¶ms) < 0) {
dev_err(dev, "failed to decode ecdh key!\n");
return -EINVAL;
return ret;
}
+static int hpre_ecdh_generate_public_key(struct kpp_request *req)
+{
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ int ret;
+
+ if (ctx->fallback) {
+ kpp_request_set_tfm(req, ctx->ecdh.soft_tfm);
+ ret = crypto_kpp_generate_public_key(req);
+ kpp_request_set_tfm(req, tfm);
+ return ret;
+ }
+
+ return hpre_ecdh_compute_value(req);
+}
+
+static int hpre_ecdh_compute_shared_secret(struct kpp_request *req)
+{
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ int ret;
+
+ if (ctx->fallback) {
+ kpp_request_set_tfm(req, ctx->ecdh.soft_tfm);
+ ret = crypto_kpp_compute_shared_secret(req);
+ kpp_request_set_tfm(req, tfm);
+ return ret;
+ }
+
+ return hpre_ecdh_compute_value(req);
+}
+
static unsigned int hpre_ecdh_max_size(struct crypto_kpp *tfm)
{
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ if (ctx->fallback)
+ return crypto_kpp_maxsize(ctx->ecdh.soft_tfm);
+
/* max size is the pub_key_size, include x and y */
return ctx->key_sz << 1;
}
+static int hpre_ecdh_init_tfm(struct crypto_kpp *tfm)
+{
+ struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ const char *alg = kpp_alg_name(tfm);
+ int ret;
+
+ ret = hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
+ if (!ret) {
+ kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());
+ return 0;
+ } else if (ret && ret != -ENODEV) {
+ return ret;
+ }
+
+ ctx->ecdh.soft_tfm = crypto_alloc_kpp(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(ctx->ecdh.soft_tfm)) {
+ pr_err("Failed to alloc %s tfm!\n", alg);
+ return PTR_ERR(ctx->ecdh.soft_tfm);
+ }
+
+ crypto_kpp_set_flags(ctx->ecdh.soft_tfm, crypto_kpp_get_flags(tfm));
+ ctx->fallback = true;
+
+ return 0;
+}
+
static int hpre_ecdh_nist_p192_init_tfm(struct crypto_kpp *tfm)
{
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
ctx->curve_id = ECC_CURVE_NIST_P192;
- kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());
-
- return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
+ return hpre_ecdh_init_tfm(tfm);
}
static int hpre_ecdh_nist_p256_init_tfm(struct crypto_kpp *tfm)
ctx->curve_id = ECC_CURVE_NIST_P256;
ctx->enable_hpcore = 1;
- kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());
-
- return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
+ return hpre_ecdh_init_tfm(tfm);
}
static int hpre_ecdh_nist_p384_init_tfm(struct crypto_kpp *tfm)
ctx->curve_id = ECC_CURVE_NIST_P384;
- kpp_set_reqsize(tfm, sizeof(struct hpre_asym_request) + hpre_align_pd());
-
- return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
+ return hpre_ecdh_init_tfm(tfm);
}
static void hpre_ecdh_exit_tfm(struct crypto_kpp *tfm)
{
struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
+ if (ctx->fallback) {
+ crypto_free_kpp(ctx->ecdh.soft_tfm);
+ return;
+ }
+
hpre_ecc_clear_ctx(ctx, true);
}
.cra_name = "rsa",
.cra_driver_name = "hpre-rsa",
.cra_module = THIS_MODULE,
+ .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
},
};
static struct kpp_alg dh = {
.set_secret = hpre_dh_set_secret,
- .generate_public_key = hpre_dh_compute_value,
- .compute_shared_secret = hpre_dh_compute_value,
+ .generate_public_key = hpre_dh_generate_public_key,
+ .compute_shared_secret = hpre_dh_compute_shared_secret,
.max_size = hpre_dh_max_size,
.init = hpre_dh_init_tfm,
.exit = hpre_dh_exit_tfm,
.cra_name = "dh",
.cra_driver_name = "hpre-dh",
.cra_module = THIS_MODULE,
+ .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
},
};
static struct kpp_alg ecdh_curves[] = {
{
.set_secret = hpre_ecdh_set_secret,
- .generate_public_key = hpre_ecdh_compute_value,
- .compute_shared_secret = hpre_ecdh_compute_value,
+ .generate_public_key = hpre_ecdh_generate_public_key,
+ .compute_shared_secret = hpre_ecdh_compute_shared_secret,
.max_size = hpre_ecdh_max_size,
.init = hpre_ecdh_nist_p192_init_tfm,
.exit = hpre_ecdh_exit_tfm,
.cra_name = "ecdh-nist-p192",
.cra_driver_name = "hpre-ecdh-nist-p192",
.cra_module = THIS_MODULE,
+ .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
},
}, {
.set_secret = hpre_ecdh_set_secret,
- .generate_public_key = hpre_ecdh_compute_value,
- .compute_shared_secret = hpre_ecdh_compute_value,
+ .generate_public_key = hpre_ecdh_generate_public_key,
+ .compute_shared_secret = hpre_ecdh_compute_shared_secret,
.max_size = hpre_ecdh_max_size,
.init = hpre_ecdh_nist_p256_init_tfm,
.exit = hpre_ecdh_exit_tfm,
.cra_name = "ecdh-nist-p256",
.cra_driver_name = "hpre-ecdh-nist-p256",
.cra_module = THIS_MODULE,
+ .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
},
}, {
.set_secret = hpre_ecdh_set_secret,
- .generate_public_key = hpre_ecdh_compute_value,
- .compute_shared_secret = hpre_ecdh_compute_value,
+ .generate_public_key = hpre_ecdh_generate_public_key,
+ .compute_shared_secret = hpre_ecdh_compute_shared_secret,
.max_size = hpre_ecdh_max_size,
.init = hpre_ecdh_nist_p384_init_tfm,
.exit = hpre_ecdh_exit_tfm,
.cra_name = "ecdh-nist-p384",
.cra_driver_name = "hpre-ecdh-nist-p384",
.cra_module = THIS_MODULE,
+ .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
},
}
};