if (acomp_req_on_stack(req) && acomp_is_async(tfm))
return -EAGAIN;
- if (crypto_acomp_req_chain(tfm) || acomp_request_issg(req))
+ if (crypto_acomp_req_virt(tfm) || acomp_request_issg(req))
return crypto_acomp_reqtfm(req)->compress(req);
return acomp_do_req_chain(req, true);
}
if (acomp_req_on_stack(req) && acomp_is_async(tfm))
return -EAGAIN;
- if (crypto_acomp_req_chain(tfm) || acomp_request_issg(req))
+ if (crypto_acomp_req_virt(tfm) || acomp_request_issg(req))
return crypto_acomp_reqtfm(req)->decompress(req);
return acomp_do_req_chain(req, false);
}
u8 *page = NULL;
int err;
- if (crypto_ahash_req_chain(tfm) ||
+ if (crypto_ahash_req_virt(tfm) ||
!update || !ahash_request_isvirt(req))
return op(req);
if (ahash_req_on_stack(req) && ahash_is_async(tfm))
return -EAGAIN;
if (!crypto_ahash_alg(tfm)->finup ||
- (!crypto_ahash_req_chain(tfm) && ahash_request_isvirt(req)))
+ (!crypto_ahash_req_virt(tfm) && ahash_request_isvirt(req)))
return ahash_def_finup(req);
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->finup);
}
return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
if (ahash_req_on_stack(req) && ahash_is_async(tfm))
return -EAGAIN;
- if (!crypto_ahash_req_chain(tfm) && ahash_request_isvirt(req))
+ if (!crypto_ahash_req_virt(tfm) && ahash_request_isvirt(req))
return ahash_def_digest(req);
if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
return -ENOKEY;
.init = deflate_init,
.base.cra_name = "deflate",
.base.cra_driver_name = "deflate-generic",
- .base.cra_flags = CRYPTO_ALG_REQ_CHAIN,
+ .base.cra_flags = CRYPTO_ALG_REQ_VIRT,
.base.cra_module = THIS_MODULE,
};
comp_prepare_alg(&alg->calg);
- base->cra_flags |= CRYPTO_ALG_REQ_CHAIN;
+ base->cra_flags |= CRYPTO_ALG_REQ_VIRT;
}
int crypto_register_scomp(struct scomp_alg *alg)
return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
}
-static inline bool crypto_tfm_req_chain(struct crypto_tfm *tfm)
+static inline bool crypto_tfm_req_virt(struct crypto_tfm *tfm)
{
- return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_CHAIN;
+ return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_VIRT;
}
static inline u32 crypto_request_flags(struct crypto_async_request *req)
CRYPTO_ACOMP_REQ_DST_NONDMA);
}
-static inline bool crypto_acomp_req_chain(struct crypto_acomp *tfm)
+static inline bool crypto_acomp_req_virt(struct crypto_acomp *tfm)
{
- return crypto_tfm_req_chain(&tfm->base);
+ return crypto_tfm_req_virt(&tfm->base);
}
void crypto_acomp_free_streams(struct crypto_acomp_streams *s);
return req->base.flags & CRYPTO_AHASH_REQ_VIRT;
}
-static inline bool crypto_ahash_req_chain(struct crypto_ahash *tfm)
+static inline bool crypto_ahash_req_virt(struct crypto_ahash *tfm)
{
- return crypto_tfm_req_chain(&tfm->base);
+ return crypto_tfm_req_virt(&tfm->base);
}
static inline struct crypto_ahash *crypto_ahash_fb(struct crypto_ahash *tfm)
*/
#define CRYPTO_ALG_FIPS_INTERNAL 0x00020000
-/* Set if the algorithm supports request chains and virtual addresses. */
-#define CRYPTO_ALG_REQ_CHAIN 0x00040000
+/* Set if the algorithm supports virtual addresses. */
+#define CRYPTO_ALG_REQ_VIRT 0x00040000
/* The high bits 0xff000000 are reserved for type-specific flags. */