void *wmem; /* working memory for compress */
};
-static void *crypto842_alloc_ctx(struct crypto_scomp *tfm)
+static void *crypto842_alloc_ctx(void)
{
void *ctx;
{
struct crypto842_ctx *ctx = crypto_tfm_ctx(tfm);
- ctx->wmem = crypto842_alloc_ctx(NULL);
+ ctx->wmem = crypto842_alloc_ctx();
if (IS_ERR(ctx->wmem))
return -ENOMEM;
return 0;
}
-static void crypto842_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void crypto842_free_ctx(void *ctx)
{
kfree(ctx);
}
{
struct crypto842_ctx *ctx = crypto_tfm_ctx(tfm);
- crypto842_free_ctx(NULL, ctx->wmem);
+ crypto842_free_ctx(ctx->wmem);
}
static int crypto842_compress(struct crypto_tfm *tfm,
return ret;
}
-static void *deflate_alloc_ctx(struct crypto_scomp *tfm)
+static void *deflate_alloc_ctx(void)
{
struct deflate_ctx *ctx;
int ret;
deflate_decomp_exit(ctx);
}
-static void deflate_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void deflate_free_ctx(void *ctx)
{
__deflate_exit(ctx);
kfree_sensitive(ctx);
void *lz4_comp_mem;
};
-static void *lz4_alloc_ctx(struct crypto_scomp *tfm)
+static void *lz4_alloc_ctx(void)
{
void *ctx;
{
struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
- ctx->lz4_comp_mem = lz4_alloc_ctx(NULL);
+ ctx->lz4_comp_mem = lz4_alloc_ctx();
if (IS_ERR(ctx->lz4_comp_mem))
return -ENOMEM;
return 0;
}
-static void lz4_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void lz4_free_ctx(void *ctx)
{
vfree(ctx);
}
{
struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
- lz4_free_ctx(NULL, ctx->lz4_comp_mem);
+ lz4_free_ctx(ctx->lz4_comp_mem);
}
static int __lz4_compress_crypto(const u8 *src, unsigned int slen,
void *lz4hc_comp_mem;
};
-static void *lz4hc_alloc_ctx(struct crypto_scomp *tfm)
+static void *lz4hc_alloc_ctx(void)
{
void *ctx;
{
struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
- ctx->lz4hc_comp_mem = lz4hc_alloc_ctx(NULL);
+ ctx->lz4hc_comp_mem = lz4hc_alloc_ctx();
if (IS_ERR(ctx->lz4hc_comp_mem))
return -ENOMEM;
return 0;
}
-static void lz4hc_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void lz4hc_free_ctx(void *ctx)
{
vfree(ctx);
}
{
struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
- lz4hc_free_ctx(NULL, ctx->lz4hc_comp_mem);
+ lz4hc_free_ctx(ctx->lz4hc_comp_mem);
}
static int __lz4hc_compress_crypto(const u8 *src, unsigned int slen,
void *lzorle_comp_mem;
};
-static void *lzorle_alloc_ctx(struct crypto_scomp *tfm)
+static void *lzorle_alloc_ctx(void)
{
void *ctx;
{
struct lzorle_ctx *ctx = crypto_tfm_ctx(tfm);
- ctx->lzorle_comp_mem = lzorle_alloc_ctx(NULL);
+ ctx->lzorle_comp_mem = lzorle_alloc_ctx();
if (IS_ERR(ctx->lzorle_comp_mem))
return -ENOMEM;
return 0;
}
-static void lzorle_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void lzorle_free_ctx(void *ctx)
{
kvfree(ctx);
}
{
struct lzorle_ctx *ctx = crypto_tfm_ctx(tfm);
- lzorle_free_ctx(NULL, ctx->lzorle_comp_mem);
+ lzorle_free_ctx(ctx->lzorle_comp_mem);
}
static int __lzorle_compress(const u8 *src, unsigned int slen,
void *lzo_comp_mem;
};
-static void *lzo_alloc_ctx(struct crypto_scomp *tfm)
+static void *lzo_alloc_ctx(void)
{
void *ctx;
{
struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
- ctx->lzo_comp_mem = lzo_alloc_ctx(NULL);
+ ctx->lzo_comp_mem = lzo_alloc_ctx();
if (IS_ERR(ctx->lzo_comp_mem))
return -ENOMEM;
return 0;
}
-static void lzo_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void lzo_free_ctx(void *ctx)
{
kvfree(ctx);
}
{
struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
- lzo_free_ctx(NULL, ctx->lzo_comp_mem);
+ lzo_free_ctx(ctx->lzo_comp_mem);
}
static int __lzo_compress(const u8 *src, unsigned int slen,
return ret;
}
-static void *zstd_alloc_ctx(struct crypto_scomp *tfm)
+static void *zstd_alloc_ctx(void)
{
int ret;
struct zstd_ctx *ctx;
zstd_decomp_exit(ctx);
}
-static void zstd_free_ctx(struct crypto_scomp *tfm, void *ctx)
+static void zstd_free_ctx(void *ctx)
{
__zstd_exit(ctx);
kfree_sensitive(ctx);
} /* Legacy compress framework end */
/* SCOMP framework start */
-void *zip_alloc_scomp_ctx_deflate(struct crypto_scomp *tfm)
+void *zip_alloc_scomp_ctx_deflate(void)
{
int ret;
struct zip_kernel_ctx *zip_ctx;
return zip_ctx;
}
-void *zip_alloc_scomp_ctx_lzs(struct crypto_scomp *tfm)
+void *zip_alloc_scomp_ctx_lzs(void)
{
int ret;
struct zip_kernel_ctx *zip_ctx;
return zip_ctx;
}
-void zip_free_scomp_ctx(struct crypto_scomp *tfm, void *ctx)
+void zip_free_scomp_ctx(void *ctx)
{
struct zip_kernel_ctx *zip_ctx = ctx;
const u8 *src, unsigned int slen,
u8 *dst, unsigned int *dlen);
-void *zip_alloc_scomp_ctx_deflate(struct crypto_scomp *tfm);
-void *zip_alloc_scomp_ctx_lzs(struct crypto_scomp *tfm);
-void zip_free_scomp_ctx(struct crypto_scomp *tfm, void *zip_ctx);
+void *zip_alloc_scomp_ctx_deflate(void);
+void *zip_alloc_scomp_ctx_lzs(void);
+void zip_free_scomp_ctx(void *zip_ctx);
int zip_scomp_compress(struct crypto_scomp *tfm,
const u8 *src, unsigned int slen,
u8 *dst, unsigned int *dlen, void *ctx);
* @calg: Cmonn algorithm data structure shared with acomp
*/
struct scomp_alg {
- void *(*alloc_ctx)(struct crypto_scomp *tfm);
- void (*free_ctx)(struct crypto_scomp *tfm, void *ctx);
+ void *(*alloc_ctx)(void);
+ void (*free_ctx)(void *ctx);
int (*compress)(struct crypto_scomp *tfm, const u8 *src,
unsigned int slen, u8 *dst, unsigned int *dlen,
void *ctx);
static inline void *crypto_scomp_alloc_ctx(struct crypto_scomp *tfm)
{
- return crypto_scomp_alg(tfm)->alloc_ctx(tfm);
+ return crypto_scomp_alg(tfm)->alloc_ctx();
}
static inline void crypto_scomp_free_ctx(struct crypto_scomp *tfm,
void *ctx)
{
- return crypto_scomp_alg(tfm)->free_ctx(tfm, ctx);
+ return crypto_scomp_alg(tfm)->free_ctx(ctx);
}
static inline int crypto_scomp_compress(struct crypto_scomp *tfm,