static struct expr *implicit_set_declaration(struct eval_ctx *ctx,
const char *name,
struct expr *key,
+ struct expr *data,
struct expr *expr)
{
struct cmd *cmd;
set->flags = NFT_SET_ANONYMOUS | expr->set_flags;
set->handle.set.name = xstrdup(name);
set->key = key;
+ set->data = data;
set->init = expr;
set->automerge = set->flags & NFT_SET_INTERVAL;
struct expr_ctx ectx = ctx->ectx;
struct expr *map = *expr, *mappings;
const struct datatype *dtype;
- struct expr *key;
+ struct expr *key, *data;
expr_set_context(&ctx->ectx, NULL, 0);
if (expr_evaluate(ctx, &map->map) < 0)
ctx->ectx.byteorder,
ctx->ectx.len, NULL);
+ dtype = set_datatype_alloc(ectx.dtype, ectx.byteorder);
+ data = constant_expr_alloc(&netlink_location, dtype,
+ dtype->byteorder, ectx.len, NULL);
+
mappings = implicit_set_declaration(ctx, "__map%d",
- key,
+ key, data,
mappings);
- dtype = set_datatype_alloc(ectx.dtype, ectx.byteorder);
-
- mappings->set->data = constant_expr_alloc(&netlink_location,
- dtype, dtype->byteorder,
- ectx.len, NULL);
if (ectx.len && mappings->set->data->len != ectx.len)
BUG("%d vs %d\n", mappings->set->data->len, ectx.len);
case EXPR_SET:
right = rel->right =
implicit_set_declaration(ctx, "__set%d",
- expr_get(left), right);
+ expr_get(left), NULL,
+ right);
/* fall through */
case EXPR_SET_REF:
/* Data for range lookups needs to be in big endian order */
set->set_flags |= NFT_SET_TIMEOUT;
setref = implicit_set_declaration(ctx, stmt->meter.name,
- expr_get(key), set);
+ expr_get(key), NULL, set);
setref->set->desc.size = stmt->meter.size;
stmt->meter.set = setref;
ctx->ectx.len, NULL);
mappings = implicit_set_declaration(ctx, "__objmap%d",
- key, mappings);
+ key, NULL, mappings);
mappings->set->objtype = stmt->objref.type;
map->mappings = mappings;