BCH_DEBUG_PARAM(debug_check_btree_locking, \
"Enable additional asserts for btree locking") \
BCH_DEBUG_PARAM(debug_check_iterators, \
- "Enables extra verification for btree iterators")
+ "Enables extra verification for btree iterators") \
+ BCH_DEBUG_PARAM(debug_check_bset_lookups, \
+ "Enables extra verification for bset lookups") \
+ BCH_DEBUG_PARAM(debug_check_btree_accounting, \
+ "Verify btree accounting for keys within a node") \
+ BCH_DEBUG_PARAM(debug_check_bkey_unpack, \
+ "Enables extra verification for bkey unpack")
/* Parameters that should only be compiled in debug mode: */
#define BCH_DEBUG_PARAMS_DEBUG() \
- BCH_DEBUG_PARAM(expensive_debug_checks, \
- "Enables various runtime debugging checks that " \
- "significantly affect performance") \
- BCH_DEBUG_PARAM(debug_check_btree_accounting, \
- "Verify btree accounting for keys within a node") \
BCH_DEBUG_PARAM(journal_seq_verify, \
"Store the journal sequence number in the version " \
"number of every btree key, and verify that btree " \
return nr;
}
-#ifdef CONFIG_BCACHEFS_DEBUG
-
void __bch2_verify_btree_nr_keys(struct btree *b)
{
struct btree_nr_keys nr = bch2_btree_node_count_keys(b);
BUG_ON(memcmp(&nr, &b->nr, sizeof(nr)));
}
-static void bch2_btree_node_iter_next_check(struct btree_node_iter *_iter,
+static void __bch2_btree_node_iter_next_check(struct btree_node_iter *_iter,
struct btree *b)
{
struct btree_node_iter iter = *_iter;
}
}
-void bch2_btree_node_iter_verify(struct btree_node_iter *iter,
- struct btree *b)
+void __bch2_btree_node_iter_verify(struct btree_node_iter *iter,
+ struct btree *b)
{
struct btree_node_iter_set *set, *s2;
struct bkey_packed *k, *p;
}
}
-void bch2_verify_insert_pos(struct btree *b, struct bkey_packed *where,
- struct bkey_packed *insert, unsigned clobber_u64s)
+static void __bch2_verify_insert_pos(struct btree *b, struct bkey_packed *where,
+ struct bkey_packed *insert, unsigned clobber_u64s)
{
struct bset_tree *t = bch2_bkey_to_bset(b, where);
struct bkey_packed *prev = bch2_bkey_prev_all(b, t, where);
#endif
}
-#else
-
-static inline void bch2_btree_node_iter_next_check(struct btree_node_iter *iter,
- struct btree *b) {}
+static inline void bch2_verify_insert_pos(struct btree *b,
+ struct bkey_packed *where,
+ struct bkey_packed *insert,
+ unsigned clobber_u64s)
+{
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+ __bch2_verify_insert_pos(b, where, insert, clobber_u64s);
+}
-#endif
/* Auxiliary search trees */
return ro_aux_tree_base(b, t)->f + idx;
}
-static void bset_aux_tree_verify(struct btree *b)
+static void __bset_aux_tree_verify(struct btree *b)
{
-#ifdef CONFIG_BCACHEFS_DEBUG
for_each_bset(b, t) {
if (t->aux_data_offset == U16_MAX)
continue;
BUG_ON(t->aux_data_offset > btree_aux_data_u64s(b));
BUG_ON(bset_aux_tree_buf_end(t) > btree_aux_data_u64s(b));
}
-#endif
+}
+
+static inline void bset_aux_tree_verify(struct btree *b)
+{
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+ __bset_aux_tree_verify(b);
}
void bch2_btree_keys_init(struct btree *b)
};
}
-static void bch2_bset_verify_rw_aux_tree(struct btree *b,
- struct bset_tree *t)
+static void __bch2_bset_verify_rw_aux_tree(struct btree *b, struct bset_tree *t)
{
struct bkey_packed *k = btree_bkey_first(b, t);
unsigned j = 0;
- if (!static_branch_unlikely(&bch2_expensive_debug_checks))
- return;
-
BUG_ON(bset_has_ro_aux_tree(t));
if (!bset_has_rw_aux_tree(t))
}
}
+static inline void bch2_bset_verify_rw_aux_tree(struct btree *b,
+ struct bset_tree *t)
+{
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+ __bch2_bset_verify_rw_aux_tree(b, t);
+}
+
/* returns idx of first entry >= offset: */
static unsigned rw_aux_tree_bsearch(struct btree *b,
struct bset_tree *t,
k = p;
}
- if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
BUG_ON(ret >= orig_k);
for (i = ret
bkey_iter_pos_cmp(b, m, search) < 0)
m = bkey_p_next(m);
- if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
struct bkey_packed *prev = bch2_bkey_prev_all(b, t, m);
BUG_ON(prev &&
void bch2_btree_node_iter_advance(struct btree_node_iter *iter,
struct btree *b)
{
- if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
- bch2_btree_node_iter_verify(iter, b);
- bch2_btree_node_iter_next_check(iter, b);
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
+ __bch2_btree_node_iter_verify(iter, b);
+ __bch2_btree_node_iter_next_check(iter, b);
}
__bch2_btree_node_iter_advance(iter, b);
struct btree_node_iter_set *set;
unsigned end = 0;
- if (static_branch_unlikely(&bch2_expensive_debug_checks))
- bch2_btree_node_iter_verify(iter, b);
+ bch2_btree_node_iter_verify(iter, b);
for_each_bset(b, t) {
k = bch2_bkey_prev_all(b, t,
iter->data[0].k = __btree_node_key_to_offset(b, prev);
iter->data[0].end = end;
- if (static_branch_unlikely(&bch2_expensive_debug_checks))
- bch2_btree_node_iter_verify(iter, b);
+ bch2_btree_node_iter_verify(iter, b);
return prev;
}
void bch2_dump_btree_node(struct bch_fs *, struct btree *);
void bch2_dump_btree_node_iter(struct btree *, struct btree_node_iter *);
-#ifdef CONFIG_BCACHEFS_DEBUG
-
void __bch2_verify_btree_nr_keys(struct btree *);
-void bch2_btree_node_iter_verify(struct btree_node_iter *, struct btree *);
-void bch2_verify_insert_pos(struct btree *, struct bkey_packed *,
- struct bkey_packed *, unsigned);
-
-#else
+void __bch2_btree_node_iter_verify(struct btree_node_iter *, struct btree *);
-static inline void __bch2_verify_btree_nr_keys(struct btree *b) {}
static inline void bch2_btree_node_iter_verify(struct btree_node_iter *iter,
- struct btree *b) {}
-static inline void bch2_verify_insert_pos(struct btree *b,
- struct bkey_packed *where,
- struct bkey_packed *insert,
- unsigned clobber_u64s) {}
-#endif
+ struct btree *b)
+{
+ if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+ __bch2_btree_node_iter_verify(iter, b);
+}
static inline void bch2_verify_btree_nr_keys(struct btree *b)
{