]> git.ipfire.org Git - thirdparty/linux.git/commitdiff
bcachefs: debug_check_bset_lookups
authorKent Overstreet <kent.overstreet@linux.dev>
Sat, 10 May 2025 19:49:38 +0000 (15:49 -0400)
committerKent Overstreet <kent.overstreet@linux.dev>
Thu, 22 May 2025 00:14:55 +0000 (20:14 -0400)
Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
fs/bcachefs/bcachefs.h
fs/bcachefs/bkey.h
fs/bcachefs/bset.c
fs/bcachefs/bset.h

index a3900fb08f92a63a31aa927cbb338050db3a4c5b..076520beacd6a2648b1079c1f5c7cda0a8ecd420 100644 (file)
@@ -406,15 +406,16 @@ do {                                                                      \
        BCH_DEBUG_PARAM(debug_check_btree_locking,                      \
                "Enable additional asserts for btree locking")          \
        BCH_DEBUG_PARAM(debug_check_iterators,                          \
-               "Enables extra verification for btree iterators")
+               "Enables extra verification for btree iterators")       \
+       BCH_DEBUG_PARAM(debug_check_bset_lookups,                       \
+               "Enables extra verification for bset lookups")          \
+       BCH_DEBUG_PARAM(debug_check_btree_accounting,                   \
+               "Verify btree accounting for keys within a node")       \
+       BCH_DEBUG_PARAM(debug_check_bkey_unpack,                        \
+               "Enables extra verification for bkey unpack")
 
 /* Parameters that should only be compiled in debug mode: */
 #define BCH_DEBUG_PARAMS_DEBUG()                                       \
-       BCH_DEBUG_PARAM(expensive_debug_checks,                         \
-               "Enables various runtime debugging checks that "        \
-               "significantly affect performance")                     \
-       BCH_DEBUG_PARAM(debug_check_btree_accounting,                   \
-               "Verify btree accounting for keys within a node")       \
        BCH_DEBUG_PARAM(journal_seq_verify,                             \
                "Store the journal sequence number in the version "     \
                "number of every btree key, and verify that btree "     \
index b33356982460ce20eecddf828b4f988892bc8b23..3ccd521c190ac73751b6ef7f6fddc9a509d15b8b 100644 (file)
@@ -398,8 +398,7 @@ __bkey_unpack_key_format_checked(const struct btree *b,
                compiled_unpack_fn unpack_fn = b->aux_data;
                unpack_fn(dst, src);
 
-               if (IS_ENABLED(CONFIG_BCACHEFS_DEBUG) &&
-                   static_branch_unlikely(&bch2_expensive_debug_checks)) {
+               if (static_branch_unlikely(&bch2_debug_check_bkey_unpack)) {
                        struct bkey dst2 = __bch2_bkey_unpack_key(&b->format, src);
 
                        BUG_ON(memcmp(dst, &dst2, sizeof(*dst)));
index 7d2004a47fe6776fa249698518761b356c8038be..32841f762eb2e036eac695d8fd5b00e59db7b2a8 100644 (file)
@@ -144,8 +144,6 @@ struct btree_nr_keys bch2_btree_node_count_keys(struct btree *b)
        return nr;
 }
 
-#ifdef CONFIG_BCACHEFS_DEBUG
-
 void __bch2_verify_btree_nr_keys(struct btree *b)
 {
        struct btree_nr_keys nr = bch2_btree_node_count_keys(b);
@@ -153,7 +151,7 @@ void __bch2_verify_btree_nr_keys(struct btree *b)
        BUG_ON(memcmp(&nr, &b->nr, sizeof(nr)));
 }
 
-static void bch2_btree_node_iter_next_check(struct btree_node_iter *_iter,
+static void __bch2_btree_node_iter_next_check(struct btree_node_iter *_iter,
                                            struct btree *b)
 {
        struct btree_node_iter iter = *_iter;
@@ -190,8 +188,8 @@ static void bch2_btree_node_iter_next_check(struct btree_node_iter *_iter,
        }
 }
 
-void bch2_btree_node_iter_verify(struct btree_node_iter *iter,
-                                struct btree *b)
+void __bch2_btree_node_iter_verify(struct btree_node_iter *iter,
+                                  struct btree *b)
 {
        struct btree_node_iter_set *set, *s2;
        struct bkey_packed *k, *p;
@@ -237,8 +235,8 @@ found:
        }
 }
 
-void bch2_verify_insert_pos(struct btree *b, struct bkey_packed *where,
-                           struct bkey_packed *insert, unsigned clobber_u64s)
+static void __bch2_verify_insert_pos(struct btree *b, struct bkey_packed *where,
+                                    struct bkey_packed *insert, unsigned clobber_u64s)
 {
        struct bset_tree *t = bch2_bkey_to_bset(b, where);
        struct bkey_packed *prev = bch2_bkey_prev_all(b, t, where);
@@ -285,12 +283,15 @@ void bch2_verify_insert_pos(struct btree *b, struct bkey_packed *where,
 #endif
 }
 
-#else
-
-static inline void bch2_btree_node_iter_next_check(struct btree_node_iter *iter,
-                                                  struct btree *b) {}
+static inline void bch2_verify_insert_pos(struct btree *b,
+                                         struct bkey_packed *where,
+                                         struct bkey_packed *insert,
+                                         unsigned clobber_u64s)
+{
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+               __bch2_verify_insert_pos(b, where, insert, clobber_u64s);
+}
 
-#endif
 
 /* Auxiliary search trees */
 
@@ -361,9 +362,8 @@ static struct bkey_float *bkey_float(const struct btree *b,
        return ro_aux_tree_base(b, t)->f + idx;
 }
 
-static void bset_aux_tree_verify(struct btree *b)
+static void __bset_aux_tree_verify(struct btree *b)
 {
-#ifdef CONFIG_BCACHEFS_DEBUG
        for_each_bset(b, t) {
                if (t->aux_data_offset == U16_MAX)
                        continue;
@@ -375,7 +375,12 @@ static void bset_aux_tree_verify(struct btree *b)
                BUG_ON(t->aux_data_offset > btree_aux_data_u64s(b));
                BUG_ON(bset_aux_tree_buf_end(t) > btree_aux_data_u64s(b));
        }
-#endif
+}
+
+static inline void bset_aux_tree_verify(struct btree *b)
+{
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+               __bset_aux_tree_verify(b);
 }
 
 void bch2_btree_keys_init(struct btree *b)
@@ -495,15 +500,11 @@ static void rw_aux_tree_set(const struct btree *b, struct bset_tree *t,
        };
 }
 
-static void bch2_bset_verify_rw_aux_tree(struct btree *b,
-                                       struct bset_tree *t)
+static void __bch2_bset_verify_rw_aux_tree(struct btree *b, struct bset_tree *t)
 {
        struct bkey_packed *k = btree_bkey_first(b, t);
        unsigned j = 0;
 
-       if (!static_branch_unlikely(&bch2_expensive_debug_checks))
-               return;
-
        BUG_ON(bset_has_ro_aux_tree(t));
 
        if (!bset_has_rw_aux_tree(t))
@@ -530,6 +531,13 @@ start:
        }
 }
 
+static inline void bch2_bset_verify_rw_aux_tree(struct btree *b,
+                                               struct bset_tree *t)
+{
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+               __bch2_bset_verify_rw_aux_tree(b, t);
+}
+
 /* returns idx of first entry >= offset: */
 static unsigned rw_aux_tree_bsearch(struct btree *b,
                                    struct bset_tree *t,
@@ -869,7 +877,7 @@ struct bkey_packed *bch2_bkey_prev_filter(struct btree *b,
                k = p;
        }
 
-       if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
                BUG_ON(ret >= orig_k);
 
                for (i = ret
@@ -1195,7 +1203,7 @@ struct bkey_packed *bch2_bset_search_linear(struct btree *b,
                       bkey_iter_pos_cmp(b, m, search) < 0)
                        m = bkey_p_next(m);
 
-       if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
                struct bkey_packed *prev = bch2_bkey_prev_all(b, t, m);
 
                BUG_ON(prev &&
@@ -1435,9 +1443,9 @@ static inline void __bch2_btree_node_iter_advance(struct btree_node_iter *iter,
 void bch2_btree_node_iter_advance(struct btree_node_iter *iter,
                                  struct btree *b)
 {
-       if (static_branch_unlikely(&bch2_expensive_debug_checks)) {
-               bch2_btree_node_iter_verify(iter, b);
-               bch2_btree_node_iter_next_check(iter, b);
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups)) {
+               __bch2_btree_node_iter_verify(iter, b);
+               __bch2_btree_node_iter_next_check(iter, b);
        }
 
        __bch2_btree_node_iter_advance(iter, b);
@@ -1453,8 +1461,7 @@ struct bkey_packed *bch2_btree_node_iter_prev_all(struct btree_node_iter *iter,
        struct btree_node_iter_set *set;
        unsigned end = 0;
 
-       if (static_branch_unlikely(&bch2_expensive_debug_checks))
-               bch2_btree_node_iter_verify(iter, b);
+       bch2_btree_node_iter_verify(iter, b);
 
        for_each_bset(b, t) {
                k = bch2_bkey_prev_all(b, t,
@@ -1489,8 +1496,7 @@ found:
        iter->data[0].k = __btree_node_key_to_offset(b, prev);
        iter->data[0].end = end;
 
-       if (static_branch_unlikely(&bch2_expensive_debug_checks))
-               bch2_btree_node_iter_verify(iter, b);
+       bch2_btree_node_iter_verify(iter, b);
        return prev;
 }
 
index f5b7fda537ea5751b818071ca7032b4c5d28dca9..a15ecf9d006e058b67a49dec0084877759a8980c 100644 (file)
@@ -517,23 +517,15 @@ void bch2_dump_bset(struct bch_fs *, struct btree *, struct bset *, unsigned);
 void bch2_dump_btree_node(struct bch_fs *, struct btree *);
 void bch2_dump_btree_node_iter(struct btree *, struct btree_node_iter *);
 
-#ifdef CONFIG_BCACHEFS_DEBUG
-
 void __bch2_verify_btree_nr_keys(struct btree *);
-void bch2_btree_node_iter_verify(struct btree_node_iter *, struct btree *);
-void bch2_verify_insert_pos(struct btree *, struct bkey_packed *,
-                           struct bkey_packed *, unsigned);
-
-#else
+void __bch2_btree_node_iter_verify(struct btree_node_iter *, struct btree *);
 
-static inline void __bch2_verify_btree_nr_keys(struct btree *b) {}
 static inline void bch2_btree_node_iter_verify(struct btree_node_iter *iter,
-                                             struct btree *b) {}
-static inline void bch2_verify_insert_pos(struct btree *b,
-                                         struct bkey_packed *where,
-                                         struct bkey_packed *insert,
-                                         unsigned clobber_u64s) {}
-#endif
+                                              struct btree *b)
+{
+       if (static_branch_unlikely(&bch2_debug_check_bset_lookups))
+               __bch2_btree_node_iter_verify(iter, b);
+}
 
 static inline void bch2_verify_btree_nr_keys(struct btree *b)
 {