[kallsyms,bpf,1/3] rbtree_latch: Introduce latch_tree_first() and latch_tree_next()
diff mbox series

Message ID 20190117231730.2413466-2-songliubraving@fb.com
State Changes Requested
Delegated to: BPF Maintainers
Headers show
Series
  • kallsym_tree for dynamic ksymbols
Related show

Commit Message

Song Liu Jan. 17, 2019, 11:17 p.m. UTC
From: Peter Zijlstra <peterz@infradead.org>

These two functions will be used by kallsym_tree for dynamic symbols.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Song Liu <songliubraving@fb.com>
---
 include/linux/rbtree_latch.h | 54 ++++++++++++++++++++++++++++++++++++
 1 file changed, 54 insertions(+)

Patch
diff mbox series

diff --git a/include/linux/rbtree_latch.h b/include/linux/rbtree_latch.h
index 7d012faa509a..d0001a136d3e 100644
--- a/include/linux/rbtree_latch.h
+++ b/include/linux/rbtree_latch.h
@@ -211,4 +211,58 @@  latch_tree_find(void *key, struct latch_tree_root *root,
 	return node;
 }
 
+/**
+ * latch_tree_first() - return the first node in @root per sort order
+ * @root: trees to search
+ *
+ * Does a lockless lookup in the trees @root for the first node.
+ */
+static __always_inline struct latch_tree_node *
+latch_tree_first(struct latch_tree_root *root)
+{
+	struct latch_tree_node *ltn = NULL;
+	struct rb_node *node;
+	unsigned int seq;
+
+	do {
+		struct rb_root *rbr;
+
+		seq = raw_read_seqcount_latch(&root->seq);
+		rbr = &root->tree[seq & 1];
+		node = rb_first(rbr);
+	} while (read_seqcount_retry(&root->seq, seq));
+
+	if (node)
+		ltn = __lt_from_rb(node, seq & 1);
+
+	return ltn;
+}
+
+/**
+ * latch_tree_next() - find the next @ltn in @root per sort order
+ * @root: trees to which @ltn belongs
+ * @ltn: nodes to start from
+ *
+ * Does a lockless lookup in the trees @root for the next node starting at
+ * @ltn.
+ *
+ * Using this function outside of the write side lock is rather dodgy but given
+ * latch_tree_erase() doesn't re-init the nodes and the whole iteration is done
+ * under a single RCU critical section, it should be non-fatal and generate some
+ * semblance of order - albeit possibly missing chunks of the tree.
+ */
+static __always_inline struct latch_tree_node *
+latch_tree_next(struct latch_tree_root *root, struct latch_tree_node *ltn)
+{
+	struct rb_node *node;
+	unsigned int seq;
+
+	do {
+		seq = raw_read_seqcount_latch(&root->seq);
+		node = rb_next(&ltn->node[seq & 1]);
+	} while (read_seqcount_retry(&root->seq, seq));
+
+	return __lt_from_rb(node, seq & 1);
+}
+
 #endif /* RB_TREE_LATCH_H */