Commit e9a28dc5 authored by Qu Wenruo's avatar Qu Wenruo Committed by David Sterba
Browse files

btrfs: rename tree_entry to rb_simple_node and export it



Structure tree_entry provides a very simple rb_tree which only uses
bytenr as search index.

That tree_entry is used in 3 structures: backref_node, mapping_node and
tree_block.

Since we're going to make backref_node independnt from relocation, it's
a good time to extract the tree_entry into rb_simple_node, and export it
into misc.h.

Signed-off-by: default avatarQu Wenruo <wqu@suse.com>
Reviewed-by: default avatarDavid Sterba <dsterba@suse.com>
Signed-off-by: default avatarDavid Sterba <dsterba@suse.com>
parent 70535441
Loading
Loading
Loading
Loading
+4 −2
Original line number Diff line number Diff line
@@ -161,8 +161,10 @@ static inline void btrfs_backref_iter_release(struct btrfs_backref_iter *iter)
 * Represent a tree block in the backref cache
 */
struct btrfs_backref_node {
	struct {
		struct rb_node rb_node;
		u64 bytenr;
	}; /* Use rb_simple_node for search/insert */

	u64 new_bytenr;
	/* Objectid of tree block owner, can be not uptodate */
+54 −0
Original line number Diff line number Diff line
@@ -6,6 +6,7 @@
#include <linux/sched.h>
#include <linux/wait.h>
#include <asm/div64.h>
#include <linux/rbtree.h>

#define in_range(b, first, len) ((b) >= (first) && (b) < (first) + (len))

@@ -58,4 +59,57 @@ static inline bool has_single_bit_set(u64 n)
	return is_power_of_two_u64(n);
}

/*
 * Simple bytenr based rb_tree relate structures
 *
 * Any structure wants to use bytenr as single search index should have their
 * structure start with these members.
 */
struct rb_simple_node {
	struct rb_node rb_node;
	u64 bytenr;
};

static inline struct rb_node *rb_simple_search(struct rb_root *root, u64 bytenr)
{
	struct rb_node *node = root->rb_node;
	struct rb_simple_node *entry;

	while (node) {
		entry = rb_entry(node, struct rb_simple_node, rb_node);

		if (bytenr < entry->bytenr)
			node = node->rb_left;
		else if (bytenr > entry->bytenr)
			node = node->rb_right;
		else
			return node;
	}
	return NULL;
}

static inline struct rb_node *rb_simple_insert(struct rb_root *root, u64 bytenr,
					       struct rb_node *node)
{
	struct rb_node **p = &root->rb_node;
	struct rb_node *parent = NULL;
	struct rb_simple_node *entry;

	while (*p) {
		parent = *p;
		entry = rb_entry(parent, struct rb_simple_node, rb_node);

		if (bytenr < entry->bytenr)
			p = &(*p)->rb_left;
		else if (bytenr > entry->bytenr)
			p = &(*p)->rb_right;
		else
			return parent;
	}

	rb_link_node(node, parent, p);
	rb_insert_color(node, root);
	return NULL;
}

#endif
+32 −77
Original line number Diff line number Diff line
@@ -24,6 +24,7 @@
#include "delalloc-space.h"
#include "block-group.h"
#include "backref.h"
#include "misc.h"

/*
 * Relocation overview
@@ -72,21 +73,15 @@
 * The entry point of relocation is relocate_block_group() function.
 */

/*
 * btrfs_backref_node, mapping_node and tree_block start with this
 */
struct tree_entry {
	struct rb_node rb_node;
	u64 bytenr;
};

#define RELOCATION_RESERVED_NODES	256
/*
 * map address of tree root to tree
 */
struct mapping_node {
	struct {
		struct rb_node rb_node;
		u64 bytenr;
	}; /* Use rb_simle_node for search/insert */
	void *data;
};

@@ -99,8 +94,10 @@ struct mapping_tree {
 * present a tree block to process
 */
struct tree_block {
	struct {
		struct rb_node rb_node;
		u64 bytenr;
	}; /* Use rb_simple_node for search/insert */
	struct btrfs_key key;
	unsigned int level:8;
	unsigned int key_ready:1;
@@ -294,48 +291,6 @@ static void free_backref_edge(struct btrfs_backref_cache *cache,
	}
}

static struct rb_node *tree_insert(struct rb_root *root, u64 bytenr,
				   struct rb_node *node)
{
	struct rb_node **p = &root->rb_node;
	struct rb_node *parent = NULL;
	struct tree_entry *entry;

	while (*p) {
		parent = *p;
		entry = rb_entry(parent, struct tree_entry, rb_node);

		if (bytenr < entry->bytenr)
			p = &(*p)->rb_left;
		else if (bytenr > entry->bytenr)
			p = &(*p)->rb_right;
		else
			return parent;
	}

	rb_link_node(node, parent, p);
	rb_insert_color(node, root);
	return NULL;
}

static struct rb_node *tree_search(struct rb_root *root, u64 bytenr)
{
	struct rb_node *n = root->rb_node;
	struct tree_entry *entry;

	while (n) {
		entry = rb_entry(n, struct tree_entry, rb_node);

		if (bytenr < entry->bytenr)
			n = n->rb_left;
		else if (bytenr > entry->bytenr)
			n = n->rb_right;
		else
			return n;
	}
	return NULL;
}

static void backref_tree_panic(struct rb_node *rb_node, int errno, u64 bytenr)
{

@@ -474,7 +429,7 @@ static void update_backref_node(struct btrfs_backref_cache *cache,
	struct rb_node *rb_node;
	rb_erase(&node->rb_node, &cache->rb_root);
	node->bytenr = bytenr;
	rb_node = tree_insert(&cache->rb_root, node->bytenr, &node->rb_node);
	rb_node = rb_simple_insert(&cache->rb_root, node->bytenr, &node->rb_node);
	if (rb_node)
		backref_tree_panic(rb_node, -EEXIST, bytenr);
}
@@ -599,7 +554,7 @@ struct btrfs_root *find_reloc_root(struct btrfs_fs_info *fs_info, u64 bytenr)

	ASSERT(rc);
	spin_lock(&rc->reloc_root_tree.lock);
	rb_node = tree_search(&rc->reloc_root_tree.rb_root, bytenr);
	rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root, bytenr);
	if (rb_node) {
		node = rb_entry(rb_node, struct mapping_node, rb_node);
		root = (struct btrfs_root *)node->data;
@@ -667,7 +622,7 @@ static int handle_direct_tree_backref(struct btrfs_backref_cache *cache,
	if (!edge)
		return -ENOMEM;

	rb_node = tree_search(&cache->rb_root, ref_key->offset);
	rb_node = rb_simple_search(&cache->rb_root, ref_key->offset);
	if (!rb_node) {
		/* Parent node not yet cached */
		upper = alloc_backref_node(cache, ref_key->offset,
@@ -788,7 +743,7 @@ static int handle_indirect_tree_backref(struct btrfs_backref_cache *cache,
		}

		eb = path->nodes[level];
		rb_node = tree_search(&cache->rb_root, eb->start);
		rb_node = rb_simple_search(&cache->rb_root, eb->start);
		if (!rb_node) {
			upper = alloc_backref_node(cache, eb->start,
						   lower->level + 1);
@@ -994,7 +949,7 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,

	/* Insert this node to cache if it's not COW-only */
	if (!start->cowonly) {
		rb_node = tree_insert(&cache->rb_root, start->bytenr,
		rb_node = rb_simple_insert(&cache->rb_root, start->bytenr,
					   &start->rb_node);
		if (rb_node)
			backref_tree_panic(rb_node, -EEXIST, start->bytenr);
@@ -1062,7 +1017,7 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,

		/* Only cache non-COW-only (subvolume trees) tree blocks */
		if (!upper->cowonly) {
			rb_node = tree_insert(&cache->rb_root, upper->bytenr,
			rb_node = rb_simple_insert(&cache->rb_root, upper->bytenr,
						   &upper->rb_node);
			if (rb_node) {
				backref_tree_panic(rb_node, -EEXIST,
@@ -1311,7 +1266,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
	if (cache->last_trans > 0)
		update_backref_cache(trans, cache);

	rb_node = tree_search(&cache->rb_root, src->commit_root->start);
	rb_node = rb_simple_search(&cache->rb_root, src->commit_root->start);
	if (rb_node) {
		node = rb_entry(rb_node, struct btrfs_backref_node, rb_node);
		if (node->detached)
@@ -1321,7 +1276,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
	}

	if (!node) {
		rb_node = tree_search(&cache->rb_root,
		rb_node = rb_simple_search(&cache->rb_root,
					   reloc_root->commit_root->start);
		if (rb_node) {
			node = rb_entry(rb_node, struct btrfs_backref_node,
@@ -1355,7 +1310,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
		list_add_tail(&new_node->lower, &cache->leaves);
	}

	rb_node = tree_insert(&cache->rb_root, new_node->bytenr,
	rb_node = rb_simple_insert(&cache->rb_root, new_node->bytenr,
				   &new_node->rb_node);
	if (rb_node)
		backref_tree_panic(rb_node, -EEXIST, new_node->bytenr);
@@ -1396,7 +1351,7 @@ static int __must_check __add_reloc_root(struct btrfs_root *root)
	node->data = root;

	spin_lock(&rc->reloc_root_tree.lock);
	rb_node = tree_insert(&rc->reloc_root_tree.rb_root,
	rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
				   node->bytenr, &node->rb_node);
	spin_unlock(&rc->reloc_root_tree.lock);
	if (rb_node) {
@@ -1423,7 +1378,7 @@ static void __del_reloc_root(struct btrfs_root *root)

	if (rc && root->node) {
		spin_lock(&rc->reloc_root_tree.lock);
		rb_node = tree_search(&rc->reloc_root_tree.rb_root,
		rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
					   root->commit_root->start);
		if (rb_node) {
			node = rb_entry(rb_node, struct mapping_node, rb_node);
@@ -1467,7 +1422,7 @@ static int __update_reloc_root(struct btrfs_root *root)
	struct reloc_control *rc = fs_info->reloc_ctl;

	spin_lock(&rc->reloc_root_tree.lock);
	rb_node = tree_search(&rc->reloc_root_tree.rb_root,
	rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
				   root->commit_root->start);
	if (rb_node) {
		node = rb_entry(rb_node, struct mapping_node, rb_node);
@@ -1481,7 +1436,7 @@ static int __update_reloc_root(struct btrfs_root *root)

	spin_lock(&rc->reloc_root_tree.lock);
	node->bytenr = root->node->start;
	rb_node = tree_insert(&rc->reloc_root_tree.rb_root,
	rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
				   node->bytenr, &node->rb_node);
	spin_unlock(&rc->reloc_root_tree.lock);
	if (rb_node)
@@ -3640,7 +3595,7 @@ static int add_tree_block(struct reloc_control *rc,
	block->level = level;
	block->key_ready = 0;

	rb_node = tree_insert(blocks, block->bytenr, &block->rb_node);
	rb_node = rb_simple_insert(blocks, block->bytenr, &block->rb_node);
	if (rb_node)
		backref_tree_panic(rb_node, -EEXIST, block->bytenr);

@@ -3663,7 +3618,7 @@ static int __add_tree_block(struct reloc_control *rc,
	if (tree_block_processed(bytenr, rc))
		return 0;

	if (tree_search(blocks, bytenr))
	if (rb_simple_search(blocks, bytenr))
		return 0;

	path = btrfs_alloc_path();