Lines Matching +full:in +full:- +full:tree
1 // SPDX-License-Identifier: GPL-2.0
8 #include "extent-io-tree.h"
15 return !RB_EMPTY_NODE(&state->rb_node); in extent_state_in_tree()
27 list_add(&state->leak_list, &states); in btrfs_leak_debug_add_state()
36 list_del(&state->leak_list); in btrfs_leak_debug_del_state()
46 pr_err("BTRFS: state leak: start %llu end %llu state %u in tree %d refs %d\n", in btrfs_extent_state_leak_debug_check()
47 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
49 refcount_read(&state->refs)); in btrfs_extent_state_leak_debug_check()
50 list_del(&state->leak_list); in btrfs_extent_state_leak_debug_check()
56 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
57 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
59 struct extent_io_tree *tree, in __btrfs_debug_check_extent_io_range() argument
65 if (tree->owner != IO_TREE_INODE_IO) in __btrfs_debug_check_extent_io_range()
68 inode = extent_io_tree_to_inode_const(tree); in __btrfs_debug_check_extent_io_range()
69 isize = i_size_read(&inode->vfs_inode); in __btrfs_debug_check_extent_io_range()
70 if (end >= PAGE_SIZE && (end % 2) == 0 && end != isize - 1) { in __btrfs_debug_check_extent_io_range()
71 btrfs_debug_rl(inode->root->fs_info, in __btrfs_debug_check_extent_io_range()
85 * The only tree allowed to set the inode is IO_TREE_INODE_IO.
87 static bool is_inode_io_tree(const struct extent_io_tree *tree) in is_inode_io_tree() argument
89 return tree->owner == IO_TREE_INODE_IO; in is_inode_io_tree()
92 /* Return the inode if it's valid for the given tree, otherwise NULL. */
93 struct btrfs_inode *extent_io_tree_to_inode(struct extent_io_tree *tree) in extent_io_tree_to_inode() argument
95 if (tree->owner == IO_TREE_INODE_IO) in extent_io_tree_to_inode()
96 return tree->inode; in extent_io_tree_to_inode()
100 /* Read-only access to the inode. */
101 const struct btrfs_inode *extent_io_tree_to_inode_const(const struct extent_io_tree *tree) in extent_io_tree_to_inode_const() argument
103 if (tree->owner == IO_TREE_INODE_IO) in extent_io_tree_to_inode_const()
104 return tree->inode; in extent_io_tree_to_inode_const()
108 /* For read-only access to fs_info. */
109 const struct btrfs_fs_info *extent_io_tree_to_fs_info(const struct extent_io_tree *tree) in extent_io_tree_to_fs_info() argument
111 if (tree->owner == IO_TREE_INODE_IO) in extent_io_tree_to_fs_info()
112 return tree->inode->root->fs_info; in extent_io_tree_to_fs_info()
113 return tree->fs_info; in extent_io_tree_to_fs_info()
117 struct extent_io_tree *tree, unsigned int owner) in extent_io_tree_init() argument
119 tree->state = RB_ROOT; in extent_io_tree_init()
120 spin_lock_init(&tree->lock); in extent_io_tree_init()
121 tree->fs_info = fs_info; in extent_io_tree_init()
122 tree->owner = owner; in extent_io_tree_init()
126 * Empty an io tree, removing and freeing every extent state record from the
127 * tree. This should be called once we are sure no other task can access the
128 * tree anymore, so no tree updates happen after we empty the tree and there
132 void extent_io_tree_release(struct extent_io_tree *tree) in extent_io_tree_release() argument
138 spin_lock(&tree->lock); in extent_io_tree_release()
139 root = tree->state; in extent_io_tree_release()
140 tree->state = RB_ROOT; in extent_io_tree_release()
143 RB_CLEAR_NODE(&state->rb_node); in extent_io_tree_release()
144 ASSERT(!(state->state & EXTENT_LOCK_BITS)); in extent_io_tree_release()
146 * No need for a memory barrier here, as we are holding the tree in extent_io_tree_release()
150 ASSERT(!waitqueue_active(&state->wq)); in extent_io_tree_release()
152 cond_resched_lock(&tree->lock); in extent_io_tree_release()
156 * be accessing the tree anymore. in extent_io_tree_release()
158 ASSERT(RB_EMPTY_ROOT(&tree->state)); in extent_io_tree_release()
159 spin_unlock(&tree->lock); in extent_io_tree_release()
174 state->state = 0; in alloc_extent_state()
175 RB_CLEAR_NODE(&state->rb_node); in alloc_extent_state()
177 refcount_set(&state->refs, 1); in alloc_extent_state()
178 init_waitqueue_head(&state->wq); in alloc_extent_state()
195 if (refcount_dec_and_test(&state->refs)) { in free_extent_state()
211 if (set && (state->state & bits) == bits) in add_extent_changeset()
213 if (!set && (state->state & bits) == 0) in add_extent_changeset()
215 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
216 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
223 struct rb_node *next = rb_next(&state->rb_node); in next_state()
233 struct rb_node *next = rb_prev(&state->rb_node); in prev_state()
242 * Search @tree for an entry that contains @offset. Such entry would have
243 * entry->start <= offset && entry->end >= offset.
245 * @tree: the tree to search
246 * @offset: offset that should fall within an entry in @tree
248 * entry in the tree)
258 static inline struct extent_state *tree_search_for_insert(struct extent_io_tree *tree, in tree_search_for_insert() argument
263 struct rb_root *root = &tree->state; in tree_search_for_insert()
264 struct rb_node **node = &root->rb_node; in tree_search_for_insert()
272 if (offset < entry->start) in tree_search_for_insert()
273 node = &(*node)->rb_left; in tree_search_for_insert()
274 else if (offset > entry->end) in tree_search_for_insert()
275 node = &(*node)->rb_right; in tree_search_for_insert()
286 while (entry && offset > entry->end) in tree_search_for_insert()
293 * Search offset in the tree or fill neighbor rbtree node pointers.
295 * @tree: the tree to search
296 * @offset: offset that should fall within an entry in @tree
304 static struct extent_state *tree_search_prev_next(struct extent_io_tree *tree, in tree_search_prev_next() argument
309 struct rb_root *root = &tree->state; in tree_search_prev_next()
310 struct rb_node **node = &root->rb_node; in tree_search_prev_next()
320 if (offset < entry->start) in tree_search_prev_next()
321 node = &(*node)->rb_left; in tree_search_prev_next()
322 else if (offset > entry->end) in tree_search_prev_next()
323 node = &(*node)->rb_right; in tree_search_prev_next()
329 while (entry && offset > entry->end) in tree_search_prev_next()
334 while (entry && offset < entry->start) in tree_search_prev_next()
342 * Inexact rb-tree search, return the next entry if @offset is not found
344 static inline struct extent_state *tree_search(struct extent_io_tree *tree, u64 offset) in tree_search() argument
346 return tree_search_for_insert(tree, offset, NULL, NULL); in tree_search()
349 static void extent_io_tree_panic(const struct extent_io_tree *tree, in extent_io_tree_panic() argument
354 btrfs_panic(extent_io_tree_to_fs_info(tree), err, in extent_io_tree_panic()
355 "extent io tree error on %s state start %llu end %llu", in extent_io_tree_panic()
356 opname, state->start, state->end); in extent_io_tree_panic()
359 static void merge_prev_state(struct extent_io_tree *tree, struct extent_state *state) in merge_prev_state() argument
364 if (prev && prev->end == state->start - 1 && prev->state == state->state) { in merge_prev_state()
365 if (is_inode_io_tree(tree)) in merge_prev_state()
366 btrfs_merge_delalloc_extent(extent_io_tree_to_inode(tree), in merge_prev_state()
368 state->start = prev->start; in merge_prev_state()
369 rb_erase(&prev->rb_node, &tree->state); in merge_prev_state()
370 RB_CLEAR_NODE(&prev->rb_node); in merge_prev_state()
375 static void merge_next_state(struct extent_io_tree *tree, struct extent_state *state) in merge_next_state() argument
380 if (next && next->start == state->end + 1 && next->state == state->state) { in merge_next_state()
381 if (is_inode_io_tree(tree)) in merge_next_state()
382 btrfs_merge_delalloc_extent(extent_io_tree_to_inode(tree), in merge_next_state()
384 state->end = next->end; in merge_next_state()
385 rb_erase(&next->rb_node, &tree->state); in merge_next_state()
386 RB_CLEAR_NODE(&next->rb_node); in merge_next_state()
393 * extents with matching state are merged together into a single extent in the
394 * tree. Extents with EXTENT_IO in their state field are not merged because
398 * This should be called with the tree lock held.
400 static void merge_state(struct extent_io_tree *tree, struct extent_state *state) in merge_state() argument
402 if (state->state & (EXTENT_LOCK_BITS | EXTENT_BOUNDARY)) in merge_state()
405 merge_prev_state(tree, state); in merge_state()
406 merge_next_state(tree, state); in merge_state()
409 static void set_state_bits(struct extent_io_tree *tree, in set_state_bits() argument
416 if (is_inode_io_tree(tree)) in set_state_bits()
417 btrfs_set_delalloc_extent(extent_io_tree_to_inode(tree), state, bits); in set_state_bits()
421 state->state |= bits_to_set; in set_state_bits()
425 * Insert an extent_state struct into the tree. 'bits' are set on the
430 * may be an existing record in the tree that was expanded to accommodate the
431 * requested range. In case of an extent_state different from the one that was
436 * The tree lock is not taken internally. This is a utility function and
439 static struct extent_state *insert_state(struct extent_io_tree *tree, in insert_state() argument
446 const u64 start = state->start - 1; in insert_state()
447 const u64 end = state->end + 1; in insert_state()
450 set_state_bits(tree, state, bits, changeset); in insert_state()
452 node = &tree->state.rb_node; in insert_state()
459 if (state->end < entry->start) { in insert_state()
460 if (try_merge && end == entry->start && in insert_state()
461 state->state == entry->state) { in insert_state()
462 if (is_inode_io_tree(tree)) in insert_state()
464 extent_io_tree_to_inode(tree), in insert_state()
466 entry->start = state->start; in insert_state()
467 merge_prev_state(tree, entry); in insert_state()
468 state->state = 0; in insert_state()
471 node = &(*node)->rb_left; in insert_state()
472 } else if (state->end > entry->end) { in insert_state()
473 if (try_merge && entry->end == start && in insert_state()
474 state->state == entry->state) { in insert_state()
475 if (is_inode_io_tree(tree)) in insert_state()
477 extent_io_tree_to_inode(tree), in insert_state()
479 entry->end = state->end; in insert_state()
480 merge_next_state(tree, entry); in insert_state()
481 state->state = 0; in insert_state()
484 node = &(*node)->rb_right; in insert_state()
486 return ERR_PTR(-EEXIST); in insert_state()
490 rb_link_node(&state->rb_node, parent, node); in insert_state()
491 rb_insert_color(&state->rb_node, &tree->state); in insert_state()
497 * Insert state to @tree to the location given by @node and @parent.
499 static void insert_state_fast(struct extent_io_tree *tree, in insert_state_fast() argument
504 set_state_bits(tree, state, bits, changeset); in insert_state_fast()
505 rb_link_node(&state->rb_node, parent, node); in insert_state_fast()
506 rb_insert_color(&state->rb_node, &tree->state); in insert_state_fast()
507 merge_state(tree, state); in insert_state_fast()
511 * Split a given extent state struct in two, inserting the preallocated
516 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
517 * are two extent state structs in the tree:
518 * prealloc: [orig->start, split - 1]
519 * orig: [ split, orig->end ]
521 * The tree locks are not taken by this function. They need to be held
524 static int split_state(struct extent_io_tree *tree, struct extent_state *orig, in split_state() argument
530 if (is_inode_io_tree(tree)) in split_state()
531 btrfs_split_delalloc_extent(extent_io_tree_to_inode(tree), orig, in split_state()
534 prealloc->start = orig->start; in split_state()
535 prealloc->end = split - 1; in split_state()
536 prealloc->state = orig->state; in split_state()
537 orig->start = split; in split_state()
539 parent = &orig->rb_node; in split_state()
547 if (prealloc->end < entry->start) { in split_state()
548 node = &(*node)->rb_left; in split_state()
549 } else if (prealloc->end > entry->end) { in split_state()
550 node = &(*node)->rb_right; in split_state()
553 return -EEXIST; in split_state()
557 rb_link_node(&prealloc->rb_node, parent, node); in split_state()
558 rb_insert_color(&prealloc->rb_node, &tree->state); in split_state()
564 * Utility function to clear some bits in an extent state struct. It will
568 * struct is freed and removed from the tree
570 static struct extent_state *clear_state_bit(struct extent_io_tree *tree, in clear_state_bit() argument
579 if (is_inode_io_tree(tree)) in clear_state_bit()
580 btrfs_clear_delalloc_extent(extent_io_tree_to_inode(tree), state, in clear_state_bit()
585 state->state &= ~bits_to_clear; in clear_state_bit()
587 wake_up(&state->wq); in clear_state_bit()
588 if (state->state == 0) { in clear_state_bit()
591 rb_erase(&state->rb_node, &tree->state); in clear_state_bit()
592 RB_CLEAR_NODE(&state->rb_node); in clear_state_bit()
598 merge_state(tree, state); in clear_state_bit()
611 *bits &= EXTENT_NOWAIT - 1; in set_gfp_mask_from_bits()
615 * Clear some bits on a range in the tree. This may require splitting or
616 * inserting elements in the tree, so the gfp mask is used to indicate which
621 * This takes the tree lock, and returns 0 on success and < 0 on error.
623 int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __clear_extent_bit() argument
638 btrfs_debug_check_extent_io_range(tree, start, end); in __clear_extent_bit()
639 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in __clear_extent_bit()
654 * up not needing the pre-allocated extent state at all, which in __clear_extent_bit()
655 * is the case if we only have in the tree extent states that in __clear_extent_bit()
662 spin_lock(&tree->lock); in __clear_extent_bit()
672 cached->start <= start && cached->end > start) { in __clear_extent_bit()
674 refcount_dec(&cached->refs); in __clear_extent_bit()
683 state = tree_search(tree, start); in __clear_extent_bit()
687 if (state->start > end) in __clear_extent_bit()
689 WARN_ON(state->end < start); in __clear_extent_bit()
690 last_end = state->end; in __clear_extent_bit()
693 if (!(state->state & bits)) { in __clear_extent_bit()
699 * | ---- desired range ---- | in __clear_extent_bit()
701 * | ------------- state -------------- | in __clear_extent_bit()
713 if (state->start < start) { in __clear_extent_bit()
717 err = split_state(tree, state, prealloc, start); in __clear_extent_bit()
719 extent_io_tree_panic(tree, state, "split", err); in __clear_extent_bit()
724 if (state->end <= end) { in __clear_extent_bit()
725 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
731 * | ---- desired range ---- | in __clear_extent_bit()
735 if (state->start <= end && state->end > end) { in __clear_extent_bit()
739 err = split_state(tree, state, prealloc, end + 1); in __clear_extent_bit()
741 extent_io_tree_panic(tree, state, "split", err); in __clear_extent_bit()
744 wake_up(&state->wq); in __clear_extent_bit()
746 clear_state_bit(tree, prealloc, bits, wake, changeset); in __clear_extent_bit()
752 state = clear_state_bit(tree, state, bits, wake, changeset); in __clear_extent_bit()
754 if (last_end == (u64)-1) in __clear_extent_bit()
763 spin_unlock(&tree->lock); in __clear_extent_bit()
769 spin_unlock(&tree->lock); in __clear_extent_bit()
778 * Wait for one or more bits to clear on a range in the state tree.
780 * The tree lock is taken by this function
782 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in wait_extent_bit() argument
787 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
789 spin_lock(&tree->lock); in wait_extent_bit()
792 * Maintain cached_state, as we may not remove it from the tree if there in wait_extent_bit()
798 state->start <= start && start < state->end) in wait_extent_bit()
806 state = tree_search(tree, start); in wait_extent_bit()
810 if (state->start > end) in wait_extent_bit()
813 if (state->state & bits) { in wait_extent_bit()
816 start = state->start; in wait_extent_bit()
817 refcount_inc(&state->refs); in wait_extent_bit()
818 prepare_to_wait(&state->wq, &wait, TASK_UNINTERRUPTIBLE); in wait_extent_bit()
819 spin_unlock(&tree->lock); in wait_extent_bit()
821 spin_lock(&tree->lock); in wait_extent_bit()
822 finish_wait(&state->wq, &wait); in wait_extent_bit()
826 start = state->end + 1; in wait_extent_bit()
831 if (!cond_resched_lock(&tree->lock)) { in wait_extent_bit()
843 spin_unlock(&tree->lock); in wait_extent_bit()
851 if (!flags || (state->state & flags)) { in cache_state_if_flags()
853 refcount_inc(&state->refs); in cache_state_if_flags()
866 * tree->lock must be held. NULL will returned if nothing was found after
869 static struct extent_state *find_first_extent_bit_state(struct extent_io_tree *tree, in find_first_extent_bit_state() argument
878 state = tree_search(tree, start); in find_first_extent_bit_state()
880 if (state->end >= start && (state->state & bits)) in find_first_extent_bit_state()
888 * Find the first offset in the io tree with one or more @bits set.
890 * Note: If there are multiple bits set in @bits, any of them will match.
895 bool find_first_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_extent_bit() argument
902 spin_lock(&tree->lock); in find_first_extent_bit()
905 if (state->end == start - 1 && extent_state_in_tree(state)) { in find_first_extent_bit()
907 if (state->state & bits) in find_first_extent_bit()
927 state = find_first_extent_bit_state(tree, start, bits); in find_first_extent_bit()
931 *start_ret = state->start; in find_first_extent_bit()
932 *end_ret = state->end; in find_first_extent_bit()
936 spin_unlock(&tree->lock); in find_first_extent_bit()
943 * @tree: io tree to check
951 * will drop the tree->lock, so use this helper if you want to find the actual
953 * then walk down the tree until we find a non-contiguous area. The area
956 int find_contiguous_extent_bit(struct extent_io_tree *tree, u64 start, in find_contiguous_extent_bit() argument
962 ASSERT(!btrfs_fs_incompat(extent_io_tree_to_fs_info(tree), NO_HOLES)); in find_contiguous_extent_bit()
964 spin_lock(&tree->lock); in find_contiguous_extent_bit()
965 state = find_first_extent_bit_state(tree, start, bits); in find_contiguous_extent_bit()
967 *start_ret = state->start; in find_contiguous_extent_bit()
968 *end_ret = state->end; in find_contiguous_extent_bit()
970 if (state->start > (*end_ret + 1)) in find_contiguous_extent_bit()
972 *end_ret = state->end; in find_contiguous_extent_bit()
976 spin_unlock(&tree->lock); in find_contiguous_extent_bit()
981 * Find a contiguous range of bytes in the file marked as delalloc, not more
984 * True is returned if we find something, false if nothing was in the tree.
986 bool btrfs_find_delalloc_range(struct extent_io_tree *tree, u64 *start, in btrfs_find_delalloc_range() argument
995 spin_lock(&tree->lock); in btrfs_find_delalloc_range()
1001 state = tree_search(tree, cur_start); in btrfs_find_delalloc_range()
1003 *end = (u64)-1; in btrfs_find_delalloc_range()
1008 if (found && (state->start != cur_start || in btrfs_find_delalloc_range()
1009 (state->state & EXTENT_BOUNDARY))) { in btrfs_find_delalloc_range()
1012 if (!(state->state & EXTENT_DELALLOC)) { in btrfs_find_delalloc_range()
1014 *end = state->end; in btrfs_find_delalloc_range()
1018 *start = state->start; in btrfs_find_delalloc_range()
1020 refcount_inc(&state->refs); in btrfs_find_delalloc_range()
1023 *end = state->end; in btrfs_find_delalloc_range()
1024 cur_start = state->end + 1; in btrfs_find_delalloc_range()
1025 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
1031 spin_unlock(&tree->lock); in btrfs_find_delalloc_range()
1036 * Set some bits on a range in the tree. This may require allocations or
1040 * If any of the exclusive bits are set, this will fail with -EEXIST if some
1042 * existing range is returned in failed_state in this case, and the start of the
1043 * existing range is returned in failed_start. failed_state is used as an
1047 * [start, end] is inclusive This takes the tree lock.
1049 static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in __set_extent_bit() argument
1066 btrfs_debug_check_extent_io_range(tree, start, end); in __set_extent_bit()
1067 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in __set_extent_bit()
1077 * up not needing the pre-allocated extent state at all, which in __set_extent_bit()
1078 * is the case if we only have in the tree extent states that in __set_extent_bit()
1088 spin_lock(&tree->lock); in __set_extent_bit()
1091 if (state->start <= start && state->end > start && in __set_extent_bit()
1099 state = tree_search_for_insert(tree, start, &p, &parent); in __set_extent_bit()
1104 prealloc->start = start; in __set_extent_bit()
1105 prealloc->end = end; in __set_extent_bit()
1106 insert_state_fast(tree, prealloc, p, parent, bits, changeset); in __set_extent_bit()
1112 last_start = state->start; in __set_extent_bit()
1113 last_end = state->end; in __set_extent_bit()
1116 * | ---- desired range ---- | in __set_extent_bit()
1121 if (state->start == start && state->end <= end) { in __set_extent_bit()
1122 if (state->state & exclusive_bits) { in __set_extent_bit()
1123 *failed_start = state->start; in __set_extent_bit()
1125 ret = -EEXIST; in __set_extent_bit()
1129 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1131 merge_state(tree, state); in __set_extent_bit()
1132 if (last_end == (u64)-1) in __set_extent_bit()
1136 if (start < end && state && state->start == start && in __set_extent_bit()
1143 * | ---- desired range ---- | in __set_extent_bit()
1146 * | ------------- state -------------- | in __set_extent_bit()
1157 if (state->start < start) { in __set_extent_bit()
1158 if (state->state & exclusive_bits) { in __set_extent_bit()
1161 ret = -EEXIST; in __set_extent_bit()
1169 if ((state->state & bits) == bits) { in __set_extent_bit()
1170 start = state->end + 1; in __set_extent_bit()
1178 ret = split_state(tree, state, prealloc, start); in __set_extent_bit()
1180 extent_io_tree_panic(tree, state, "split", ret); in __set_extent_bit()
1185 if (state->end <= end) { in __set_extent_bit()
1186 set_state_bits(tree, state, bits, changeset); in __set_extent_bit()
1188 merge_state(tree, state); in __set_extent_bit()
1189 if (last_end == (u64)-1) in __set_extent_bit()
1193 if (start < end && state && state->start == start && in __set_extent_bit()
1200 * | ---- desired range ---- | in __set_extent_bit()
1203 * There's a hole, we need to insert something in it and ignore the in __set_extent_bit()
1206 if (state->start > start) { in __set_extent_bit()
1213 this_end = last_start - 1; in __set_extent_bit()
1223 prealloc->start = start; in __set_extent_bit()
1224 prealloc->end = this_end; in __set_extent_bit()
1225 inserted_state = insert_state(tree, prealloc, bits, changeset); in __set_extent_bit()
1228 extent_io_tree_panic(tree, prealloc, "insert", ret); in __set_extent_bit()
1238 * | ---- desired range ---- | in __set_extent_bit()
1243 if (state->start <= end && state->end > end) { in __set_extent_bit()
1244 if (state->state & exclusive_bits) { in __set_extent_bit()
1247 ret = -EEXIST; in __set_extent_bit()
1254 ret = split_state(tree, state, prealloc, end + 1); in __set_extent_bit()
1256 extent_io_tree_panic(tree, state, "split", ret); in __set_extent_bit()
1258 set_state_bits(tree, prealloc, bits, changeset); in __set_extent_bit()
1260 merge_state(tree, prealloc); in __set_extent_bit()
1268 spin_unlock(&tree->lock); in __set_extent_bit()
1274 spin_unlock(&tree->lock); in __set_extent_bit()
1282 int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1285 return __set_extent_bit(tree, start, end, bits, NULL, NULL, in set_extent_bit()
1290 * Convert all bits in a given range from one bit to another
1292 * @tree: the io tree to search
1293 * @start: the start offset in bytes
1294 * @end: the end offset in bytes (inclusive)
1295 * @bits: the bits to set in this range
1296 * @clear_bits: the bits to clear in this range
1300 * already in this range they are set with the given bit and cleared of the
1307 int convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in convert_extent_bit() argument
1320 btrfs_debug_check_extent_io_range(tree, start, end); in convert_extent_bit()
1321 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in convert_extent_bit()
1329 * that matches exactly the target range, in which case no in convert_extent_bit()
1331 * after locking the tree. in convert_extent_bit()
1335 return -ENOMEM; in convert_extent_bit()
1338 spin_lock(&tree->lock); in convert_extent_bit()
1341 if (state->start <= start && state->end > start && in convert_extent_bit()
1350 state = tree_search_for_insert(tree, start, &p, &parent); in convert_extent_bit()
1354 ret = -ENOMEM; in convert_extent_bit()
1357 prealloc->start = start; in convert_extent_bit()
1358 prealloc->end = end; in convert_extent_bit()
1359 insert_state_fast(tree, prealloc, p, parent, bits, NULL); in convert_extent_bit()
1365 last_start = state->start; in convert_extent_bit()
1366 last_end = state->end; in convert_extent_bit()
1369 * | ---- desired range ---- | in convert_extent_bit()
1374 if (state->start == start && state->end <= end) { in convert_extent_bit()
1375 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1377 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1378 if (last_end == (u64)-1) in convert_extent_bit()
1381 if (start < end && state && state->start == start && in convert_extent_bit()
1388 * | ---- desired range ---- | in convert_extent_bit()
1391 * | ------------- state -------------- | in convert_extent_bit()
1402 if (state->start < start) { in convert_extent_bit()
1405 ret = -ENOMEM; in convert_extent_bit()
1408 ret = split_state(tree, state, prealloc, start); in convert_extent_bit()
1410 extent_io_tree_panic(tree, state, "split", ret); in convert_extent_bit()
1414 if (state->end <= end) { in convert_extent_bit()
1415 set_state_bits(tree, state, bits, NULL); in convert_extent_bit()
1417 state = clear_state_bit(tree, state, clear_bits, 0, NULL); in convert_extent_bit()
1418 if (last_end == (u64)-1) in convert_extent_bit()
1421 if (start < end && state && state->start == start && in convert_extent_bit()
1428 * | ---- desired range ---- | in convert_extent_bit()
1431 * There's a hole, we need to insert something in it and ignore the in convert_extent_bit()
1434 if (state->start > start) { in convert_extent_bit()
1441 this_end = last_start - 1; in convert_extent_bit()
1445 ret = -ENOMEM; in convert_extent_bit()
1453 prealloc->start = start; in convert_extent_bit()
1454 prealloc->end = this_end; in convert_extent_bit()
1455 inserted_state = insert_state(tree, prealloc, bits, NULL); in convert_extent_bit()
1458 extent_io_tree_panic(tree, prealloc, "insert", ret); in convert_extent_bit()
1467 * | ---- desired range ---- | in convert_extent_bit()
1472 if (state->start <= end && state->end > end) { in convert_extent_bit()
1475 ret = -ENOMEM; in convert_extent_bit()
1479 ret = split_state(tree, state, prealloc, end + 1); in convert_extent_bit()
1481 extent_io_tree_panic(tree, state, "split", ret); in convert_extent_bit()
1483 set_state_bits(tree, prealloc, bits, NULL); in convert_extent_bit()
1485 clear_state_bit(tree, prealloc, clear_bits, 0, NULL); in convert_extent_bit()
1493 spin_unlock(&tree->lock); in convert_extent_bit()
1499 spin_unlock(&tree->lock); in convert_extent_bit()
1510 * @tree: the tree to search
1517 * set it's possible that @end_ret contains -1, this happens in case the range
1518 * spans (last_range_end, end of device]. In this case it's up to the caller to
1521 void find_first_clear_extent_bit(struct extent_io_tree *tree, u64 start, in find_first_clear_extent_bit() argument
1527 spin_lock(&tree->lock); in find_first_clear_extent_bit()
1531 state = tree_search_prev_next(tree, start, &prev, &next); in find_first_clear_extent_bit()
1534 * Tree is completely empty, send full range and let in find_first_clear_extent_bit()
1538 *end_ret = -1; in find_first_clear_extent_bit()
1545 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1546 *end_ret = -1; in find_first_clear_extent_bit()
1556 if (in_range(start, state->start, state->end - state->start + 1)) { in find_first_clear_extent_bit()
1557 if (state->state & bits) { in find_first_clear_extent_bit()
1559 * |--range with bits sets--| in find_first_clear_extent_bit()
1563 start = state->end + 1; in find_first_clear_extent_bit()
1570 * |--range with bits cleared----| in find_first_clear_extent_bit()
1574 *start_ret = state->start; in find_first_clear_extent_bit()
1579 * |---prev range---|---hole/unset---|---node range---| in find_first_clear_extent_bit()
1585 * |---hole/unset--||--first node--| in find_first_clear_extent_bit()
1590 *start_ret = prev->end + 1; in find_first_clear_extent_bit()
1602 if (state->end >= start && !(state->state & bits)) { in find_first_clear_extent_bit()
1603 *end_ret = state->end; in find_first_clear_extent_bit()
1605 *end_ret = state->start - 1; in find_first_clear_extent_bit()
1611 spin_unlock(&tree->lock); in find_first_clear_extent_bit()
1615 * Count the number of bytes in the tree that have a given bit(s) set for a
1618 * @tree: The io tree to search.
1625 * @bits: The bits the range must have in order to be accounted for.
1628 * @contig: Indicate if we should ignore holes in the range or not. If
1631 * function in order to speedup searches. Use NULL if this is
1639 u64 count_range_bits(struct extent_io_tree *tree, in count_range_bits() argument
1654 spin_lock(&tree->lock); in count_range_bits()
1664 if (cached->start <= cur_start && cur_start <= cached->end) { in count_range_bits()
1666 } else if (cached->start > cur_start) { in count_range_bits()
1672 * are looking for, and if so, use it - this is a common case in count_range_bits()
1673 * when there are holes between records in the tree. If there is in count_range_bits()
1679 else if (prev->start <= cur_start && cur_start <= prev->end) in count_range_bits()
1689 state = tree_search(tree, cur_start); in count_range_bits()
1692 if (state->start > search_end) in count_range_bits()
1694 if (contig && found && state->start > last + 1) in count_range_bits()
1696 if (state->end >= cur_start && (state->state & bits) == bits) { in count_range_bits()
1697 total_bytes += min(search_end, state->end) + 1 - in count_range_bits()
1698 max(cur_start, state->start); in count_range_bits()
1702 *start = max(cur_start, state->start); in count_range_bits()
1705 last = state->end; in count_range_bits()
1716 refcount_inc(&state->refs); in count_range_bits()
1719 spin_unlock(&tree->lock); in count_range_bits()
1725 * Check if the single @bit exists in the given range.
1727 bool test_range_bit_exists(struct extent_io_tree *tree, u64 start, u64 end, u32 bit) in test_range_bit_exists() argument
1734 spin_lock(&tree->lock); in test_range_bit_exists()
1735 state = tree_search(tree, start); in test_range_bit_exists()
1737 if (state->start > end) in test_range_bit_exists()
1740 if (state->state & bit) { in test_range_bit_exists()
1745 /* If state->end is (u64)-1, start will overflow to 0 */ in test_range_bit_exists()
1746 start = state->end + 1; in test_range_bit_exists()
1751 spin_unlock(&tree->lock); in test_range_bit_exists()
1758 bool test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bit, in test_range_bit() argument
1766 spin_lock(&tree->lock); in test_range_bit()
1767 if (cached && extent_state_in_tree(cached) && cached->start <= start && in test_range_bit()
1768 cached->end > start) in test_range_bit()
1771 state = tree_search(tree, start); in test_range_bit()
1773 if (state->start > start) { in test_range_bit()
1778 if (state->start > end) in test_range_bit()
1781 if ((state->state & bit) == 0) { in test_range_bit()
1786 if (state->end == (u64)-1) in test_range_bit()
1790 * Last entry (if state->end is (u64)-1 and overflow happens), in test_range_bit()
1793 start = state->end + 1; in test_range_bit()
1802 spin_unlock(&tree->lock); in test_range_bit()
1807 int set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in set_record_extent_bits() argument
1813 * fail with -EEXIST or changeset will record the whole range. in set_record_extent_bits()
1817 return __set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset); in set_record_extent_bits()
1820 int clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in clear_record_extent_bits() argument
1829 return __clear_extent_bit(tree, start, end, bits, NULL, changeset); in clear_record_extent_bits()
1832 bool __try_lock_extent(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in __try_lock_extent() argument
1838 err = __set_extent_bit(tree, start, end, bits, &failed_start, in __try_lock_extent()
1840 if (err == -EEXIST) { in __try_lock_extent()
1842 clear_extent_bit(tree, start, failed_start - 1, bits, cached); in __try_lock_extent()
1852 int __lock_extent(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in __lock_extent() argument
1859 err = __set_extent_bit(tree, start, end, bits, &failed_start, in __lock_extent()
1861 while (err == -EEXIST) { in __lock_extent()
1863 clear_extent_bit(tree, start, failed_start - 1, in __lock_extent()
1866 wait_extent_bit(tree, failed_start, end, bits, &failed_state); in __lock_extent()
1867 err = __set_extent_bit(tree, start, end, bits, in __lock_extent()
1886 return -ENOMEM; in extent_state_init_cachep()