spin_lock(&io_tree->lock);
while (!RB_EMPTY_ROOT(&io_tree->state)) {
- struct extent_state *state;
struct extent_state *cached_state = NULL;
- node = rb_first(&io_tree->state);
- state = rb_entry(node, struct extent_state, rb_node);
- atomic_inc(&state->refs);
spin_unlock(&io_tree->lock);
-
- lock_extent_bits(io_tree, state->start, state->end,
- 0, &cached_state);
- clear_extent_bit(io_tree, state->start, state->end,
+ lock_extent_bits(io_tree, 0, (u64)-1, 0, &cached_state);
+ clear_extent_bit(io_tree, 0, (u64)-1,
EXTENT_LOCKED | EXTENT_DIRTY |
EXTENT_DELALLOC | EXTENT_DO_ACCOUNTING |
EXTENT_DEFRAG, 1, 1,
&cached_state, GFP_NOFS);
- free_extent_state(state);
cond_resched();
spin_lock(&io_tree->lock);