Lines Matching refs:size

19 	u64 size;  member
30 if (entry->start + entry->size <= range->start) in cache_tree_comp_range()
32 else if (range->start + range->size <= entry->start) in cache_tree_comp_range()
45 range.size = entry->size; in cache_tree_comp_nodes()
62 else if (entry->start + entry->size <= range->start) in cache_tree_comp_range2()
64 else if (range->start + range->size <= entry->start) in cache_tree_comp_range2()
78 range.size = entry->size; in cache_tree_comp_nodes2()
88 static struct cache_extent *alloc_cache_extent(u64 start, u64 size) in alloc_cache_extent() argument
97 pe->size = size; in alloc_cache_extent()
101 int add_cache_extent(struct cache_tree *tree, u64 start, u64 size) in add_cache_extent() argument
103 struct cache_extent *pe = alloc_cache_extent(start, size); in add_cache_extent()
127 u64 start, u64 size) in lookup_cache_extent() argument
134 range.size = size; in lookup_cache_extent()
144 u64 objectid, u64 start, u64 size) in lookup_cache_extent2() argument
152 range.size = size; in lookup_cache_extent2()
169 range.size = 1; in search_cache_extent()
190 range.size = 1; in search_cache_extent2()
263 int add_merge_cache_extent(struct cache_tree *tree, u64 start, u64 size) in add_merge_cache_extent() argument
295 BUG_ON(next && start + size > next->start); in add_merge_cache_extent()
296 BUG_ON(prev && prev->start + prev->size > start); in add_merge_cache_extent()
298 if (next && start + size == next->start) { in add_merge_cache_extent()
300 next->size = next->start + next->size - start; in add_merge_cache_extent()
303 if (prev && prev->start + prev->size == start) { in add_merge_cache_extent()
306 next->size = next->start + next->size - prev->start; in add_merge_cache_extent()
311 prev->size = start + size - prev->start; in add_merge_cache_extent()
316 ret = add_cache_extent(tree, start, size); in add_merge_cache_extent()