Lines Matching full:span

1069 	struct interval_tree_double_span_iter span;  member
1091 struct interval_tree_double_span_iter span; in pfn_reader_unpin() local
1096 interval_tree_for_each_double_span(&span, &pages->access_itree, in pfn_reader_unpin()
1098 if (span.is_used) in pfn_reader_unpin()
1101 batch_unpin(&pfns->batch, pages, span.start_hole - start, in pfn_reader_unpin()
1102 span.last_hole - span.start_hole + 1); in pfn_reader_unpin()
1106 /* Process a single span to load it from the proper storage */
1109 struct interval_tree_double_span_iter *span = &pfns->span; in pfn_reader_fill_span() local
1117 WARN_ON(span->last_used < start_index)) in pfn_reader_fill_span()
1120 if (span->is_used == 1) { in pfn_reader_fill_span()
1122 start_index, span->last_used); in pfn_reader_fill_span()
1126 if (span->is_used == 2) { in pfn_reader_fill_span()
1129 * target span. If it is too small then we will be called again in pfn_reader_fill_span()
1139 min(iopt_area_last_index(area), span->last_used)); in pfn_reader_fill_span()
1145 span->last_hole); in pfn_reader_fill_span()
1179 WARN_ON(interval_tree_double_span_iter_done(&pfns->span))) in pfn_reader_next()
1191 if (pfns->batch_end_index == pfns->span.last_used + 1) in pfn_reader_next()
1192 interval_tree_double_span_iter_next(&pfns->span); in pfn_reader_next()
1216 interval_tree_double_span_iter_first(&pfns->span, &pages->access_itree, in pfn_reader_init()
1447 struct interval_tree_double_span_iter span; in __iopt_area_unfill_domain() local
1463 * end of any contiguous span, unmap that whole span, and then only in __iopt_area_unfill_domain()
1470 interval_tree_for_each_double_span(&span, &pages->domains_itree, in __iopt_area_unfill_domain()
1473 if (span.is_used) { in __iopt_area_unfill_domain()
1475 span.last_used - span.start_used + 1); in __iopt_area_unfill_domain()
1479 span.start_hole, span.last_hole, in __iopt_area_unfill_domain()
1731 struct interval_tree_double_span_iter span; in iopt_pages_unfill_xarray() local
1738 interval_tree_for_each_double_span(&span, &pages->access_itree, in iopt_pages_unfill_xarray()
1741 if (!span.is_used) { in iopt_pages_unfill_xarray()
1748 iopt_pages_unpin_xarray(&batch, pages, span.start_hole, in iopt_pages_unfill_xarray()
1749 span.last_hole); in iopt_pages_unfill_xarray()
1750 } else if (span.is_used == 2) { in iopt_pages_unfill_xarray()
1752 clear_xarray(&pages->pinned_pfns, span.start_used, in iopt_pages_unfill_xarray()
1753 span.last_used); in iopt_pages_unfill_xarray()
1859 struct interval_tree_double_span_iter span; in iopt_pages_fill_xarray() local
1868 interval_tree_for_each_double_span(&span, &pages->access_itree, in iopt_pages_fill_xarray()
1873 if (span.is_used == 1) { in iopt_pages_fill_xarray()
1874 cur_pages = out_pages + (span.start_used - start_index); in iopt_pages_fill_xarray()
1875 iopt_pages_fill_from_xarray(pages, span.start_used, in iopt_pages_fill_xarray()
1876 span.last_used, cur_pages); in iopt_pages_fill_xarray()
1880 if (span.is_used == 2) { in iopt_pages_fill_xarray()
1881 cur_pages = out_pages + (span.start_used - start_index); in iopt_pages_fill_xarray()
1882 iopt_pages_fill_from_domain(pages, span.start_used, in iopt_pages_fill_xarray()
1883 span.last_used, cur_pages); in iopt_pages_fill_xarray()
1885 span.start_used, span.last_used, in iopt_pages_fill_xarray()
1889 xa_end = span.last_used + 1; in iopt_pages_fill_xarray()
1894 cur_pages = out_pages + (span.start_hole - start_index); in iopt_pages_fill_xarray()
1895 rc = iopt_pages_fill(pages, &user, span.start_hole, in iopt_pages_fill_xarray()
1896 span.last_hole, cur_pages); in iopt_pages_fill_xarray()
1899 rc = pages_to_xarray(&pages->pinned_pfns, span.start_hole, in iopt_pages_fill_xarray()
1900 span.last_hole, cur_pages); in iopt_pages_fill_xarray()
1902 iopt_pages_err_unpin(pages, span.start_hole, in iopt_pages_fill_xarray()
1903 span.last_hole, cur_pages); in iopt_pages_fill_xarray()
1906 xa_end = span.last_hole + 1; in iopt_pages_fill_xarray()