Lines Matching full:range
66 static void pgmap_array_delete(struct range *range) in pgmap_array_delete() argument
68 xa_store_range(&pgmap_array, PHYS_PFN(range->start), PHYS_PFN(range->end), in pgmap_array_delete()
75 struct range *range = &pgmap->ranges[range_id]; in pfn_first() local
76 unsigned long pfn = PHYS_PFN(range->start); in pfn_first()
88 struct range *range = &pgmap->ranges[i]; in pgmap_pfn_valid() local
90 if (pfn >= PHYS_PFN(range->start) && in pgmap_pfn_valid()
91 pfn <= PHYS_PFN(range->end)) in pgmap_pfn_valid()
100 const struct range *range = &pgmap->ranges[range_id]; in pfn_end() local
102 return (range->start + range_len(range)) >> PAGE_SHIFT; in pfn_end()
113 struct range *range = &pgmap->ranges[range_id]; in pageunmap_range() local
121 remove_pfn_range_from_zone(page_zone(first_page), PHYS_PFN(range->start), in pageunmap_range()
122 PHYS_PFN(range_len(range))); in pageunmap_range()
124 __remove_pages(PHYS_PFN(range->start), in pageunmap_range()
125 PHYS_PFN(range_len(range)), NULL); in pageunmap_range()
127 arch_remove_memory(range->start, range_len(range), in pageunmap_range()
129 kasan_remove_zero_shadow(__va(range->start), range_len(range)); in pageunmap_range()
133 untrack_pfn(NULL, PHYS_PFN(range->start), range_len(range), true); in pageunmap_range()
134 pgmap_array_delete(range); in pageunmap_range()
174 struct range *range = &pgmap->ranges[range_id]; in pagemap_range() local
182 conflict_pgmap = get_dev_pagemap(PHYS_PFN(range->start), NULL); in pagemap_range()
189 conflict_pgmap = get_dev_pagemap(PHYS_PFN(range->end), NULL); in pagemap_range()
196 is_ram = region_intersects(range->start, range_len(range), in pagemap_range()
202 range->start, range->end); in pagemap_range()
206 error = xa_err(xa_store_range(&pgmap_array, PHYS_PFN(range->start), in pagemap_range()
207 PHYS_PFN(range->end), pgmap, GFP_KERNEL)); in pagemap_range()
214 error = track_pfn_remap(NULL, ¶ms->pgprot, PHYS_PFN(range->start), 0, in pagemap_range()
215 range_len(range)); in pagemap_range()
219 if (!mhp_range_allowed(range->start, range_len(range), !is_private)) { in pagemap_range()
238 error = add_pages(nid, PHYS_PFN(range->start), in pagemap_range()
239 PHYS_PFN(range_len(range)), params); in pagemap_range()
241 error = kasan_add_zero_shadow(__va(range->start), range_len(range)); in pagemap_range()
247 error = arch_add_memory(nid, range->start, range_len(range), in pagemap_range()
255 move_pfn_range_to_zone(zone, PHYS_PFN(range->start), in pagemap_range()
256 PHYS_PFN(range_len(range)), params->altmap, in pagemap_range()
269 PHYS_PFN(range->start), in pagemap_range()
270 PHYS_PFN(range_len(range)), pgmap); in pagemap_range()
278 kasan_remove_zero_shadow(__va(range->start), range_len(range)); in pagemap_range()
280 untrack_pfn(NULL, PHYS_PFN(range->start), range_len(range), true); in pagemap_range()
282 pgmap_array_delete(range); in pagemap_range()
361 * successfully processed range. This communicates how many in memremap_pages()
389 * 1/ At a minimum the range and type members of @pgmap must be initialized
399 * 4/ range is expected to be a host memory range that could feasibly be
400 * treated as a "System RAM" range, i.e. not a device mmio range, but
443 if (phys >= pgmap->range.start && phys <= pgmap->range.end) in get_dev_pagemap()