Lines Matching full:mem
107 struct memory_block *mem = to_memory_block(dev); in memory_block_release() local
109 WARN_ON(mem->altmap); in memory_block_release()
110 kfree(mem); in memory_block_release()
123 struct memory_block *mem = to_memory_block(dev); in phys_index_show() local
125 return sysfs_emit(buf, "%08lx\n", memory_block_id(mem->start_section_nr)); in phys_index_show()
144 struct memory_block *mem = to_memory_block(dev); in state_show() local
151 switch (mem->state) { in state_show()
163 return sysfs_emit(buf, "ERROR-UNKNOWN-%ld\n", mem->state); in state_show()
175 static unsigned long memblk_nr_poison(struct memory_block *mem);
177 static inline unsigned long memblk_nr_poison(struct memory_block *mem) in memblk_nr_poison() argument
186 static int memory_block_online(struct memory_block *mem) in memory_block_online() argument
188 unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr); in memory_block_online()
195 if (memblk_nr_poison(mem)) in memory_block_online()
198 zone = zone_for_pfn_range(mem->online_type, mem->nid, mem->group, in memory_block_online()
208 if (mem->altmap) in memory_block_online()
209 nr_vmemmap_pages = mem->altmap->free; in memory_block_online()
223 zone, mem->altmap->inaccessible); in memory_block_online()
229 nr_pages - nr_vmemmap_pages, zone, mem->group); in memory_block_online()
241 adjust_present_page_count(pfn_to_page(start_pfn), mem->group, in memory_block_online()
244 mem->zone = zone; in memory_block_online()
257 static int memory_block_offline(struct memory_block *mem) in memory_block_offline() argument
259 unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr); in memory_block_offline()
265 if (!mem->zone) in memory_block_offline()
272 if (mem->altmap) in memory_block_offline()
273 nr_vmemmap_pages = mem->altmap->free; in memory_block_offline()
277 adjust_present_page_count(pfn_to_page(start_pfn), mem->group, in memory_block_offline()
281 nr_pages - nr_vmemmap_pages, mem->zone, mem->group); in memory_block_offline()
286 mem->group, nr_vmemmap_pages); in memory_block_offline()
293 mem->zone = NULL; in memory_block_offline()
309 memory_block_action(struct memory_block *mem, unsigned long action) in memory_block_action() argument
315 ret = memory_block_online(mem); in memory_block_action()
318 ret = memory_block_offline(mem); in memory_block_action()
322 "%ld\n", __func__, mem->start_section_nr, action, action); in memory_block_action()
329 static int memory_block_change_state(struct memory_block *mem, in memory_block_change_state() argument
334 if (mem->state != from_state_req) in memory_block_change_state()
338 mem->state = MEM_GOING_OFFLINE; in memory_block_change_state()
340 ret = memory_block_action(mem, to_state); in memory_block_change_state()
341 mem->state = ret ? from_state_req : to_state; in memory_block_change_state()
349 struct memory_block *mem = to_memory_block(dev); in memory_subsys_online() local
352 if (mem->state == MEM_ONLINE) in memory_subsys_online()
359 if (mem->online_type == MMOP_OFFLINE) in memory_subsys_online()
360 mem->online_type = MMOP_ONLINE; in memory_subsys_online()
362 ret = memory_block_change_state(mem, MEM_ONLINE, MEM_OFFLINE); in memory_subsys_online()
363 mem->online_type = MMOP_OFFLINE; in memory_subsys_online()
370 struct memory_block *mem = to_memory_block(dev); in memory_subsys_offline() local
372 if (mem->state == MEM_OFFLINE) in memory_subsys_offline()
375 return memory_block_change_state(mem, MEM_OFFLINE, MEM_ONLINE); in memory_subsys_offline()
382 struct memory_block *mem = to_memory_block(dev); in state_store() local
396 /* mem->online_type is protected by device_hotplug_lock */ in state_store()
397 mem->online_type = online_type; in state_store()
398 ret = device_online(&mem->dev); in state_store()
401 ret = device_offline(&mem->dev); in state_store()
427 struct memory_block *mem = to_memory_block(dev); in phys_device_show() local
428 unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr); in phys_device_show()
452 struct memory_block *mem = to_memory_block(dev); in valid_zones_show() local
453 unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr); in valid_zones_show()
455 struct memory_group *group = mem->group; in valid_zones_show()
457 int nid = mem->nid; in valid_zones_show()
464 if (mem->state == MEM_ONLINE) { in valid_zones_show()
466 * If !mem->zone, the memory block spans multiple zones and in valid_zones_show()
469 default_zone = mem->zone; in valid_zones_show()
641 struct memory_block *mem; in find_memory_block_by_id() local
643 mem = xa_load(&memory_blocks, block_id); in find_memory_block_by_id()
644 if (mem) in find_memory_block_by_id()
645 get_device(&mem->dev); in find_memory_block_by_id()
646 return mem; in find_memory_block_by_id()
702 static struct zone *early_node_zone_for_memory_block(struct memory_block *mem, in early_node_zone_for_memory_block() argument
705 const unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr); in early_node_zone_for_memory_block()
740 * @mem: The memory block device.
747 * set/adjust mem->zone based on the zone ranges of the given node.
749 void memory_block_add_nid(struct memory_block *mem, int nid, in memory_block_add_nid() argument
752 if (context == MEMINIT_EARLY && mem->nid != nid) { in memory_block_add_nid()
762 if (mem->nid == NUMA_NO_NODE) in memory_block_add_nid()
763 mem->zone = early_node_zone_for_memory_block(mem, nid); in memory_block_add_nid()
765 mem->zone = NULL; in memory_block_add_nid()
774 mem->nid = nid; in memory_block_add_nid()
782 struct memory_block *mem; in add_memory_block() local
785 mem = find_memory_block_by_id(block_id); in add_memory_block()
786 if (mem) { in add_memory_block()
787 put_device(&mem->dev); in add_memory_block()
790 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in add_memory_block()
791 if (!mem) in add_memory_block()
794 mem->start_section_nr = block_id * sections_per_block; in add_memory_block()
795 mem->state = state; in add_memory_block()
796 mem->nid = NUMA_NO_NODE; in add_memory_block()
797 mem->altmap = altmap; in add_memory_block()
798 INIT_LIST_HEAD(&mem->group_next); in add_memory_block()
808 mem->zone = early_node_zone_for_memory_block(mem, NUMA_NO_NODE); in add_memory_block()
811 ret = __add_memory_block(mem); in add_memory_block()
816 mem->group = group; in add_memory_block()
817 list_add(&mem->group_next, &group->memory_blocks); in add_memory_block()
876 struct memory_block *mem; in create_memory_block_devices() local
893 mem = find_memory_block_by_id(block_id); in create_memory_block_devices()
894 if (WARN_ON_ONCE(!mem)) in create_memory_block_devices()
896 remove_memory_block(mem); in create_memory_block_devices()
913 struct memory_block *mem; in remove_memory_block_devices() local
921 mem = find_memory_block_by_id(block_id); in remove_memory_block_devices()
922 if (WARN_ON_ONCE(!mem)) in remove_memory_block_devices()
924 num_poisoned_pages_sub(-1UL, memblk_nr_poison(mem)); in remove_memory_block_devices()
925 unregister_memory_block_under_nodes(mem); in remove_memory_block_devices()
926 remove_memory_block(mem); in remove_memory_block_devices()
1012 struct memory_block *mem; in walk_memory_blocks() local
1020 mem = find_memory_block_by_id(block_id); in walk_memory_blocks()
1021 if (!mem) in walk_memory_blocks()
1024 ret = func(mem, arg); in walk_memory_blocks()
1025 put_device(&mem->dev); in walk_memory_blocks()
1039 struct memory_block *mem = to_memory_block(dev); in for_each_memory_block_cb() local
1042 return cb_data->func(mem, cb_data->arg); in for_each_memory_block_cb()
1233 struct memory_block *mem = find_memory_block_by_id(block_id); in memblk_nr_poison_inc() local
1235 if (mem) in memblk_nr_poison_inc()
1236 atomic_long_inc(&mem->nr_hwpoison); in memblk_nr_poison_inc()
1242 struct memory_block *mem = find_memory_block_by_id(block_id); in memblk_nr_poison_sub() local
1244 if (mem) in memblk_nr_poison_sub()
1245 atomic_long_sub(i, &mem->nr_hwpoison); in memblk_nr_poison_sub()
1248 static unsigned long memblk_nr_poison(struct memory_block *mem) in memblk_nr_poison() argument
1250 return atomic_long_read(&mem->nr_hwpoison); in memblk_nr_poison()