Lines Matching full:order
72 unsigned order, void *entry, gfp_t gfp) in xa_store_order() argument
74 XA_STATE_ORDER(xas, xa, index, order); in xa_store_order()
177 unsigned int order; in check_xa_mark_1() local
207 for (order = 2; order < max_order; order++) { in check_xa_mark_1()
208 unsigned long base = round_down(index, 1UL << order); in check_xa_mark_1()
209 unsigned long next = base + (1UL << order); in check_xa_mark_1()
217 xa_store_order(xa, index, order, xa_mk_index(index), in check_xa_mark_1()
328 unsigned int order; in check_xa_shrink() local
353 for (order = 0; order < max_order; order++) { in check_xa_shrink()
354 unsigned long max = (1UL << order) - 1; in check_xa_shrink()
355 xa_store_order(xa, 0, order, xa_mk_value(0), GFP_KERNEL); in check_xa_shrink()
430 unsigned int i, order = 3; in check_cmpxchg_order() local
432 XA_BUG_ON(xa, xa_store_order(xa, 0, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
434 /* Check entry FIVE has the order saved */ in check_cmpxchg_order()
435 XA_BUG_ON(xa, xa_get_order(xa, xa_to_value(FIVE)) != order); in check_cmpxchg_order()
437 /* Check all the tied indexes have the same entry and order */ in check_cmpxchg_order()
438 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
440 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
443 /* Ensure that nothing is stored at index '1 << order' */ in check_cmpxchg_order()
444 XA_BUG_ON(xa, xa_load(xa, 1 << order) != NULL); in check_cmpxchg_order()
447 * Additionally, keep the node information and the order at in check_cmpxchg_order()
448 * '1 << order' in check_cmpxchg_order()
450 XA_BUG_ON(xa, xa_store_order(xa, 1 << order, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
451 for (i = (1 << order); i < (1 << order) + (1 << order) - 1; i++) { in check_cmpxchg_order()
453 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
459 /* Verify the order is lost at FIVE (and old) entries */ in check_cmpxchg_order()
462 /* Verify the order and entries are lost in all the tied indexes */ in check_cmpxchg_order()
463 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
468 /* Verify node and order are kept at '1 << order' */ in check_cmpxchg_order()
469 for (i = (1 << order); i < (1 << order) + (1 << order) - 1; i++) { in check_cmpxchg_order()
471 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
585 unsigned int order) in check_multi_store_1() argument
588 unsigned long min = index & ~((1UL << order) - 1); in check_multi_store_1()
589 unsigned long max = min + (1UL << order); in check_multi_store_1()
591 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_1()
610 unsigned int order) in check_multi_store_2() argument
613 xa_store_order(xa, index, order, xa_mk_value(0), GFP_KERNEL); in check_multi_store_2()
624 unsigned int order) in check_multi_store_3() argument
630 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_3()
734 unsigned int order, in check_xa_multi_store_adv_add() argument
738 unsigned int nrpages = 1UL << order; in check_xa_multi_store_adv_add()
740 /* users are responsible for index alignemnt to the order when adding */ in check_xa_multi_store_adv_add()
743 xas_set_order(&xas, index, order); in check_xa_multi_store_adv_add()
766 unsigned int order) in check_xa_multi_store_adv_del_entry() argument
770 xas_set_order(&xas, index, order); in check_xa_multi_store_adv_del_entry()
777 unsigned int order) in check_xa_multi_store_adv_delete() argument
780 check_xa_multi_store_adv_del_entry(xa, index, order); in check_xa_multi_store_adv_delete()
802 * test it, and for order 20 (4 GiB block size) we can loop over in test_get_entry()
805 * order so when using a larger order we skip shared entries. in test_get_entry()
819 unsigned int order) in check_xa_multi_store_adv() argument
821 unsigned int nrpages = 1UL << order; in check_xa_multi_store_adv()
830 check_xa_multi_store_adv_add(xa, base, order, &some_val); in check_xa_multi_store_adv()
837 /* Use order 0 for the next item */ in check_xa_multi_store_adv()
844 /* Now use order for a new pointer */ in check_xa_multi_store_adv()
845 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
850 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
851 check_xa_multi_store_adv_delete(xa, base, order); in check_xa_multi_store_adv()
859 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
870 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
875 check_xa_multi_store_adv_add(xa, next_next_index, order, &some_val_2); in check_xa_multi_store_adv()
886 check_xa_multi_store_adv_delete(xa, next_next_index, order); in check_xa_multi_store_adv()
1101 unsigned int order, unsigned int present) in __check_store_iter() argument
1103 XA_STATE_ORDER(xas, xa, start, order); in __check_store_iter()
1112 XA_BUG_ON(xa, entry > xa_mk_index(start + (1UL << order) - 1)); in __check_store_iter()
1124 XA_BUG_ON(xa, xa_load(xa, start + (1UL << order) - 1) != in __check_store_iter()
1167 static noinline void check_multi_find_1(struct xarray *xa, unsigned order) in check_multi_find_1() argument
1170 unsigned long multi = 3 << order; in check_multi_find_1()
1171 unsigned long next = 4 << order; in check_multi_find_1()
1174 xa_store_order(xa, multi, order, xa_mk_value(multi), GFP_KERNEL); in check_multi_find_1()
1225 unsigned int order; in check_multi_find_3() local
1227 for (order = 5; order < order_limit; order++) { in check_multi_find_3()
1228 unsigned long index = 1UL << (order - 5); in check_multi_find_3()
1231 xa_store_order(xa, 0, order - 4, xa_mk_index(0), GFP_KERNEL); in check_multi_find_3()
1388 unsigned int order; in check_find_entry() local
1391 for (order = 0; order < 20; order++) { in check_find_entry()
1392 for (offset = 0; offset < (1UL << (order + 3)); in check_find_entry()
1393 offset += (1UL << order)) { in check_find_entry()
1394 for (index = 0; index < (1UL << (order + 5)); in check_find_entry()
1395 index += (1UL << order)) { in check_find_entry()
1396 xa_store_order(xa, index, order, in check_find_entry()
1421 int order; in check_pause() local
1425 for (order = 0; order < order_limit; order++) { in check_pause()
1426 XA_BUG_ON(xa, xa_store_order(xa, index, order, in check_pause()
1428 index += 1UL << order; in check_pause()
1453 for (order = order_limit - 1; order >= 0; order--) { in check_pause()
1454 XA_BUG_ON(xa, xa_store_order(xa, index, order, in check_pause()
1456 index += 1UL << order; in check_pause()
1650 unsigned long index, unsigned order) in xa_store_many_order() argument
1652 XA_STATE_ORDER(xas, xa, index, order); in xa_store_many_order()
1661 for (i = 0; i < (1U << order); i++) { in xa_store_many_order()
1673 unsigned long index, unsigned order) in check_create_range_1() argument
1677 xa_store_many_order(xa, index, order); in check_create_range_1()
1678 for (i = index; i < index + (1UL << order); i++) in check_create_range_1()
1683 static noinline void check_create_range_2(struct xarray *xa, unsigned order) in check_create_range_2() argument
1686 unsigned long nr = 1UL << order; in check_create_range_2()
1689 xa_store_many_order(xa, i, order); in check_create_range_2()
1704 unsigned long index, unsigned order) in check_create_range_4() argument
1706 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_4()
1716 for (i = 0; i < (1UL << order); i++) { in check_create_range_4()
1730 for (i = base; i < base + (1UL << order); i++) in check_create_range_4()
1736 unsigned long index, unsigned int order) in check_create_range_5() argument
1738 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_5()
1741 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_create_range_5()
1743 for (i = 0; i < order + 10; i++) { in check_create_range_5()
1756 unsigned int order; in check_create_range() local
1759 for (order = 0; order < max_order; order++) { in check_create_range()
1760 check_create_range_1(xa, 0, order); in check_create_range()
1761 check_create_range_1(xa, 1U << order, order); in check_create_range()
1762 check_create_range_1(xa, 2U << order, order); in check_create_range()
1763 check_create_range_1(xa, 3U << order, order); in check_create_range()
1764 check_create_range_1(xa, 1U << 24, order); in check_create_range()
1765 if (order < 10) in check_create_range()
1766 check_create_range_2(xa, order); in check_create_range()
1768 check_create_range_4(xa, 0, order); in check_create_range()
1769 check_create_range_4(xa, 1U << order, order); in check_create_range()
1770 check_create_range_4(xa, 2U << order, order); in check_create_range()
1771 check_create_range_4(xa, 3U << order, order); in check_create_range()
1772 check_create_range_4(xa, 1U << 24, order); in check_create_range()
1774 check_create_range_4(xa, 1, order); in check_create_range()
1775 check_create_range_4(xa, (1U << order) + 1, order); in check_create_range()
1776 check_create_range_4(xa, (2U << order) + 1, order); in check_create_range()
1777 check_create_range_4(xa, (2U << order) - 1, order); in check_create_range()
1778 check_create_range_4(xa, (3U << order) + 1, order); in check_create_range()
1779 check_create_range_4(xa, (3U << order) - 1, order); in check_create_range()
1780 check_create_range_4(xa, (1U << 24) + 1, order); in check_create_range()
1782 check_create_range_5(xa, 0, order); in check_create_range()
1783 check_create_range_5(xa, (1U << order), order); in check_create_range()
1824 unsigned int order, unsigned int new_order) in check_split_1() argument
1830 xa_store_order(xa, index, order, xa, GFP_KERNEL); in check_split_1()
1833 xas_split_alloc(&xas, xa, order, GFP_KERNEL); in check_split_1()
1835 xas_split(&xas, xa, order); in check_split_1()
1836 for (i = 0; i < (1 << order); i += (1 << new_order)) in check_split_1()
1840 for (i = 0; i < (1 << order); i++) { in check_split_1()
1856 XA_BUG_ON(xa, found != 1 << (order - new_order)); in check_split_1()
1863 unsigned int order, new_order; in check_split() local
1867 for (order = 1; order < 2 * XA_CHUNK_SHIFT; order++) { in check_split()
1868 for (new_order = 0; new_order < order; new_order++) { in check_split()
1869 check_split_1(xa, 0, order, new_order); in check_split()
1870 check_split_1(xa, 1UL << order, order, new_order); in check_split()
1871 check_split_1(xa, 3UL << order, order, new_order); in check_split()
1994 unsigned int order; in check_account() local
1996 for (order = 1; order < 12; order++) { in check_account()
1997 XA_STATE(xas, xa, 1 << order); in check_account()
1999 xa_store_order(xa, 0, order, xa, GFP_KERNEL); in check_account()
2003 XA_BUG_ON(xa, xas.xa_node->count > (1 << order)); in check_account()
2007 xa_store_order(xa, 1 << order, order, xa_mk_index(1UL << order), in check_account()
2011 xa_erase(xa, 1 << order); in check_account()
2023 unsigned int order; in check_get_order() local
2029 for (order = 0; order < max_order; order++) { in check_get_order()
2031 xa_store_order(xa, i << order, order, in check_get_order()
2032 xa_mk_index(i << order), GFP_KERNEL); in check_get_order()
2033 for (j = i << order; j < (i + 1) << order; j++) in check_get_order()
2034 XA_BUG_ON(xa, xa_get_order(xa, j) != order); in check_get_order()
2035 xa_erase(xa, i << order); in check_get_order()
2045 unsigned int order; in check_xas_get_order() local
2048 for (order = 0; order < max_order; order++) { in check_xas_get_order()
2050 xas_set_order(&xas, i << order, order); in check_xas_get_order()
2057 for (j = i << order; j < (i + 1) << order; j++) { in check_xas_get_order()
2061 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_get_order()
2066 xas_set_order(&xas, i << order, order); in check_xas_get_order()
2080 unsigned int order; in check_xas_conflict_get_order() local
2083 for (order = 0; order < max_order; order++) { in check_xas_conflict_get_order()
2085 xas_set_order(&xas, i << order, order); in check_xas_conflict_get_order()
2095 j = i << order; in check_xas_conflict_get_order()
2096 for (k = 0; k < order; k++) { in check_xas_conflict_get_order()
2102 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2109 if (order < max_order - 1) { in check_xas_conflict_get_order()
2111 xas_set_order(&xas, (i & ~1UL) << order, order + 1); in check_xas_conflict_get_order()
2115 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2122 xas_set_order(&xas, i << order, order); in check_xas_conflict_get_order()