1*795d594fSAndroid Build Coastguard Worker /* 2*795d594fSAndroid Build Coastguard Worker * Copyright (C) 2012 The Android Open Source Project 3*795d594fSAndroid Build Coastguard Worker * 4*795d594fSAndroid Build Coastguard Worker * Licensed under the Apache License, Version 2.0 (the "License"); 5*795d594fSAndroid Build Coastguard Worker * you may not use this file except in compliance with the License. 6*795d594fSAndroid Build Coastguard Worker * You may obtain a copy of the License at 7*795d594fSAndroid Build Coastguard Worker * 8*795d594fSAndroid Build Coastguard Worker * http://www.apache.org/licenses/LICENSE-2.0 9*795d594fSAndroid Build Coastguard Worker * 10*795d594fSAndroid Build Coastguard Worker * Unless required by applicable law or agreed to in writing, software 11*795d594fSAndroid Build Coastguard Worker * distributed under the License is distributed on an "AS IS" BASIS, 12*795d594fSAndroid Build Coastguard Worker * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13*795d594fSAndroid Build Coastguard Worker * See the License for the specific language governing permissions and 14*795d594fSAndroid Build Coastguard Worker * limitations under the License. 15*795d594fSAndroid Build Coastguard Worker */ 16*795d594fSAndroid Build Coastguard Worker 17*795d594fSAndroid Build Coastguard Worker #ifndef ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_ 18*795d594fSAndroid Build Coastguard Worker #define ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_ 19*795d594fSAndroid Build Coastguard Worker 20*795d594fSAndroid Build Coastguard Worker #include <sys/mman.h> // For the PROT_* and MAP_* constants. 21*795d594fSAndroid Build Coastguard Worker 22*795d594fSAndroid Build Coastguard Worker #include <algorithm> 23*795d594fSAndroid Build Coastguard Worker #include <memory> 24*795d594fSAndroid Build Coastguard Worker #include <string> 25*795d594fSAndroid Build Coastguard Worker 26*795d594fSAndroid Build Coastguard Worker #include <android-base/logging.h> 27*795d594fSAndroid Build Coastguard Worker 28*795d594fSAndroid Build Coastguard Worker #include "base/atomic.h" 29*795d594fSAndroid Build Coastguard Worker #include "base/macros.h" 30*795d594fSAndroid Build Coastguard Worker #include "base/mem_map.h" 31*795d594fSAndroid Build Coastguard Worker #include "stack_reference.h" 32*795d594fSAndroid Build Coastguard Worker 33*795d594fSAndroid Build Coastguard Worker // This implements a double-ended queue (deque) with various flavors of PushBack operations, 34*795d594fSAndroid Build Coastguard Worker // as well as PopBack and PopFront operations. We expect that all calls are performed 35*795d594fSAndroid Build Coastguard Worker // by a single thread (normally the GC). There is one exception, which accounts for the 36*795d594fSAndroid Build Coastguard Worker // name: 37*795d594fSAndroid Build Coastguard Worker // - Multiple calls to AtomicPushBack*() and AtomicBumpBack() may be made concurrently, 38*795d594fSAndroid Build Coastguard Worker // provided no other calls are made at the same time. 39*795d594fSAndroid Build Coastguard Worker 40*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN { 41*795d594fSAndroid Build Coastguard Worker namespace gc { 42*795d594fSAndroid Build Coastguard Worker namespace accounting { 43*795d594fSAndroid Build Coastguard Worker 44*795d594fSAndroid Build Coastguard Worker // Internal representation is StackReference<T>, so this only works with mirror::Object or its 45*795d594fSAndroid Build Coastguard Worker // subclasses. 46*795d594fSAndroid Build Coastguard Worker template <typename T> 47*795d594fSAndroid Build Coastguard Worker class AtomicStack { 48*795d594fSAndroid Build Coastguard Worker public: 49*795d594fSAndroid Build Coastguard Worker class ObjectComparator { 50*795d594fSAndroid Build Coastguard Worker public: 51*795d594fSAndroid Build Coastguard Worker // These two comparators are for std::binary_search. operator()52*795d594fSAndroid Build Coastguard Worker bool operator()(const T* a, const StackReference<T>& b) const NO_THREAD_SAFETY_ANALYSIS { 53*795d594fSAndroid Build Coastguard Worker return a < b.AsMirrorPtr(); 54*795d594fSAndroid Build Coastguard Worker } operator()55*795d594fSAndroid Build Coastguard Worker bool operator()(const StackReference<T>& a, const T* b) const NO_THREAD_SAFETY_ANALYSIS { 56*795d594fSAndroid Build Coastguard Worker return a.AsMirrorPtr() < b; 57*795d594fSAndroid Build Coastguard Worker } 58*795d594fSAndroid Build Coastguard Worker // This comparator is for std::sort. operator()59*795d594fSAndroid Build Coastguard Worker bool operator()(const StackReference<T>& a, const StackReference<T>& b) const 60*795d594fSAndroid Build Coastguard Worker NO_THREAD_SAFETY_ANALYSIS { 61*795d594fSAndroid Build Coastguard Worker return a.AsMirrorPtr() < b.AsMirrorPtr(); 62*795d594fSAndroid Build Coastguard Worker } 63*795d594fSAndroid Build Coastguard Worker }; 64*795d594fSAndroid Build Coastguard Worker 65*795d594fSAndroid Build Coastguard Worker // Capacity is how many elements we can store in the stack. Create(const std::string & name,size_t growth_limit,size_t capacity)66*795d594fSAndroid Build Coastguard Worker static AtomicStack* Create(const std::string& name, size_t growth_limit, size_t capacity) { 67*795d594fSAndroid Build Coastguard Worker std::unique_ptr<AtomicStack> mark_stack(new AtomicStack(name, growth_limit, capacity)); 68*795d594fSAndroid Build Coastguard Worker mark_stack->Init(); 69*795d594fSAndroid Build Coastguard Worker return mark_stack.release(); 70*795d594fSAndroid Build Coastguard Worker } 71*795d594fSAndroid Build Coastguard Worker ~AtomicStack()72*795d594fSAndroid Build Coastguard Worker ~AtomicStack() {} 73*795d594fSAndroid Build Coastguard Worker Reset()74*795d594fSAndroid Build Coastguard Worker void Reset() { 75*795d594fSAndroid Build Coastguard Worker DCHECK(mem_map_.IsValid()); 76*795d594fSAndroid Build Coastguard Worker DCHECK(begin_ != nullptr); 77*795d594fSAndroid Build Coastguard Worker front_index_.store(0, std::memory_order_relaxed); 78*795d594fSAndroid Build Coastguard Worker back_index_.store(0, std::memory_order_relaxed); 79*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = true; 80*795d594fSAndroid Build Coastguard Worker mem_map_.MadviseDontNeedAndZero(); 81*795d594fSAndroid Build Coastguard Worker } 82*795d594fSAndroid Build Coastguard Worker 83*795d594fSAndroid Build Coastguard Worker // Beware: Mixing atomic pushes and atomic pops will cause ABA problem. 84*795d594fSAndroid Build Coastguard Worker 85*795d594fSAndroid Build Coastguard Worker // Returns false if we overflowed the stack. AtomicPushBackIgnoreGrowthLimit(T * value)86*795d594fSAndroid Build Coastguard Worker bool AtomicPushBackIgnoreGrowthLimit(T* value) REQUIRES_SHARED(Locks::mutator_lock_) { 87*795d594fSAndroid Build Coastguard Worker return AtomicPushBackInternal(value, capacity_); 88*795d594fSAndroid Build Coastguard Worker } 89*795d594fSAndroid Build Coastguard Worker 90*795d594fSAndroid Build Coastguard Worker // Returns false if we overflowed the stack. AtomicPushBack(T * value)91*795d594fSAndroid Build Coastguard Worker bool AtomicPushBack(T* value) REQUIRES_SHARED(Locks::mutator_lock_) { 92*795d594fSAndroid Build Coastguard Worker return AtomicPushBackInternal(value, growth_limit_); 93*795d594fSAndroid Build Coastguard Worker } 94*795d594fSAndroid Build Coastguard Worker 95*795d594fSAndroid Build Coastguard Worker // Atomically bump the back index by the given number of 96*795d594fSAndroid Build Coastguard Worker // slots. Returns false if we overflowed the stack. AtomicBumpBack(size_t num_slots,StackReference<T> ** start_address,StackReference<T> ** end_address)97*795d594fSAndroid Build Coastguard Worker bool AtomicBumpBack(size_t num_slots, StackReference<T>** start_address, 98*795d594fSAndroid Build Coastguard Worker StackReference<T>** end_address) 99*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) { 100*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 101*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = false; 102*795d594fSAndroid Build Coastguard Worker } 103*795d594fSAndroid Build Coastguard Worker int32_t index; 104*795d594fSAndroid Build Coastguard Worker int32_t new_index; 105*795d594fSAndroid Build Coastguard Worker do { 106*795d594fSAndroid Build Coastguard Worker index = back_index_.load(std::memory_order_relaxed); 107*795d594fSAndroid Build Coastguard Worker new_index = index + num_slots; 108*795d594fSAndroid Build Coastguard Worker if (UNLIKELY(static_cast<size_t>(new_index) >= growth_limit_)) { 109*795d594fSAndroid Build Coastguard Worker // Stack overflow. 110*795d594fSAndroid Build Coastguard Worker return false; 111*795d594fSAndroid Build Coastguard Worker } 112*795d594fSAndroid Build Coastguard Worker } while (!back_index_.CompareAndSetWeakRelaxed(index, new_index)); 113*795d594fSAndroid Build Coastguard Worker *start_address = begin_ + index; 114*795d594fSAndroid Build Coastguard Worker *end_address = begin_ + new_index; 115*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 116*795d594fSAndroid Build Coastguard Worker // Check that the memory is zero. 117*795d594fSAndroid Build Coastguard Worker for (int32_t i = index; i < new_index; ++i) { 118*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) 119*795d594fSAndroid Build Coastguard Worker << "i=" << i << " index=" << index << " new_index=" << new_index; 120*795d594fSAndroid Build Coastguard Worker } 121*795d594fSAndroid Build Coastguard Worker } 122*795d594fSAndroid Build Coastguard Worker return true; 123*795d594fSAndroid Build Coastguard Worker } 124*795d594fSAndroid Build Coastguard Worker AssertAllZero()125*795d594fSAndroid Build Coastguard Worker void AssertAllZero() REQUIRES_SHARED(Locks::mutator_lock_) { 126*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 127*795d594fSAndroid Build Coastguard Worker for (size_t i = 0; i < capacity_; ++i) { 128*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) << "i=" << i; 129*795d594fSAndroid Build Coastguard Worker } 130*795d594fSAndroid Build Coastguard Worker } 131*795d594fSAndroid Build Coastguard Worker } 132*795d594fSAndroid Build Coastguard Worker 133*795d594fSAndroid Build Coastguard Worker // Bump the back index by the given number of slots. Returns false if this 134*795d594fSAndroid Build Coastguard Worker // operation will overflow the stack. New elements should be written 135*795d594fSAndroid Build Coastguard Worker // to [*start_address, *end_address). BumpBack(size_t num_slots,StackReference<T> ** start_address,StackReference<T> ** end_address)136*795d594fSAndroid Build Coastguard Worker bool BumpBack(size_t num_slots, 137*795d594fSAndroid Build Coastguard Worker StackReference<T>** start_address, 138*795d594fSAndroid Build Coastguard Worker StackReference<T>** end_address) 139*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) { 140*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 141*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = false; 142*795d594fSAndroid Build Coastguard Worker } 143*795d594fSAndroid Build Coastguard Worker const int32_t index = back_index_.load(std::memory_order_relaxed); 144*795d594fSAndroid Build Coastguard Worker const int32_t new_index = index + num_slots; 145*795d594fSAndroid Build Coastguard Worker if (UNLIKELY(static_cast<size_t>(new_index) >= growth_limit_)) { 146*795d594fSAndroid Build Coastguard Worker // Stack overflow. 147*795d594fSAndroid Build Coastguard Worker return false; 148*795d594fSAndroid Build Coastguard Worker } 149*795d594fSAndroid Build Coastguard Worker back_index_.store(new_index, std::memory_order_relaxed); 150*795d594fSAndroid Build Coastguard Worker *start_address = begin_ + index; 151*795d594fSAndroid Build Coastguard Worker *end_address = begin_ + new_index; 152*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 153*795d594fSAndroid Build Coastguard Worker // Check the memory is zero. 154*795d594fSAndroid Build Coastguard Worker for (int32_t i = index; i < new_index; i++) { 155*795d594fSAndroid Build Coastguard Worker DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) 156*795d594fSAndroid Build Coastguard Worker << "i=" << i << " index=" << index << " new_index=" << new_index; 157*795d594fSAndroid Build Coastguard Worker } 158*795d594fSAndroid Build Coastguard Worker } 159*795d594fSAndroid Build Coastguard Worker return true; 160*795d594fSAndroid Build Coastguard Worker } 161*795d594fSAndroid Build Coastguard Worker PushBack(T * value)162*795d594fSAndroid Build Coastguard Worker void PushBack(T* value) REQUIRES_SHARED(Locks::mutator_lock_) { 163*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 164*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = false; 165*795d594fSAndroid Build Coastguard Worker } 166*795d594fSAndroid Build Coastguard Worker const int32_t index = back_index_.load(std::memory_order_relaxed); 167*795d594fSAndroid Build Coastguard Worker DCHECK_LT(static_cast<size_t>(index), growth_limit_); 168*795d594fSAndroid Build Coastguard Worker back_index_.store(index + 1, std::memory_order_relaxed); 169*795d594fSAndroid Build Coastguard Worker begin_[index].Assign(value); 170*795d594fSAndroid Build Coastguard Worker } 171*795d594fSAndroid Build Coastguard Worker PopBack()172*795d594fSAndroid Build Coastguard Worker T* PopBack() REQUIRES_SHARED(Locks::mutator_lock_) { 173*795d594fSAndroid Build Coastguard Worker DCHECK_GT(back_index_.load(std::memory_order_relaxed), 174*795d594fSAndroid Build Coastguard Worker front_index_.load(std::memory_order_relaxed)); 175*795d594fSAndroid Build Coastguard Worker // Decrement the back index non atomically. 176*795d594fSAndroid Build Coastguard Worker const int32_t index = back_index_.load(std::memory_order_relaxed) - 1; 177*795d594fSAndroid Build Coastguard Worker back_index_.store(index, std::memory_order_relaxed); 178*795d594fSAndroid Build Coastguard Worker T* ret = begin_[index].AsMirrorPtr(); 179*795d594fSAndroid Build Coastguard Worker // In debug builds we expect the stack elements to be null, which may not 180*795d594fSAndroid Build Coastguard Worker // always be the case if the stack is being reused without resetting it 181*795d594fSAndroid Build Coastguard Worker // in-between. 182*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 183*795d594fSAndroid Build Coastguard Worker begin_[index].Clear(); 184*795d594fSAndroid Build Coastguard Worker } 185*795d594fSAndroid Build Coastguard Worker return ret; 186*795d594fSAndroid Build Coastguard Worker } 187*795d594fSAndroid Build Coastguard Worker 188*795d594fSAndroid Build Coastguard Worker // Take an item from the front of the stack. PopFront()189*795d594fSAndroid Build Coastguard Worker T PopFront() { 190*795d594fSAndroid Build Coastguard Worker int32_t index = front_index_.load(std::memory_order_relaxed); 191*795d594fSAndroid Build Coastguard Worker DCHECK_LT(index, back_index_.load(std::memory_order_relaxed)); 192*795d594fSAndroid Build Coastguard Worker front_index_.store(index + 1, std::memory_order_relaxed); 193*795d594fSAndroid Build Coastguard Worker return begin_[index]; 194*795d594fSAndroid Build Coastguard Worker } 195*795d594fSAndroid Build Coastguard Worker 196*795d594fSAndroid Build Coastguard Worker // Pop a number of elements. PopBackCount(int32_t n)197*795d594fSAndroid Build Coastguard Worker void PopBackCount(int32_t n) { 198*795d594fSAndroid Build Coastguard Worker DCHECK_GE(Size(), static_cast<size_t>(n)); 199*795d594fSAndroid Build Coastguard Worker back_index_.store(back_index_.load(std::memory_order_relaxed) - n, std::memory_order_relaxed); 200*795d594fSAndroid Build Coastguard Worker } 201*795d594fSAndroid Build Coastguard Worker IsEmpty()202*795d594fSAndroid Build Coastguard Worker bool IsEmpty() const { 203*795d594fSAndroid Build Coastguard Worker return Size() == 0; 204*795d594fSAndroid Build Coastguard Worker } 205*795d594fSAndroid Build Coastguard Worker IsFull()206*795d594fSAndroid Build Coastguard Worker bool IsFull() const { 207*795d594fSAndroid Build Coastguard Worker return Size() == growth_limit_; 208*795d594fSAndroid Build Coastguard Worker } 209*795d594fSAndroid Build Coastguard Worker Size()210*795d594fSAndroid Build Coastguard Worker size_t Size() const { 211*795d594fSAndroid Build Coastguard Worker DCHECK_LE(front_index_.load(std::memory_order_relaxed), 212*795d594fSAndroid Build Coastguard Worker back_index_.load(std::memory_order_relaxed)); 213*795d594fSAndroid Build Coastguard Worker return 214*795d594fSAndroid Build Coastguard Worker back_index_.load(std::memory_order_relaxed) - front_index_.load(std::memory_order_relaxed); 215*795d594fSAndroid Build Coastguard Worker } 216*795d594fSAndroid Build Coastguard Worker Begin()217*795d594fSAndroid Build Coastguard Worker StackReference<T>* Begin() const { 218*795d594fSAndroid Build Coastguard Worker return begin_ + front_index_.load(std::memory_order_relaxed); 219*795d594fSAndroid Build Coastguard Worker } End()220*795d594fSAndroid Build Coastguard Worker StackReference<T>* End() const { 221*795d594fSAndroid Build Coastguard Worker return begin_ + back_index_.load(std::memory_order_relaxed); 222*795d594fSAndroid Build Coastguard Worker } 223*795d594fSAndroid Build Coastguard Worker Capacity()224*795d594fSAndroid Build Coastguard Worker size_t Capacity() const { 225*795d594fSAndroid Build Coastguard Worker return capacity_; 226*795d594fSAndroid Build Coastguard Worker } 227*795d594fSAndroid Build Coastguard Worker 228*795d594fSAndroid Build Coastguard Worker // Will clear the stack. Resize(size_t new_capacity)229*795d594fSAndroid Build Coastguard Worker void Resize(size_t new_capacity) { 230*795d594fSAndroid Build Coastguard Worker capacity_ = new_capacity; 231*795d594fSAndroid Build Coastguard Worker growth_limit_ = new_capacity; 232*795d594fSAndroid Build Coastguard Worker Init(); 233*795d594fSAndroid Build Coastguard Worker } 234*795d594fSAndroid Build Coastguard Worker Sort()235*795d594fSAndroid Build Coastguard Worker void Sort() { 236*795d594fSAndroid Build Coastguard Worker int32_t start_back_index = back_index_.load(std::memory_order_relaxed); 237*795d594fSAndroid Build Coastguard Worker int32_t start_front_index = front_index_.load(std::memory_order_relaxed); 238*795d594fSAndroid Build Coastguard Worker std::sort(Begin(), End(), ObjectComparator()); 239*795d594fSAndroid Build Coastguard Worker CHECK_EQ(start_back_index, back_index_.load(std::memory_order_relaxed)); 240*795d594fSAndroid Build Coastguard Worker CHECK_EQ(start_front_index, front_index_.load(std::memory_order_relaxed)); 241*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 242*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = true; 243*795d594fSAndroid Build Coastguard Worker } 244*795d594fSAndroid Build Coastguard Worker } 245*795d594fSAndroid Build Coastguard Worker ContainsSorted(const T * value)246*795d594fSAndroid Build Coastguard Worker bool ContainsSorted(const T* value) const REQUIRES_SHARED(Locks::mutator_lock_) { 247*795d594fSAndroid Build Coastguard Worker DCHECK(debug_is_sorted_); 248*795d594fSAndroid Build Coastguard Worker return std::binary_search(Begin(), End(), value, ObjectComparator()); 249*795d594fSAndroid Build Coastguard Worker } 250*795d594fSAndroid Build Coastguard Worker Contains(const T * value)251*795d594fSAndroid Build Coastguard Worker bool Contains(const T* value) const REQUIRES_SHARED(Locks::mutator_lock_) { 252*795d594fSAndroid Build Coastguard Worker for (auto cur = Begin(), end = End(); cur != end; ++cur) { 253*795d594fSAndroid Build Coastguard Worker if (cur->AsMirrorPtr() == value) { 254*795d594fSAndroid Build Coastguard Worker return true; 255*795d594fSAndroid Build Coastguard Worker } 256*795d594fSAndroid Build Coastguard Worker } 257*795d594fSAndroid Build Coastguard Worker return false; 258*795d594fSAndroid Build Coastguard Worker } 259*795d594fSAndroid Build Coastguard Worker 260*795d594fSAndroid Build Coastguard Worker private: AtomicStack(const std::string & name,size_t growth_limit,size_t capacity)261*795d594fSAndroid Build Coastguard Worker AtomicStack(const std::string& name, size_t growth_limit, size_t capacity) 262*795d594fSAndroid Build Coastguard Worker : name_(name), 263*795d594fSAndroid Build Coastguard Worker back_index_(0), 264*795d594fSAndroid Build Coastguard Worker front_index_(0), 265*795d594fSAndroid Build Coastguard Worker begin_(nullptr), 266*795d594fSAndroid Build Coastguard Worker growth_limit_(growth_limit), 267*795d594fSAndroid Build Coastguard Worker capacity_(capacity), 268*795d594fSAndroid Build Coastguard Worker debug_is_sorted_(true) { 269*795d594fSAndroid Build Coastguard Worker } 270*795d594fSAndroid Build Coastguard Worker 271*795d594fSAndroid Build Coastguard Worker // Returns false if we overflowed the stack. AtomicPushBackInternal(T * value,size_t limit)272*795d594fSAndroid Build Coastguard Worker bool AtomicPushBackInternal(T* value, size_t limit) ALWAYS_INLINE 273*795d594fSAndroid Build Coastguard Worker REQUIRES_SHARED(Locks::mutator_lock_) { 274*795d594fSAndroid Build Coastguard Worker if (kIsDebugBuild) { 275*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = false; 276*795d594fSAndroid Build Coastguard Worker } 277*795d594fSAndroid Build Coastguard Worker int32_t index; 278*795d594fSAndroid Build Coastguard Worker do { 279*795d594fSAndroid Build Coastguard Worker index = back_index_.load(std::memory_order_relaxed); 280*795d594fSAndroid Build Coastguard Worker if (UNLIKELY(static_cast<size_t>(index) >= limit)) { 281*795d594fSAndroid Build Coastguard Worker // Stack overflow. 282*795d594fSAndroid Build Coastguard Worker return false; 283*795d594fSAndroid Build Coastguard Worker } 284*795d594fSAndroid Build Coastguard Worker } while (!back_index_.CompareAndSetWeakRelaxed(index, index + 1)); 285*795d594fSAndroid Build Coastguard Worker begin_[index].Assign(value); 286*795d594fSAndroid Build Coastguard Worker return true; 287*795d594fSAndroid Build Coastguard Worker } 288*795d594fSAndroid Build Coastguard Worker 289*795d594fSAndroid Build Coastguard Worker // Size in number of elements. Init()290*795d594fSAndroid Build Coastguard Worker void Init() { 291*795d594fSAndroid Build Coastguard Worker std::string error_msg; 292*795d594fSAndroid Build Coastguard Worker mem_map_ = MemMap::MapAnonymous(name_.c_str(), 293*795d594fSAndroid Build Coastguard Worker capacity_ * sizeof(begin_[0]), 294*795d594fSAndroid Build Coastguard Worker PROT_READ | PROT_WRITE, 295*795d594fSAndroid Build Coastguard Worker /*low_4gb=*/ false, 296*795d594fSAndroid Build Coastguard Worker &error_msg); 297*795d594fSAndroid Build Coastguard Worker CHECK(mem_map_.IsValid()) << "couldn't allocate mark stack.\n" << error_msg; 298*795d594fSAndroid Build Coastguard Worker uint8_t* addr = mem_map_.Begin(); 299*795d594fSAndroid Build Coastguard Worker CHECK(addr != nullptr); 300*795d594fSAndroid Build Coastguard Worker debug_is_sorted_ = true; 301*795d594fSAndroid Build Coastguard Worker begin_ = reinterpret_cast<StackReference<T>*>(addr); 302*795d594fSAndroid Build Coastguard Worker Reset(); 303*795d594fSAndroid Build Coastguard Worker } 304*795d594fSAndroid Build Coastguard Worker 305*795d594fSAndroid Build Coastguard Worker // Name of the mark stack. 306*795d594fSAndroid Build Coastguard Worker std::string name_; 307*795d594fSAndroid Build Coastguard Worker // Memory mapping of the atomic stack. 308*795d594fSAndroid Build Coastguard Worker MemMap mem_map_; 309*795d594fSAndroid Build Coastguard Worker // Back index (index after the last element pushed). 310*795d594fSAndroid Build Coastguard Worker AtomicInteger back_index_; 311*795d594fSAndroid Build Coastguard Worker // Front index, used for implementing PopFront. 312*795d594fSAndroid Build Coastguard Worker AtomicInteger front_index_; 313*795d594fSAndroid Build Coastguard Worker // Base of the atomic stack. 314*795d594fSAndroid Build Coastguard Worker StackReference<T>* begin_; 315*795d594fSAndroid Build Coastguard Worker // Current maximum which we can push back to, must be <= capacity_. 316*795d594fSAndroid Build Coastguard Worker size_t growth_limit_; 317*795d594fSAndroid Build Coastguard Worker // Maximum number of elements. 318*795d594fSAndroid Build Coastguard Worker size_t capacity_; 319*795d594fSAndroid Build Coastguard Worker // Whether or not the stack is sorted, only updated in debug mode to avoid performance overhead. 320*795d594fSAndroid Build Coastguard Worker bool debug_is_sorted_; 321*795d594fSAndroid Build Coastguard Worker 322*795d594fSAndroid Build Coastguard Worker DISALLOW_COPY_AND_ASSIGN(AtomicStack); 323*795d594fSAndroid Build Coastguard Worker }; 324*795d594fSAndroid Build Coastguard Worker 325*795d594fSAndroid Build Coastguard Worker using ObjectStack = AtomicStack<mirror::Object>; 326*795d594fSAndroid Build Coastguard Worker 327*795d594fSAndroid Build Coastguard Worker } // namespace accounting 328*795d594fSAndroid Build Coastguard Worker } // namespace gc 329*795d594fSAndroid Build Coastguard Worker } // namespace art 330*795d594fSAndroid Build Coastguard Worker 331*795d594fSAndroid Build Coastguard Worker #endif // ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_ 332