xref: /aosp_15_r20/art/runtime/base/quasi_atomic.h (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker  * Copyright (C) 2008 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker  *
4*795d594fSAndroid Build Coastguard Worker  * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker  * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker  * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker  *
8*795d594fSAndroid Build Coastguard Worker  *      http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker  *
10*795d594fSAndroid Build Coastguard Worker  * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker  * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker  * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker  * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker  */
16*795d594fSAndroid Build Coastguard Worker 
17*795d594fSAndroid Build Coastguard Worker #ifndef ART_RUNTIME_BASE_QUASI_ATOMIC_H_
18*795d594fSAndroid Build Coastguard Worker #define ART_RUNTIME_BASE_QUASI_ATOMIC_H_
19*795d594fSAndroid Build Coastguard Worker 
20*795d594fSAndroid Build Coastguard Worker #include <stdint.h>
21*795d594fSAndroid Build Coastguard Worker #include <atomic>
22*795d594fSAndroid Build Coastguard Worker #include <limits>
23*795d594fSAndroid Build Coastguard Worker #include <vector>
24*795d594fSAndroid Build Coastguard Worker 
25*795d594fSAndroid Build Coastguard Worker #include <android-base/logging.h>
26*795d594fSAndroid Build Coastguard Worker 
27*795d594fSAndroid Build Coastguard Worker #include "arch/instruction_set.h"
28*795d594fSAndroid Build Coastguard Worker #include "base/macros.h"
29*795d594fSAndroid Build Coastguard Worker 
30*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
31*795d594fSAndroid Build Coastguard Worker 
32*795d594fSAndroid Build Coastguard Worker class Mutex;
33*795d594fSAndroid Build Coastguard Worker 
34*795d594fSAndroid Build Coastguard Worker // QuasiAtomic encapsulates two separate facilities that we are
35*795d594fSAndroid Build Coastguard Worker // trying to move away from:  "quasiatomic" 64 bit operations
36*795d594fSAndroid Build Coastguard Worker // and custom memory fences.  For the time being, they remain
37*795d594fSAndroid Build Coastguard Worker // exposed.  Clients should be converted to use either class Atomic
38*795d594fSAndroid Build Coastguard Worker // below whenever possible, and should eventually use C++11 atomics.
39*795d594fSAndroid Build Coastguard Worker // The two facilities that do not have a good C++11 analog are
40*795d594fSAndroid Build Coastguard Worker // ThreadFenceForConstructor and Atomic::*JavaData.
41*795d594fSAndroid Build Coastguard Worker //
42*795d594fSAndroid Build Coastguard Worker // NOTE: Two "quasiatomic" operations on the exact same memory address
43*795d594fSAndroid Build Coastguard Worker // are guaranteed to operate atomically with respect to each other,
44*795d594fSAndroid Build Coastguard Worker // but no guarantees are made about quasiatomic operations mixed with
45*795d594fSAndroid Build Coastguard Worker // non-quasiatomic operations on the same address, nor about
46*795d594fSAndroid Build Coastguard Worker // quasiatomic operations that are performed on partially-overlapping
47*795d594fSAndroid Build Coastguard Worker // memory.
48*795d594fSAndroid Build Coastguard Worker class QuasiAtomic {
NeedSwapMutexes(InstructionSet isa)49*795d594fSAndroid Build Coastguard Worker   static constexpr bool NeedSwapMutexes([[maybe_unused]] InstructionSet isa) {
50*795d594fSAndroid Build Coastguard Worker     // TODO: Remove this function now that mips support has been removed.
51*795d594fSAndroid Build Coastguard Worker     return false;
52*795d594fSAndroid Build Coastguard Worker   }
53*795d594fSAndroid Build Coastguard Worker 
54*795d594fSAndroid Build Coastguard Worker  public:
55*795d594fSAndroid Build Coastguard Worker   static void Startup();
56*795d594fSAndroid Build Coastguard Worker 
57*795d594fSAndroid Build Coastguard Worker   static void Shutdown();
58*795d594fSAndroid Build Coastguard Worker 
59*795d594fSAndroid Build Coastguard Worker   // Reads the 64-bit value at "addr" without tearing.
Read64(volatile const int64_t * addr)60*795d594fSAndroid Build Coastguard Worker   static int64_t Read64(volatile const int64_t* addr) {
61*795d594fSAndroid Build Coastguard Worker     if (!NeedSwapMutexes(kRuntimeISA)) {
62*795d594fSAndroid Build Coastguard Worker       int64_t value;
63*795d594fSAndroid Build Coastguard Worker #if defined(__LP64__)
64*795d594fSAndroid Build Coastguard Worker       value = *addr;
65*795d594fSAndroid Build Coastguard Worker #else
66*795d594fSAndroid Build Coastguard Worker #if defined(__arm__)
67*795d594fSAndroid Build Coastguard Worker #if defined(__ARM_FEATURE_LPAE)
68*795d594fSAndroid Build Coastguard Worker       // With LPAE support (such as Cortex-A15) then ldrd is defined not to tear.
69*795d594fSAndroid Build Coastguard Worker       __asm__ __volatile__("@ QuasiAtomic::Read64\n"
70*795d594fSAndroid Build Coastguard Worker         "ldrd     %0, %H0, %1"
71*795d594fSAndroid Build Coastguard Worker         : "=r" (value)
72*795d594fSAndroid Build Coastguard Worker         : "m" (*addr));
73*795d594fSAndroid Build Coastguard Worker #else
74*795d594fSAndroid Build Coastguard Worker       // Exclusive loads are defined not to tear, clearing the exclusive state isn't necessary.
75*795d594fSAndroid Build Coastguard Worker       __asm__ __volatile__("@ QuasiAtomic::Read64\n"
76*795d594fSAndroid Build Coastguard Worker         "ldrexd     %0, %H0, %1"
77*795d594fSAndroid Build Coastguard Worker         : "=r" (value)
78*795d594fSAndroid Build Coastguard Worker         : "Q" (*addr));
79*795d594fSAndroid Build Coastguard Worker #endif
80*795d594fSAndroid Build Coastguard Worker #elif defined(__i386__)
81*795d594fSAndroid Build Coastguard Worker   __asm__ __volatile__(
82*795d594fSAndroid Build Coastguard Worker       "movq     %1, %0\n"
83*795d594fSAndroid Build Coastguard Worker       : "=x" (value)
84*795d594fSAndroid Build Coastguard Worker       : "m" (*addr));
85*795d594fSAndroid Build Coastguard Worker #else
86*795d594fSAndroid Build Coastguard Worker       LOG(FATAL) << "Unsupported architecture";
87*795d594fSAndroid Build Coastguard Worker #endif
88*795d594fSAndroid Build Coastguard Worker #endif  // defined(__LP64__)
89*795d594fSAndroid Build Coastguard Worker       return value;
90*795d594fSAndroid Build Coastguard Worker     } else {
91*795d594fSAndroid Build Coastguard Worker       return SwapMutexRead64(addr);
92*795d594fSAndroid Build Coastguard Worker     }
93*795d594fSAndroid Build Coastguard Worker   }
94*795d594fSAndroid Build Coastguard Worker 
95*795d594fSAndroid Build Coastguard Worker   // Writes to the 64-bit value at "addr" without tearing.
Write64(volatile int64_t * addr,int64_t value)96*795d594fSAndroid Build Coastguard Worker   static void Write64(volatile int64_t* addr, int64_t value) {
97*795d594fSAndroid Build Coastguard Worker     if (!NeedSwapMutexes(kRuntimeISA)) {
98*795d594fSAndroid Build Coastguard Worker #if defined(__LP64__)
99*795d594fSAndroid Build Coastguard Worker       *addr = value;
100*795d594fSAndroid Build Coastguard Worker #else
101*795d594fSAndroid Build Coastguard Worker #if defined(__arm__)
102*795d594fSAndroid Build Coastguard Worker #if defined(__ARM_FEATURE_LPAE)
103*795d594fSAndroid Build Coastguard Worker     // If we know that ARM architecture has LPAE (such as Cortex-A15) strd is defined not to tear.
104*795d594fSAndroid Build Coastguard Worker     __asm__ __volatile__("@ QuasiAtomic::Write64\n"
105*795d594fSAndroid Build Coastguard Worker       "strd     %1, %H1, %0"
106*795d594fSAndroid Build Coastguard Worker       : "=m"(*addr)
107*795d594fSAndroid Build Coastguard Worker       : "r" (value));
108*795d594fSAndroid Build Coastguard Worker #else
109*795d594fSAndroid Build Coastguard Worker     // The write is done as a swap so that the cache-line is in the exclusive state for the store.
110*795d594fSAndroid Build Coastguard Worker     int64_t prev;
111*795d594fSAndroid Build Coastguard Worker     int status;
112*795d594fSAndroid Build Coastguard Worker     do {
113*795d594fSAndroid Build Coastguard Worker       __asm__ __volatile__("@ QuasiAtomic::Write64\n"
114*795d594fSAndroid Build Coastguard Worker         "ldrexd     %0, %H0, %2\n"
115*795d594fSAndroid Build Coastguard Worker         "strexd     %1, %3, %H3, %2"
116*795d594fSAndroid Build Coastguard Worker         : "=&r" (prev), "=&r" (status), "+Q"(*addr)
117*795d594fSAndroid Build Coastguard Worker         : "r" (value)
118*795d594fSAndroid Build Coastguard Worker         : "cc");
119*795d594fSAndroid Build Coastguard Worker       } while (UNLIKELY(status != 0));
120*795d594fSAndroid Build Coastguard Worker #endif
121*795d594fSAndroid Build Coastguard Worker #elif defined(__i386__)
122*795d594fSAndroid Build Coastguard Worker       __asm__ __volatile__(
123*795d594fSAndroid Build Coastguard Worker         "movq     %1, %0"
124*795d594fSAndroid Build Coastguard Worker         : "=m" (*addr)
125*795d594fSAndroid Build Coastguard Worker         : "x" (value));
126*795d594fSAndroid Build Coastguard Worker #else
127*795d594fSAndroid Build Coastguard Worker       LOG(FATAL) << "Unsupported architecture";
128*795d594fSAndroid Build Coastguard Worker #endif
129*795d594fSAndroid Build Coastguard Worker #endif  // defined(__LP64__)
130*795d594fSAndroid Build Coastguard Worker     } else {
131*795d594fSAndroid Build Coastguard Worker       SwapMutexWrite64(addr, value);
132*795d594fSAndroid Build Coastguard Worker     }
133*795d594fSAndroid Build Coastguard Worker   }
134*795d594fSAndroid Build Coastguard Worker 
135*795d594fSAndroid Build Coastguard Worker   // Atomically compare the value at "addr" to "old_value", if equal replace it with "new_value"
136*795d594fSAndroid Build Coastguard Worker   // and return true. Otherwise, don't swap, and return false.
137*795d594fSAndroid Build Coastguard Worker   // This is fully ordered, i.e. it has C++11 memory_order_seq_cst
138*795d594fSAndroid Build Coastguard Worker   // semantics (assuming all other accesses use a mutex if this one does).
139*795d594fSAndroid Build Coastguard Worker   // This has "strong" semantics; if it fails then it is guaranteed that
140*795d594fSAndroid Build Coastguard Worker   // at some point during the execution of Cas64, *addr was not equal to
141*795d594fSAndroid Build Coastguard Worker   // old_value.
Cas64(int64_t old_value,int64_t new_value,volatile int64_t * addr)142*795d594fSAndroid Build Coastguard Worker   static bool Cas64(int64_t old_value, int64_t new_value, volatile int64_t* addr) {
143*795d594fSAndroid Build Coastguard Worker     if (!NeedSwapMutexes(kRuntimeISA)) {
144*795d594fSAndroid Build Coastguard Worker       return __sync_bool_compare_and_swap(addr, old_value, new_value);
145*795d594fSAndroid Build Coastguard Worker     } else {
146*795d594fSAndroid Build Coastguard Worker       return SwapMutexCas64(old_value, new_value, addr);
147*795d594fSAndroid Build Coastguard Worker     }
148*795d594fSAndroid Build Coastguard Worker   }
149*795d594fSAndroid Build Coastguard Worker 
150*795d594fSAndroid Build Coastguard Worker   // Does the architecture provide reasonable atomic long operations or do we fall back on mutexes?
LongAtomicsUseMutexes(InstructionSet isa)151*795d594fSAndroid Build Coastguard Worker   static bool LongAtomicsUseMutexes(InstructionSet isa) {
152*795d594fSAndroid Build Coastguard Worker     return NeedSwapMutexes(isa);
153*795d594fSAndroid Build Coastguard Worker   }
154*795d594fSAndroid Build Coastguard Worker 
ThreadFenceForConstructor()155*795d594fSAndroid Build Coastguard Worker   static void ThreadFenceForConstructor() {
156*795d594fSAndroid Build Coastguard Worker     #if defined(__aarch64__)
157*795d594fSAndroid Build Coastguard Worker       __asm__ __volatile__("dmb ishst" : : : "memory");
158*795d594fSAndroid Build Coastguard Worker     #else
159*795d594fSAndroid Build Coastguard Worker       std::atomic_thread_fence(std::memory_order_release);
160*795d594fSAndroid Build Coastguard Worker     #endif
161*795d594fSAndroid Build Coastguard Worker   }
162*795d594fSAndroid Build Coastguard Worker 
163*795d594fSAndroid Build Coastguard Worker  private:
164*795d594fSAndroid Build Coastguard Worker   static Mutex* GetSwapMutex(const volatile int64_t* addr);
165*795d594fSAndroid Build Coastguard Worker   static int64_t SwapMutexRead64(volatile const int64_t* addr);
166*795d594fSAndroid Build Coastguard Worker   static void SwapMutexWrite64(volatile int64_t* addr, int64_t val);
167*795d594fSAndroid Build Coastguard Worker   static bool SwapMutexCas64(int64_t old_value, int64_t new_value, volatile int64_t* addr);
168*795d594fSAndroid Build Coastguard Worker 
169*795d594fSAndroid Build Coastguard Worker   // We stripe across a bunch of different mutexes to reduce contention.
170*795d594fSAndroid Build Coastguard Worker   static constexpr size_t kSwapMutexCount = 32;
171*795d594fSAndroid Build Coastguard Worker   static std::vector<Mutex*>* gSwapMutexes;
172*795d594fSAndroid Build Coastguard Worker 
173*795d594fSAndroid Build Coastguard Worker   DISALLOW_COPY_AND_ASSIGN(QuasiAtomic);
174*795d594fSAndroid Build Coastguard Worker };
175*795d594fSAndroid Build Coastguard Worker 
176*795d594fSAndroid Build Coastguard Worker }  // namespace art
177*795d594fSAndroid Build Coastguard Worker 
178*795d594fSAndroid Build Coastguard Worker #endif  // ART_RUNTIME_BASE_QUASI_ATOMIC_H_
179