xref: /aosp_15_r20/art/runtime/gc/reference_processor.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1*795d594fSAndroid Build Coastguard Worker /*
2*795d594fSAndroid Build Coastguard Worker  * Copyright (C) 2014 The Android Open Source Project
3*795d594fSAndroid Build Coastguard Worker  *
4*795d594fSAndroid Build Coastguard Worker  * Licensed under the Apache License, Version 2.0 (the "License");
5*795d594fSAndroid Build Coastguard Worker  * you may not use this file except in compliance with the License.
6*795d594fSAndroid Build Coastguard Worker  * You may obtain a copy of the License at
7*795d594fSAndroid Build Coastguard Worker  *
8*795d594fSAndroid Build Coastguard Worker  *      http://www.apache.org/licenses/LICENSE-2.0
9*795d594fSAndroid Build Coastguard Worker  *
10*795d594fSAndroid Build Coastguard Worker  * Unless required by applicable law or agreed to in writing, software
11*795d594fSAndroid Build Coastguard Worker  * distributed under the License is distributed on an "AS IS" BASIS,
12*795d594fSAndroid Build Coastguard Worker  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13*795d594fSAndroid Build Coastguard Worker  * See the License for the specific language governing permissions and
14*795d594fSAndroid Build Coastguard Worker  * limitations under the License.
15*795d594fSAndroid Build Coastguard Worker  */
16*795d594fSAndroid Build Coastguard Worker 
17*795d594fSAndroid Build Coastguard Worker #include "reference_processor.h"
18*795d594fSAndroid Build Coastguard Worker 
19*795d594fSAndroid Build Coastguard Worker #include "art_field-inl.h"
20*795d594fSAndroid Build Coastguard Worker #include "base/mutex.h"
21*795d594fSAndroid Build Coastguard Worker #include "base/time_utils.h"
22*795d594fSAndroid Build Coastguard Worker #include "base/utils.h"
23*795d594fSAndroid Build Coastguard Worker #include "base/systrace.h"
24*795d594fSAndroid Build Coastguard Worker #include "class_root-inl.h"
25*795d594fSAndroid Build Coastguard Worker #include "collector/garbage_collector.h"
26*795d594fSAndroid Build Coastguard Worker #include "jni/java_vm_ext.h"
27*795d594fSAndroid Build Coastguard Worker #include "mirror/class-inl.h"
28*795d594fSAndroid Build Coastguard Worker #include "mirror/object-inl.h"
29*795d594fSAndroid Build Coastguard Worker #include "mirror/reference-inl.h"
30*795d594fSAndroid Build Coastguard Worker #include "nativehelper/scoped_local_ref.h"
31*795d594fSAndroid Build Coastguard Worker #include "object_callbacks.h"
32*795d594fSAndroid Build Coastguard Worker #include "reflection.h"
33*795d594fSAndroid Build Coastguard Worker #include "scoped_thread_state_change-inl.h"
34*795d594fSAndroid Build Coastguard Worker #include "task_processor.h"
35*795d594fSAndroid Build Coastguard Worker #include "thread-inl.h"
36*795d594fSAndroid Build Coastguard Worker #include "thread_pool.h"
37*795d594fSAndroid Build Coastguard Worker #include "well_known_classes.h"
38*795d594fSAndroid Build Coastguard Worker 
39*795d594fSAndroid Build Coastguard Worker namespace art HIDDEN {
40*795d594fSAndroid Build Coastguard Worker namespace gc {
41*795d594fSAndroid Build Coastguard Worker 
42*795d594fSAndroid Build Coastguard Worker static constexpr bool kAsyncReferenceQueueAdd = false;
43*795d594fSAndroid Build Coastguard Worker 
ReferenceProcessor()44*795d594fSAndroid Build Coastguard Worker ReferenceProcessor::ReferenceProcessor()
45*795d594fSAndroid Build Coastguard Worker     : collector_(nullptr),
46*795d594fSAndroid Build Coastguard Worker       condition_("reference processor condition", *Locks::reference_processor_lock_) ,
47*795d594fSAndroid Build Coastguard Worker       soft_reference_queue_(Locks::reference_queue_soft_references_lock_),
48*795d594fSAndroid Build Coastguard Worker       weak_reference_queue_(Locks::reference_queue_weak_references_lock_),
49*795d594fSAndroid Build Coastguard Worker       finalizer_reference_queue_(Locks::reference_queue_finalizer_references_lock_),
50*795d594fSAndroid Build Coastguard Worker       phantom_reference_queue_(Locks::reference_queue_phantom_references_lock_),
51*795d594fSAndroid Build Coastguard Worker       cleared_references_(Locks::reference_queue_cleared_references_lock_) {
52*795d594fSAndroid Build Coastguard Worker }
53*795d594fSAndroid Build Coastguard Worker 
GetSlowPathFlagOffset(ObjPtr<mirror::Class> reference_class)54*795d594fSAndroid Build Coastguard Worker static inline MemberOffset GetSlowPathFlagOffset(ObjPtr<mirror::Class> reference_class)
55*795d594fSAndroid Build Coastguard Worker     REQUIRES_SHARED(Locks::mutator_lock_) {
56*795d594fSAndroid Build Coastguard Worker   DCHECK(reference_class == GetClassRoot<mirror::Reference>());
57*795d594fSAndroid Build Coastguard Worker   // Second static field
58*795d594fSAndroid Build Coastguard Worker   ArtField* field = reference_class->GetStaticField(1);
59*795d594fSAndroid Build Coastguard Worker   DCHECK_STREQ(field->GetName(), "slowPathEnabled");
60*795d594fSAndroid Build Coastguard Worker   return field->GetOffset();
61*795d594fSAndroid Build Coastguard Worker }
62*795d594fSAndroid Build Coastguard Worker 
SetSlowPathFlag(bool enabled)63*795d594fSAndroid Build Coastguard Worker static inline void SetSlowPathFlag(bool enabled) REQUIRES_SHARED(Locks::mutator_lock_) {
64*795d594fSAndroid Build Coastguard Worker   ObjPtr<mirror::Class> reference_class = GetClassRoot<mirror::Reference>();
65*795d594fSAndroid Build Coastguard Worker   MemberOffset slow_path_offset = GetSlowPathFlagOffset(reference_class);
66*795d594fSAndroid Build Coastguard Worker   reference_class->SetFieldBoolean</* kTransactionActive= */ false, /* kCheckTransaction= */ false>(
67*795d594fSAndroid Build Coastguard Worker       slow_path_offset, enabled ? 1 : 0);
68*795d594fSAndroid Build Coastguard Worker }
69*795d594fSAndroid Build Coastguard Worker 
EnableSlowPath()70*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::EnableSlowPath() {
71*795d594fSAndroid Build Coastguard Worker   SetSlowPathFlag(/* enabled= */ true);
72*795d594fSAndroid Build Coastguard Worker }
73*795d594fSAndroid Build Coastguard Worker 
DisableSlowPath(Thread * self)74*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::DisableSlowPath(Thread* self) {
75*795d594fSAndroid Build Coastguard Worker   SetSlowPathFlag(/* enabled= */ false);
76*795d594fSAndroid Build Coastguard Worker   condition_.Broadcast(self);
77*795d594fSAndroid Build Coastguard Worker }
78*795d594fSAndroid Build Coastguard Worker 
SlowPathEnabled()79*795d594fSAndroid Build Coastguard Worker bool ReferenceProcessor::SlowPathEnabled() {
80*795d594fSAndroid Build Coastguard Worker   ObjPtr<mirror::Class> reference_class = GetClassRoot<mirror::Reference>();
81*795d594fSAndroid Build Coastguard Worker   MemberOffset slow_path_offset = GetSlowPathFlagOffset(reference_class);
82*795d594fSAndroid Build Coastguard Worker   return reference_class->GetFieldBoolean(slow_path_offset);
83*795d594fSAndroid Build Coastguard Worker }
84*795d594fSAndroid Build Coastguard Worker 
BroadcastForSlowPath(Thread * self)85*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::BroadcastForSlowPath(Thread* self) {
86*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::reference_processor_lock_);
87*795d594fSAndroid Build Coastguard Worker   condition_.Broadcast(self);
88*795d594fSAndroid Build Coastguard Worker }
89*795d594fSAndroid Build Coastguard Worker 
GetReferent(Thread * self,ObjPtr<mirror::Reference> reference)90*795d594fSAndroid Build Coastguard Worker ObjPtr<mirror::Object> ReferenceProcessor::GetReferent(Thread* self,
91*795d594fSAndroid Build Coastguard Worker                                                        ObjPtr<mirror::Reference> reference) {
92*795d594fSAndroid Build Coastguard Worker   auto slow_path_required = [this, self]() REQUIRES_SHARED(Locks::mutator_lock_) {
93*795d594fSAndroid Build Coastguard Worker     return gUseReadBarrier ? !self->GetWeakRefAccessEnabled() : SlowPathEnabled();
94*795d594fSAndroid Build Coastguard Worker   };
95*795d594fSAndroid Build Coastguard Worker   if (!slow_path_required()) {
96*795d594fSAndroid Build Coastguard Worker     return reference->GetReferent();
97*795d594fSAndroid Build Coastguard Worker   }
98*795d594fSAndroid Build Coastguard Worker   // If the referent is null then it is already cleared, we can just return null since there is no
99*795d594fSAndroid Build Coastguard Worker   // scenario where it becomes non-null during the reference processing phase.
100*795d594fSAndroid Build Coastguard Worker   // A read barrier may be unsafe here, and we use the result only when it's null or marked.
101*795d594fSAndroid Build Coastguard Worker   ObjPtr<mirror::Object> referent = reference->template GetReferent<kWithoutReadBarrier>();
102*795d594fSAndroid Build Coastguard Worker   if (referent.IsNull()) {
103*795d594fSAndroid Build Coastguard Worker     return referent;
104*795d594fSAndroid Build Coastguard Worker   }
105*795d594fSAndroid Build Coastguard Worker 
106*795d594fSAndroid Build Coastguard Worker   bool started_trace = false;
107*795d594fSAndroid Build Coastguard Worker   uint64_t start_millis;
108*795d594fSAndroid Build Coastguard Worker   auto finish_trace = [](uint64_t start_millis) {
109*795d594fSAndroid Build Coastguard Worker     ATraceEnd();
110*795d594fSAndroid Build Coastguard Worker     uint64_t millis = MilliTime() - start_millis;
111*795d594fSAndroid Build Coastguard Worker     static constexpr uint64_t kReportMillis = 10;  // Long enough to risk dropped frames.
112*795d594fSAndroid Build Coastguard Worker     if (millis > kReportMillis) {
113*795d594fSAndroid Build Coastguard Worker       LOG(WARNING) << "Weak pointer dereference blocked for " << millis << " milliseconds.";
114*795d594fSAndroid Build Coastguard Worker     }
115*795d594fSAndroid Build Coastguard Worker   };
116*795d594fSAndroid Build Coastguard Worker 
117*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::reference_processor_lock_);
118*795d594fSAndroid Build Coastguard Worker   // Keeping reference_processor_lock_ blocks the broadcast when we try to reenable the fast path.
119*795d594fSAndroid Build Coastguard Worker   while (slow_path_required()) {
120*795d594fSAndroid Build Coastguard Worker     DCHECK(collector_ != nullptr);
121*795d594fSAndroid Build Coastguard Worker     const bool other_read_barrier = !kUseBakerReadBarrier && gUseReadBarrier;
122*795d594fSAndroid Build Coastguard Worker     if (UNLIKELY(reference->IsFinalizerReferenceInstance()
123*795d594fSAndroid Build Coastguard Worker                  || rp_state_ == RpState::kStarting /* too early to determine mark state */
124*795d594fSAndroid Build Coastguard Worker                  || (other_read_barrier && reference->IsPhantomReferenceInstance()))) {
125*795d594fSAndroid Build Coastguard Worker       // Odd cases in which it doesn't hurt to just wait, or the wait is likely to be very brief.
126*795d594fSAndroid Build Coastguard Worker 
127*795d594fSAndroid Build Coastguard Worker       // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
128*795d594fSAndroid Build Coastguard Worker       // presence of threads blocking for weak ref access.
129*795d594fSAndroid Build Coastguard Worker       self->CheckEmptyCheckpointFromWeakRefAccess(Locks::reference_processor_lock_);
130*795d594fSAndroid Build Coastguard Worker       if (!started_trace) {
131*795d594fSAndroid Build Coastguard Worker         ATraceBegin("GetReferent blocked");
132*795d594fSAndroid Build Coastguard Worker         started_trace = true;
133*795d594fSAndroid Build Coastguard Worker         start_millis = MilliTime();
134*795d594fSAndroid Build Coastguard Worker       }
135*795d594fSAndroid Build Coastguard Worker       condition_.WaitHoldingLocks(self);
136*795d594fSAndroid Build Coastguard Worker       continue;
137*795d594fSAndroid Build Coastguard Worker     }
138*795d594fSAndroid Build Coastguard Worker     DCHECK(!reference->IsPhantomReferenceInstance());
139*795d594fSAndroid Build Coastguard Worker 
140*795d594fSAndroid Build Coastguard Worker     if (rp_state_ == RpState::kInitClearingDone) {
141*795d594fSAndroid Build Coastguard Worker       // Reachable references have their final referent values.
142*795d594fSAndroid Build Coastguard Worker       break;
143*795d594fSAndroid Build Coastguard Worker     }
144*795d594fSAndroid Build Coastguard Worker     // Although reference processing is not done, we can always predict the correct return value
145*795d594fSAndroid Build Coastguard Worker     // based on the current mark state. No additional marking from finalizers has been done, since
146*795d594fSAndroid Build Coastguard Worker     // we hold reference_processor_lock_, which is required to advance to kInitClearingDone.
147*795d594fSAndroid Build Coastguard Worker     DCHECK(rp_state_ == RpState::kInitMarkingDone);
148*795d594fSAndroid Build Coastguard Worker     // Re-load and re-check referent, since the current one may have been read before we acquired
149*795d594fSAndroid Build Coastguard Worker     // reference_lock. In particular a Reference.clear() call may have intervened. (b/33569625)
150*795d594fSAndroid Build Coastguard Worker     referent = reference->GetReferent<kWithoutReadBarrier>();
151*795d594fSAndroid Build Coastguard Worker     ObjPtr<mirror::Object> forwarded_ref =
152*795d594fSAndroid Build Coastguard Worker         referent.IsNull() ? nullptr : collector_->IsMarked(referent.Ptr());
153*795d594fSAndroid Build Coastguard Worker     // Either the referent was marked, and forwarded_ref is the correct return value, or it
154*795d594fSAndroid Build Coastguard Worker     // was not, and forwarded_ref == null, which is again the correct return value.
155*795d594fSAndroid Build Coastguard Worker     if (started_trace) {
156*795d594fSAndroid Build Coastguard Worker       finish_trace(start_millis);
157*795d594fSAndroid Build Coastguard Worker     }
158*795d594fSAndroid Build Coastguard Worker     return forwarded_ref;
159*795d594fSAndroid Build Coastguard Worker   }
160*795d594fSAndroid Build Coastguard Worker   if (started_trace) {
161*795d594fSAndroid Build Coastguard Worker     finish_trace(start_millis);
162*795d594fSAndroid Build Coastguard Worker   }
163*795d594fSAndroid Build Coastguard Worker   return reference->GetReferent();
164*795d594fSAndroid Build Coastguard Worker }
165*795d594fSAndroid Build Coastguard Worker 
166*795d594fSAndroid Build Coastguard Worker // Forward SoftReferences. Can be done before we disable Reference access. Only
167*795d594fSAndroid Build Coastguard Worker // invoked if we are not clearing SoftReferences.
ForwardSoftReferences(TimingLogger * timings)168*795d594fSAndroid Build Coastguard Worker uint32_t ReferenceProcessor::ForwardSoftReferences(TimingLogger* timings) {
169*795d594fSAndroid Build Coastguard Worker   TimingLogger::ScopedTiming split(
170*795d594fSAndroid Build Coastguard Worker       concurrent_ ? "ForwardSoftReferences" : "(Paused)ForwardSoftReferences", timings);
171*795d594fSAndroid Build Coastguard Worker   // We used to argue that we should be smarter about doing this conditionally, but it's unclear
172*795d594fSAndroid Build Coastguard Worker   // that's actually better than the more predictable strategy of basically only clearing
173*795d594fSAndroid Build Coastguard Worker   // SoftReferences just before we would otherwise run out of memory.
174*795d594fSAndroid Build Coastguard Worker   uint32_t non_null_refs = soft_reference_queue_.ForwardSoftReferences(collector_);
175*795d594fSAndroid Build Coastguard Worker   if (ATraceEnabled()) {
176*795d594fSAndroid Build Coastguard Worker     static constexpr size_t kBufSize = 80;
177*795d594fSAndroid Build Coastguard Worker     char buf[kBufSize];
178*795d594fSAndroid Build Coastguard Worker     snprintf(buf, kBufSize, "Marking for %" PRIu32 " SoftReferences", non_null_refs);
179*795d594fSAndroid Build Coastguard Worker     ATraceBegin(buf);
180*795d594fSAndroid Build Coastguard Worker     collector_->ProcessMarkStack();
181*795d594fSAndroid Build Coastguard Worker     ATraceEnd();
182*795d594fSAndroid Build Coastguard Worker   } else {
183*795d594fSAndroid Build Coastguard Worker     collector_->ProcessMarkStack();
184*795d594fSAndroid Build Coastguard Worker   }
185*795d594fSAndroid Build Coastguard Worker   return non_null_refs;
186*795d594fSAndroid Build Coastguard Worker }
187*795d594fSAndroid Build Coastguard Worker 
Setup(Thread * self,collector::GarbageCollector * collector,bool concurrent,bool clear_soft_references)188*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::Setup(Thread* self,
189*795d594fSAndroid Build Coastguard Worker                                collector::GarbageCollector* collector,
190*795d594fSAndroid Build Coastguard Worker                                bool concurrent,
191*795d594fSAndroid Build Coastguard Worker                                bool clear_soft_references) {
192*795d594fSAndroid Build Coastguard Worker   DCHECK(collector != nullptr);
193*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::reference_processor_lock_);
194*795d594fSAndroid Build Coastguard Worker   collector_ = collector;
195*795d594fSAndroid Build Coastguard Worker   rp_state_ = RpState::kStarting;
196*795d594fSAndroid Build Coastguard Worker   concurrent_ = concurrent;
197*795d594fSAndroid Build Coastguard Worker   clear_soft_references_ = clear_soft_references;
198*795d594fSAndroid Build Coastguard Worker }
199*795d594fSAndroid Build Coastguard Worker 
200*795d594fSAndroid Build Coastguard Worker // Process reference class instances and schedule finalizations.
201*795d594fSAndroid Build Coastguard Worker // We advance rp_state_ to signal partial completion for the benefit of GetReferent.
ProcessReferences(Thread * self,TimingLogger * timings)202*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::ProcessReferences(Thread* self, TimingLogger* timings) {
203*795d594fSAndroid Build Coastguard Worker   TimingLogger::ScopedTiming t(concurrent_ ? __FUNCTION__ : "(Paused)ProcessReferences", timings);
204*795d594fSAndroid Build Coastguard Worker   if (!clear_soft_references_) {
205*795d594fSAndroid Build Coastguard Worker     // Forward any additional SoftReferences we discovered late, now that reference access has been
206*795d594fSAndroid Build Coastguard Worker     // inhibited.
207*795d594fSAndroid Build Coastguard Worker     while (!soft_reference_queue_.IsEmpty()) {
208*795d594fSAndroid Build Coastguard Worker       ForwardSoftReferences(timings);
209*795d594fSAndroid Build Coastguard Worker     }
210*795d594fSAndroid Build Coastguard Worker   }
211*795d594fSAndroid Build Coastguard Worker   {
212*795d594fSAndroid Build Coastguard Worker     MutexLock mu(self, *Locks::reference_processor_lock_);
213*795d594fSAndroid Build Coastguard Worker     if (!gUseReadBarrier) {
214*795d594fSAndroid Build Coastguard Worker       CHECK_EQ(SlowPathEnabled(), concurrent_) << "Slow path must be enabled iff concurrent";
215*795d594fSAndroid Build Coastguard Worker     } else {
216*795d594fSAndroid Build Coastguard Worker       // Weak ref access is enabled at Zygote compaction by SemiSpace (concurrent_ == false).
217*795d594fSAndroid Build Coastguard Worker       CHECK_EQ(!self->GetWeakRefAccessEnabled(), concurrent_);
218*795d594fSAndroid Build Coastguard Worker     }
219*795d594fSAndroid Build Coastguard Worker     DCHECK(rp_state_ == RpState::kStarting);
220*795d594fSAndroid Build Coastguard Worker     rp_state_ = RpState::kInitMarkingDone;
221*795d594fSAndroid Build Coastguard Worker     condition_.Broadcast(self);
222*795d594fSAndroid Build Coastguard Worker   }
223*795d594fSAndroid Build Coastguard Worker   if (kIsDebugBuild && collector_->IsTransactionActive()) {
224*795d594fSAndroid Build Coastguard Worker     // In transaction mode, we shouldn't enqueue any Reference to the queues.
225*795d594fSAndroid Build Coastguard Worker     // See DelayReferenceReferent().
226*795d594fSAndroid Build Coastguard Worker     DCHECK(soft_reference_queue_.IsEmpty());
227*795d594fSAndroid Build Coastguard Worker     DCHECK(weak_reference_queue_.IsEmpty());
228*795d594fSAndroid Build Coastguard Worker     DCHECK(finalizer_reference_queue_.IsEmpty());
229*795d594fSAndroid Build Coastguard Worker     DCHECK(phantom_reference_queue_.IsEmpty());
230*795d594fSAndroid Build Coastguard Worker   }
231*795d594fSAndroid Build Coastguard Worker   // Clear all remaining soft and weak references with white referents.
232*795d594fSAndroid Build Coastguard Worker   // This misses references only reachable through finalizers.
233*795d594fSAndroid Build Coastguard Worker   soft_reference_queue_.ClearWhiteReferences(&cleared_references_, collector_);
234*795d594fSAndroid Build Coastguard Worker   weak_reference_queue_.ClearWhiteReferences(&cleared_references_, collector_);
235*795d594fSAndroid Build Coastguard Worker   // Defer PhantomReference processing until we've finished marking through finalizers.
236*795d594fSAndroid Build Coastguard Worker   {
237*795d594fSAndroid Build Coastguard Worker     // TODO: Capture mark state of some system weaks here. If the referent was marked here,
238*795d594fSAndroid Build Coastguard Worker     // then it is now safe to return, since it can only refer to marked objects. If it becomes
239*795d594fSAndroid Build Coastguard Worker     // marked below, that is no longer guaranteed.
240*795d594fSAndroid Build Coastguard Worker     MutexLock mu(self, *Locks::reference_processor_lock_);
241*795d594fSAndroid Build Coastguard Worker     rp_state_ = RpState::kInitClearingDone;
242*795d594fSAndroid Build Coastguard Worker     // At this point, all mutator-accessible data is marked (black). Objects enqueued for
243*795d594fSAndroid Build Coastguard Worker     // finalization will only be made available to the mutator via CollectClearedReferences after
244*795d594fSAndroid Build Coastguard Worker     // we're fully done marking. Soft and WeakReferences accessible to the mutator have been
245*795d594fSAndroid Build Coastguard Worker     // processed and refer only to black objects.  Thus there is no danger of the mutator getting
246*795d594fSAndroid Build Coastguard Worker     // access to non-black objects.  Weak reference processing is still nominally suspended,
247*795d594fSAndroid Build Coastguard Worker     // But many kinds of references, including all java.lang.ref ones, are handled normally from
248*795d594fSAndroid Build Coastguard Worker     // here on. See GetReferent().
249*795d594fSAndroid Build Coastguard Worker   }
250*795d594fSAndroid Build Coastguard Worker   {
251*795d594fSAndroid Build Coastguard Worker     TimingLogger::ScopedTiming t2(
252*795d594fSAndroid Build Coastguard Worker         concurrent_ ? "EnqueueFinalizerReferences" : "(Paused)EnqueueFinalizerReferences", timings);
253*795d594fSAndroid Build Coastguard Worker     // Preserve all white objects with finalize methods and schedule them for finalization.
254*795d594fSAndroid Build Coastguard Worker     FinalizerStats finalizer_stats =
255*795d594fSAndroid Build Coastguard Worker         finalizer_reference_queue_.EnqueueFinalizerReferences(&cleared_references_, collector_);
256*795d594fSAndroid Build Coastguard Worker     if (ATraceEnabled()) {
257*795d594fSAndroid Build Coastguard Worker       static constexpr size_t kBufSize = 80;
258*795d594fSAndroid Build Coastguard Worker       char buf[kBufSize];
259*795d594fSAndroid Build Coastguard Worker       snprintf(buf, kBufSize, "Marking from %" PRIu32 " / %" PRIu32 " finalizers",
260*795d594fSAndroid Build Coastguard Worker                finalizer_stats.num_enqueued_, finalizer_stats.num_refs_);
261*795d594fSAndroid Build Coastguard Worker       ATraceBegin(buf);
262*795d594fSAndroid Build Coastguard Worker       collector_->ProcessMarkStack();
263*795d594fSAndroid Build Coastguard Worker       ATraceEnd();
264*795d594fSAndroid Build Coastguard Worker     } else {
265*795d594fSAndroid Build Coastguard Worker       collector_->ProcessMarkStack();
266*795d594fSAndroid Build Coastguard Worker     }
267*795d594fSAndroid Build Coastguard Worker   }
268*795d594fSAndroid Build Coastguard Worker 
269*795d594fSAndroid Build Coastguard Worker   // Process all soft and weak references with white referents, where the references are reachable
270*795d594fSAndroid Build Coastguard Worker   // only from finalizers. It is unclear that there is any way to do this without slightly
271*795d594fSAndroid Build Coastguard Worker   // violating some language spec. We choose to apply normal Reference processing rules for these.
272*795d594fSAndroid Build Coastguard Worker   // This exposes the following issues:
273*795d594fSAndroid Build Coastguard Worker   // 1) In the case of an unmarked referent, we may end up enqueuing an "unreachable" reference.
274*795d594fSAndroid Build Coastguard Worker   //    This appears unavoidable, since we need to clear the reference for safety, unless we
275*795d594fSAndroid Build Coastguard Worker   //    mark the referent and undo finalization decisions for objects we encounter during marking.
276*795d594fSAndroid Build Coastguard Worker   //    (Some versions of the RI seem to do something along these lines.)
277*795d594fSAndroid Build Coastguard Worker   //    Or we could clear the reference without enqueuing it, which also seems strange and
278*795d594fSAndroid Build Coastguard Worker   //    unhelpful.
279*795d594fSAndroid Build Coastguard Worker   // 2) In the case of a marked referent, we will preserve a reference to objects that may have
280*795d594fSAndroid Build Coastguard Worker   //    been enqueued for finalization. Again fixing this would seem to involve at least undoing
281*795d594fSAndroid Build Coastguard Worker   //    previous finalization / reference clearing decisions. (This would also mean than an object
282*795d594fSAndroid Build Coastguard Worker   //    containing both a strong and a WeakReference to the same referent could see the
283*795d594fSAndroid Build Coastguard Worker   //    WeakReference cleared.)
284*795d594fSAndroid Build Coastguard Worker   // The treatment in (2) is potentially quite dangerous, since Reference.get() can e.g. return a
285*795d594fSAndroid Build Coastguard Worker   // finalized object containing pointers to native objects that have already been deallocated.
286*795d594fSAndroid Build Coastguard Worker   // But it can be argued that this is just an instance of the broader rule that it is not safe
287*795d594fSAndroid Build Coastguard Worker   // for finalizers to access otherwise inaccessible finalizable objects.
288*795d594fSAndroid Build Coastguard Worker   soft_reference_queue_.ClearWhiteReferences(&cleared_references_, collector_,
289*795d594fSAndroid Build Coastguard Worker                                              /*report_cleared=*/ true);
290*795d594fSAndroid Build Coastguard Worker   weak_reference_queue_.ClearWhiteReferences(&cleared_references_, collector_,
291*795d594fSAndroid Build Coastguard Worker                                              /*report_cleared=*/ true);
292*795d594fSAndroid Build Coastguard Worker 
293*795d594fSAndroid Build Coastguard Worker   // Clear all phantom references with white referents. It's fine to do this just once here.
294*795d594fSAndroid Build Coastguard Worker   phantom_reference_queue_.ClearWhiteReferences(&cleared_references_, collector_);
295*795d594fSAndroid Build Coastguard Worker 
296*795d594fSAndroid Build Coastguard Worker   // At this point all reference queues other than the cleared references should be empty.
297*795d594fSAndroid Build Coastguard Worker   DCHECK(soft_reference_queue_.IsEmpty());
298*795d594fSAndroid Build Coastguard Worker   DCHECK(weak_reference_queue_.IsEmpty());
299*795d594fSAndroid Build Coastguard Worker   DCHECK(finalizer_reference_queue_.IsEmpty());
300*795d594fSAndroid Build Coastguard Worker   DCHECK(phantom_reference_queue_.IsEmpty());
301*795d594fSAndroid Build Coastguard Worker 
302*795d594fSAndroid Build Coastguard Worker   {
303*795d594fSAndroid Build Coastguard Worker     MutexLock mu(self, *Locks::reference_processor_lock_);
304*795d594fSAndroid Build Coastguard Worker     // Need to always do this since the next GC may be concurrent. Doing this for only concurrent
305*795d594fSAndroid Build Coastguard Worker     // could result in a stale is_marked_callback_ being called before the reference processing
306*795d594fSAndroid Build Coastguard Worker     // starts since there is a small window of time where slow_path_enabled_ is enabled but the
307*795d594fSAndroid Build Coastguard Worker     // callback isn't yet set.
308*795d594fSAndroid Build Coastguard Worker     if (!gUseReadBarrier && concurrent_) {
309*795d594fSAndroid Build Coastguard Worker       // Done processing, disable the slow path and broadcast to the waiters.
310*795d594fSAndroid Build Coastguard Worker       DisableSlowPath(self);
311*795d594fSAndroid Build Coastguard Worker     }
312*795d594fSAndroid Build Coastguard Worker   }
313*795d594fSAndroid Build Coastguard Worker }
314*795d594fSAndroid Build Coastguard Worker 
315*795d594fSAndroid Build Coastguard Worker // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
316*795d594fSAndroid Build Coastguard Worker // marked, put it on the appropriate list in the heap for later processing.
DelayReferenceReferent(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref,collector::GarbageCollector * collector)317*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
318*795d594fSAndroid Build Coastguard Worker                                                 ObjPtr<mirror::Reference> ref,
319*795d594fSAndroid Build Coastguard Worker                                                 collector::GarbageCollector* collector) {
320*795d594fSAndroid Build Coastguard Worker   // klass can be the class of the old object if the visitor already updated the class of ref.
321*795d594fSAndroid Build Coastguard Worker   DCHECK(klass != nullptr);
322*795d594fSAndroid Build Coastguard Worker   DCHECK(klass->IsTypeOfReferenceClass());
323*795d594fSAndroid Build Coastguard Worker   mirror::HeapReference<mirror::Object>* referent = ref->GetReferentReferenceAddr();
324*795d594fSAndroid Build Coastguard Worker   // do_atomic_update needs to be true because this happens outside of the reference processing
325*795d594fSAndroid Build Coastguard Worker   // phase.
326*795d594fSAndroid Build Coastguard Worker   if (!collector->IsNullOrMarkedHeapReference(referent, /*do_atomic_update=*/true)) {
327*795d594fSAndroid Build Coastguard Worker     if (UNLIKELY(collector->IsTransactionActive())) {
328*795d594fSAndroid Build Coastguard Worker       // In transaction mode, keep the referent alive and avoid any reference processing to avoid the
329*795d594fSAndroid Build Coastguard Worker       // issue of rolling back reference processing.  do_atomic_update needs to be true because this
330*795d594fSAndroid Build Coastguard Worker       // happens outside of the reference processing phase.
331*795d594fSAndroid Build Coastguard Worker       if (!referent->IsNull()) {
332*795d594fSAndroid Build Coastguard Worker         collector->MarkHeapReference(referent, /*do_atomic_update=*/ true);
333*795d594fSAndroid Build Coastguard Worker       }
334*795d594fSAndroid Build Coastguard Worker       return;
335*795d594fSAndroid Build Coastguard Worker     }
336*795d594fSAndroid Build Coastguard Worker     Thread* self = Thread::Current();
337*795d594fSAndroid Build Coastguard Worker     // TODO: Remove these locks, and use atomic stacks for storing references?
338*795d594fSAndroid Build Coastguard Worker     // We need to check that the references haven't already been enqueued since we can end up
339*795d594fSAndroid Build Coastguard Worker     // scanning the same reference multiple times due to dirty cards.
340*795d594fSAndroid Build Coastguard Worker     if (klass->IsSoftReferenceClass()) {
341*795d594fSAndroid Build Coastguard Worker       soft_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
342*795d594fSAndroid Build Coastguard Worker     } else if (klass->IsWeakReferenceClass()) {
343*795d594fSAndroid Build Coastguard Worker       weak_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
344*795d594fSAndroid Build Coastguard Worker     } else if (klass->IsFinalizerReferenceClass()) {
345*795d594fSAndroid Build Coastguard Worker       finalizer_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
346*795d594fSAndroid Build Coastguard Worker     } else if (klass->IsPhantomReferenceClass()) {
347*795d594fSAndroid Build Coastguard Worker       phantom_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
348*795d594fSAndroid Build Coastguard Worker     } else {
349*795d594fSAndroid Build Coastguard Worker       LOG(FATAL) << "Invalid reference type " << klass->PrettyClass() << " " << std::hex
350*795d594fSAndroid Build Coastguard Worker                  << klass->GetAccessFlags();
351*795d594fSAndroid Build Coastguard Worker     }
352*795d594fSAndroid Build Coastguard Worker   }
353*795d594fSAndroid Build Coastguard Worker }
354*795d594fSAndroid Build Coastguard Worker 
UpdateRoots(IsMarkedVisitor * visitor)355*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::UpdateRoots(IsMarkedVisitor* visitor) {
356*795d594fSAndroid Build Coastguard Worker   cleared_references_.UpdateRoots(visitor);
357*795d594fSAndroid Build Coastguard Worker }
358*795d594fSAndroid Build Coastguard Worker 
359*795d594fSAndroid Build Coastguard Worker class ClearedReferenceTask : public HeapTask {
360*795d594fSAndroid Build Coastguard Worker  public:
ClearedReferenceTask(jobject cleared_references)361*795d594fSAndroid Build Coastguard Worker   explicit ClearedReferenceTask(jobject cleared_references)
362*795d594fSAndroid Build Coastguard Worker       : HeapTask(NanoTime()), cleared_references_(cleared_references) {
363*795d594fSAndroid Build Coastguard Worker   }
Run(Thread * thread)364*795d594fSAndroid Build Coastguard Worker   void Run(Thread* thread) override {
365*795d594fSAndroid Build Coastguard Worker     ScopedObjectAccess soa(thread);
366*795d594fSAndroid Build Coastguard Worker     WellKnownClasses::java_lang_ref_ReferenceQueue_add->InvokeStatic<'V', 'L'>(
367*795d594fSAndroid Build Coastguard Worker         thread, soa.Decode<mirror::Object>(cleared_references_));
368*795d594fSAndroid Build Coastguard Worker     soa.Env()->DeleteGlobalRef(cleared_references_);
369*795d594fSAndroid Build Coastguard Worker   }
370*795d594fSAndroid Build Coastguard Worker 
371*795d594fSAndroid Build Coastguard Worker  private:
372*795d594fSAndroid Build Coastguard Worker   const jobject cleared_references_;
373*795d594fSAndroid Build Coastguard Worker };
374*795d594fSAndroid Build Coastguard Worker 
CollectClearedReferences(Thread * self)375*795d594fSAndroid Build Coastguard Worker SelfDeletingTask* ReferenceProcessor::CollectClearedReferences(Thread* self) {
376*795d594fSAndroid Build Coastguard Worker   Locks::mutator_lock_->AssertNotHeld(self);
377*795d594fSAndroid Build Coastguard Worker   // By default we don't actually need to do anything. Just return this no-op task to avoid having
378*795d594fSAndroid Build Coastguard Worker   // to put in ifs.
379*795d594fSAndroid Build Coastguard Worker   std::unique_ptr<SelfDeletingTask> result(new FunctionTask([](Thread*) {}));
380*795d594fSAndroid Build Coastguard Worker   // When a runtime isn't started there are no reference queues to care about so ignore.
381*795d594fSAndroid Build Coastguard Worker   if (!cleared_references_.IsEmpty()) {
382*795d594fSAndroid Build Coastguard Worker     if (LIKELY(Runtime::Current()->IsStarted())) {
383*795d594fSAndroid Build Coastguard Worker       jobject cleared_references;
384*795d594fSAndroid Build Coastguard Worker       {
385*795d594fSAndroid Build Coastguard Worker         ReaderMutexLock mu(self, *Locks::mutator_lock_);
386*795d594fSAndroid Build Coastguard Worker         cleared_references = self->GetJniEnv()->GetVm()->AddGlobalRef(
387*795d594fSAndroid Build Coastguard Worker             self, cleared_references_.GetList());
388*795d594fSAndroid Build Coastguard Worker       }
389*795d594fSAndroid Build Coastguard Worker       if (kAsyncReferenceQueueAdd) {
390*795d594fSAndroid Build Coastguard Worker         // TODO: This can cause RunFinalization to terminate before newly freed objects are
391*795d594fSAndroid Build Coastguard Worker         // finalized since they may not be enqueued by the time RunFinalization starts.
392*795d594fSAndroid Build Coastguard Worker         Runtime::Current()->GetHeap()->GetTaskProcessor()->AddTask(
393*795d594fSAndroid Build Coastguard Worker             self, new ClearedReferenceTask(cleared_references));
394*795d594fSAndroid Build Coastguard Worker       } else {
395*795d594fSAndroid Build Coastguard Worker         result.reset(new ClearedReferenceTask(cleared_references));
396*795d594fSAndroid Build Coastguard Worker       }
397*795d594fSAndroid Build Coastguard Worker     }
398*795d594fSAndroid Build Coastguard Worker     cleared_references_.Clear();
399*795d594fSAndroid Build Coastguard Worker   }
400*795d594fSAndroid Build Coastguard Worker   return result.release();
401*795d594fSAndroid Build Coastguard Worker }
402*795d594fSAndroid Build Coastguard Worker 
ClearReferent(ObjPtr<mirror::Reference> ref)403*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::ClearReferent(ObjPtr<mirror::Reference> ref) {
404*795d594fSAndroid Build Coastguard Worker   Thread* self = Thread::Current();
405*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::reference_processor_lock_);
406*795d594fSAndroid Build Coastguard Worker   // Need to wait until reference processing is done since IsMarkedHeapReference does not have a
407*795d594fSAndroid Build Coastguard Worker   // CAS. If we do not wait, it can result in the GC un-clearing references due to race conditions.
408*795d594fSAndroid Build Coastguard Worker   // This also handles the race where the referent gets cleared after a null check but before
409*795d594fSAndroid Build Coastguard Worker   // IsMarkedHeapReference is called.
410*795d594fSAndroid Build Coastguard Worker   WaitUntilDoneProcessingReferences(self);
411*795d594fSAndroid Build Coastguard Worker   if (Runtime::Current()->IsActiveTransaction()) {
412*795d594fSAndroid Build Coastguard Worker     ref->ClearReferent<true>();
413*795d594fSAndroid Build Coastguard Worker   } else {
414*795d594fSAndroid Build Coastguard Worker     ref->ClearReferent<false>();
415*795d594fSAndroid Build Coastguard Worker   }
416*795d594fSAndroid Build Coastguard Worker }
417*795d594fSAndroid Build Coastguard Worker 
WaitUntilDoneProcessingReferences(Thread * self)418*795d594fSAndroid Build Coastguard Worker void ReferenceProcessor::WaitUntilDoneProcessingReferences(Thread* self) {
419*795d594fSAndroid Build Coastguard Worker   // Wait until we are done processing reference.
420*795d594fSAndroid Build Coastguard Worker   while ((!gUseReadBarrier && SlowPathEnabled()) ||
421*795d594fSAndroid Build Coastguard Worker          (gUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
422*795d594fSAndroid Build Coastguard Worker     // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
423*795d594fSAndroid Build Coastguard Worker     // presence of threads blocking for weak ref access.
424*795d594fSAndroid Build Coastguard Worker     self->CheckEmptyCheckpointFromWeakRefAccess(Locks::reference_processor_lock_);
425*795d594fSAndroid Build Coastguard Worker     condition_.WaitHoldingLocks(self);
426*795d594fSAndroid Build Coastguard Worker   }
427*795d594fSAndroid Build Coastguard Worker }
428*795d594fSAndroid Build Coastguard Worker 
MakeCircularListIfUnenqueued(ObjPtr<mirror::FinalizerReference> reference)429*795d594fSAndroid Build Coastguard Worker bool ReferenceProcessor::MakeCircularListIfUnenqueued(
430*795d594fSAndroid Build Coastguard Worker     ObjPtr<mirror::FinalizerReference> reference) {
431*795d594fSAndroid Build Coastguard Worker   Thread* self = Thread::Current();
432*795d594fSAndroid Build Coastguard Worker   MutexLock mu(self, *Locks::reference_processor_lock_);
433*795d594fSAndroid Build Coastguard Worker   WaitUntilDoneProcessingReferences(self);
434*795d594fSAndroid Build Coastguard Worker   // At this point, since the sentinel of the reference is live, it is guaranteed to not be
435*795d594fSAndroid Build Coastguard Worker   // enqueued if we just finished processing references. Otherwise, we may be doing the main GC
436*795d594fSAndroid Build Coastguard Worker   // phase. Since we are holding the reference processor lock, it guarantees that reference
437*795d594fSAndroid Build Coastguard Worker   // processing can't begin. The GC could have just enqueued the reference one one of the internal
438*795d594fSAndroid Build Coastguard Worker   // GC queues, but since we hold the lock finalizer_reference_queue_ lock it also prevents this
439*795d594fSAndroid Build Coastguard Worker   // race.
440*795d594fSAndroid Build Coastguard Worker   MutexLock mu2(self, *Locks::reference_queue_finalizer_references_lock_);
441*795d594fSAndroid Build Coastguard Worker   if (reference->IsUnprocessed()) {
442*795d594fSAndroid Build Coastguard Worker     CHECK(reference->IsFinalizerReferenceInstance());
443*795d594fSAndroid Build Coastguard Worker     reference->SetPendingNext(reference);
444*795d594fSAndroid Build Coastguard Worker     return true;
445*795d594fSAndroid Build Coastguard Worker   }
446*795d594fSAndroid Build Coastguard Worker   return false;
447*795d594fSAndroid Build Coastguard Worker }
448*795d594fSAndroid Build Coastguard Worker 
449*795d594fSAndroid Build Coastguard Worker }  // namespace gc
450*795d594fSAndroid Build Coastguard Worker }  // namespace art
451