1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_GC_COLLECTOR_GARBAGE_COLLECTOR_H_ 18 #define ART_RUNTIME_GC_COLLECTOR_GARBAGE_COLLECTOR_H_ 19 20 #include <stdint.h> 21 #include <list> 22 23 #include "base/histogram.h" 24 #include "base/macros.h" 25 #include "base/metrics/metrics.h" 26 #include "base/mutex.h" 27 #include "base/timing_logger.h" 28 #include "gc/collector_type.h" 29 #include "gc/gc_cause.h" 30 #include "gc_root.h" 31 #include "gc_type.h" 32 #include "iteration.h" 33 #include "object_byte_pair.h" 34 #include "object_callbacks.h" 35 36 namespace art HIDDEN { 37 38 namespace mirror { 39 class Class; 40 class Object; 41 class Reference; 42 } // namespace mirror 43 44 namespace gc { 45 46 class Heap; 47 48 namespace accounting { 49 template <typename T> 50 class AtomicStack; 51 using ObjectStack = AtomicStack<mirror::Object>; 52 } // namespace accounting 53 54 namespace space { 55 class ContinuousSpace; 56 } // namespace space 57 58 namespace collector { 59 class GarbageCollector : public RootVisitor, public IsMarkedVisitor, public MarkObjectVisitor { 60 public: 61 class SCOPED_LOCKABLE ScopedPause { 62 public: 63 explicit ScopedPause(GarbageCollector* collector, bool with_reporting = true) 64 EXCLUSIVE_LOCK_FUNCTION(Locks::mutator_lock_); 65 ~ScopedPause() UNLOCK_FUNCTION(); 66 67 private: 68 const uint64_t start_time_; 69 GarbageCollector* const collector_; 70 bool with_reporting_; 71 }; 72 73 GarbageCollector(Heap* heap, const std::string& name); ~GarbageCollector()74 virtual ~GarbageCollector() { } GetName()75 const char* GetName() const { 76 return name_.c_str(); 77 } 78 virtual GcType GetGcType() const = 0; 79 virtual CollectorType GetCollectorType() const = 0; 80 // Run the garbage collector. 81 void Run(GcCause gc_cause, bool clear_soft_references) REQUIRES(!pause_histogram_lock_); GetHeap()82 Heap* GetHeap() const { 83 return heap_; 84 } 85 void RegisterPause(uint64_t nano_length); GetCumulativeTimings()86 const CumulativeLogger& GetCumulativeTimings() const { 87 return cumulative_timings_; 88 } 89 // Swap the live and mark bitmaps of spaces that are active for the collector. For partial GC, 90 // this is the allocation space, for full GC then we swap the zygote bitmaps too. 91 void SwapBitmaps() 92 REQUIRES(Locks::heap_bitmap_lock_) 93 REQUIRES_SHARED(Locks::mutator_lock_); GetTotalCpuTime()94 uint64_t GetTotalCpuTime() const { 95 return total_thread_cpu_time_ns_; 96 } 97 uint64_t GetTotalPausedTimeNs() REQUIRES(!pause_histogram_lock_); GetTotalFreedBytes()98 int64_t GetTotalFreedBytes() const { 99 return total_freed_bytes_; 100 } GetTotalFreedObjects()101 uint64_t GetTotalFreedObjects() const { 102 return total_freed_objects_; 103 } GetTotalScannedBytes()104 uint64_t GetTotalScannedBytes() const { 105 return total_scanned_bytes_; 106 } 107 // Reset the cumulative timings and pause histogram. 108 void ResetMeasurements() REQUIRES(!pause_histogram_lock_); 109 // Returns the estimated throughput in bytes / second. 110 uint64_t GetEstimatedMeanThroughput() const; 111 // Returns how many GC iterations have been run. NumberOfIterations()112 size_t NumberOfIterations() const { 113 return GetCumulativeTimings().GetIterations(); 114 } 115 // Returns the current GC iteration and assocated info. 116 Iteration* GetCurrentIteration(); 117 const Iteration* GetCurrentIteration() const; GetTimings()118 TimingLogger* GetTimings() { 119 return &GetCurrentIteration()->timings_; 120 } 121 // Record a free of normal objects. 122 void RecordFree(const ObjectBytePair& freed); 123 // Record a free of large objects. 124 void RecordFreeLOS(const ObjectBytePair& freed); 125 virtual void DumpPerformanceInfo(std::ostream& os) REQUIRES(!pause_histogram_lock_); 126 127 // Extract RSS for GC-specific memory ranges using mincore(). 128 uint64_t ExtractRssFromMincore(std::list<std::pair<void*, void*>>* gc_ranges); 129 130 // Helper functions for querying if objects are marked. These are used for processing references, 131 // and will be used for reading system weaks while the GC is running. 132 virtual mirror::Object* IsMarked(mirror::Object* obj) 133 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 134 // Returns true if the given heap reference is null or is already marked. If it's already marked, 135 // update the reference (uses a CAS if do_atomic_update is true). Otherwise, returns false. 136 virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj, 137 bool do_atomic_update) 138 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 139 // Used by reference processor. 140 virtual void ProcessMarkStack() REQUIRES_SHARED(Locks::mutator_lock_) = 0; 141 // Force mark an object. 142 virtual mirror::Object* MarkObject(mirror::Object* obj) 143 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 144 virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj, 145 bool do_atomic_update) 146 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 147 virtual void DelayReferenceReferent(ObjPtr<mirror::Class> klass, 148 ObjPtr<mirror::Reference> reference) 149 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 150 IsTransactionActive()151 bool IsTransactionActive() const { 152 return is_transaction_active_; 153 } 154 155 bool ShouldEagerlyReleaseMemoryToOS() const; 156 157 protected: 158 // Run all of the GC phases. 159 virtual void RunPhases() REQUIRES(!Locks::mutator_lock_) = 0; 160 // Revoke all the thread-local buffers. 161 virtual void RevokeAllThreadLocalBuffers() = 0; 162 // Deallocates unmarked objects referenced by 'obj_arr' that reside either in the 163 // given continuous-spaces or in large-object space. WARNING: Trashes objects. 164 void SweepArray(accounting::ObjectStack* obj_arr, 165 bool swap_bitmaps, 166 std::vector<space::ContinuousSpace*>* sweep_spaces) 167 REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 168 169 static constexpr size_t kPauseBucketSize = 500; 170 static constexpr size_t kPauseBucketCount = 32; 171 static constexpr size_t kMemBucketSize = 10; 172 static constexpr size_t kMemBucketCount = 16; 173 174 Heap* const heap_; 175 std::string name_; 176 // Cumulative statistics. 177 Histogram<uint64_t> pause_histogram_ GUARDED_BY(pause_histogram_lock_); 178 Histogram<uint64_t> rss_histogram_; 179 Histogram<size_t> freed_bytes_histogram_; 180 metrics::MetricsBase<int64_t>* gc_time_histogram_; 181 metrics::MetricsBase<uint64_t>* metrics_gc_count_; 182 metrics::MetricsBase<uint64_t>* metrics_gc_count_delta_; 183 metrics::MetricsBase<int64_t>* gc_throughput_histogram_; 184 metrics::MetricsBase<int64_t>* gc_tracing_throughput_hist_; 185 metrics::MetricsBase<uint64_t>* gc_throughput_avg_; 186 metrics::MetricsBase<uint64_t>* gc_tracing_throughput_avg_; 187 metrics::MetricsBase<uint64_t>* gc_scanned_bytes_; 188 metrics::MetricsBase<uint64_t>* gc_scanned_bytes_delta_; 189 metrics::MetricsBase<uint64_t>* gc_freed_bytes_; 190 metrics::MetricsBase<uint64_t>* gc_freed_bytes_delta_; 191 metrics::MetricsBase<uint64_t>* gc_duration_; 192 metrics::MetricsBase<uint64_t>* gc_duration_delta_; 193 uint64_t total_thread_cpu_time_ns_; 194 uint64_t total_time_ns_; 195 uint64_t total_freed_objects_; 196 int64_t total_freed_bytes_; 197 uint64_t total_scanned_bytes_; 198 CumulativeLogger cumulative_timings_; 199 mutable Mutex pause_histogram_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 200 bool is_transaction_active_; 201 // The garbage collector algorithms will either have all the metrics pointers 202 // (above) initialized, or none of them. So instead of checking each time, we 203 // use this flag. 204 bool are_metrics_initialized_; 205 206 private: 207 DISALLOW_IMPLICIT_CONSTRUCTORS(GarbageCollector); 208 }; 209 210 } // namespace collector 211 } // namespace gc 212 } // namespace art 213 214 #endif // ART_RUNTIME_GC_COLLECTOR_GARBAGE_COLLECTOR_H_ 215