1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19 
20 #include <memory>
21 
22 #include "base/atomic.h"
23 #include "barrier.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc/accounting/heap_bitmap.h"
28 #include "gc_root.h"
29 #include "immune_spaces.h"
30 #include "offsets.h"
31 
32 namespace art {
33 
34 namespace mirror {
35 class Class;
36 class Object;
37 class Reference;
38 }  // namespace mirror
39 
40 class Thread;
41 enum VisitRootFlags : uint8_t;
42 
43 namespace gc {
44 
45 class Heap;
46 
47 namespace accounting {
48 template<typename T> class AtomicStack;
49 typedef AtomicStack<mirror::Object> ObjectStack;
50 }  // namespace accounting
51 
52 namespace collector {
53 
54 class MarkSweep : public GarbageCollector {
55  public:
56   MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
57 
~MarkSweep()58   ~MarkSweep() {}
59 
60   void RunPhases() override REQUIRES(!mark_stack_lock_);
61   void InitializePhase();
62   void MarkingPhase() REQUIRES(!mark_stack_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
63   void PausePhase() REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
64   void ReclaimPhase() REQUIRES(!mark_stack_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
65   void FinishPhase();
66   virtual void MarkReachableObjects()
67       REQUIRES(Locks::heap_bitmap_lock_)
68       REQUIRES(!mark_stack_lock_)
69       REQUIRES_SHARED(Locks::mutator_lock_);
70 
IsConcurrent()71   bool IsConcurrent() const {
72     return is_concurrent_;
73   }
74 
GetGcType()75   GcType GetGcType() const override {
76     return kGcTypeFull;
77   }
78 
GetCollectorType()79   CollectorType GetCollectorType() const override {
80     return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
81   }
82 
83   // Initializes internal structures.
84   void Init();
85 
86   // Find the default mark bitmap.
87   void FindDefaultSpaceBitmap() REQUIRES_SHARED(Locks::mutator_lock_);
88 
89   // Marks all objects in the root set at the start of a garbage collection.
90   void MarkRoots(Thread* self)
91       REQUIRES(Locks::heap_bitmap_lock_)
92       REQUIRES(!mark_stack_lock_)
93       REQUIRES_SHARED(Locks::mutator_lock_);
94 
95   void MarkNonThreadRoots()
96       REQUIRES(Locks::heap_bitmap_lock_)
97       REQUIRES(!mark_stack_lock_)
98       REQUIRES_SHARED(Locks::mutator_lock_);
99 
100   virtual void MarkConcurrentRoots(VisitRootFlags flags)
101       REQUIRES(Locks::heap_bitmap_lock_)
102       REQUIRES(!mark_stack_lock_)
103       REQUIRES_SHARED(Locks::mutator_lock_);
104 
105   void MarkRootsCheckpoint(Thread* self, bool revoke_ros_alloc_thread_local_buffers_at_checkpoint)
106       REQUIRES(Locks::heap_bitmap_lock_)
107       REQUIRES(!mark_stack_lock_)
108       REQUIRES_SHARED(Locks::mutator_lock_);
109 
110   // Builds a mark stack and recursively mark until it empties.
111   void RecursiveMark()
112       REQUIRES(Locks::heap_bitmap_lock_)
113       REQUIRES(!mark_stack_lock_)
114       REQUIRES_SHARED(Locks::mutator_lock_);
115 
116   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
117   // the image. Mark that portion of the heap as immune.
118   virtual void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_);
119 
120   // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
121   void RecursiveMarkDirtyObjects(bool paused, uint8_t minimum_age)
122       REQUIRES(Locks::heap_bitmap_lock_)
123       REQUIRES(!mark_stack_lock_)
124       REQUIRES_SHARED(Locks::mutator_lock_);
125 
126   // Remarks the root set after completing the concurrent mark.
127   void ReMarkRoots()
128       REQUIRES(Locks::heap_bitmap_lock_)
129       REQUIRES(!mark_stack_lock_)
130       REQUIRES_SHARED(Locks::mutator_lock_);
131 
132   void ProcessReferences(Thread* self)
133       REQUIRES(!mark_stack_lock_)
134       REQUIRES_SHARED(Locks::mutator_lock_);
135 
136   // Update and mark references from immune spaces.
137   void UpdateAndMarkModUnion()
138       REQUIRES(!mark_stack_lock_)
139       REQUIRES_SHARED(Locks::mutator_lock_);
140 
141   // Pre clean cards to reduce how much work is needed in the pause.
142   void PreCleanCards()
143       REQUIRES(Locks::heap_bitmap_lock_)
144       REQUIRES(!mark_stack_lock_)
145       REQUIRES_SHARED(Locks::mutator_lock_);
146 
147   // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
148   // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
149   virtual void Sweep(bool swap_bitmaps)
150       REQUIRES(Locks::heap_bitmap_lock_)
151       REQUIRES_SHARED(Locks::mutator_lock_);
152 
153   // Sweeps unmarked objects to complete the garbage collection.
154   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
155 
156   // Sweep only pointers within an array. WARNING: Trashes objects.
157   void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
158       REQUIRES(Locks::heap_bitmap_lock_)
159       REQUIRES_SHARED(Locks::mutator_lock_);
160 
161   // Blackens an object.
162   void ScanObject(mirror::Object* obj)
163       REQUIRES(Locks::heap_bitmap_lock_)
164       REQUIRES(!mark_stack_lock_)
165       REQUIRES_SHARED(Locks::mutator_lock_);
166 
167   // No thread safety analysis due to lambdas.
168   template<typename MarkVisitor, typename ReferenceVisitor>
169   void ScanObjectVisit(mirror::Object* obj,
170                        const MarkVisitor& visitor,
171                        const ReferenceVisitor& ref_visitor)
172       REQUIRES(Locks::heap_bitmap_lock_)
173       REQUIRES(!mark_stack_lock_)
174       REQUIRES_SHARED(Locks::mutator_lock_);
175 
176   void SweepSystemWeaks(Thread* self)
177       REQUIRES(!Locks::heap_bitmap_lock_)
178       REQUIRES_SHARED(Locks::mutator_lock_);
179 
180   static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
181       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
182 
183   void VerifySystemWeaks()
184       REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
185 
186   // Verify that an object is live, either in a live bitmap or in the allocation stack.
187   void VerifyIsLive(const mirror::Object* obj)
188       REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
189 
190   bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref,
191                                    bool do_atomic_update) override
192       REQUIRES(Locks::heap_bitmap_lock_)
193       REQUIRES_SHARED(Locks::mutator_lock_);
194 
195   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) override
196       REQUIRES(Locks::heap_bitmap_lock_)
197       REQUIRES(!mark_stack_lock_)
198       REQUIRES_SHARED(Locks::mutator_lock_);
199 
200   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
201                   size_t count,
202                   const RootInfo& info) override
203       REQUIRES(Locks::heap_bitmap_lock_)
204       REQUIRES(!mark_stack_lock_)
205       REQUIRES_SHARED(Locks::mutator_lock_);
206 
207   // Marks an object.
208   mirror::Object* MarkObject(mirror::Object* obj) override
209       REQUIRES(Locks::heap_bitmap_lock_)
210       REQUIRES(!mark_stack_lock_)
211       REQUIRES_SHARED(Locks::mutator_lock_);
212 
213   void MarkObject(mirror::Object* obj, mirror::Object* holder, MemberOffset offset)
214       REQUIRES(Locks::heap_bitmap_lock_)
215       REQUIRES(!mark_stack_lock_)
216       REQUIRES_SHARED(Locks::mutator_lock_);
217 
218   void MarkHeapReference(mirror::HeapReference<mirror::Object>* ref,
219                          bool do_atomic_update) override
220       REQUIRES(Locks::heap_bitmap_lock_)
221       REQUIRES(!mark_stack_lock_)
222       REQUIRES_SHARED(Locks::mutator_lock_);
223 
GetBarrier()224   Barrier& GetBarrier() {
225     return *gc_barrier_;
226   }
227 
228   // Schedules an unmarked object for reference processing.
229   void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
230       override REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
231 
232  protected:
233   // Returns object if the object is marked in the heap bitmap, otherwise null.
234   mirror::Object* IsMarked(mirror::Object* object) override
235       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
236 
237   void MarkObjectNonNull(mirror::Object* obj,
238                          mirror::Object* holder = nullptr,
239                          MemberOffset offset = MemberOffset(0))
240       REQUIRES(Locks::heap_bitmap_lock_)
241       REQUIRES(!mark_stack_lock_)
242       REQUIRES_SHARED(Locks::mutator_lock_);
243 
244   // Marks an object atomically, safe to use from multiple threads.
245   void MarkObjectNonNullParallel(mirror::Object* obj)
246       REQUIRES(!mark_stack_lock_)
247       REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   // Returns true if we need to add obj to a mark stack.
250   bool MarkObjectParallel(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
251 
252   // Verify the roots of the heap and print out information related to any invalid roots.
253   // Called in MarkObject, so may we may not hold the mutator lock.
254   void VerifySuspendedThreadRoots(std::ostream& os)
255       REQUIRES_SHARED(Locks::mutator_lock_);
256 
257   // Expand mark stack to 2x its current size.
258   void ExpandMarkStack()
259       REQUIRES(mark_stack_lock_)
260       REQUIRES_SHARED(Locks::mutator_lock_);
261 
262   void ResizeMarkStack(size_t new_size)
263       REQUIRES(mark_stack_lock_)
264       REQUIRES_SHARED(Locks::mutator_lock_);
265 
266   // Returns how many threads we should use for the current GC phase based on if we are paused,
267   // whether or not we care about pauses.
268   size_t GetThreadCount(bool paused) const;
269 
270   // Push a single reference on a mark stack.
271   void PushOnMarkStack(mirror::Object* obj)
272       REQUIRES(!mark_stack_lock_)
273       REQUIRES_SHARED(Locks::mutator_lock_);
274 
275   // Blackens objects grayed during a garbage collection.
276   void ScanGrayObjects(bool paused, uint8_t minimum_age)
277       REQUIRES(Locks::heap_bitmap_lock_)
278       REQUIRES(!mark_stack_lock_)
279       REQUIRES_SHARED(Locks::mutator_lock_);
280 
ProcessMarkStack()281   void ProcessMarkStack() override
282       REQUIRES(Locks::heap_bitmap_lock_)
283       REQUIRES(!mark_stack_lock_)
284       REQUIRES_SHARED(Locks::mutator_lock_) {
285     ProcessMarkStack(false);
286   }
287 
288   // Recursively blackens objects on the mark stack.
289   void ProcessMarkStack(bool paused)
290       REQUIRES(Locks::heap_bitmap_lock_)
291       REQUIRES(!mark_stack_lock_)
292       REQUIRES_SHARED(Locks::mutator_lock_);
293 
294   void ProcessMarkStackParallel(size_t thread_count)
295       REQUIRES(Locks::heap_bitmap_lock_)
296       REQUIRES(!mark_stack_lock_)
297       REQUIRES_SHARED(Locks::mutator_lock_);
298 
299   // Used to Get around thread safety annotations. The call is from MarkingPhase and is guarded by
300   // IsExclusiveHeld.
301   void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
302 
303   // Revoke all the thread-local buffers.
304   void RevokeAllThreadLocalBuffers() override;
305 
306   // Whether or not we count how many of each type of object were scanned.
307   static constexpr bool kCountScannedTypes = false;
308 
309   // Current space, we check this space first to avoid searching for the appropriate space for an
310   // object.
311   accounting::ContinuousSpaceBitmap* current_space_bitmap_;
312   // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
313   accounting::HeapBitmap* mark_bitmap_;
314 
315   accounting::ObjectStack* mark_stack_;
316 
317   // Every object inside the immune spaces is assumed to be marked. Immune spaces that aren't in the
318   // immune region are handled by the normal marking logic.
319   ImmuneSpaces immune_spaces_;
320 
321   // Parallel finger.
322   AtomicInteger atomic_finger_;
323 
324   AtomicInteger no_reference_class_count_;
325   AtomicInteger normal_count_;
326   // Number of classes scanned, if kCountScannedTypes.
327   AtomicInteger class_count_;
328   // Number of object arrays scanned, if kCountScannedTypes.
329   AtomicInteger object_array_count_;
330   // Number of non-class/arrays scanned, if kCountScannedTypes.
331   AtomicInteger other_count_;
332   // Number of java.lang.ref.Reference instances.
333   AtomicInteger reference_count_;
334 
335   AtomicInteger large_object_test_;
336   AtomicInteger large_object_mark_;
337   AtomicInteger overhead_time_;
338   AtomicInteger work_chunks_created_;
339   AtomicInteger work_chunks_deleted_;
340   AtomicInteger mark_null_count_;
341   AtomicInteger mark_immune_count_;
342   AtomicInteger mark_fastpath_count_;
343   AtomicInteger mark_slowpath_count_;
344 
345   std::unique_ptr<Barrier> gc_barrier_;
346   Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
347 
348   const bool is_concurrent_;
349 
350   // Verification.
351   size_t live_stack_freeze_size_;
352 
353   // Sweep array free buffer, used to sweep the spaces based on an array more
354   // efficiently, by recording dead objects to be freed in batches (see
355   // MarkSweep::SweepArray).
356   MemMap sweep_array_free_buffer_mem_map_;
357 
358  private:
359   class CardScanTask;
360   class CheckpointMarkThreadRoots;
361   class DelayReferenceReferentVisitor;
362   template<bool kUseFinger> class MarkStackTask;
363   class MarkObjectSlowPath;
364   class RecursiveMarkTask;
365   class ScanObjectParallelVisitor;
366   class ScanObjectVisitor;
367   class VerifyRootMarkedVisitor;
368   class VerifyRootVisitor;
369   class VerifySystemWeakVisitor;
370 
371   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkSweep);
372 };
373 
374 }  // namespace collector
375 }  // namespace gc
376 }  // namespace art
377 
378 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
379