/art/runtime/gc/space/ |
D | space_create_test.cc | 25 namespace space { namespace 62 std::unique_ptr<Space> space(CreateSpace("test", 16 * MB, 32 * MB, 32 * MB)); in TEST_P() local 63 EXPECT_TRUE(space != nullptr); in TEST_P() 65 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 16 * MB)); in TEST_P() 66 EXPECT_TRUE(space != nullptr); in TEST_P() 68 space.reset(CreateSpace("test", 32 * MB, 16 * MB, 16 * MB)); in TEST_P() 69 EXPECT_TRUE(space == nullptr); in TEST_P() 71 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 32 * MB)); in TEST_P() 72 EXPECT_TRUE(space != nullptr); in TEST_P() 74 space.reset(CreateSpace("test", 16 * MB, 8 * MB, 32 * MB)); in TEST_P() [all …]
|
D | space_test.h | 36 namespace space { 43 void AddSpace(ContinuousSpace* space, bool revoke = true) { 51 heap->AddSpace(space); 53 heap->SetSpaceAsDefault(space); 67 mirror::Object* Alloc(space::MallocSpace* alloc_space, in Alloc() 87 mirror::Object* AllocWithGrowth(space::MallocSpace* alloc_space, in AllocWithGrowth() 131 void SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size, 142 void SpaceTest<Super>::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, in SizeFootPrintGrowthLimitAndTrimBody() argument 153 size_t footprint = space->GetFootprint(); in SizeFootPrintGrowthLimitAndTrimBody() 162 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() [all …]
|
D | space.cc | 30 namespace space { namespace 39 std::ostream& operator<<(std::ostream& os, const Space& space) { in operator <<() argument 40 space.Dump(os); in operator <<() 136 AllocSpace::SweepCallbackContext::SweepCallbackContext(bool swap_bitmaps_in, space::Space* space_in) in SweepCallbackContext() 137 : swap_bitmaps(swap_bitmaps_in), space(space_in), self(Thread::Current()) { in SweepCallbackContext()
|
/art/runtime/gc/collector/ |
D | immune_spaces.cc | 45 for (space::ContinuousSpace* space : GetSpaces()) { in CreateLargestImmuneRegion() 46 uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin()); in CreateLargestImmuneRegion() 47 uintptr_t space_end = reinterpret_cast<uintptr_t>(space->Limit()); in CreateLargestImmuneRegion() 48 if (space->IsImageSpace()) { in CreateLargestImmuneRegion() 51 space::ImageSpace* image_space = space->AsImageSpace(); in CreateLargestImmuneRegion() 107 void ImmuneSpaces::AddSpace(space::ContinuousSpace* space) { in AddSpace() argument 108 DCHECK(spaces_.find(space) == spaces_.end()) << *space; in AddSpace() 110 if (space->GetLiveBitmap() != nullptr && !space->HasBoundBitmaps()) { in AddSpace() 111 CHECK(space->IsContinuousMemMapAllocSpace()); in AddSpace() 112 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in AddSpace() [all …]
|
D | immune_spaces.h | 29 namespace space { 40 bool operator()(space::ContinuousSpace* a, space::ContinuousSpace* b) const; 48 void AddSpace(space::ContinuousSpace* space) REQUIRES(Locks::heap_bitmap_lock_); 57 bool ContainsSpace(space::ContinuousSpace* space) const; 60 const std::set<space::ContinuousSpace*, CompareByBegin>& GetSpaces() { in GetSpaces() 74 for (space::ContinuousSpace* space : spaces_) { in ContainsObject() 75 if (space->HasAddress(obj)) { in ContainsObject() 87 std::set<space::ContinuousSpace*, CompareByBegin> spaces_;
|
D | sticky_mark_sweep.cc | 42 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 43 if (space->IsContinuousMemMapAllocSpace() && in BindBitmaps() 44 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect) { in BindBitmaps() 45 DCHECK(space->IsContinuousMemMapAllocSpace()); in BindBitmaps() 46 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in BindBitmaps() 49 for (const auto& space : GetHeap()->GetDiscontinuousSpaces()) { in BindBitmaps() local 50 CHECK(space->IsLargeObjectSpace()); in BindBitmaps() 51 space->AsLargeObjectSpace()->CopyLiveToMarked(); in BindBitmaps()
|
D | partial_mark_sweep.cc | 39 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 40 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 41 CHECK(space->IsZygoteSpace()); in BindBitmaps() 42 immune_spaces_.AddSpace(space); in BindBitmaps()
|
D | semi_space.cc | 66 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 67 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || in BindBitmaps() 68 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 69 immune_spaces_.AddSpace(space); in BindBitmaps() 70 } else if (space->GetLiveBitmap() != nullptr) { in BindBitmaps() 72 if (space == to_space_) { in BindBitmaps() 73 CHECK(space->IsContinuousMemMapAllocSpace()); in BindBitmaps() 74 space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap(); in BindBitmaps() 241 explicit VerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space) in VerifyNoFromSpaceReferencesVisitor() 270 space::ContinuousMemMapAllocSpace* const from_space_; [all …]
|
D | mark_sweep.cc | 87 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in BindBitmaps() local 88 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect) { in BindBitmaps() 89 immune_spaces_.AddSpace(space); in BindBitmaps() 316 for (const auto& space : immune_spaces_.GetSpaces()) { in UpdateAndMarkModUnion() local 317 const char* name = space->IsZygoteSpace() in UpdateAndMarkModUnion() 320 DCHECK(space->IsZygoteSpace() || space->IsImageSpace()) << *space; in UpdateAndMarkModUnion() 322 accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space); in UpdateAndMarkModUnion() 327 space->GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), in UpdateAndMarkModUnion() 328 reinterpret_cast<uintptr_t>(space->End()), in UpdateAndMarkModUnion() 367 for (const auto& space : GetHeap()->GetContinuousSpaces()) { in FindDefaultSpaceBitmap() local [all …]
|
D | concurrent_copying.cc | 121 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize, in ConcurrentCopying() 320 for (const auto& space : heap_->GetContinuousSpaces()) { in BindBitmaps() local 321 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect || in BindBitmaps() 322 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) { in BindBitmaps() 323 CHECK(space->IsZygoteSpace() || space->IsImageSpace()); in BindBitmaps() 324 immune_spaces_.AddSpace(space); in BindBitmaps() 326 CHECK(!space->IsZygoteSpace()); in BindBitmaps() 327 CHECK(!space->IsImageSpace()); in BindBitmaps() 328 CHECK(space == region_space_ || space == heap_->non_moving_space_); in BindBitmaps() 330 if (space == region_space_) { in BindBitmaps() [all …]
|
D | semi_space.h | 50 namespace space { 83 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space); 86 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space); 185 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const; 213 space::ContinuousMemMapAllocSpace* to_space_; 216 space::ContinuousMemMapAllocSpace* from_space_; 223 space::ContinuousMemMapAllocSpace* fallback_space_;
|
/art/runtime/gc/accounting/ |
D | mod_union_table_test.cc | 43 TableType type, space::ContinuousSpace* space, space::ContinuousSpace* target_space); 51 Thread* self, space::ContinuousMemMapAllocSpace* space, size_t component_count) in AllocObjectArray() argument 53 auto* klass = GetObjectArrayClass(self, space); in AllocObjectArray() 57 space->Alloc(self, size, &bytes_allocated, nullptr, &bytes_tl_bulk_allocated)); in AllocObjectArray() 61 space->GetLiveBitmap()->Set(obj); in AllocObjectArray() 72 mirror::Class* GetObjectArrayClass(Thread* self, space::ContinuousMemMapAllocSpace* space) in GetObjectArrayClass() argument 84 auto* klass = down_cast<mirror::Class*>(space->Alloc(self, class_size, &bytes_allocated, in GetObjectArrayClass() 122 const std::string& name, Heap* heap, space::ContinuousSpace* space, in ModUnionTableRefCacheToSpace() argument 123 space::ContinuousSpace* target_space) in ModUnionTableRefCacheToSpace() 124 : ModUnionTableReferenceCache(name, heap, space), target_space_(target_space) {} in ModUnionTableRefCacheToSpace() [all …]
|
D | remembered_set.h | 35 namespace space { 50 explicit RememberedSet(const std::string& name, Heap* heap, space::ContinuousSpace* space) in RememberedSet() argument 51 : name_(name), heap_(heap), space_(space) {} in RememberedSet() 57 void UpdateAndMarkReferences(space::ContinuousSpace* target_space, 64 space::ContinuousSpace* GetSpace() { in GetSpace() 78 space::ContinuousSpace* const space_;
|
D | mod_union_table.h | 40 namespace space { 59 explicit ModUnionTable(const std::string& name, Heap* heap, space::ContinuousSpace* space) in ModUnionTable() argument 62 space_(space) {} in ModUnionTable() 100 space::ContinuousSpace* GetSpace() { in GetSpace() 115 space::ContinuousSpace* const space_; 122 space::ContinuousSpace* space) in ModUnionTableReferenceCache() argument 123 : ModUnionTable(name, heap, space) {} in ModUnionTableReferenceCache() 170 space::ContinuousSpace* space);
|
D | mod_union_table-inl.h | 33 space::ContinuousSpace* space) in ModUnionTableToZygoteAllocspace() argument 34 : ModUnionTableReferenceCache(name, heap, space) {} in ModUnionTableToZygoteAllocspace()
|
D | mod_union_table.cc | 94 space::ContinuousSpace* from_space, in ModUnionUpdateObjectReferencesVisitor() 95 space::ContinuousSpace* immune_space, in ModUnionUpdateObjectReferencesVisitor() 135 space::ContinuousSpace* const from_space_; 136 space::ContinuousSpace* const immune_space_; 146 space::ContinuousSpace* from_space, in ModUnionScanImageRootVisitor() 147 space::ContinuousSpace* immune_space, in ModUnionScanImageRootVisitor() 168 space::ContinuousSpace* const from_space_; 169 space::ContinuousSpace* const immune_space_; 280 space::ContinuousSpace* from_space = heap->FindContinuousSpaceFromObject(obj, false); in operator ()() 281 space::ContinuousSpace* to_space = heap->FindContinuousSpaceFromObject(ref, false); in operator ()() [all …]
|
D | read_barrier_table.h | 50 void ClearForSpace(space::ContinuousSpace* space) { in ClearForSpace() argument 51 uint8_t* entry_start = EntryFromAddr(space->Begin()); in ClearForSpace() 52 uint8_t* entry_end = EntryFromAddr(space->Limit()); in ClearForSpace()
|
/art/test/1001-app-image-regions/ |
D | app_image_regions.cc | 33 return gc::space::RegionSpace::kRegionSize; in Java_Main_getRegionSize() 39 for (auto* space : Runtime::Current()->GetHeap()->GetContinuousSpaces()) { in Java_Main_checkAppImageSectionSize() local 40 if (space->IsImageSpace()) { in Java_Main_checkAppImageSectionSize() 41 auto* image_space = space->AsImageSpace(); in Java_Main_checkAppImageSectionSize()
|
/art/test/596-app-images/ |
D | app_images.cc | 60 for (auto* space : Runtime::Current()->GetHeap()->GetContinuousSpaces()) { in Java_Main_checkAppImageLoaded() local 61 if (space->IsImageSpace()) { in Java_Main_checkAppImageLoaded() 62 auto* image_space = space->AsImageSpace(); in Java_Main_checkAppImageLoaded() 79 for (auto* space : Runtime::Current()->GetHeap()->GetContinuousSpaces()) { in Java_Main_checkAppImageContains() local 80 if (space->IsImageSpace()) { in Java_Main_checkAppImageContains() 81 auto* image_space = space->AsImageSpace(); in Java_Main_checkAppImageContains()
|
/art/runtime/gc/ |
D | heap.cc | 172 static void VerifyBootImagesContiguity(const std::vector<gc::space::ImageSpace*>& image_spaces) { in VerifyBootImagesContiguity() 225 space::LargeObjectSpaceType large_object_space_type, in Heap() 248 space::ImageSpaceLoadingOrder image_space_loading_order) in Heap() 416 std::vector<std::unique_ptr<space::ImageSpace>> boot_image_spaces; in Heap() 418 if (space::ImageSpace::LoadBootImage(boot_class_path, in Heap() 435 for (std::unique_ptr<space::ImageSpace>& space : boot_image_spaces) { in Heap() 436 boot_image_spaces_.push_back(space.get()); in Heap() 437 AddSpace(space.release()); in Heap() 540 non_moving_space_ = space::DlMallocSpace::CreateFromMemMap(std::move(non_moving_space_mem_map), in Heap() 557 space::RegionSpace::CreateMemMap(kRegionSpaceName, capacity_ * 2, request_begin); in Heap() [all …]
|
D | heap.h | 98 namespace space { 156 static constexpr space::LargeObjectSpaceType kDefaultLargeObjectSpaceType = 158 space::LargeObjectSpaceType::kFreeList 159 : space::LargeObjectSpaceType::kMap; 206 space::LargeObjectSpaceType large_object_space_type, 229 space::ImageSpaceLoadingOrder image_space_loading_order); 435 void SetSpaceAsDefault(space::ContinuousSpace* continuous_space) 437 void AddSpace(space::Space* space) 440 void RemoveSpace(space::Space* space) 485 const std::vector<space::ContinuousSpace*>& GetContinuousSpaces() const in GetContinuousSpaces() [all …]
|
D | verification.cc | 68 space::Space* const space = heap_->FindSpaceFromAddress(addr); in DumpObjectInfo() local 69 if (space != nullptr) { in DumpObjectInfo() 70 oss << " space=" << *space; in DumpObjectInfo() 120 bool Verification::IsAddressInHeapSpace(const void* addr, space::Space** out_space) const { in IsAddressInHeapSpace() 121 space::Space* const space = heap_->FindSpaceFromAddress(addr); in IsAddressInHeapSpace() local 122 if (space != nullptr) { in IsAddressInHeapSpace() 124 *out_space = space; in IsAddressInHeapSpace() 131 bool Verification::IsValidHeapObjectAddress(const void* addr, space::Space** out_space) const { in IsValidHeapObjectAddress()
|
D | verification.h | 32 namespace space { 56 bool IsValidHeapObjectAddress(const void* addr, space::Space** out_space = nullptr) const 65 bool IsAddressInHeapSpace(const void* addr, space::Space** out_space = nullptr) const
|
/art/test/1000-non-moving-space-stress/ |
D | info.txt | 3 newly allocated object in the non-moving space with a dangling 5 of the region space.
|
/art/dex2oat/linker/ |
D | image_write_read_test.cc | 44 gc::space::ContinuousSpace* space = heap->GetNonMovingSpace(); in TestWriteRead() local 45 ASSERT_FALSE(space->IsImageSpace()); in TestWriteRead() 46 ASSERT_TRUE(space != nullptr); in TestWriteRead() 47 ASSERT_TRUE(space->IsMallocSpace()); in TestWriteRead() 100 gc::space::ImageSpace* image_space = heap->GetBootImageSpaces()[i]; in TestWriteRead()
|