Lines Matching refs:kRegionSize

53   CHECK_ALIGNED(capacity, kRegionSize);  in CreateMemMap()
61 capacity + kRegionSize, in CreateMemMap()
80 CHECK_EQ(mem_map.Size(), capacity + kRegionSize); in CreateMemMap()
83 if (IsAlignedParam(mem_map.Begin(), kRegionSize)) { in CreateMemMap()
89 mem_map.AlignBy(kRegionSize); in CreateMemMap()
91 CHECK_ALIGNED(mem_map.Begin(), kRegionSize); in CreateMemMap()
92 CHECK_ALIGNED(mem_map.End(), kRegionSize); in CreateMemMap()
112 num_regions_(mem_map_.Size() / kRegionSize), in RegionSpace()
121 CHECK_ALIGNED(mem_map_.Size(), kRegionSize); in RegionSpace()
122 CHECK_ALIGNED(mem_map_.Begin(), kRegionSize); in RegionSpace()
126 for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) { in RegionSpace()
127 regions_[i].Init(i, region_addr, region_addr + kRegionSize); in RegionSpace()
135 CHECK_EQ(static_cast<size_t>(regions_[i].End() - regions_[i].Begin()), kRegionSize); in RegionSpace()
159 return num_regions * kRegionSize; in FromSpaceSize()
171 return num_regions * kRegionSize; in UnevacFromSpaceSize()
183 return num_regions * kRegionSize; in ToSpaceSize()
275 const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in ShouldBeEvacuated()
301 DCHECK_ALIGNED(obj, kRegionSize); in ZeroLiveBytesForLargeObject()
303 DCHECK_GT(obj_size, space::RegionSpace::kRegionSize); in ZeroLiveBytesForLargeObject()
305 size_t obj_alloc_size = RoundUp(obj_size, space::RegionSpace::kRegionSize); in ZeroLiveBytesForLargeObject()
308 DCHECK_ALIGNED(end_addr, kRegionSize); in ZeroLiveBytesForLargeObject()
312 for (uint8_t* addr = begin_addr; addr < end_addr; addr += kRegionSize) { in ZeroLiveBytesForLargeObject()
385 num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1; in SetFromSpace()
553 reinterpret_cast<mirror::Object*>(r->Begin() + free_regions * kRegionSize)); in ClearFromSpace()
604 + regions_to_clear_bitmap * kRegionSize)); in ClearFromSpace()
758 max_contiguous_free_regions * kRegionSize); in LogFragmentationAllocFailure()
795 size_t new_num_regions = new_capacity / kRegionSize; in ClampGrowthLimit()
855 if (tlab_size < kRegionSize) { in AllocNewTlab()
905 DCHECK_LE(thread->GetThreadLocalBytesAllocated(), kRegionSize); in RevokeThreadLocalBuffersLocked()
960 << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); in Dump()
973 return kRegionSize; in GetLongestConsecutiveFreeBytes()
998 if (LIKELY(num_bytes <= kRegionSize)) { in AllocationSizeNonvirtual()
1003 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual()
1073 CheckedCall(mprotect, __FUNCTION__, Begin(), kRegionSize, PROT_READ | PROT_WRITE); in MarkAsAllocated()