Home
last modified time | relevance | path

Searched refs:RoundUp (Results 1 – 25 of 97) sorted by relevance

1234

/art/runtime/utils/
Ddex_cache_arrays_layout-inl.h38 RoundUp(types_offset_ + TypesSize(header.type_ids_size_), MethodsAlignment())), in DexCacheArraysLayout()
40 RoundUp(methods_offset_ + MethodsSize(header.method_ids_size_), StringsAlignment())), in DexCacheArraysLayout()
42 RoundUp(strings_offset_ + StringsSize(header.string_ids_size_), FieldsAlignment())), in DexCacheArraysLayout()
44 RoundUp(fields_offset_ + FieldsSize(header.field_ids_size_), MethodTypesAlignment())), in DexCacheArraysLayout()
46 RoundUp(method_types_offset_ + MethodTypesSize(header.proto_ids_size_), in DexCacheArraysLayout()
48 size_(RoundUp(call_sites_offset_ + CallSitesSize(num_call_sites), Alignment())) { in DexCacheArraysLayout()
/art/runtime/
Dimage.cc66 CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize)); in ImageHeader()
67 CHECK_EQ(oat_file_begin, RoundUp(oat_file_begin, kPageSize)); in ImageHeader()
68 CHECK_EQ(oat_data_begin, RoundUp(oat_data_begin, kPageSize)); in ImageHeader()
104 return image_reservation_size_ == RoundUp(image_size_, kPageSize); in IsAppImage()
160 static const size_t kStartPos = RoundUp(sizeof(ImageHeader), kObjectAlignment); in VisitObjects()
164 pos += RoundUp(object->SizeOf(), kObjectAlignment); in VisitObjects()
Dnterp_helpers.cc114 return RoundUp(frame_size, kStackAlignment); in NterpGetFrameSize()
138 RoundUp(out_regs * kVRegSize, kPointerSize) + // out arguments and pointer alignment in NterpGetReferenceArray()
148 RoundUp(out_regs * kVRegSize, kPointerSize); // out arguments and pointer alignment in NterpGetDexPC()
Dbacktrace_helper.cc108 if (RoundUp(it->pc, align) == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) { in CollectImpl()
/art/runtime/arch/arm/
Djni_frame_arm.h71 return RoundUp(size, kAapcsStackAlignment); in GetCriticalNativeStubFrameSize()
81 return RoundUp(size, kAapcsStackAlignment); in GetCriticalNativeDirectCallFrameSize()
Dquick_entrypoints_cc_arm.cc50 fpr_double_index = std::max(fpr_double_index, RoundUp(fpr_index, 2)); in quick_invoke_reg_setup()
/art/runtime/arch/x86/
Djni_frame_x86.h67 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeStubFrameSize()
77 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/runtime/arch/arm64/
Djni_frame_arm64.h78 return RoundUp(size, kAapcs64StackAlignment); in GetCriticalNativeStubFrameSize()
88 return RoundUp(size, kAapcs64StackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/runtime/arch/x86_64/
Djni_frame_x86_64.h84 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeStubFrameSize()
94 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/runtime/gc/space/
Dmalloc_space.cc105 *growth_limit = RoundUp(*growth_limit, kPageSize); in CreateMemMap()
106 *capacity = RoundUp(*capacity, kPageSize); in CreateMemMap()
143 growth_limit = RoundUp(growth_limit, kPageSize); in SetGrowthLimit()
186 SetEnd(reinterpret_cast<uint8_t*>(RoundUp(reinterpret_cast<uintptr_t>(End()), kPageSize))); in CreateZygoteSpace()
191 size_t size = RoundUp(Size(), kPageSize); in CreateZygoteSpace()
203 SetGrowthLimit(RoundUp(size, kPageSize)); in CreateZygoteSpace()
Dbump_pointer_space-inl.h31 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
48 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe()
Dbump_pointer_space.cc28 capacity = RoundUp(capacity, kPageSize); in Create()
102 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
150 bytes = RoundUp(bytes, kAlignment); in AllocBlock()
234 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
Dregion_space.cc275 const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in ShouldBeEvacuated()
305 size_t obj_alloc_size = RoundUp(obj_size, space::RegionSpace::kRegionSize); in ZeroLiveBytesForLargeObject()
385 num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1; in SetFromSpace()
663 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in CheckLiveBytesAgainstRegionBitmap()
960 << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); in Dump()
987 prev_object_end = RoundUp(object_end, kAlignment); in GetLongestConsecutiveFreeBytes()
1000 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
1003 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual()
/art/compiler/utils/
Dswap_space.cc110 size = RoundUp(size, 8U); in Alloc()
149 size_t next_part = std::max(RoundUp(min_size, kPageSize), RoundUp(kMininumMapSize, kPageSize)); in NewFileChunk()
177 size = RoundUp(size, 8U); in Free()
/art/runtime/gc/accounting/
Dbitmap.cc48 const size_t bitmap_size = RoundUp( in AllocateMemMap()
49 RoundUp(num_bits, kBitsPerBitmapWord) / kBitsPerBitmapWord * sizeof(uintptr_t), kPageSize); in AllocateMemMap()
/art/compiler/jni/quick/arm/
Dcalling_convention_arm.cc232 gpr_index_ = RoundUp(gpr_index_, 2u) + 2u; in Next()
250 return RoundUp(gpr_index_, 2u) + 1u < kHFCoreArgumentRegistersCount; in IsCurrentParamInRegister()
273 CHECK_EQ(RoundUp(gpr_index_, 2u), 2u); in CurrentParamRegister()
420 return RoundUp(total_size, kStackAlignment); in FrameSize()
441 size_t out_args_size = RoundUp(size, kAapcsStackAlignment); in OutFrameSize()
/art/dex2oat/linker/arm/
Drelative_patcher_thumb2_test.cc728 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); in TestBakerFieldWide()
794 thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); in TestBakerFieldWide()
826 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); in TestBakerFieldNarrow()
895 thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); in TestBakerFieldNarrow()
950 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
971 - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArmAlignment) in TEST_F()
972 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArmAlignment) in TEST_F()
1018 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
1025 const uint32_t bne = BneWWithOffset(kLiteralOffset1, RoundUp(raw_code1.size(), kArmAlignment)); in TEST_F()
1048 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
[all …]
/art/runtime/mirror/
Darray.h81 size_t data_offset = RoundUp(OFFSETOF_MEMBER(Array, first_element_), component_size); in DataOffset()
82 DCHECK_EQ(RoundUp(data_offset, component_size), data_offset) in DataOffset()
89 constexpr size_t data_offset = RoundUp(kFirstElementOffset, kComponentSize); in DataOffset()
90 static_assert(RoundUp(data_offset, kComponentSize) == data_offset, "RoundUp fail"); in DataOffset()
/art/dex2oat/linker/arm64/
Drelative_patcher_arm64_test.cc1068 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment); in TestBakerField()
1121 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment); in TestBakerField()
1158 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1173 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1174 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1218 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1225 const uint32_t cbnz_offset = RoundUp(raw_code1.size(), kArm64Alignment) - kLiteralOffset1; in TEST_F()
1247 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1262 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1263 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
[all …]
/art/runtime/gc/allocator/
Drosalloc.h550 return RoundUp(size, kThreadLocalBracketQuantumSize); in RoundToBracketSize()
552 return RoundUp(size, kBracketQuantumSize); in RoundToBracketSize()
564 return RoundUp(size, kThreadLocalBracketQuantumSize) / kThreadLocalBracketQuantumSize - 1; in SizeToIndex()
566 return (RoundUp(size, kBracketQuantumSize) - kMaxThreadLocalBracketSize) / kBracketQuantumSize in SizeToIndex()
581 bracket_size = RoundUp(size, kThreadLocalBracketQuantumSize); in SizeToIndexAndBracketSize()
584 bracket_size = RoundUp(size, kBracketQuantumSize); in SizeToIndexAndBracketSize()
875 return RoundUp(bytes, kPageSize); in UsableSize()
Ddlmalloc.cc72 start = reinterpret_cast<void*>(art::RoundUp(reinterpret_cast<uintptr_t>(start), art::kPageSize)); in DlmallocMadviseCallback()
/art/libartbase/base/
Dscoped_arena_allocator.cc96 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); in AllocWithMemoryTool()
150 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8); in Reset()
Darena_allocator.h271 bytes = RoundUp(bytes, kAlignment);
311 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
317 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
/art/compiler/jni/quick/x86/
Dcalling_convention_x86.cc220 return RoundUp(total_size, kStackAlignment); in FrameSize()
253 size_t out_args_size = RoundUp(size, kNativeStackAlignment); in OutFrameSize()
/art/runtime/gc/
Dheap-inl.h108 byte_count = RoundUp(byte_count, space::BumpPointerSpace::kAlignment); in AllocObjectWithAllocator()
300 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
380 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()

1234