/art/libartbase/base/ |
D | arena_allocator_test.cc | 143 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation)) in TEST_F() 248 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation)); in TEST_F() 252 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation)); in TEST_F() 255 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc)); in TEST_F() 265 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation)); in TEST_F() 269 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation)); in TEST_F() 272 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc)); in TEST_F() 282 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation)); in TEST_F() 286 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation)); in TEST_F() 289 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc)); in TEST_F() [all …]
|
D | bit_utils.h | 196 constexpr bool IsAligned(T x) { in IsAligned() function 202 inline bool IsAligned(T* x) { in IsAligned() function 203 return IsAligned<n>(reinterpret_cast<const uintptr_t>(x)); in IsAligned() 217 CHECK(::art::IsAligned<alignment>(value)) << reinterpret_cast<const void*>(value) 220 DCHECK(::art::IsAligned<alignment>(value)) << reinterpret_cast<const void*>(value)
|
D | memory_region.h | 154 return IsAligned<kRuntimePointerSize>(address); in IsWordAligned()
|
D | bit_memory_region.h | 112 DCHECK(IsAligned<sizeof(Result)>(data_)); in LoadBits()
|
D | mem_map.cc | 1213 DCHECK(IsAligned<kPageSize>(begin)); in TryReadable() 1214 DCHECK(IsAligned<kPageSize>(end)); in TryReadable()
|
/art/libartbase/arch/ |
D | instruction_set.cc | 96 static_assert(IsAligned<kPageSize>(kArmStackOverflowReservedBytes), "ARM gap not page aligned"); 97 static_assert(IsAligned<kPageSize>(kArm64StackOverflowReservedBytes), "ARM64 gap not page aligned"); 98 static_assert(IsAligned<kPageSize>(kX86StackOverflowReservedBytes), "X86 gap not page aligned"); 99 static_assert(IsAligned<kPageSize>(kX86_64StackOverflowReservedBytes),
|
/art/runtime/ |
D | verify_object.cc | 35 bool failed = !IsAligned<kObjectAlignment>(obj.Ptr()); in VerifyObjectImpl() 38 failed = failed || !IsAligned<kObjectAlignment>(c); in VerifyObjectImpl()
|
D | nterp_helpers.cc | 104 static_assert(IsAligned<kPointerSize>(kStackAlignment)); in NterpGetFrameSize() 105 static_assert(IsAligned<kPointerSize>(NterpGetFrameEntrySize())); in NterpGetFrameSize() 106 static_assert(IsAligned<kPointerSize>(kVRegSize * 2)); in NterpGetFrameSize()
|
D | fault_handler.cc | 305 if (method_obj == nullptr || !IsAligned<sizeof(void*)>(method_obj)) { in IsInGeneratedCode() 320 if (!IsAligned<kObjectAlignment>(cls)) { in IsInGeneratedCode()
|
D | oat.cc | 104 if (!IsAligned<kPageSize>(executable_offset_)) { in IsValid() 126 if (!IsAligned<kPageSize>(executable_offset_)) { in GetValidationErrorMessage()
|
D | image.cc | 122 if (!IsAligned<kPageSize>(image_reservation_size_)) { in IsValid()
|
D | class_linker_test.cc | 389 CHECK(!IsAligned<4>(field->GetOffset().Uint32Value())); in AssertClass() 1088 EXPECT_TRUE(IsAligned<4>(data_offset)); // Check 4 byte alignment. in TEST_F() 1090 EXPECT_TRUE(IsAligned<8>(data_offset)); // Check 8 byte alignment. in TEST_F() 1100 EXPECT_TRUE(IsAligned<8>(data_offset)); // Longs require 8 byte alignment in TEST_F() 1105 EXPECT_TRUE(IsAligned<8>(data_offset)); // Doubles require 8 byte alignment in TEST_F() 1110 EXPECT_TRUE(IsAligned<4>(data_offset)); // Ints require 4 byte alignment in TEST_F() 1115 EXPECT_TRUE(IsAligned<2>(data_offset)); // Chars require 2 byte alignment in TEST_F() 1120 EXPECT_TRUE(IsAligned<2>(data_offset)); // Shorts require 2 byte alignment in TEST_F()
|
D | oat_file.cc | 442 IsAligned<alignof(IndexBssMapping)>(index_bss_mapping_offset) && in ReadIndexBssMapping() 524 if (!IsAligned<sizeof(uint32_t)>(data_bimg_rel_ro_begin_) || in Setup() 525 !IsAligned<sizeof(uint32_t)>(data_bimg_rel_ro_end_) || in Setup() 536 if (!IsAligned<kPageSize>(bss_begin_) || in Setup() 539 !IsAligned<alignof(GcRoot<mirror::Object>)>(bss_end_)) { in Setup() 832 if (UNLIKELY(!IsAligned<alignof(uint32_t)>(class_offsets_offset))) { in Setup()
|
D | stack_map.h | 235 DCHECK(IsAligned<kFrameSlotSize>(packed_value)); in PackValue()
|
/art/runtime/gc/accounting/ |
D | card_table-inl.h | 71 while (!IsAligned<sizeof(intptr_t)>(card_cur) && card_cur < card_end) { in Scan() 142 while (!IsAligned<sizeof(intptr_t)>(card_cur) && card_cur < card_end) { in ModifyCardsAtomic() 155 while (!IsAligned<sizeof(intptr_t)>(card_end) && card_end > card_cur) { in ModifyCardsAtomic()
|
/art/compiler/utils/arm/ |
D | assembler_arm_vixl.cc | 214 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); // VFP addressing mode. in CanHoldLoadOffsetThumb() 216 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); in CanHoldLoadOffsetThumb() 232 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); // VFP addressing mode. in CanHoldStoreOffsetThumb() 234 return IsAbsoluteUint<10>(offset) && IsAligned<4>(offset); in CanHoldStoreOffsetThumb()
|
D | jni_macro_assembler_arm_vixl.cc | 484 IsAligned<2u>(static_cast<size_t>(srcs[start].GetRegister().AsArm().AsSRegister())); in UseVstrForChunk() 585 if (IsAligned<2u>(start_sreg) && in MoveArguments() 586 IsAligned<kDRegSizeInBytes>(frame_offset) && in MoveArguments() 587 IsAligned<kDRegSizeInBytes>(total_size)) { in MoveArguments() 657 if (IsAligned<2u>(first_sreg) && in MoveArguments()
|
/art/dex2oat/linker/arm/ |
D | relative_patcher_thumb2_test.cc | 122 if (!IsAligned<2u>(bne_offset)) { in BneWWithOffset() 126 if (!IsAligned<2u>(target_offset)) { in BneWWithOffset() 490 ASSERT_TRUE(IsAligned<2u>(diff)); in TEST_F() 575 ASSERT_TRUE(IsAligned<kArmAlignment>(method_after_thunk_offset)); in TEST_F() 582 ASSERT_TRUE(IsAligned<kArmAlignment>(thunk_offset)); in TEST_F() 584 ASSERT_TRUE(IsAligned<2u>(diff)); in TEST_F() 618 ASSERT_TRUE(IsAligned<2u>(diff)); in TEST_F() 678 ASSERT_TRUE(IsAligned<2u>(diff)); in TEST_F() 948 static_assert(IsAligned<kArmAlignment>(expected_thunk_offset), "Target offset must be aligned."); in TEST_F() 959 static_assert(IsAligned<kArmAlignment>(kLiteralOffset2 + kPcAdjustment), in TEST_F() [all …]
|
/art/tools/hiddenapi/ |
D | hiddenapi.cc | 621 CHECK(IsAligned<sizeof(uint32_t)>(data_.data())); in GetSizeField() 674 CHECK(IsAligned<kMapListAlignment>(old_dex_.Size())) in Encode() 762 CHECK(IsAligned<kMapListAlignment>(data_.data())); in AllocateMemory() 763 CHECK(IsAligned<kHiddenapiClassDataAlignment>(data_.data())); in AllocateMemory() 839 CHECK(IsAligned<kHiddenapiClassDataAlignment>(payload_offset)) in AppendHiddenapiClassData()
|
/art/dex2oat/linker/arm64/ |
D | relative_patcher_arm64_test.cc | 657 ASSERT_TRUE(IsAligned<4u>(diff)); in TEST_F() 746 ASSERT_TRUE(IsAligned<kArm64Alignment>(method_after_thunk_offset)); in TEST_F() 753 ASSERT_TRUE(IsAligned<kArm64Alignment>(thunk_offset)); in TEST_F() 755 ASSERT_TRUE(IsAligned<4u>(diff)); in TEST_F() 785 ASSERT_TRUE(IsAligned<4u>(diff)); in TEST_F() 856 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(pcrel_disp)); in TEST_F() 971 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(pcrel_disp)); in TEST_F() 1021 ASSERT_TRUE(IsAligned<4u>(diff)); in TEST_F()
|
/art/runtime/gc/ |
D | verification.cc | 132 return IsAligned<kObjectAlignment>(addr) && IsAddressInHeapSpace(addr, out_space); in IsValidHeapObjectAddress()
|
/art/runtime/mirror/ |
D | string-alloc-inl.h | 182 static_assert(IsAligned<sizeof(uint16_t)>(kObjectAlignment), in Alloc()
|
/art/dex2oat/ |
D | dex2oat_image_test.cc | 117 static_assert(IsAligned<kPageSize>(ART_BASE_ADDRESS_MIN_DELTA)); in ReserveCoreImageAddressSpace() 118 static_assert(IsAligned<kPageSize>(ART_BASE_ADDRESS_MAX_DELTA)); in ReserveCoreImageAddressSpace()
|
/art/compiler/debug/ |
D | elf_debug_writer.cc | 241 auto is_pc_aligned = [](const void* pc) { return IsAligned<kPcAlign>(pc); }; in PackElfFileForJIT()
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 1168 && (!IsAligned<kPageSize>(ref) || heap_->GetLargeObjectsSpace() == nullptr)) { in TestAndSetMarkBitForRef() 1197 && (!IsAligned<kPageSize>(ref) || heap_->GetLargeObjectsSpace() == nullptr)) { in TestMarkBitmapForRef() 2215 if (!IsAligned<kPageSize>(to_ref)) { in ProcessMarkStackRef() 2569 DCHECK(IsAligned<kPageSize>(region_space_->Limit())); in CaptureRssAtPeak() 2576 DCHECK(IsAligned<kPageSize>(heap_->non_moving_space_->Limit())); in CaptureRssAtPeak() 2595 DCHECK(IsAligned<kPageSize>(map.BaseSize())); in CaptureRssAtPeak() 3577 if (!IsAligned<kPageSize>(ref)) { in MarkNonMoving()
|