/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 167 Register scratch = temps.AcquireW(); in StoreImmediateToFrame() local 168 ___ Mov(scratch, imm); in StoreImmediateToFrame() 169 ___ Str(scratch, MEM_OP(reg_x(SP), offs.Int32Value())); in StoreImmediateToFrame() 174 Register scratch = temps.AcquireX(); in StoreStackOffsetToThread() local 175 ___ Add(scratch, reg_x(SP), fr_offs.Int32Value()); in StoreStackOffsetToThread() 176 ___ Str(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value())); in StoreStackOffsetToThread() 181 Register scratch = temps.AcquireX(); in StoreStackPointerToThread() local 182 ___ Mov(scratch, reg_x(SP)); in StoreStackPointerToThread() 183 ___ Str(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value())); in StoreStackPointerToThread() 455 Register scratch = temps.AcquireX(); in CopyRawPtrFromThread() local [all …]
|
D | jni_macro_assembler_arm64.h | 91 void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch) 102 ManagedRegister scratch, 107 ManagedRegister scratch, 112 ManagedRegister scratch, 118 ManagedRegister scratch, 124 ManagedRegister scratch, 126 void MemoryBarrier(ManagedRegister scratch) override; 185 Arm64Exception(vixl::aarch64::Register scratch, size_t stack_adjust) in Arm64Exception() argument 186 : scratch_(scratch), stack_adjust_(stack_adjust) {} in Arm64Exception()
|
D | assembler_arm64.cc | 97 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in JumpTo() local 99 CHECK(scratch.IsXRegister()) << scratch; in JumpTo() 102 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister())); in JumpTo() 103 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value())); in JumpTo() 104 ___ Br(reg_x(scratch.AsXRegister())); in JumpTo()
|
/art/compiler/utils/x86/ |
D | jni_macro_assembler_x86.cc | 178 Register scratch = GetScratchRegister(); in StoreStackOffsetToThread() local 179 __ leal(scratch, Address(ESP, fr_offs)); in StoreStackOffsetToThread() 180 __ fs()->movl(Address::Absolute(thr_offs), scratch); in StoreStackOffsetToThread() 355 Register scratch = GetScratchRegister(); in CopyRef() local 356 __ movl(scratch, Address(ESP, src)); in CopyRef() 357 __ movl(Address(ESP, dest), scratch); in CopyRef() 364 Register scratch = GetScratchRegister(); in CopyRef() local 365 __ movl(scratch, Address(base.AsX86().AsCpuRegister(), offs)); in CopyRef() 367 __ MaybeUnpoisonHeapReference(scratch); in CopyRef() 369 __ movl(Address(ESP, dest), scratch); in CopyRef() [all …]
|
D | jni_macro_assembler_x86.h | 91 void CopyRawPtrToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs, ManagedRegister scratch) 102 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch, 105 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch, 108 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch, 112 ManagedRegister scratch, size_t size) override; 115 ManagedRegister scratch, size_t size) override;
|
/art/compiler/utils/x86_64/ |
D | jni_macro_assembler_x86_64.cc | 205 CpuRegister scratch = GetScratchRegister(); in StoreStackOffsetToThread() local 206 __ leaq(scratch, Address(CpuRegister(RSP), fr_offs)); in StoreStackOffsetToThread() 207 __ gs()->movq(Address::Absolute(thr_offs, true), scratch); in StoreStackOffsetToThread() 439 CpuRegister scratch = GetScratchRegister(); in CopyRef() local 440 __ movl(scratch, Address(CpuRegister(RSP), src)); in CopyRef() 441 __ movl(Address(CpuRegister(RSP), dest), scratch); in CopyRef() 448 CpuRegister scratch = GetScratchRegister(); in CopyRef() local 449 __ movl(scratch, Address(base.AsX86_64().AsCpuRegister(), offs)); in CopyRef() 451 __ MaybeUnpoisonHeapReference(scratch); in CopyRef() 453 __ movl(Address(CpuRegister(RSP), dest), scratch); in CopyRef() [all …]
|
D | jni_macro_assembler_x86_64.h | 94 void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch) 108 ManagedRegister scratch, 114 ManagedRegister scratch, 120 ManagedRegister scratch, 127 ManagedRegister scratch, 134 ManagedRegister scratch,
|
/art/compiler/utils/ |
D | swap_space_test.cc | 37 ScratchFile scratch; in SwapTest() local 38 int fd = scratch.GetFd(); in SwapTest() 39 unlink(scratch.GetFilename().c_str()); in SwapTest() 72 scratch.Close(); in SwapTest()
|
D | jni_macro_assembler.h | 157 ManagedRegister scratch) = 0; 170 ManagedRegister scratch, 176 ManagedRegister scratch, 182 ManagedRegister scratch, 189 ManagedRegister scratch, 196 ManagedRegister scratch, 199 virtual void MemoryBarrier(ManagedRegister scratch) = 0;
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.h | 104 ManagedRegister scratch) override; 117 ManagedRegister scratch, 123 ManagedRegister scratch, 129 ManagedRegister scratch, 136 ManagedRegister scratch, 143 ManagedRegister scratch, 201 void MemoryBarrier(ManagedRegister scratch) override; 209 ArmException(vixl32::Register scratch, size_t stack_adjust) in ArmException() argument 210 : scratch_(scratch), stack_adjust_(stack_adjust) {} in ArmException()
|
D | jni_macro_assembler_arm_vixl.cc | 262 vixl32::Register scratch = temps.Acquire(); in StoreSpanning() local 263 asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value()); in StoreSpanning() 264 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4); in StoreSpanning() 269 vixl32::Register scratch = temps.Acquire(); in CopyRef() local 270 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value()); in CopyRef() 271 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); in CopyRef() 279 vixl32::Register scratch = temps.Acquire(); in CopyRef() local 280 asm_.LoadFromOffset(kLoadWord, scratch, AsVIXLRegister(base.AsArm()), offs.Int32Value()); in CopyRef() 282 asm_.MaybeUnpoisonHeapReference(scratch); in CopyRef() 284 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); in CopyRef() [all …]
|
/art/compiler/optimizing/ |
D | parallel_move_resolver.cc | 264 int scratch = -1; in AllocateScratchRegister() local 267 scratch = reg; in AllocateScratchRegister() 272 if (scratch == -1) { in AllocateScratchRegister() 274 scratch = if_scratch; in AllocateScratchRegister() 279 return scratch; in AllocateScratchRegister() 381 for (Location scratch : scratches_) { in AddScratchLocation() local 382 CHECK(!loc.Equals(scratch)); in AddScratchLocation() 458 Location scratch = AllocateScratchLocationFor(kind); in PerformMove() local 462 move->SetDestination(scratch); in PerformMove() 465 UpdateMoveSource(source, scratch); in PerformMove() [all …]
|
D | code_generator_vector_arm_vixl.cc | 903 /*out*/ vixl32::Register* scratch) { in VecAddress() argument 919 *scratch = temps_scope->Acquire(); in VecAddress() 920 __ Add(*scratch, base, Operand(RegisterFrom(index), ShiftType::LSL, shift)); in VecAddress() 922 return MemOperand(*scratch, offset); in VecAddress() 929 /*out*/ vixl32::Register* scratch) { in VecAddressUnaligned() argument 943 __ Add(*scratch, base, offset); in VecAddressUnaligned() 945 *scratch = temps_scope->Acquire(); in VecAddressUnaligned() 946 __ Add(*scratch, base, offset); in VecAddressUnaligned() 947 __ Add(*scratch, *scratch, Operand(RegisterFrom(index), ShiftType::LSL, shift)); in VecAddressUnaligned() 949 return AlignedMemOperand(*scratch, kNoAlignment); in VecAddressUnaligned() [all …]
|
D | parallel_move_test.cc | 116 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local 117 if (scratch.Equals(Location::NoLocation())) { in AllocateScratchLocationFor() 121 scratch = (kind == Location::kRegister) ? Location::RegisterLocation(scratch_index_) in AllocateScratchLocationFor() 125 return scratch; in AllocateScratchLocationFor()
|
D | code_generator_vector_arm64_sve.cc | 1402 Register scratch; in VisitVecLoad() local 1421 VecNeonAddress(instruction, &temps, 1, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad() 1424 if (scratch.IsValid()) { in VisitVecLoad() 1425 temps.Release(scratch); // if used, no longer needed in VisitVecLoad() 1430 VecNeonAddress(instruction, &temps, size, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad() 1445 VecNeonAddress(instruction, &temps, size, instruction->IsStringCharAt(), &scratch)); in VisitVecLoad() 1462 Register scratch; in VisitVecStore() local 1477 VecNeonAddress(instruction, &temps, size, /*is_string_char_at*/ false, &scratch)); in VisitVecStore()
|
D | code_generator_vector_arm64_neon.cc | 1402 Register scratch; in VisitVecLoad() local 1421 VecNeonAddress(instruction, &temps, 1, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad() 1424 if (scratch.IsValid()) { in VisitVecLoad() 1425 temps.Release(scratch); // if used, no longer needed in VisitVecLoad() 1430 VecNeonAddress(instruction, &temps, size, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad() 1445 VecNeonAddress(instruction, &temps, size, instruction->IsStringCharAt(), &scratch)); in VisitVecLoad() 1462 Register scratch; in VisitVecStore() local 1477 VecNeonAddress(instruction, &temps, size, /*is_string_char_at*/ false, &scratch)); in VisitVecStore()
|
D | code_generator_arm_vixl.h | 437 /*out*/ vixl32::Register* scratch); 442 /*out*/ vixl32::Register* scratch);
|
D | code_generator_arm64.cc | 1091 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local 1092 if (!scratch.Equals(Location::NoLocation())) { in AllocateScratchLocationFor() 1093 return scratch; in AllocateScratchLocationFor() 1097 scratch = LocationFrom(vixl_temps_.AcquireX()); in AllocateScratchLocationFor() 1100 scratch = codegen_->GetGraph()->HasSIMD() in AllocateScratchLocationFor() 1104 AddScratchLocation(scratch); in AllocateScratchLocationFor() 1105 return scratch; in AllocateScratchLocationFor() 6650 /*out*/ Register* scratch) { in VecNeonAddress() argument 6672 *scratch = temps_scope->AcquireSameSizeAs(base); in VecNeonAddress() 6673 __ Add(*scratch, base, Operand(WRegisterFrom(index), LSL, shift)); in VecNeonAddress() [all …]
|
/art/test/626-checker-arm64-scratch-register/ |
D | info.txt | 1 Regression test checking that the ARM64 scratch register pool is not
|
/art/test/635-checker-arm64-volatile-load-cc/ |
D | info.txt | 1 Regression test checking that the VIXL ARM64 scratch register pool is
|
/art/test/572-checker-array-get-regression/ |
D | info.txt | 3 used to require too many scratch (temporary) registers.
|
/art/test/646-checker-arraycopy-large-cst-pos/ |
D | info.txt | 1 Regression test for an issue with a depleted VIXL scratch register
|
/art/runtime/ |
D | dexopt_test.cc | 117 std::optional<ScratchDir> scratch; in GenerateOatForTest() local 119 scratch.emplace(); // Create the scratch directory for the generated boot image. in GenerateOatForTest() 120 std::string alternate_image_location = GenerateAlternateImage(scratch->GetPath()); in GenerateOatForTest()
|
/art/dex2oat/ |
D | dex2oat_image_test.cc | 89 ScratchDir scratch; in CompileImageAndGetSizes() local 90 std::string filename_prefix = scratch.GetPath() + "boot"; in CompileImageAndGetSizes() 257 ScratchDir scratch; in TEST_F() local 258 const std::string& scratch_dir = scratch.GetPath(); in TEST_F()
|
/art/runtime/gc/space/ |
D | image_space_test.cc | 57 ScratchDir scratch; in TEST_F() local 58 const std::string& scratch_dir = scratch.GetPath(); in TEST_F()
|