/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.cc | 37 #define ___ asm_.GetVIXLAssembler()-> 184 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in RemoveFrame() 224 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Store() 244 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRef() 251 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRawPtr() 261 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreSpanning() 268 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef() 278 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef() 293 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRef() 314 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreImmediateToFrame() [all …]
|
D | assembler_arm_vixl.cc | 388 UseScratchRegisterScope temps(GetVIXLAssembler()); in StoreRegisterList() 409 UseScratchRegisterScope temps(GetVIXLAssembler()); in LoadRegisterList()
|
D | assembler_arm_vixl.h | 172 ArmVIXLMacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
|
/art/compiler/optimizing/ |
D | intrinsics_arm64.cc | 69 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { in GetVIXLAssembler() function in art::arm64::IntrinsicCodeGeneratorARM64 70 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler() 81 #define __ codegen->GetVIXLAssembler()-> 201 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits() 204 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble() 215 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits() 218 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat() 262 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler()); in VisitIntegerReverseBytes() 270 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler()); in VisitLongReverseBytes() 278 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetVIXLAssembler()); in VisitShortReverseBytes() [all …]
|
D | code_generator_arm_vixl.cc | 102 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT 201 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList() 249 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList() 1173 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongDataProc() 1327 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTestConstant() 1379 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTestConstant() 1455 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTest() 1481 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTest() 1534 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateConditionGeneric() 1564 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateEqualLong() [all …]
|
D | intrinsics_arm64.h | 80 vixl::aarch64::MacroAssembler* GetVIXLAssembler();
|
D | code_generator_arm64.cc | 163 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT 183 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelper() 587 EmissionCheckScope scope(codegen->GetVIXLAssembler(), in EmitTable() 973 #define __ GetVIXLAssembler()-> 1077 vixl_temps_.Open(GetVIXLAssembler()); in PrepareForEmitNativeCode() 1128 MacroAssembler* masm = GetVIXLAssembler(); in MaybeIncrementHotness() 1187 MacroAssembler* masm = GetVIXLAssembler(); in GenerateFrameEntry() 1199 ExactAssemblyScope eas(GetVIXLAssembler(), in GenerateFrameEntry() 1320 UseScratchRegisterScope temps(GetVIXLAssembler()); in MarkGCCard() 1549 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveLocation() [all …]
|
D | intrinsics_arm_vixl.cc | 39 #define __ assembler->GetVIXLAssembler()-> 441 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in VisitMathRoundFloat() 620 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafeGet() 788 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafePut() 951 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in EmitNativeCode() 970 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), 2 * k16BitT32InstructionSizeInBytes); in EmitNativeCode() 1045 static_cast<vixl32::MacroAssembler*>(assembler->GetVIXLAssembler())-> in GenCas() 1179 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo() 1201 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo() 1252 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenerateStringCompareToLoop() [all …]
|
D | code_generator_arm64.h | 305 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 520 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const { in GetVIXLAssembler() function 521 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler() 583 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 909 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
|
D | optimizing_cfi_test.cc | 194 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
|
D | code_generator_arm_vixl.h | 343 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 492 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function 788 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
|
D | code_generator_vector_arm_vixl.cc | 34 #define __ GetVIXLAssembler()-> 839 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecSADAccumulate() 958 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1009 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
|
D | code_generator_vector_arm64_sve.cc | 41 #define __ GetVIXLAssembler()-> 1401 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1461 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore() 1548 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot() 1549 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
|
D | code_generator_vector_arm64_neon.cc | 41 #define __ GetVIXLAssembler()-> 1401 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad() 1461 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore() 1548 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot() 1549 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
|
D | codegen_test.cc | 834 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F() 849 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 32 #define ___ asm_.GetVIXLAssembler()-> 94 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in AddConstant() 166 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreImmediateToFrame() 173 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackOffsetToThread() 180 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread() 200 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadImmediate() 317 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtr() 415 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move() 454 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRawPtrFromThread() 470 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef() [all …]
|
D | assembler_arm64.h | 84 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 49 #define ___ assembler.GetVIXLAssembler()-> 64 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 224 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F() 257 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
|