Home
last modified time | relevance | path

Searched refs:GetVIXLAssembler (Results 1 – 19 of 19) sorted by relevance

/art/compiler/utils/arm/
Djni_macro_assembler_arm_vixl.cc37 #define ___ asm_.GetVIXLAssembler()->
184 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in RemoveFrame()
224 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Store()
244 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRef()
251 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreRawPtr()
261 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreSpanning()
268 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef()
278 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef()
293 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRef()
314 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreImmediateToFrame()
[all …]
Dassembler_arm_vixl.cc388 UseScratchRegisterScope temps(GetVIXLAssembler()); in StoreRegisterList()
409 UseScratchRegisterScope temps(GetVIXLAssembler()); in LoadRegisterList()
Dassembler_arm_vixl.h172 ArmVIXLMacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
/art/compiler/optimizing/
Dintrinsics_arm64.cc69 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { in GetVIXLAssembler() function in art::arm64::IntrinsicCodeGeneratorARM64
70 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler()
81 #define __ codegen->GetVIXLAssembler()->
201 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits()
204 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble()
215 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits()
218 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat()
262 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler()); in VisitIntegerReverseBytes()
270 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler()); in VisitLongReverseBytes()
278 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetVIXLAssembler()); in VisitShortReverseBytes()
[all …]
Dcode_generator_arm_vixl.cc102 #define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
201 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in SaveContiguousSRegisterList()
249 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()); in RestoreContiguousSRegisterList()
1173 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongDataProc()
1327 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTestConstant()
1379 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTestConstant()
1455 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateLongTest()
1481 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateLongTest()
1534 ExactAssemblyScope guard(codegen->GetVIXLAssembler(), in GenerateConditionGeneric()
1564 UseScratchRegisterScope temps(codegen->GetVIXLAssembler()); in GenerateEqualLong()
[all …]
Dintrinsics_arm64.h80 vixl::aarch64::MacroAssembler* GetVIXLAssembler();
Dcode_generator_arm64.cc163 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
183 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler(); in SaveRestoreLiveRegistersHelper()
587 EmissionCheckScope scope(codegen->GetVIXLAssembler(), in EmitTable()
973 #define __ GetVIXLAssembler()->
1077 vixl_temps_.Open(GetVIXLAssembler()); in PrepareForEmitNativeCode()
1128 MacroAssembler* masm = GetVIXLAssembler(); in MaybeIncrementHotness()
1187 MacroAssembler* masm = GetVIXLAssembler(); in GenerateFrameEntry()
1199 ExactAssemblyScope eas(GetVIXLAssembler(), in GenerateFrameEntry()
1320 UseScratchRegisterScope temps(GetVIXLAssembler()); in MarkGCCard()
1549 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveLocation()
[all …]
Dintrinsics_arm_vixl.cc39 #define __ assembler->GetVIXLAssembler()->
441 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(), in VisitMathRoundFloat()
620 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafeGet()
788 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenUnsafePut()
951 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in EmitNativeCode()
970 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), 2 * k16BitT32InstructionSizeInBytes); in EmitNativeCode()
1045 static_cast<vixl32::MacroAssembler*>(assembler->GetVIXLAssembler())-> in GenCas()
1179 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo()
1201 ExactAssemblyScope aas(assembler->GetVIXLAssembler(), in VisitStringCompareTo()
1252 UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); in GenerateStringCompareToLoop()
[all …]
Dcode_generator_arm64.h305 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
520 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const { in GetVIXLAssembler() function
521 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler()
583 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
909 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
Doptimizing_cfi_test.cc194 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
Dcode_generator_arm_vixl.h343 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
492 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() function
788 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true) in MaybeRecordImplicitNullCheck()
Dcode_generator_vector_arm_vixl.cc34 #define __ GetVIXLAssembler()->
839 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecSADAccumulate()
958 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1009 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
Dcode_generator_vector_arm64_sve.cc41 #define __ GetVIXLAssembler()->
1401 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1461 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
1548 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot()
1549 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
Dcode_generator_vector_arm64_neon.cc41 #define __ GetVIXLAssembler()->
1401 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecLoad()
1461 UseScratchRegisterScope temps(GetVIXLAssembler()); in VisitVecStore()
1548 UseScratchRegisterScope temps(GetVIXLAssembler()); in MoveToSIMDStackSlot()
1549 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) { in MoveToSIMDStackSlot()
Dcodegen_test.cc834 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
849 vixl::CPUFeatures* features = codegen.GetVIXLAssembler()->GetCPUFeatures(); in TEST_F()
/art/compiler/utils/arm64/
Djni_macro_assembler_arm64.cc32 #define ___ asm_.GetVIXLAssembler()->
94 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in AddConstant()
166 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreImmediateToFrame()
173 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackOffsetToThread()
180 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in StoreStackPointerToThread()
200 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadImmediate()
317 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in LoadRawPtr()
415 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in Move()
454 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRawPtrFromThread()
470 UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); in CopyRef()
[all …]
Dassembler_arm64.h84 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } in GetVIXLAssembler() function
/art/compiler/trampolines/
Dtrampoline_compiler.cc49 #define ___ assembler.GetVIXLAssembler()->
64 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline()
/art/compiler/utils/
Dassembler_thumb_test.cc224 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
257 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()