Home
last modified time | relevance | path

Searched refs:GetAssembler (Results 1 – 25 of 28) sorted by relevance

12

/art/compiler/utils/x86/
Dassembler_x86_test.cc279 GetAssembler()->LoadLongConstant(x86::XMM0, 51); in TEST_F()
315 GetAssembler()->repne_scasb(); in TEST_F()
321 GetAssembler()->repne_scasw(); in TEST_F()
327 GetAssembler()->repe_cmpsb(); in TEST_F()
333 GetAssembler()->repe_cmpsw(); in TEST_F()
339 GetAssembler()->repe_cmpsl(); in TEST_F()
345 GetAssembler()->rep_movsb(); in TEST_F()
351 GetAssembler()->rep_movsw(); in TEST_F()
467 GetAssembler()->cmovl(x86::kEqual, x86::Register(x86::EAX), x86::Address( in TEST_F()
469 GetAssembler()->cmovl(x86::kNotEqual, x86::Register(x86::EDI), x86::Address( in TEST_F()
[all …]
/art/compiler/optimizing/
Dintrinsics_arm_vixl.cc61 ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() { in GetAssembler() function in art::arm::IntrinsicCodeGeneratorARMVIXL
62 return codegen_->GetAssembler(); in GetAssembler()
128 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode()
197 assembler_(codegen->GetAssembler()), in IntrinsicLocationsBuilderARMVIXL()
251 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits()
254 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble()
265 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits()
268 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat()
295 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros()
340 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros()
[all …]
Dintrinsics_x86_64.cc47 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { in GetAssembler() function in art::x86_64::IntrinsicCodeGeneratorX86_64
48 return down_cast<X86_64Assembler*>(codegen_->GetAssembler()); in GetAssembler()
67 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
160 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits()
163 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble()
174 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits()
177 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat()
215 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes()
223 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); in VisitLongReverseBytes()
231 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes()
[all …]
Doptimizing_cfi_test.cc60 code_gen_->GetAssembler()->cfi().SetEnabled(true); in SetUpFrame()
100 Assembler* opt_asm = code_gen_->GetAssembler(); in Check()
194 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
Dcode_generator_arm_vixl.cc337 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); in SaveLiveRegisters()
371 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); in RestoreLiveRegisters()
1929 #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT
1940 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr); in Finalize()
1943 GetAssembler()->FinalizeCode(); in Finalize()
2061 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorARMVIXL()
2091 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize); in MaybeIncrementHotness()
2092 GetAssembler()->LoadFromOffset(kLoadWord, kMethodRegister, sp, kArmWordSize); in MaybeIncrementHotness()
2102 GetAssembler()->cfi().AdjustCFAOffset(-static_cast<int>(kArmWordSize)); in MaybeIncrementHotness()
2116 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize); in MaybeIncrementHotness()
[all …]
Dintrinsics_x86.cc49 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { in GetAssembler() function in art::x86::IntrinsicCodeGeneratorX86
50 return down_cast<X86Assembler*>(codegen_->GetAssembler()); in GetAssembler()
69 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
228 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits()
231 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble()
242 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits()
245 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat()
294 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes()
310 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes()
323 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes()
[all …]
Dintrinsics_arm_vixl.h72 ArmVIXLAssembler* GetAssembler();
Dintrinsics_x86_64.h72 X86_64Assembler* GetAssembler();
Dintrinsics_x86.h72 X86Assembler* GetAssembler();
Dintrinsics_utils.h54 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode()
Dcode_generator_arm64.h304 Arm64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function
305 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
519 Arm64Assembler* GetAssembler() const;
521 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler()
581 Arm64Assembler* GetAssembler() override { return &assembler_; } in GetAssembler() function
582 const Arm64Assembler& GetAssembler() const override { return assembler_; } in GetAssembler() function
583 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
1088 inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const { in GetAssembler() function
1089 return codegen_->GetAssembler(); in GetAssembler()
Dcode_generator.cc334 start_offset_ = codegen_.GetAssembler().CodeSize(); in DisassemblyScope()
342 instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); in ~DisassemblyScope()
360 code_start = GetAssembler()->CodeSize(); in GenerateSlowPaths()
366 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths()
394 size_t frame_start = GetAssembler()->CodeSize(); in Compile()
396 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); in Compile()
398 disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); in Compile()
441 size_t code_size = GetAssembler()->CodeSize(); in Finalize()
445 GetAssembler()->FinalizeInstructions(code); in Finalize()
1165 RecordPcInfo(instruction, dex_pc, GetAssembler()->CodePosition(), slow_path, native_debug_info); in RecordPcInfo()
[all …]
Dcode_generator_x86.h161 X86Assembler* GetAssembler() const;
228 X86Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function
405 X86Assembler* GetAssembler() override { in GetAssembler() function
409 const X86Assembler& GetAssembler() const override { in GetAssembler() function
Dcode_generator_x86_64.h162 X86_64Assembler* GetAssembler() const;
232 X86_64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function
388 X86_64Assembler* GetAssembler() override { in GetAssembler() function
392 const X86_64Assembler& GetAssembler() const override { in GetAssembler() function
Dcode_generator_arm_vixl.h274 ArmVIXLAssembler* GetAssembler() const;
342 ArmVIXLAssembler* GetAssembler() const { return assembler_; } in GetAssembler() function
343 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
488 ArmVIXLAssembler* GetAssembler() override { return &assembler_; } in GetAssembler() function
490 const ArmVIXLAssembler& GetAssembler() const override { return assembler_; } in GetAssembler() function
492 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
Dcode_generator_arm64.cc990 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr); in Finalize()
1237 GetAssembler()->cfi().AdjustCFAOffset(frame_size); in GenerateFrameEntry()
1239 GetAssembler()->cfi().RelOffset(DWARFReg(lowest_spill), core_spills_offset); in GenerateFrameEntry()
1242 GetAssembler()->SpillRegisters(preserved_core_registers, core_spills_offset); in GenerateFrameEntry()
1243 GetAssembler()->SpillRegisters(preserved_fp_registers, fp_spills_offset); in GenerateFrameEntry()
1256 GetAssembler()->cfi().RememberState(); in GenerateFrameExit()
1274 GetAssembler()->UnspillRegisters(preserved_fp_registers, fp_spills_offset); in GenerateFrameExit()
1275 GetAssembler()->UnspillRegisters(preserved_core_registers, core_spills_offset); in GenerateFrameExit()
1278 GetAssembler()->cfi().Restore(DWARFReg(lowest_spill)); in GenerateFrameExit()
1282 GetAssembler()->cfi().AdjustCFAOffset(-frame_size); in GenerateFrameExit()
[all …]
Dintrinsics_arm64.cc113 codegen->GetAssembler()->MaybeUnpoisonHeapReference(tmp_reg); in EmitNativeCode()
138 codegen->GetAssembler()->MaybePoisonHeapReference(tmp_reg); in EmitNativeCode()
828 codegen->GetAssembler()->PoisonHeapReference(temp.W()); in GenUnsafePut()
947 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode()
1003 Arm64Assembler* assembler = codegen->GetAssembler(); in GenCas()
2526 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); in VisitSystemArrayCopy()
2527 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2); in VisitSystemArrayCopy()
2536 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); in VisitSystemArrayCopy()
2548 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); in VisitSystemArrayCopy()
2561 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); in VisitSystemArrayCopy()
[all …]
Dgraph_visualizer.cc200 codegen_.GetAssembler().CodeBufferBaseAddress(), in HGraphVisualizerPrinter()
201 codegen_.GetAssembler().CodeBufferBaseAddress() in HGraphVisualizerPrinter()
202 + codegen_.GetAssembler().CodeSize()) in HGraphVisualizerPrinter()
Doptimizing_compiler.cc715 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), in Emit()
818 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompile()
943 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompileIntrinsic()
1390 info.cfi = ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()); in JitCompile()
Dscheduler_arm.cc333 !codegen_->GetAssembler()->ShifterOperandCanHold( in CanGenerateTest()
337 } else if (!codegen_->GetAssembler()->ShifterOperandCanHold( in CanGenerateTest()
Dcode_generator.h222 virtual Assembler* GetAssembler() = 0;
223 virtual const Assembler& GetAssembler() const = 0;
Dcode_generator_x86.cc71 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
926 #define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
1069 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorX86()
6431 X86Assembler* ParallelMoveResolverX86::GetAssembler() const { in GetAssembler() function in art::x86::ParallelMoveResolverX86
6432 return codegen_->GetAssembler(); in GetAssembler()
8351 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize()); in VisitX86ComputeBaseMethodAddress()
8466 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable()
8496 X86Assembler* assembler = GetAssembler(); in Finalize()
Dcode_generator_x86_64.cc70 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
948 #define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
1382 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorX86_64()
5803 X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const { in GetAssembler() function in art::x86_64::ParallelMoveResolverX86_64
5804 return codegen_->GetAssembler(); in GetAssembler()
7742 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable()
7771 X86_64Assembler* assembler = GetAssembler(); in Finalize()
/art/compiler/utils/x86_64/
Dassembler_x86_64_test.cc1028 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F()
1031 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F()
1040 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F()
1043 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F()
1108 GetAssembler()->repne_scasb(); in TEST_F()
1114 GetAssembler()->repne_scasw(); in TEST_F()
1120 GetAssembler()->rep_movsw(); in TEST_F()
1943 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F()
1944 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM15), x86_64::Immediate(2)); in TEST_F()
1950 GetAssembler()->pslld(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F()
[all …]
/art/compiler/utils/
Djni_macro_assembler_test.h38 Ass* GetAssembler() { in GetAssembler() function

12