/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 279 GetAssembler()->LoadLongConstant(x86::XMM0, 51); in TEST_F() 315 GetAssembler()->repne_scasb(); in TEST_F() 321 GetAssembler()->repne_scasw(); in TEST_F() 327 GetAssembler()->repe_cmpsb(); in TEST_F() 333 GetAssembler()->repe_cmpsw(); in TEST_F() 339 GetAssembler()->repe_cmpsl(); in TEST_F() 345 GetAssembler()->rep_movsb(); in TEST_F() 351 GetAssembler()->rep_movsw(); in TEST_F() 467 GetAssembler()->cmovl(x86::kEqual, x86::Register(x86::EAX), x86::Address( in TEST_F() 469 GetAssembler()->cmovl(x86::kNotEqual, x86::Register(x86::EDI), x86::Address( in TEST_F() [all …]
|
/art/compiler/optimizing/ |
D | intrinsics_arm_vixl.cc | 61 ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() { in GetAssembler() function in art::arm::IntrinsicCodeGeneratorARMVIXL 62 return codegen_->GetAssembler(); in GetAssembler() 128 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() 197 assembler_(codegen->GetAssembler()), in IntrinsicLocationsBuilderARMVIXL() 251 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 254 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 265 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 268 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 295 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() 340 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros() [all …]
|
D | intrinsics_x86_64.cc | 47 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { in GetAssembler() function in art::x86_64::IntrinsicCodeGeneratorX86_64 48 return down_cast<X86_64Assembler*>(codegen_->GetAssembler()); in GetAssembler() 67 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT 160 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 163 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 174 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 177 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 215 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 223 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); in VisitLongReverseBytes() 231 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | optimizing_cfi_test.cc | 60 code_gen_->GetAssembler()->cfi().SetEnabled(true); in SetUpFrame() 100 Assembler* opt_asm = code_gen_->GetAssembler(); in Check() 194 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
|
D | code_generator_arm_vixl.cc | 337 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); in SaveLiveRegisters() 371 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); in RestoreLiveRegisters() 1929 #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT 1940 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr); in Finalize() 1943 GetAssembler()->FinalizeCode(); in Finalize() 2061 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorARMVIXL() 2091 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize); in MaybeIncrementHotness() 2092 GetAssembler()->LoadFromOffset(kLoadWord, kMethodRegister, sp, kArmWordSize); in MaybeIncrementHotness() 2102 GetAssembler()->cfi().AdjustCFAOffset(-static_cast<int>(kArmWordSize)); in MaybeIncrementHotness() 2116 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize); in MaybeIncrementHotness() [all …]
|
D | intrinsics_x86.cc | 49 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { in GetAssembler() function in art::x86::IntrinsicCodeGeneratorX86 50 return down_cast<X86Assembler*>(codegen_->GetAssembler()); in GetAssembler() 69 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT 228 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 231 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 242 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 245 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 294 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 310 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() 323 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | intrinsics_arm_vixl.h | 72 ArmVIXLAssembler* GetAssembler();
|
D | intrinsics_x86_64.h | 72 X86_64Assembler* GetAssembler();
|
D | intrinsics_x86.h | 72 X86Assembler* GetAssembler();
|
D | intrinsics_utils.h | 54 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode()
|
D | code_generator_arm64.h | 304 Arm64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 305 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 519 Arm64Assembler* GetAssembler() const; 521 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler() 581 Arm64Assembler* GetAssembler() override { return &assembler_; } in GetAssembler() function 582 const Arm64Assembler& GetAssembler() const override { return assembler_; } in GetAssembler() function 583 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 1088 inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const { in GetAssembler() function 1089 return codegen_->GetAssembler(); in GetAssembler()
|
D | code_generator.cc | 334 start_offset_ = codegen_.GetAssembler().CodeSize(); in DisassemblyScope() 342 instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); in ~DisassemblyScope() 360 code_start = GetAssembler()->CodeSize(); in GenerateSlowPaths() 366 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths() 394 size_t frame_start = GetAssembler()->CodeSize(); in Compile() 396 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); in Compile() 398 disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); in Compile() 441 size_t code_size = GetAssembler()->CodeSize(); in Finalize() 445 GetAssembler()->FinalizeInstructions(code); in Finalize() 1165 RecordPcInfo(instruction, dex_pc, GetAssembler()->CodePosition(), slow_path, native_debug_info); in RecordPcInfo() [all …]
|
D | code_generator_x86.h | 161 X86Assembler* GetAssembler() const; 228 X86Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 405 X86Assembler* GetAssembler() override { in GetAssembler() function 409 const X86Assembler& GetAssembler() const override { in GetAssembler() function
|
D | code_generator_x86_64.h | 162 X86_64Assembler* GetAssembler() const; 232 X86_64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 388 X86_64Assembler* GetAssembler() override { in GetAssembler() function 392 const X86_64Assembler& GetAssembler() const override { in GetAssembler() function
|
D | code_generator_arm_vixl.h | 274 ArmVIXLAssembler* GetAssembler() const; 342 ArmVIXLAssembler* GetAssembler() const { return assembler_; } in GetAssembler() function 343 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 488 ArmVIXLAssembler* GetAssembler() override { return &assembler_; } in GetAssembler() function 490 const ArmVIXLAssembler& GetAssembler() const override { return assembler_; } in GetAssembler() function 492 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
|
D | code_generator_arm64.cc | 990 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr); in Finalize() 1237 GetAssembler()->cfi().AdjustCFAOffset(frame_size); in GenerateFrameEntry() 1239 GetAssembler()->cfi().RelOffset(DWARFReg(lowest_spill), core_spills_offset); in GenerateFrameEntry() 1242 GetAssembler()->SpillRegisters(preserved_core_registers, core_spills_offset); in GenerateFrameEntry() 1243 GetAssembler()->SpillRegisters(preserved_fp_registers, fp_spills_offset); in GenerateFrameEntry() 1256 GetAssembler()->cfi().RememberState(); in GenerateFrameExit() 1274 GetAssembler()->UnspillRegisters(preserved_fp_registers, fp_spills_offset); in GenerateFrameExit() 1275 GetAssembler()->UnspillRegisters(preserved_core_registers, core_spills_offset); in GenerateFrameExit() 1278 GetAssembler()->cfi().Restore(DWARFReg(lowest_spill)); in GenerateFrameExit() 1282 GetAssembler()->cfi().AdjustCFAOffset(-frame_size); in GenerateFrameExit() [all …]
|
D | intrinsics_arm64.cc | 113 codegen->GetAssembler()->MaybeUnpoisonHeapReference(tmp_reg); in EmitNativeCode() 138 codegen->GetAssembler()->MaybePoisonHeapReference(tmp_reg); in EmitNativeCode() 828 codegen->GetAssembler()->PoisonHeapReference(temp.W()); in GenUnsafePut() 947 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode() 1003 Arm64Assembler* assembler = codegen->GetAssembler(); in GenCas() 2526 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); in VisitSystemArrayCopy() 2527 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2); in VisitSystemArrayCopy() 2536 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); in VisitSystemArrayCopy() 2548 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); in VisitSystemArrayCopy() 2561 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); in VisitSystemArrayCopy() [all …]
|
D | graph_visualizer.cc | 200 codegen_.GetAssembler().CodeBufferBaseAddress(), in HGraphVisualizerPrinter() 201 codegen_.GetAssembler().CodeBufferBaseAddress() in HGraphVisualizerPrinter() 202 + codegen_.GetAssembler().CodeSize()) in HGraphVisualizerPrinter()
|
D | optimizing_compiler.cc | 715 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), in Emit() 818 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompile() 943 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompileIntrinsic() 1390 info.cfi = ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()); in JitCompile()
|
D | scheduler_arm.cc | 333 !codegen_->GetAssembler()->ShifterOperandCanHold( in CanGenerateTest() 337 } else if (!codegen_->GetAssembler()->ShifterOperandCanHold( in CanGenerateTest()
|
D | code_generator.h | 222 virtual Assembler* GetAssembler() = 0; 223 virtual const Assembler& GetAssembler() const = 0;
|
D | code_generator_x86.cc | 71 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT 926 #define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT 1069 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorX86() 6431 X86Assembler* ParallelMoveResolverX86::GetAssembler() const { in GetAssembler() function in art::x86::ParallelMoveResolverX86 6432 return codegen_->GetAssembler(); in GetAssembler() 8351 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize()); in VisitX86ComputeBaseMethodAddress() 8466 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() 8496 X86Assembler* assembler = GetAssembler(); in Finalize()
|
D | code_generator_x86_64.cc | 70 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT 948 #define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT 1382 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorX86_64() 5803 X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const { in GetAssembler() function in art::x86_64::ParallelMoveResolverX86_64 5804 return codegen_->GetAssembler(); in GetAssembler() 7742 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() 7771 X86_64Assembler* assembler = GetAssembler(); in Finalize()
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 1028 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1031 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1040 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1043 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1108 GetAssembler()->repne_scasb(); in TEST_F() 1114 GetAssembler()->repne_scasw(); in TEST_F() 1120 GetAssembler()->rep_movsw(); in TEST_F() 1943 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F() 1944 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM15), x86_64::Immediate(2)); in TEST_F() 1950 GetAssembler()->pslld(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F() [all …]
|
/art/compiler/utils/ |
D | jni_macro_assembler_test.h | 38 Ass* GetAssembler() { in GetAssembler() function
|