/art/compiler/utils/x86_64/ |
D | jni_macro_assembler_x86_64.cc | 90 __ movq(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister()); in BuildFrame() local 164 __ movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister()); in Store() local 168 __ movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow()); in Store() local 169 __ movq(Address(CpuRegister(RSP), FrameOffset(offs.Int32Value()+4)), in Store() local 196 __ movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRawPtr() local 230 __ movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in Load() local 234 __ movq(dest.AsRegisterPairLow(), Address(CpuRegister(RSP), src)); in Load() local 235 __ movq(dest.AsRegisterPairHigh(), Address(CpuRegister(RSP), FrameOffset(src.Int32Value()+4))); in Load() local 286 __ movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in LoadRef() local 310 __ movq(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs)); in LoadRawPtr() local [all …]
|
D | assembler_x86_64.cc | 142 void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) { in movq() function in art::x86_64::X86_64Assembler 167 void X86_64Assembler::movq(const Address& dst, const Immediate& imm) { in movq() function in art::x86_64::X86_64Assembler 177 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) { in movq() function in art::x86_64::X86_64Assembler 194 void X86_64Assembler::movq(CpuRegister dst, const Address& src) { in movq() function in art::x86_64::X86_64Assembler 210 void X86_64Assembler::movq(const Address& dst, CpuRegister src) { in movq() function in art::x86_64::X86_64Assembler
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.cc | 650 __ movq(temp2_, CpuRegister(RAX)); in EmitNativeCode() local 699 __ movq(CpuRegister(RAX), temp2_); in EmitNativeCode() local 1028 __ movq(temp.AsRegister<CpuRegister>(), in GenerateStaticOrDirectCall() local 1127 __ movq(temp, Address(temp, method_offset)); in GenerateVirtualCall() local 1294 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id)); in SaveCoreRegister() local 1299 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index)); in RestoreCoreRegister() local 1408 __ movq(CpuRegister(method), Address(CpuRegister(RSP), kCurrentMethodStackOffset)); in MaybeIncrementHotness() local 1424 __ movq(CpuRegister(TMP), Immediate(address)); in MaybeIncrementHotness() local 1436 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI)); in MaybeIncrementHotness() local 1492 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), in GenerateFrameEntry() local [all …]
|
D | intrinsics_x86_64.cc | 1651 __ movq(out, Address(address, 0)); in GenPeek() local 1733 __ movq(Address(address, 0), Immediate(v_32)); in GenPoke() local 1735 __ movq(Address(address, 0), value.AsRegister<CpuRegister>()); in GenPoke() local 1825 __ movq(output, Address(base, offset, ScaleFactor::TIMES_1, 0)); in GenUnsafeGet() local 1948 __ movq(Address(base, offset, ScaleFactor::TIMES_1, 0), value); in GenUnsafePut() local 2231 __ movq(temp_mask, Immediate(mask)); in SwapBits64() local 2232 __ movq(temp, reg); in SwapBits64() local 2403 __ movq(tmp, src.AsRegister<CpuRegister>()); in GenOneBit() local 2409 __ movq(tmp, Address(CpuRegister(RSP), src.GetStackIndex())); in GenOneBit() local 2416 __ movq(out, tmp); in GenOneBit() local
|