Home
last modified time | relevance | path

Searched refs:AsRegister (Results 1 – 14 of 14) sorted by relevance

/art/compiler/optimizing/
Dintrinsics_x86_64.cc90 CpuRegister src_curr_addr = locations->GetTemp(0).AsRegister<CpuRegister>(); in EmitNativeCode()
91 CpuRegister dst_curr_addr = locations->GetTemp(1).AsRegister<CpuRegister>(); in EmitNativeCode()
92 CpuRegister src_stop_addr = locations->GetTemp(2).AsRegister<CpuRegister>(); in EmitNativeCode()
143 __ movd(output.AsRegister<CpuRegister>(), input.AsFpuRegister<XmmRegister>(), is64bit); in MoveFPToInt()
149 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<CpuRegister>(), is64bit); in MoveIntToFP()
190 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in GenReverseBytes()
321 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in VisitMathRoundFloat()
362 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in VisitMathRoundDouble()
625 __ cmpl(Address(input, length_offset), length.AsRegister<CpuRegister>()); in CheckPosition()
639 __ cmpl(temp, length.AsRegister<CpuRegister>()); in CheckPosition()
[all …]
Dcode_generator_x86_64.cc212 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset); in EmitNativeCode()
219 __ movl(length_loc.AsRegister<CpuRegister>(), array_len); in EmitNativeCode()
221 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1)); in EmitNativeCode()
356 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>()); in EmitNativeCode()
492 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>(); in EmitNativeCode()
493 Register ref_reg = ref_cpu_reg.AsRegister(); in EmitNativeCode()
585 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>(); in EmitNativeCode()
586 Register ref_reg = ref_cpu_reg.AsRegister(); in EmitNativeCode()
658 bool base_equals_value = (base.AsRegister() == value.AsRegister()); in EmitNativeCode()
665 value_reg = temp1_.AsRegister(); in EmitNativeCode()
[all …]
Dintrinsics_x86.cc93 Register src = locations->InAt(0).AsRegister<Register>(); in EmitNativeCode()
95 Register dest = locations->InAt(2).AsRegister<Register>(); in EmitNativeCode()
99 Register temp1 = temp1_loc.AsRegister<Register>(); in EmitNativeCode()
100 Register temp2 = locations->GetTemp(1).AsRegister<Register>(); in EmitNativeCode()
101 Register temp3 = locations->GetTemp(2).AsRegister<Register>(); in EmitNativeCode()
124 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); in EmitNativeCode()
147 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); in EmitNativeCode()
200 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>()); in MoveFPToInt()
216 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>()); in MoveIntToFP()
272 Register out = locations->Out().AsRegister<Register>(); in GenReverseBytes()
[all …]
Dcode_generator_x86.cc166 Address array_len(array_loc.AsRegister<Register>(), len_offset); in EmitNativeCode()
173 __ movl(length_loc.AsRegister<Register>(), array_len); in EmitNativeCode()
175 __ shrl(length_loc.AsRegister<Register>(), Immediate(1)); in EmitNativeCode()
340 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>()); in EmitNativeCode()
481 Register ref_reg = ref_.AsRegister<Register>(); in EmitNativeCode()
568 Register ref_reg = ref_.AsRegister<Register>(); in EmitNativeCode()
733 Register reg_out = out_.AsRegister<Register>(); in EmitNativeCode()
756 Register index_reg = index_.AsRegister<Register>(); in EmitNativeCode()
853 size_t ref = static_cast<int>(ref_.AsRegister<Register>()); in FindAvailableCallerSaveRegister()
854 size_t obj = static_cast<int>(obj_.AsRegister<Register>()); in FindAvailableCallerSaveRegister()
[all …]
Dcode_generator_vector_x86_64.cc73 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
81 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
87 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar()
92 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ true); in VisitVecReplicateScalar()
148 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ false); in VisitVecExtractScalar()
152 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ true); in VisitVecExtractScalar()
1117 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>()); in VisitVecSetScalars()
1121 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>()); // is 64-bit in VisitVecSetScalars()
1255 return CodeGeneratorX86_64::ArrayAddress(base.AsRegister<CpuRegister>(), index, scale, offset); in VecAddress()
1284 __ testb(Address(locations->InAt(0).AsRegister<CpuRegister>(), count_offset), Immediate(1)); in VisitVecLoad()
Dcode_generator_vector_x86.cc78 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
86 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
92 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
161 __ movd(locations->Out().AsRegister<Register>(), src); in VisitVecExtractScalar()
1139 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecSetScalars()
1282 return CodeGeneratorX86::ArrayAddress(base.AsRegister<Register>(), index, scale, offset); in VecAddress()
1311 __ testb(Address(locations->InAt(0).AsRegister<Register>(), count_offset), Immediate(1)); in VisitVecLoad()
Dlocations.h181 T AsRegister() const { in AsRegister() function
/art/compiler/utils/x86_64/
Dmanaged_register_x86_64.cc63 Register low = AsRegisterPairLow().AsRegister(); in Overlaps()
64 Register high = AsRegisterPairHigh().AsRegister(); in Overlaps()
101 os << "CPU: " << static_cast<int>(AsCpuRegister().AsRegister()); in Print()
Dconstants_x86_64.h35 constexpr Register AsRegister() const { in AsRegister() function
Dassembler_x86_64.cc28 return os << reg.AsRegister(); in operator <<()
42 if (addr.rm() != RSP || addr.cpu_index().AsRegister() == RSP) { in operator <<()
51 if (addr.rm() != RSP || addr.cpu_index().AsRegister() == RSP) { in operator <<()
57 if (addr.rm() != RSP || addr.cpu_index().AsRegister() == RSP) { in operator <<()
2867 X86_64ManagedRegister::FromCpuRegister(src1.AsRegister()), in andn()
3820 const bool src_rax = src.AsRegister() == RAX; in xchgl()
3821 const bool dst_rax = dst.AsRegister() == RAX; in xchgl()
3840 const bool src_rax = src.AsRegister() == RAX; in xchgq()
3841 const bool dst_rax = dst.AsRegister() == RAX; in xchgq()
3995 if (immediate.is_uint8() && reg.AsRegister() < 4) { in testl()
[all …]
Djni_macro_assembler_x86_64.cc63 cfi().RelOffset(DWARFReg(spill.AsCpuRegister().AsRegister()), 0); in BuildFrame()
125 cfi().Restore(DWARFReg(spill.AsCpuRegister().AsRegister())); in RemoveFrame()
347 size_t cpu_reg_number = static_cast<size_t>(x86_64_reg.AsCpuRegister().AsRegister()); in MoveArguments()
412 DCHECK(!mdest.Equals(X86_64ManagedRegister::FromCpuRegister(GetScratchRegister().AsRegister()))); in Move()
Dassembler_x86_64.h198 CHECK_EQ(base_in.AsRegister(), RSP); in Address()
229 CHECK_NE(index_in.AsRegister(), RSP); // Illegal addressing mode. in Address()
236 CHECK_NE(index_in.AsRegister(), RSP); // Illegal addressing mode. in Address()
Dassembler_x86_64_test.cc126 return a.AsRegister() < b.AsRegister(); in operator ()()
524 if (index->AsRegister() == x86_64::RSP) { in TEST_F()
527 } else if (base->AsRegister() == index->AsRegister()) { in TEST_F()
/art/compiler/jni/quick/x86_64/
Dcalling_convention_x86_64.cc57 result |= (1u << r.AsX86_64().AsCpuRegister().AsRegister()); in CalculateCoreCalleeSpillMask()