/art/runtime/arch/arm64/ |
D | asm_support_arm64.S | 137 .macro SAVE_TWO_REGS_BASE base, reg1, reg2, offset 138 stp \reg1, \reg2, [\base, #(\offset)] 140 .cfi_rel_offset \reg2, (\offset) + 8 143 .macro SAVE_TWO_REGS reg1, reg2, offset 144 SAVE_TWO_REGS_BASE sp, \reg1, \reg2, \offset 147 .macro RESTORE_TWO_REGS_BASE base, reg1, reg2, offset 148 ldp \reg1, \reg2, [\base, #(\offset)] 150 .cfi_restore \reg2 153 .macro RESTORE_TWO_REGS reg1, reg2, offset 154 RESTORE_TWO_REGS_BASE sp, \reg1, \reg2, \offset
|
D | quick_entrypoints_arm64.S | 34 .macro SAVE_TWO_REGS_INCREASE_FRAME reg1, reg2, frame_adjustment 35 stp \reg1, \reg2, [sp, #-(\frame_adjustment)]! 38 .cfi_rel_offset \reg2, 8 41 .macro RESTORE_TWO_REGS_DECREASE_FRAME reg1, reg2, frame_adjustment 42 ldp \reg1, \reg2, [sp], #(\frame_adjustment) 44 .cfi_restore \reg2
|
/art/runtime/interpreter/mterp/arm64/ |
D | main.S | 316 .macro SAVE_TWO_REGS reg1, reg2, offset 317 stp \reg1, \reg2, [sp, #(\offset)] 319 .cfi_rel_offset \reg2, (\offset) + 8 325 .macro RESTORE_TWO_REGS reg1, reg2, offset 326 ldp \reg1, \reg2, [sp, #(\offset)] 328 .cfi_restore \reg2 334 .macro SAVE_TWO_REGS_INCREASE_FRAME reg1, reg2, frame_adjustment 335 stp \reg1, \reg2, [sp, #-(\frame_adjustment)]! 338 .cfi_rel_offset \reg2, 8 344 .macro RESTORE_TWO_REGS_DECREASE_FRAME reg1, reg2, frame_adjustment [all …]
|
/art/compiler/utils/ |
D | assembler_test.h | 195 for (auto reg2 : reg2_registers) { variable 199 (assembler_.get()->*f)(*reg1, *reg2, new_imm * multiplier + bias); 209 std::string reg2_string = (this->*GetName2)(*reg2); 250 for (auto reg2 : reg2_registers) { in RepeatTemplatedRegistersImmBits() local 255 (assembler_.get()->*f)(*reg1, *reg2, *reg3, new_imm + bias); in RepeatTemplatedRegistersImmBits() 265 std::string reg2_string = (this->*GetName2)(*reg2); in RepeatTemplatedRegistersImmBits() 312 for (auto reg2 : reg2_registers) { in RepeatTemplatedImmBitsRegisters() local 316 (assembler_.get()->*f)(new_imm, *reg1, *reg2); in RepeatTemplatedImmBitsRegisters() 326 std::string reg2_string = (this->*GetName2)(*reg2); in RepeatTemplatedImmBitsRegisters() 1274 for (auto reg2 : reg2_registers) { in RepeatTemplatedRegisters() local [all …]
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.h | 167 void Exchange64(CpuRegister reg1, CpuRegister reg2);
|
D | code_generator_x86_64.cc | 5926 void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) { in Exchange64() argument 5928 __ movq(reg1, reg2); in Exchange64() 5929 __ movq(reg2, CpuRegister(TMP)); in Exchange64()
|
D | code_generator_arm_vixl.cc | 4617 vixl32::Register reg2 = RegisterFrom(second); in VisitRem() local 4622 __ Sdiv(temp, reg1, reg2); in VisitRem() 4623 __ Mls(out_reg, temp, reg2, reg1); in VisitRem()
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 730 void testl(CpuRegister reg1, CpuRegister reg2); 734 void testq(CpuRegister reg1, CpuRegister reg2);
|
D | assembler_x86_64.cc | 3975 void X86_64Assembler::testl(CpuRegister reg1, CpuRegister reg2) { in testl() argument 3977 EmitOptionalRex32(reg1, reg2); in testl() 3979 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits()); in testl() 4017 void X86_64Assembler::testq(CpuRegister reg1, CpuRegister reg2) { in testq() argument 4019 EmitRex64(reg1, reg2); in testq() 4021 EmitRegisterOperand(reg1.LowBits(), reg2.LowBits()); in testq()
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 687 void testl(Register reg1, Register reg2);
|
D | assembler_x86.cc | 2966 void X86Assembler::testl(Register reg1, Register reg2) { in testl() argument 2969 EmitRegisterOperand(reg1, reg2); in testl()
|