/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 65 const vixl::aarch32::Register temp_reg = temps.Acquire(); in CreateTrampoline() local 69 ___ Ldr(temp_reg, MemOperand(r0, JNIEnvExt::SelfOffset(4).Int32Value())); in CreateTrampoline() 70 ___ Ldr(pc, MemOperand(temp_reg, offset.Int32Value())); in CreateTrampoline()
|
/art/runtime/arch/x86/ |
D | quick_entrypoints_x86.S | 28 MACRO2(SETUP_SAVE_ALL_CALLEE_SAVES_FRAME, got_reg, temp_reg) 34 LOAD_RUNTIME_INSTANCE \temp_reg, \got_reg 36 pushl RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(REG_VAR(temp_reg)) 51 MACRO2(SETUP_SAVE_REFS_ONLY_FRAME, got_reg, temp_reg) 57 LOAD_RUNTIME_INSTANCE \temp_reg, \got_reg 59 pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg)) 76 MACRO2(SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_GOT_REG, got_reg, temp_reg) 84 LOAD_RUNTIME_INSTANCE \temp_reg, \got_reg 86 pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg)) 113 MACRO2(SETUP_SAVE_REFS_AND_ARGS_FRAME, got_reg, temp_reg) [all …]
|
/art/compiler/optimizing/ |
D | intrinsics_arm_vixl.cc | 621 const vixl32::Register temp_reg = temps.Acquire(); in GenUnsafeGet() local 622 __ Add(temp_reg, base, offset); in GenUnsafeGet() 623 __ Ldrexd(trg_lo, trg_hi, MemOperand(temp_reg)); in GenUnsafeGet() 789 const vixl32::Register temp_reg = temps.Acquire(); in GenUnsafePut() local 791 __ Add(temp_reg, base, offset); in GenUnsafePut() 794 __ Ldrexd(temp_lo, temp_hi, MemOperand(temp_reg)); in GenUnsafePut() 795 __ Strexd(temp_lo, value_lo, value_hi, MemOperand(temp_reg)); in GenUnsafePut() 1257 vixl32::Register temp_reg = temps.Acquire(); in GenerateStringCompareToLoop() local 1258 __ Ldr(temp_reg, MemOperand(str, temp1)); in GenerateStringCompareToLoop() 1260 __ Cmp(temp_reg, temp2); in GenerateStringCompareToLoop() [all …]
|
D | code_generator_x86.cc | 4632 Register temp_reg = locations->GetTemp(0).AsRegister<Register>(); in VisitRor() local 4636 __ movl(temp_reg, first_reg_hi); in VisitRor() 4638 __ shrd(first_reg_lo, temp_reg, second_reg); in VisitRor() 4639 __ movl(temp_reg, first_reg_hi); in VisitRor() 4642 __ cmovl(kNotEqual, first_reg_lo, temp_reg); in VisitRor() 4651 __ movl(temp_reg, first_reg_lo); in VisitRor() 4653 __ movl(first_reg_hi, temp_reg); in VisitRor() 4659 __ movl(temp_reg, first_reg_lo); in VisitRor() 4665 __ shrd(first_reg_hi, temp_reg, imm); in VisitRor() 4669 __ movl(temp_reg, first_reg_lo); in VisitRor() [all …]
|
D | code_generator_arm_vixl.cc | 9077 vixl32::Register temp_reg = RegisterFrom(temp); in GenerateStaticOrDirectCall() local 9078 EmitMovwMovtPlaceholder(labels, temp_reg); in GenerateStaticOrDirectCall() 9084 vixl32::Register temp_reg = RegisterFrom(temp); in GenerateStaticOrDirectCall() local 9085 EmitMovwMovtPlaceholder(labels, temp_reg); in GenerateStaticOrDirectCall() 9086 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0); in GenerateStaticOrDirectCall() 9092 vixl32::Register temp_reg = RegisterFrom(temp); in GenerateStaticOrDirectCall() local 9093 EmitMovwMovtPlaceholder(labels, temp_reg); in GenerateStaticOrDirectCall() 9095 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset*/ 0); in GenerateStaticOrDirectCall() 9565 vixl32::Register temp_reg = temps.Acquire(); in VisitPackedSwitch() local 9570 __ Adds(temp_reg, value_reg, -lower_bound); in VisitPackedSwitch() [all …]
|
D | code_generator_x86_64.cc | 7504 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>(); in VisitPackedSwitch() local 7557 __ leal(temp_reg, Address(value_reg_in, -lower_bound)); in VisitPackedSwitch() 7558 value_reg_out = temp_reg.AsRegister(); in VisitPackedSwitch() 7571 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0)); in VisitPackedSwitch() 7574 __ addq(temp_reg, base_reg); in VisitPackedSwitch() 7577 __ jmp(temp_reg); in VisitPackedSwitch()
|
D | code_generator_arm64.cc | 6200 Register temp_reg = RegisterFrom(maybe_temp, type); in GenerateReferenceLoadOneRegister() local 6201 __ Mov(temp_reg, out_reg); in GenerateReferenceLoadOneRegister()
|