/art/runtime/arch/x86/ |
D | jni_entrypoints_x86.S | 30 andl LITERAL(0xfffffffe), %eax // ArtMethod** sp
|
D | quick_entrypoints_x86.S | 542 andl LITERAL(0xFFFFFFF0), %ebx 639 andl LITERAL(0xFFFFFFF0), %ebx 1076 andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %edx 1218 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx // zero the gc bits. 1234 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx // zero the read barrier bits. 1290 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %edx // zero the gc bits. 1296 …andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx // ecx: new lock word zero except original rb… 1737 andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index. 2337 andl LITERAL(0xFFFFFFF0), %esp // Align stack
|
/art/runtime/interpreter/mterp/x86_64ng/ |
D | invoke.S | 93 andl LITERAL(0xffff), %eax
|
D | other.S | 19 andl MACRO_LITERAL(0xf), rINST # rINST <- A
|
D | main.S | 863 andl MACRO_LITERAL(0xf), %eax 872 andl MACRO_LITERAL(0xf), %eax
|
/art/runtime/arch/x86_64/ |
D | quick_entrypoints_x86_64.S | 450 andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. 544 andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. 1079 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx // zero the gc bits. 1094 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %ecx // zero the gc bits. 1131 andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %edx // zero the gc bits. 1136 …andl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED), %ecx // ecx: new lock word zero except original gc… 1434 andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
|
/art/runtime/interpreter/mterp/x86_64/ |
D | floating_point.S | 74 andl $$0xf, %ecx # ecx <- A
|
D | other.S | 39 andl %eax, rINST # rINST <- A
|
/art/runtime/interpreter/mterp/x86/ |
D | floating_point.S | 74 andl $$0xf, %ecx # ecx <- A
|
D | other.S | 43 andl %eax, rINST # rINST <- A
|
D | arithmetic.S | 42 andl $$0x000000FF, %eax
|
/art/compiler/utils/x86/ |
D | assembler_x86.h | 694 void andl(Register dst, const Immediate& imm); 695 void andl(Register dst, Register src); 696 void andl(Register dst, const Address& address);
|
D | assembler_x86.cc | 3022 void X86Assembler::andl(Register dst, Register src) { in andl() function in art::x86::X86Assembler 3029 void X86Assembler::andl(Register reg, const Address& address) { in andl() function in art::x86::X86Assembler 3036 void X86Assembler::andl(Register dst, const Immediate& imm) { in andl() function in art::x86::X86Assembler
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.h | 740 void andl(CpuRegister dst, const Immediate& imm); 741 void andl(CpuRegister dst, CpuRegister src); 742 void andl(CpuRegister reg, const Address& address);
|
D | assembler_x86_64_test.cc | 909 DriverStr(Repeatrr(&x86_64::X86_64Assembler::andl, "andl %{reg2}, %{reg1}"), "andl"); in TEST_F() 913 DriverStr(RepeatrI(&x86_64::X86_64Assembler::andl, in TEST_F()
|
D | assembler_x86_64.cc | 4052 void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) { in andl() function in art::x86_64::X86_64Assembler 4060 void X86_64Assembler::andl(CpuRegister reg, const Address& address) { in andl() function in art::x86_64::X86_64Assembler 4068 void X86_64Assembler::andl(CpuRegister dst, const Immediate& imm) { in andl() function in art::x86_64::X86_64Assembler
|
/art/compiler/optimizing/ |
D | intrinsics_x86.cc | 567 __ andl(out_lo, src_lo); in GenLowestOneBit() local 568 __ andl(out_hi, src_hi); in GenLowestOneBit() local 585 __ andl(out, src.AsRegister<Register>()); in GenLowestOneBit() local 587 __ andl(out, Address(ESP, src.GetStackIndex())); in GenLowestOneBit() local 2112 __ andl(temp, imm_mask); in SwapBits() local 2113 __ andl(reg, imm_mask); in SwapBits() local
|
D | code_generator_x86.cc | 3621 __ andl(EAX, Immediate(kC2ConditionMask)); in GenerateRemFP() local 3685 __ andl(out, Immediate(abs_imm-1)); in RemByPowerOfTwo() local 7799 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>()); in HandleBitwiseOperation() local 7808 __ andl(first.AsRegister<Register>(), in HandleBitwiseOperation() local 7820 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex())); in HandleBitwiseOperation() local 7832 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>()); in HandleBitwiseOperation() local 7833 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>()); in HandleBitwiseOperation() local 7844 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex())); in HandleBitwiseOperation() local 7845 __ andl(first.AsRegisterPairHigh<Register>(), in HandleBitwiseOperation() local 7870 __ andl(first_low, low); in HandleBitwiseOperation() local [all …]
|
D | intrinsics_x86_64.cc | 2191 __ andl(temp, imm_mask); in SwapBits() local 2192 __ andl(reg, imm_mask); in SwapBits() local 2422 __ andl(out, tmp); in GenOneBit() local
|
D | code_generator_x86_64.cc | 3707 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask)); in GenerateRemFP() local 3787 __ andl(out, Immediate(abs_imm-1)); in RemByPowerOfTwo() local 6538 __ andl(out, Immediate(1)); in VisitInstanceOf() local 6725 __ andl(out, Immediate(1)); in VisitInstanceOf() local 7094 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>()); in HandleBitwiseOperation() local 7104 __ andl(first.AsRegister<CpuRegister>(), imm); in HandleBitwiseOperation() local 7114 __ andl(first.AsRegister<CpuRegister>(), address); in HandleBitwiseOperation() local
|