/art/compiler/utils/arm/ |
D | constants_arm.h | 57 TIMES_4 = 2, enumerator
|
/art/compiler/utils/x86/ |
D | constants_x86.h | 60 TIMES_4 = 2, enumerator
|
D | assembler_x86_test.cc | 68 addresses_.push_back(x86::Address(x86::EDI, x86::ECX, x86::TIMES_4, 17)); in SetUpHelpers() 77 addresses_.push_back(x86::Address(x86::ESP, x86::ECX, x86::TIMES_4, 17)); in SetUpHelpers() 245 all_addresses.push_back(x86::Address(*index, x86::TIMES_4, 1)); in TEST_F() 251 all_addresses.push_back(x86::Address(*base, *index, x86::TIMES_4, 1)); in TEST_F() 468 x86::Register(x86::EDI), x86::Register(x86::EBX), x86::TIMES_4, 12)); in TEST_F() 470 x86::Register(x86::ESI), x86::Register(x86::EBX), x86::TIMES_4, 12)); in TEST_F() 472 x86::Register(x86::EDI), x86::Register(x86::EAX), x86::TIMES_4, 12)); in TEST_F()
|
/art/compiler/utils/x86_64/ |
D | constants_x86_64.h | 87 TIMES_4 = 2, enumerator
|
D | assembler_x86_64_test.cc | 169 x86_64::CpuRegister(x86_64::RCX), x86_64::TIMES_4, 17)); in SetUpHelpers() 186 x86_64::CpuRegister(x86_64::RCX), x86_64::TIMES_4, 17)); in SetUpHelpers() 531 all_addresses.push_back(x86_64::Address(*index, x86_64::TIMES_4, 1)); in TEST_F() 537 all_addresses.push_back(x86_64::Address(*base, *index, x86_64::TIMES_4, 1)); in TEST_F() 2159 … x86_64::CpuRegister(x86_64::RDI), x86_64::CpuRegister(x86_64::RBX), x86_64::TIMES_4, 12), false); in TEST_F() 2161 … x86_64::CpuRegister(x86_64::R10), x86_64::CpuRegister(x86_64::RBX), x86_64::TIMES_4, 12), false); in TEST_F() 2163 … x86_64::CpuRegister(x86_64::RDI), x86_64::CpuRegister(x86_64::R9), x86_64::TIMES_4, 12), false); in TEST_F() 2173 … x86_64::CpuRegister(x86_64::RDI), x86_64::CpuRegister(x86_64::RBX), x86_64::TIMES_4, 12), true); in TEST_F() 2175 … x86_64::CpuRegister(x86_64::R10), x86_64::CpuRegister(x86_64::RBX), x86_64::TIMES_4, 12), true); in TEST_F() 2177 … x86_64::CpuRegister(x86_64::RDI), x86_64::CpuRegister(x86_64::R9), x86_64::TIMES_4, 12), true); in TEST_F()
|
/art/compiler/optimizing/ |
D | intrinsics_x86.cc | 122 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset)); in EmitNativeCode() 125 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset)); in EmitNativeCode() 145 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2); in EmitNativeCode() 148 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2); in EmitNativeCode() 2970 static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>), in VisitIntegerValueOf() 2981 Address(method_address_reg, out, TIMES_4, CodeGeneratorX86::kPlaceholder32BitOffset)); in VisitIntegerValueOf() 2991 __ movl(out, Address(out, in, TIMES_4, 0)); in VisitIntegerValueOf()
|
D | code_generator_x86_64.cc | 816 __ shll(CpuRegister(index_reg), Immediate(TIMES_4)); in EmitNativeCode() 5296 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 5313 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 5320 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; in VisitArrayGet() 5338 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 5431 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 5534 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 5563 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 6970 TIMES_4, in VisitCheckCast() 7330 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset); in GenerateArrayLoadWithBakerReadBarrier() [all …]
|
D | code_generator_x86.cc | 795 __ shll(index_reg, Immediate(TIMES_4)); in EmitNativeCode() 5940 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 5957 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 5964 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; in VisitArrayGet() 5986 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset)); in VisitArrayGet() 6085 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 6188 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 6222 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset); in VisitArraySet() 7664 TIMES_4, in VisitCheckCast() 8053 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset); in GenerateArrayLoadWithBakerReadBarrier() [all …]
|
D | code_generator_arm_vixl.cc | 819 __ Lsl(index_reg, index_reg, TIMES_4); in EmitNativeCode() 6433 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset; in VisitArrayGet() 6501 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset; in VisitArrayGet() 6506 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4)); in VisitArrayGet() 6629 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset; in VisitArraySet() 6733 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset; in VisitArraySet() 6785 size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset; in VisitArraySet() 6790 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4)); in VisitArraySet() 8893 ScaleFactor scale_factor = TIMES_4; in GenerateArrayLoadWithBakerReadBarrier()
|
D | intrinsics_x86_64.cc | 2645 static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>), in VisitIntegerValueOf() 2647 __ movl(out, Address(argument, out, TIMES_4, 0)); in VisitIntegerValueOf()
|
D | code_generator_vector_x86_64.cc | 1247 case 4: scale = TIMES_4; break; in VecAddress()
|
D | code_generator_vector_x86.cc | 1274 case 4: scale = TIMES_4; break; in VecAddress()
|