/art/compiler/utils/x86/ |
D | jni_macro_assembler_x86.cc | 161 void X86JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 164 __ movl(Address(ESP, dest), src.AsCpuRegister()); in StoreRef() 167 void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 170 __ movl(Address(ESP, dest), src.AsCpuRegister()); in StoreRawPtr() 173 void X86JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) { in StoreImmediateToFrame() argument 174 __ movl(Address(ESP, dest), Immediate(imm)); in StoreImmediateToFrame() 194 X86ManagedRegister dest = mdest.AsX86(); in Load() local 195 if (dest.IsNoRegister()) { in Load() 197 } else if (dest.IsCpuRegister()) { in Load() 199 __ movl(dest.AsCpuRegister(), Address(ESP, src)); in Load() [all …]
|
D | jni_macro_assembler_x86.h | 59 void StoreRef(FrameOffset dest, ManagedRegister src) override; 60 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 62 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override; 68 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 71 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 73 void LoadFromThread(ManagedRegister dest, ThreadOffset32 src, size_t size) override; 75 void LoadRef(ManagedRegister dest, FrameOffset src) override; 77 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs, 80 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override; 82 void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset32 offs) override; [all …]
|
/art/compiler/utils/x86_64/ |
D | jni_macro_assembler_x86_64.h | 60 void StoreRef(FrameOffset dest, ManagedRegister src) override; 61 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 63 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override; 69 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 72 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 74 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override; 76 void LoadRef(ManagedRegister dest, FrameOffset src) override; 78 void LoadRef(ManagedRegister dest, 83 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override; 85 void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) override; [all …]
|
D | jni_macro_assembler_x86_64.cc | 187 void X86_64JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 190 __ movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRef() 193 void X86_64JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 196 __ movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister()); in StoreRawPtr() 199 void X86_64JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) { in StoreImmediateToFrame() argument 200 __ movl(Address(CpuRegister(RSP), dest), Immediate(imm)); // TODO(64) movq? in StoreImmediateToFrame() 221 X86_64ManagedRegister dest = mdest.AsX86_64(); in Load() local 222 if (dest.IsNoRegister()) { in Load() 224 } else if (dest.IsCpuRegister()) { in Load() 227 __ movl(dest.AsCpuRegister(), Address(CpuRegister(RSP), src)); in Load() [all …]
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.h | 69 void StoreRef(FrameOffset dest, ManagedRegister src) override; 70 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 71 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override; 74 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 77 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 78 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override; 79 void LoadRef(ManagedRegister dest, FrameOffset src) override; 80 void LoadRef(ManagedRegister dest, 84 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override; 85 void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) override; [all …]
|
D | jni_macro_assembler_arm64.cc | 54 void Arm64JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) { in GetCurrentThread() argument 55 ___ Mov(reg_x(dest.AsArm64().AsXRegister()), reg_x(TR)); in GetCurrentThread() 193 void Arm64JNIMacroAssembler::LoadImmediate(XRegister dest, int32_t value, Condition cond) { in LoadImmediate() argument 195 ___ Mov(reg_x(dest), value); in LoadImmediate() 201 temps.Exclude(reg_x(dest)); in LoadImmediate() 204 ___ Csel(reg_x(dest), temp, reg_x(dest), cond); in LoadImmediate() 206 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond); in LoadImmediate() 212 WRegister dest, in LoadWFromOffset() argument 217 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() 220 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset)); in LoadWFromOffset() [all …]
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm_vixl.h | 66 void StoreRef(FrameOffset dest, ManagedRegister src) override; 67 void StoreRawPtr(FrameOffset dest, ManagedRegister src) override; 69 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override; 75 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override; 78 void Load(ManagedRegister dest, FrameOffset src, size_t size) override; 80 void LoadFromThread(ManagedRegister dest, 84 void LoadRef(ManagedRegister dest, FrameOffset src) override; 86 void LoadRef(ManagedRegister dest, 91 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override; 93 void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset32 offs) override; [all …]
|
D | jni_macro_assembler_arm_vixl.cc | 218 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) { in Store() argument 226 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value()); in Store() 231 MemOperand(sp, dest.Int32Value())); in Store() 234 asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value()); in Store() 238 asm_.StoreDToOffset(AsVIXLDRegister(src), sp, dest.Int32Value()); in Store() 242 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { in StoreRef() argument 246 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value()); in StoreRef() 249 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { in StoreRawPtr() argument 253 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value()); in StoreRawPtr() 256 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest, in StoreSpanning() argument [all …]
|
D | assembler_arm_vixl.cc | 303 vixl32::Register dest, in LoadFromOffset() argument 314 AddConstant(dest, base, add_to_base); in LoadFromOffset() 315 base = dest; in LoadFromOffset() 319 vixl32::Register temp = (dest.Is(base)) ? temps.Acquire() : dest; in LoadFromOffset() 323 ___ Add(dest, dest, (dest.Is(base)) ? temp : base); in LoadFromOffset() 324 base = dest; in LoadFromOffset() 332 ___ Ldrsb(dest, MemOperand(base, offset)); in LoadFromOffset() 335 ___ Ldrb(dest, MemOperand(base, offset)); in LoadFromOffset() 338 ___ Ldrsh(dest, MemOperand(base, offset)); in LoadFromOffset() 341 ___ Ldrh(dest, MemOperand(base, offset)); in LoadFromOffset() [all …]
|
/art/compiler/utils/ |
D | jni_macro_assembler.h | 116 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0; 117 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0; 119 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) = 0; 126 virtual void StoreSpanning(FrameOffset dest, 131 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0; 133 virtual void LoadFromThread(ManagedRegister dest, 137 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0; 139 virtual void LoadRef(ManagedRegister dest, 144 virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0; 146 virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0; [all …]
|
/art/ |
D | test.py | 31 parser.add_argument('-j', default='', dest='n_threads', help='specify number of concurrent tests') 32 parser.add_argument('--run-test', '-r', action='store_true', dest='run_test', help='execute run tes… 33 parser.add_argument('--gtest', '-g', action='store_true', dest='gtest', help='execute gtest tests') 34 parser.add_argument('--target', action='store_true', dest='target', help='test on target system') 35 parser.add_argument('--host', action='store_true', dest='host', help='test on build host system') 36 parser.add_argument('--help-runner', action='store_true', dest='help_runner', help='show help for o…
|
/art/libartbase/base/ |
D | leb128.h | 246 static inline uint8_t* EncodeUnsignedLeb128(uint8_t* dest, uint32_t value) { in EncodeUnsignedLeb128() argument 250 *dest++ = out | 0x80; in EncodeUnsignedLeb128() 254 *dest++ = out; in EncodeUnsignedLeb128() 255 return dest; in EncodeUnsignedLeb128() 259 static inline void EncodeUnsignedLeb128(Vector* dest, uint32_t value) { in EncodeUnsignedLeb128() argument 264 dest->push_back(out | 0x80); in EncodeUnsignedLeb128() 268 dest->push_back(out); in EncodeUnsignedLeb128() 273 static inline void UpdateUnsignedLeb128(uint8_t* dest, uint32_t value) { in UpdateUnsignedLeb128() argument 274 const uint8_t* old_end = dest; in UpdateUnsignedLeb128() 277 for (uint8_t* end = EncodeUnsignedLeb128(dest, value); end < old_end; end++) { in UpdateUnsignedLeb128() [all …]
|
D | mem_map_test.cc | 165 MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", in TEST_F() local 170 ASSERT_TRUE(dest.IsValid()); in TEST_F() 178 void* dest_addr = dest.Begin(); in TEST_F() 185 ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; in TEST_F() 191 ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); in TEST_F() 193 ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); in TEST_F() 198 MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", in TEST_F() local 207 ASSERT_TRUE(dest.IsValid()); in TEST_F() 215 uint8_t* dest_addr = dest.Begin(); in TEST_F() 223 dest.SetSize(kPageSize); in TEST_F() [all …]
|
D | casts.h | 93 Dest dest; in bit_cast() local 94 memcpy(&dest, &source, sizeof(dest)); in bit_cast() 95 return dest; in bit_cast()
|
/art/libdexfile/dex/ |
D | compact_dex_utils.h | 28 static inline void AlignmentPadVector(std::vector<T, Allocator<T>>* dest, in AlignmentPadVector() argument 30 while (!IsAlignedParam(dest->size(), alignment)) { in AlignmentPadVector() 31 dest->push_back(T()); in AlignmentPadVector()
|
/art/tools/checker/ |
D | checker.py | 32 parser.add_argument("--check-prefix", dest="check_prefix", default="CHECK", metavar="PREFIX", 34 parser.add_argument("--list-passes", dest="list_passes", action="store_true", 36 parser.add_argument("--dump-pass", dest="dump_pass", metavar="PASS", 38 parser.add_argument("--arch", dest="arch", choices=archs_list,
|
/art/test/testrunner/ |
D | testrunner.py | 1031 parser.add_argument('-t', '--test', action='append', dest='tests', help='name(s) of the test(s)') 1034 global_group.add_argument('-j', type=int, dest='n_thread') 1035 global_group.add_argument('--timeout', default=timeout, type=int, dest='timeout') 1036 global_group.add_argument('--verbose', '-v', action='store_true', dest='verbose') 1037 global_group.add_argument('--dry-run', action='store_true', dest='dry_run') 1038 global_group.add_argument("--skip", action='append', dest="skips", default=[], 1040 global_group.add_argument("--no-skips", dest="ignore_skips", action='store_true', default=False, 1044 action='store_false', dest='build', 1048 action='store_true', dest='build', 1052 action='store_true', dest='dist', [all …]
|
/art/libartbase/base/unix_file/ |
D | fd_file_test.cc | 159 FdFile dest(src_tmp.GetFilename(), O_RDWR, false); in TEST_F() local 160 ASSERT_GE(dest.Fd(), 0); in TEST_F() 161 ASSERT_TRUE(dest.IsOpened()); in TEST_F() 163 ASSERT_TRUE(dest.Copy(&src, 0, sizeof(src_data))); in TEST_F() 164 ASSERT_EQ(0, dest.Flush()); in TEST_F() 165 ASSERT_EQ(static_cast<int64_t>(sizeof(src_data)), dest.GetLength()); in TEST_F() 168 ASSERT_TRUE(dest.PreadFully(check_data, sizeof(src_data), 0u)); in TEST_F() 171 ASSERT_EQ(0, dest.Close()); in TEST_F()
|
/art/runtime/mirror/ |
D | object.cc | 77 ObjPtr<Object> Object::CopyObject(ObjPtr<mirror::Object> dest, in CopyObject() argument 84 uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest.Ptr()) + offset; in CopyObject() 121 CopyReferenceFieldsWithReadBarrierVisitor visitor(dest); in CopyObject() 128 ObjPtr<ObjectArray<Object>> array = dest->AsObjectArray<Object>(); in CopyObject() 129 WriteBarrier::ForArrayWrite(dest, 0, array->GetLength()); in CopyObject() 132 WriteBarrier::ForEveryFieldWrite(dest); in CopyObject() 134 return dest; in CopyObject()
|
D | dex_cache-inl.h | 396 inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) { in FixupStrings() argument 403 dest[i].store(source, std::memory_order_relaxed); in FixupStrings() 408 inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) { in FixupResolvedTypes() argument 415 dest[i].store(source, std::memory_order_relaxed); in FixupResolvedTypes() 420 inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, in FixupResolvedMethodTypes() argument 428 dest[i].store(source, std::memory_order_relaxed); in FixupResolvedMethodTypes() 433 inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, in FixupResolvedCallSites() argument 439 dest[i] = GcRoot<mirror::CallSite>(new_source); in FixupResolvedCallSites()
|
/art/tools/runtime_memusage/ |
D | prune_sanitizer_output.py | 106 default="", dest="out_dir_name", type=extant_directory, 109 default=False, dest="check_exact", 113 default=4, dest="stack_min_size", type=int,
|
D | symbol_trace_info.py | 142 default="", dest="out_dir_name", type=is_directory, 145 default=None, dest="dex_file", 150 dest="offsets", 158 dest="times",
|
/art/test/986-native-method-bind/src/art/ |
D | Test986.java | 37 private static void setNativeTransform(Method method, String dest) { in setNativeTransform() argument 38 SymbolMap.put(method, dest); in setNativeTransform()
|
/art/tools/ |
D | generate_cmake_lists.py | 58 parser.add_argument('--project-name', dest="project_name", required=True, 60 parser.add_argument('--arch', dest="arch", required=True, help='arch')
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.h | 580 void Load32BitValue(CpuRegister dest, int32_t value); 581 void Load64BitValue(CpuRegister dest, int64_t value); 582 void Load32BitValue(XmmRegister dest, int32_t value); 583 void Load64BitValue(XmmRegister dest, int64_t value); 584 void Load32BitValue(XmmRegister dest, float value); 585 void Load64BitValue(XmmRegister dest, double value); 588 void Compare32BitValue(CpuRegister dest, int32_t value); 589 void Compare64BitValue(CpuRegister dest, int64_t value); 607 void Store64BitValueToStack(Location dest, int64_t value);
|