/art/runtime/arch/x86/ |
D | thread_x86.cc | 62 descriptor_table_entry_t entry; in InitCpu() local 63 memset(&entry, 0, sizeof(entry)); in InitCpu() 64 entry.limit0 = (limit & 0x0ffff); in InitCpu() 65 entry.limit = (limit & 0xf0000) >> 16; in InitCpu() 66 entry.base0 = (base & 0x0000ffff); in InitCpu() 67 entry.base1 = (base & 0x00ff0000) >> 16; in InitCpu() 68 entry.base2 = (base & 0xff000000) >> 24; in InitCpu() 69 entry.type = ((read_exec_only ^ 1) << 1) | (contents << 2); in InitCpu() 70 entry.s = 1; in InitCpu() 71 entry.dpl = 0x3; in InitCpu() [all …]
|
/art/compiler/optimizing/ |
D | nodes_test.cc | 34 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in TEST_F() local 35 graph->AddBlock(entry); in TEST_F() 36 graph->SetEntryBlock(entry); in TEST_F() 39 entry->AddInstruction(parameter); in TEST_F() 40 entry->AddInstruction(new (GetAllocator()) HGoto()); in TEST_F() 44 entry->AddSuccessor(first_block); in TEST_F() 74 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in TEST_F() local 75 graph->AddBlock(entry); in TEST_F() 76 graph->SetEntryBlock(entry); in TEST_F() 81 entry->AddInstruction(parameter1); in TEST_F() [all …]
|
D | load_store_analysis_test.cc | 33 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_); in TEST_F() local 34 graph_->AddBlock(entry); in TEST_F() 35 graph_->SetEntryBlock(entry); in TEST_F() 60 entry->AddInstruction(array); in TEST_F() 61 entry->AddInstruction(index); in TEST_F() 62 entry->AddInstruction(array_get1); in TEST_F() 63 entry->AddInstruction(array_get2); in TEST_F() 64 entry->AddInstruction(array_set1); in TEST_F() 65 entry->AddInstruction(array_set2); in TEST_F() 76 heap_location_collector.VisitBasicBlock(entry); in TEST_F() [all …]
|
D | gvn_test.cc | 31 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in TEST_F() local 32 graph->AddBlock(entry); in TEST_F() 33 graph->SetEntryBlock(entry); in TEST_F() 38 entry->AddInstruction(parameter); in TEST_F() 42 entry->AddSuccessor(block); in TEST_F() 112 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in TEST_F() local 113 graph->AddBlock(entry); in TEST_F() 114 graph->SetEntryBlock(entry); in TEST_F() 119 entry->AddInstruction(parameter); in TEST_F() 123 entry->AddSuccessor(block); in TEST_F() [all …]
|
D | stack_map_stream.cc | 109 BitTableBuilder<RegisterMask>::Entry entry; in BeginStackMapEntry() local 110 entry[RegisterMask::kValue] = register_mask >> shift; in BeginStackMapEntry() 111 entry[RegisterMask::kShift] = shift; in BeginStackMapEntry() 112 current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); in BeginStackMapEntry() 192 BitTableBuilder<InlineInfo>::Entry entry; in BeginInlineInfoEntry() local 193 entry[InlineInfo::kIsLast] = InlineInfo::kMore; in BeginInlineInfoEntry() 194 entry[InlineInfo::kDexPc] = dex_pc; in BeginInlineInfoEntry() 195 entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); in BeginInlineInfoEntry() 197 entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method)); in BeginInlineInfoEntry() 198 entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method)); in BeginInlineInfoEntry() [all …]
|
D | bounds_check_elimination_test.cc | 67 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_); in TEST_F() local 68 graph_->AddBlock(entry); in TEST_F() 69 graph_->SetEntryBlock(entry); in TEST_F() 74 entry->AddInstruction(parameter1); in TEST_F() 75 entry->AddInstruction(parameter2); in TEST_F() 86 entry->AddSuccessor(block1); in TEST_F() 164 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph_); in TEST_F() local 165 graph_->AddBlock(entry); in TEST_F() 166 graph_->SetEntryBlock(entry); in TEST_F() 171 entry->AddInstruction(parameter1); in TEST_F() [all …]
|
D | register_allocator_test.cc | 492 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in BuildIfElseWithPhi() local 493 graph->AddBlock(entry); in BuildIfElseWithPhi() 494 graph->SetEntryBlock(entry); in BuildIfElseWithPhi() 497 entry->AddInstruction(parameter); in BuildIfElseWithPhi() 501 entry->AddSuccessor(block); in BuildIfElseWithPhi() 642 HBasicBlock* entry = new (GetAllocator()) HBasicBlock(graph); in BuildFieldReturn() local 643 graph->AddBlock(entry); in BuildFieldReturn() 644 graph->SetEntryBlock(entry); in BuildFieldReturn() 647 entry->AddInstruction(parameter); in BuildFieldReturn() 651 entry->AddSuccessor(block); in BuildFieldReturn() [all …]
|
/art/libdexfile/dex/ |
D | test_dex_file_builder.h | 106 for (auto& entry : strings_) { in Build() 107 entry.second.idx = string_idx; in Build() 109 entry.second.data_offset = data_section_size; in Build() 110 data_section_size += entry.first.length() + 1u /* length */ + 1u /* null-terminator */; in Build() 117 for (auto& entry : types_) { in Build() 118 entry.second = type_idx; in Build() 126 for (auto& entry : protos_) { in Build() 127 entry.second.idx = proto_idx; in Build() 129 size_t num_args = entry.first.args.size(); in Build() 131 entry.second.data_offset = RoundUp(data_section_size, 4u); in Build() [all …]
|
D | type_lookup_table.cc | 105 const Entry* entry = &entries_[pos]; in Lookup() local 106 if (entry->IsEmpty()) { in Lookup() 111 while (compared_hash_bits != entry->GetHashBits(mask_bits_)) { in Lookup() 112 if (entry->IsLast(mask_bits_)) { in Lookup() 115 pos = (pos + entry->GetNextPosDelta(mask_bits_)) & mask; in Lookup() 116 entry = &entries_[pos]; in Lookup() 117 DCHECK(!entry->IsEmpty()); in Lookup() 120 const char* first_checked_str = GetStringData(*entry); in Lookup() 122 return entry->GetClassDefIdx(mask_bits_); in Lookup() 125 if (entry->IsLast(mask_bits_)) { in Lookup() [all …]
|
D | dex_file-inl.h | 380 PositionInfo entry; in DecodeDebugPositionInfo() local 381 entry.line_ = DecodeDebugInfoParameterNames(&stream, VoidFunctor()); in DecodeDebugPositionInfo() 389 entry.address_ += DecodeUnsignedLeb128(&stream); in DecodeDebugPositionInfo() 392 entry.line_ += DecodeSignedLeb128(&stream); in DecodeDebugPositionInfo() 410 entry.prologue_end_ = true; in DecodeDebugPositionInfo() 413 entry.epilogue_begin_ = true; in DecodeDebugPositionInfo() 417 entry.source_file_ = index_to_string_data(name_idx); in DecodeDebugPositionInfo() 422 entry.address_ += adjopcode / DBG_LINE_RANGE; in DecodeDebugPositionInfo() 423 entry.line_ += DBG_LINE_BASE + (adjopcode % DBG_LINE_RANGE); in DecodeDebugPositionInfo() 424 if (position_functor(entry)) { in DecodeDebugPositionInfo() [all …]
|
/art/dex2oat/ |
D | verifier_deps_test.cc | 282 for (auto& entry : storage) { in HasAssignable() local 284 verifier_deps_->GetStringFromId(dex_file, entry.GetDestination()); in HasAssignable() 285 std::string actual_source = verifier_deps_->GetStringFromId(dex_file, entry.GetSource()); in HasAssignable() 300 for (auto& entry : dex_dep.second->classes_) { in HasClass() local 301 if (expected_resolved != entry.IsResolved()) { in HasClass() 305 std::string actual_klass = dex_dep.first->StringByTypeIdx(entry.GetDexTypeIndex()); in HasClass() 313 std::string actual_access_flags = PrettyJavaAccessFlags(entry.GetAccessFlags()); in HasClass() 334 for (auto& entry : dex_dep.second->fields_) { in HasField() local 335 if (expected_resolved != entry.IsResolved()) { in HasField() 339 const dex::FieldId& field_id = dex_dep.first->GetFieldId(entry.GetDexFieldIndex()); in HasField() [all …]
|
/art/runtime/verifier/ |
D | reg_type_cache.cc | 48 ALWAYS_INLINE static inline bool MatchingPrecisionForClass(const RegType* entry, bool precise) in MatchingPrecisionForClass() argument 50 if (entry->IsPreciseReference() == precise) { in MatchingPrecisionForClass() 54 if (!precise && entry->GetClass()->CannotBeAssignedFromOtherTypes()) { in MatchingPrecisionForClass() 145 const RegType* entry = entries_[idx]; in MatchDescriptor() local 146 if (descriptor != entry->descriptor_) { in MatchDescriptor() 149 if (entry->HasClass()) { in MatchDescriptor() 150 return MatchingPrecisionForClass(entry, precise); in MatchDescriptor() 154 DCHECK(entry->IsUnresolvedReference()); in MatchDescriptor() 207 RegType* entry; in From() local 212 entry = in From() [all …]
|
D | verifier_deps.cc | 650 for (const T& entry : set) { in EncodeSet() local 651 EncodeTuple(out, entry); in EncodeSet() 912 for (const TypeAssignability& entry : dep.second->assignable_types_) { in Dump() local 914 << GetStringFromId(dex_file, entry.GetSource()) in Dump() 916 << GetStringFromId(dex_file, entry.GetDestination()) in Dump() 920 for (const TypeAssignability& entry : dep.second->unassignable_types_) { in Dump() local 922 << GetStringFromId(dex_file, entry.GetSource()) in Dump() 924 << GetStringFromId(dex_file, entry.GetDestination()) in Dump() 928 for (const ClassResolution& entry : dep.second->classes_) { in Dump() local 930 << dex_file.StringByTypeIdx(entry.GetDexTypeIndex()) in Dump() [all …]
|
/art/runtime/jit/ |
D | debugger_interface.cc | 249 ArrayRef<const uint8_t> GetJITCodeEntrySymFile(const JITCodeEntry* entry) { in GetJITCodeEntrySymFile() argument 250 return ArrayRef<const uint8_t>(entry->symfile_addr_, entry->symfile_size_); in GetJITCodeEntrySymFile() 278 static void InsertNewEntry(const JITCodeEntry* entry, const JITCodeEntry* next) { in InsertNewEntry() argument 279 CHECK_EQ(entry->seqlock_.load(kNonRacingRelaxed) & 1, 1u) << "Expected invalid entry"; in InsertNewEntry() 282 JITCodeEntry* writable = NativeInfo::Writable(entry); in InsertNewEntry() 288 NativeInfo::Writable(next)->prev_ = entry; in InsertNewEntry() 290 descriptor.tail_ = entry; in InsertNewEntry() 294 NativeInfo::Writable(prev)->next_.store(entry, std::memory_order_release); in InsertNewEntry() 296 descriptor.head_.store(entry, std::memory_order_release); in InsertNewEntry() 340 const JITCodeEntry* entry = descriptor.free_entries_; in CreateJITCodeEntryInternal() local [all …]
|
/art/runtime/ |
D | index_bss_mapping.cc | 62 [=](const struct IndexBssMappingEntry& entry) { in GetBssOffset() argument 63 return (entry.index_and_mask & index_mask) < index; in GetBssOffset() 68 const IndexBssMappingEntry& entry = *it; in GetBssOffset() local 69 return entry.GetBssOffset(index_bits, index, slot_size); in GetBssOffset()
|
/art/tools/ |
D | boot-image-profile-generate.sh | 90 for entry in "$ART_JARS"/* 92 jar_args+=("--apk=$entry") 94 for entry in "$BOOT_JARS"/* 96 jar_args+=("--apk=$entry")
|
/art/compiler/utils/ |
D | swap_space.cc | 39 for (const auto& entry : free_by_size) { in DumpFreeMap() local 40 if (last_size != entry.size) { in DumpFreeMap() 41 last_size = entry.size; in DumpFreeMap() 44 LOG(INFO) << " 0x" << std::hex << entry.free_by_start_entry->Start() in DumpFreeMap() 45 << " size=" << std::dec << entry.free_by_start_entry->size; in DumpFreeMap() 92 for (const auto& entry : free_by_size) { in CollectFree() local 93 sum1 += entry.free_by_start_entry->size; in CollectFree() 98 for (const auto& entry : free_by_start) { in CollectFree() local 99 sum2 += entry.size; in CollectFree()
|
/art/imgdiag/ |
D | imgdiag.cc | 212 template <typename T> size_t EntrySize(T* entry); 252 T* entry = dirty_entries_[i]; in DumpSamplesAndOffsetCount() local 253 os_ << reinterpret_cast<void*>(entry) << ", "; in DumpSamplesAndOffsetCount() 274 bool IsEntryOnDirtyPage(T* entry, const std::set<size_t>& dirty_pages) const in IsEntryOnDirtyPage() 276 size_t size = EntrySize(entry); in IsEntryOnDirtyPage() 279 uintptr_t entry_address = reinterpret_cast<uintptr_t>(entry); in IsEntryOnDirtyPage() 292 void AddZygoteDirtyEntry(T* entry) REQUIRES_SHARED(Locks::mutator_lock_) { in AddZygoteDirtyEntry() 293 zygote_dirty_entries_.insert(entry); in AddZygoteDirtyEntry() 296 void AddImageDirtyEntry(T* entry) REQUIRES_SHARED(Locks::mutator_lock_) { in AddImageDirtyEntry() 297 image_dirty_entries_.insert(entry); in AddImageDirtyEntry() [all …]
|
/art/runtime/interpreter/ |
D | interpreter_cache.h | 67 Entry& entry = data_[IndexOf(key)]; in Get() local 68 if (LIKELY(entry.first == key)) { in Get() 69 *value = entry.second; in Get()
|
/art/libnativeloader/ |
D | public_libraries.cpp | 154 config_file_path, [&company_name](const struct ConfigEntry& entry) -> Result<bool> { in ReadExtensionLibraries() argument 155 if (android::base::StartsWith(entry.soname, "lib") && in ReadExtensionLibraries() 156 android::base::EndsWith(entry.soname, "." + company_name + ".so")) { in ReadExtensionLibraries() 160 entry.soname, company_name); in ReadExtensionLibraries() 177 ReadConfig(config_file, [&for_preload](const struct ConfigEntry& entry) -> Result<bool> { in InitDefaultPublicLibraries() argument 179 return !entry.nopreload; in InitDefaultPublicLibraries() 451 struct ConfigEntry entry = {.soname = "", .nopreload = false, .bitness = ALL}; in ParseConfig() local 455 entry.nopreload = true; in ParseConfig() 457 if (entry.bitness != ALL) { in ParseConfig() 460 entry.bitness = tokens[i] == "32" ? ONLY_32 : ONLY_64; in ParseConfig() [all …]
|
/art/tools/ahat/src/main/com/android/ahat/ |
D | Query.java | 96 for (Map.Entry<String, String> entry : params.entrySet()) { in with() 97 if (entry.getValue() != null) { in with() 99 newQuery.append(entry.getKey()); in with() 101 newQuery.append(entry.getValue()); in with()
|
/art/tools/checker/file_format/c1visualizer/ |
D | struct.py | 33 for entry in self.passes: 34 if entry.name == name: 35 return entry
|
/art/test/063-process-manager/src/ |
D | Main.java | 44 for (Map.Entry<Thread, StackTraceElement[]> entry : in checkManager() 46 Thread t = entry.getKey(); in checkManager() 51 for (StackTraceElement e : entry.getValue()) { in checkManager()
|
/art/test/098-ddmc/ |
D | expected.txt | 2 empty=Allocations[message header len: 15 entry header len: 9 stack frame len: 8 number of entries: … 14 reset=Allocations[message header len: 15 entry header len: 9 stack frame len: 8 number of entries: … 23 goodbye=Allocations[message header len: 15 entry header len: 9 stack frame len: 8 number of entries…
|
/art/test/543-checker-dce-trycatch/smali/ |
D | TestCase.smali | 33 ## CHECK: TryBoundary kind:entry 34 ## CHECK: TryBoundary kind:entry 35 ## CHECK-NOT: TryBoundary kind:entry 41 ## CHECK: TryBoundary kind:entry 42 ## CHECK-NOT: TryBoundary kind:entry 121 ## CHECK: TryBoundary kind:entry 122 ## CHECK: TryBoundary kind:entry 123 ## CHECK-NOT: TryBoundary kind:entry 131 ## CHECK: TryBoundary kind:entry 132 ## CHECK-NOT: TryBoundary kind:entry [all …]
|