Home
last modified time | relevance | path

Searched refs:next_ (Results 1 – 17 of 17) sorted by relevance

/art/runtime/base/
Dmem_map_arena_pool.cc94 free_arenas_ = free_arenas_->next_; in ReclaimMemory()
110 free_arenas_ = free_arenas_->next_; in AllocArena()
123 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { in TrimMaps()
131 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { in GetBytesAllocated()
139 for (Arena* arena = first; arena != nullptr; arena = arena->next_) { in FreeArenaChain()
147 Arena* next = first->next_; in FreeArenaChain()
156 while (last->next_ != nullptr) { in FreeArenaChain()
157 last = last->next_; in FreeArenaChain()
160 last->next_ = free_arenas_; in FreeArenaChain()
/art/libartbase/base/
Dmalloc_arena_pool.cc95 free_arenas_ = free_arenas_->next_; in ReclaimMemory()
111 free_arenas_ = free_arenas_->next_; in AllocArena()
128 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { in GetBytesAllocated()
136 for (Arena* arena = first; arena != nullptr; arena = arena->next_) { in FreeArenaChain()
144 Arena* next = first->next_; in FreeArenaChain()
153 while (last->next_ != nullptr) { in FreeArenaChain()
154 last = last->next_; in FreeArenaChain()
157 last->next_ = free_arenas_; in FreeArenaChain()
Darena_allocator.cc135 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) { in Dump()
188 Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) { in Arena()
198 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr; in BytesUsed()
199 cur_arena = cur_arena->next_) { in BytesUsed()
280 new_arena->next_ = arena_head_->next_; in AllocFromNewArena()
281 arena_head_->next_ = new_arena; in AllocFromNewArena()
284 new_arena->next_ = arena_head_; in AllocFromNewArena()
304 DCHECK(arena_head_->next_ != nullptr); in AllocFromNewArenaWithMemoryTool()
305 DCHECK(ret == arena_head_->next_->Begin()); in AllocFromNewArenaWithMemoryTool()
306 DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated()); in AllocFromNewArenaWithMemoryTool()
[all …]
Dscoped_arena_allocator.cc59 top_arena_->next_ = nullptr; in AllocateFromNextArena()
60 } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) { in AllocateFromNextArena()
61 top_arena_ = top_arena_->next_; in AllocateFromNextArena()
63 Arena* tail = top_arena_->next_; in AllocateFromNextArena()
64 top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size); in AllocateFromNextArena()
65 top_arena_ = top_arena_->next_; in AllocateFromNextArena()
66 top_arena_->next_ = tail; in AllocateFromNextArena()
112 for (Arena* arena = bottom_arena_; arena != nullptr; arena = arena->next_) { in ApproximatePeakBytes()
184 for (Arena* arena = start; arena != nullptr; arena = arena->next_) { in ApproximatePeakBytes()
Darena_allocator.h214 Arena* next_; variable
Darena_allocator_test.cc30 for (Arena* a = allocator->arena_head_; a != nullptr; a = a->next_) { in NumberOfArenas()
/art/runtime/jit/
Ddebugger_interface.cc122 std::atomic<const JITCodeEntry*> next_; // Atomic to guarantee consistency after crash. member
283 writable->next_ = next; in InsertNewEntry()
294 NativeInfo::Writable(prev)->next_.store(entry, std::memory_order_release); in InsertNewEntry()
341 descriptor.free_entries_ = descriptor.free_entries_->next_.load(kNonRacingRelaxed); in CreateJITCodeEntryInternal()
372 const JITCodeEntry* next = entry->next_.load(kNonRacingRelaxed); in DeleteJITCodeEntryInternal()
380 NativeInfo::Writable(prev)->next_.store(next, std::memory_order_relaxed); in DeleteJITCodeEntryInternal()
405 writable_entry->next_.store(descriptor.free_entries_, kNonRacingRelaxed); in DeleteJITCodeEntryInternal()
424 const JITCodeEntry* next = entry->next_; // Save next pointer before we free the memory. in RemoveNativeDebugInfoForDex()
498 for (const JITCodeEntry* it = descriptor.head_; it != nullptr; it = it->next_) { in RepackEntries()
622 const JITCodeEntry* next = it->next_; in RepackNativeDebugInfoForJitLocked()
[all …]
/art/compiler/utils/
Dassembler.h69 SlowPath() : next_(nullptr) {} in SlowPath()
83 SlowPath *next_; variable
142 for ( ; cur->next_ != nullptr ; cur = cur->next_) {} in EnqueueSlowPath()
143 cur->next_ = slowpath; in EnqueueSlowPath()
153 next = cur->next_; in EmitSlowPaths()
/art/compiler/optimizing/
Dssa_liveness_analysis.h65 LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) { in LiveRange()
67 DCHECK(next_ == nullptr || next_->GetStart() > GetEnd()); in LiveRange()
72 LiveRange* GetNext() const { return next_; } in GetNext()
89 start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator)); in Dup()
93 return next_ == nullptr ? this : next_->GetLastRange(); in GetLastRange()
99 LiveRange* next_; variable
231 next_(nullptr) {} in SafepointPosition()
247 next_ = next; in SetNext()
255 return next_; in GetNext()
268 SafepointPosition* next_; variable
[all …]
Dnodes.cc1165 last_instruction_->next_ = instruction; in AddInstruction()
1175 instruction->next_ = cursor; in InsertInstructionBefore()
1179 instruction->next_ = cursor; in InsertInstructionBefore()
1181 instruction->previous_->next_ = instruction; in InsertInstructionBefore()
1188 cursor->next_ = instruction; in InsertInstructionAfter()
1192 instruction->next_ = cursor->next_; in InsertInstructionAfter()
1194 cursor->next_ = instruction; in InsertInstructionAfter()
1195 instruction->next_->previous_ = instruction; in InsertInstructionAfter()
1201 instruction->previous_->next_ = instruction->next_; in RemoveInstruction()
1203 if (instruction->next_ != nullptr) { in RemoveInstruction()
[all …]
Dgvn.cc189 : instruction_(instruction), hash_code_(hash_code), next_(next) {} in Node()
193 Node* GetNext() const { return next_; } in GetNext()
194 void SetNext(Node* node) { next_ = node; } in SetNext()
218 Node* next_; member in art::ValueSet::Node
Dbounds_check_elimination.cc531 next_(nullptr) {} in BCEVisitor()
541 next_ = instruction->GetNext(); in VisitBasicBlock()
543 instruction = next_; in VisitBasicBlock()
547 next_ = instruction->GetNext(); in VisitBasicBlock()
549 instruction = next_; in VisitBasicBlock()
1999 if (instruction == next_) { in ReplaceInstruction()
2000 next_ = next_->GetNext(); in ReplaceInstruction()
2039 HInstruction* next_; member in art::BCEVisitor
Dnodes.h2102 next_(nullptr), in HInstruction()
2122 HInstruction* GetNext() const { return next_; } in GetNext()
2542 next_(nullptr), in HInstruction()
2601 HInstruction* next_; variable
2658 next_ = Done() ? nullptr : instruction_->GetNext(); in HInstructionIterator()
2664 instruction_ = next_; in Advance()
2665 next_ = Done() ? nullptr : instruction_->GetNext(); in Advance()
2670 HInstruction* next_; variable
2701 next_ = Done() ? nullptr : instruction_->GetPrevious(); in HBackwardInstructionIterator()
2707 instruction_ = next_; in Advance()
[all …]
/art/runtime/mirror/
Dreference.h132 HeapReference<FinalizerReference> next_;
/art/runtime/gc/allocator/
Drosalloc.h119 return next_; in Next()
122 next_ = next; in SetNext()
129 next_ = nullptr; in Clear()
133 Slot* next_; // Next slot in the list.
844 return OFFSETOF_MEMBER(Slot, next_); in RunSlotNextOffset()
/art/runtime/
Dthread.cc457 FrameIdToShadowFrame* GetNext() const { return next_; } in GetNext()
458 void SetNext(FrameIdToShadowFrame* next) { next_ = next; } in SetNext()
469 next_(next) {} in FrameIdToShadowFrame()
473 FrameIdToShadowFrame* next_; member in art::FrameIdToShadowFrame
Dclass_linker_test.cc707 addOffset(OFFSETOF_MEMBER(mirror::FinalizerReference, next_), "next"); in FinalizerReferenceOffsets()