Home
last modified time | relevance | path

Searched refs:to_ref (Results 1 – 4 of 4) sorted by relevance

/art/runtime/gc/collector/
Dconcurrent_copying.cc165 mirror::Object* to_ref = Mark(self, from_ref); in MarkHeapReference() local
166 if (from_ref != to_ref) { in MarkHeapReference()
172 } while (!field->CasWeakRelaxed(from_ref, to_ref)); in MarkHeapReference()
479 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); in VisitRoots() local
480 if (to_ref != ref) { in VisitRoots()
481 *root = to_ref; in VisitRoots()
496 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); in VisitRoots() local
497 if (to_ref != ref) { in VisitRoots()
498 root->Assign(to_ref); in VisitRoots()
1758 void ConcurrentCopying::PushOntoMarkStack(Thread* const self, mirror::Object* to_ref) { in PushOntoMarkStack() argument
[all …]
Dconcurrent_copying-inl.h162 mirror::Object* to_ref = GetFwdPtr(from_ref); in Mark() local
163 if (to_ref == nullptr) { in Mark()
165 to_ref = Copy(self, from_ref, holder, offset); in Mark()
169 DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref)) in Mark()
170 << "from_ref=" << from_ref << " to_ref=" << to_ref; in Mark()
171 return to_ref; in Mark()
Dconcurrent_copying.h172 void Scan(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_)
204 void ProcessMarkStackRef(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_)
/art/runtime/entrypoints/quick/
Dquick_jni_entrypoints.cc46 mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack); in ReadBarrierJni() local
47 handle_on_stack->Assign(to_ref); in ReadBarrierJni()