Lines Matching refs:self

45 inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self,  in AllocObjectWithAllocator()  argument
60 CHECK_EQ(self->GetState(), kRunnable); in AllocObjectWithAllocator()
61 self->AssertThreadSuspensionIsAllowable(); in AllocObjectWithAllocator()
62 self->AssertNoPendingException(); in AllocObjectWithAllocator()
64 StackHandleScope<1> hs(self); in AllocObjectWithAllocator()
66 self->PoisonObjectPointers(); in AllocObjectWithAllocator()
73 StackHandleScope<1> hs(self); in AllocObjectWithAllocator()
75 l->PreObjectAllocated(self, h_klass, &byte_count); in AllocObjectWithAllocator()
94 obj = AllocLargeObject<kInstrumented, PreFenceVisitor>(self, &klass, byte_count, in AllocObjectWithAllocator()
100 self->ClearException(); in AllocObjectWithAllocator()
111 if (IsTLABAllocator(allocator) && byte_count <= self->TlabSize()) { in AllocObjectWithAllocator()
112 obj = self->AllocTlab(byte_count); in AllocObjectWithAllocator()
124 (obj = rosalloc_space_->AllocThreadLocal(self, byte_count, &bytes_allocated)) != nullptr && in AllocObjectWithAllocator()
138 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated, in AllocObjectWithAllocator()
144 obj = AllocateInternalWithGc(self, in AllocObjectWithAllocator()
155 if (!self->IsExceptionPending()) { in AllocObjectWithAllocator()
160 return AllocObject</*kInstrumented=*/true>(self, in AllocObjectWithAllocator()
207 RuntimeStats* thread_stats = self->GetStats(); in AllocObjectWithAllocator()
222 allocation_records_->RecordAllocation(self, &obj, bytes_allocated); in AllocObjectWithAllocator()
228 l->ObjectAllocated(self, &obj, bytes_allocated); in AllocObjectWithAllocator()
234 PushOnAllocationStack(self, &obj); in AllocObjectWithAllocator()
238 CheckGcStressMode(self, &obj); in AllocObjectWithAllocator()
250 CheckConcurrentGCForJava(self, new_num_bytes_allocated, &obj); in AllocObjectWithAllocator()
253 self->VerifyStack(); in AllocObjectWithAllocator()
260 inline void Heap::PushOnAllocationStack(Thread* self, ObjPtr<mirror::Object>* obj) { in PushOnAllocationStack() argument
262 if (UNLIKELY(!self->PushOnThreadLocalAllocationStack(obj->Ptr()))) { in PushOnAllocationStack()
263 PushOnThreadLocalAllocationStackWithInternalGC(self, obj); in PushOnAllocationStack()
266 PushOnAllocationStackWithInternalGC(self, obj); in PushOnAllocationStack()
271 inline mirror::Object* Heap::AllocLargeObject(Thread* self, in AllocLargeObject() argument
276 StackHandleScope<1> hs(self); in AllocLargeObject()
278 return AllocObjectWithAllocator<kInstrumented, false, PreFenceVisitor>(self, *klass, byte_count, in AllocLargeObject()
284 inline mirror::Object* Heap::TryToAllocate(Thread* self, in TryToAllocate() argument
318 ret = rosalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
330 DCHECK(!rosalloc_space_->CanAllocThreadLocal(self, alloc_size)); in TryToAllocate()
332 ret = rosalloc_space_->AllocNonvirtual(self, in TryToAllocate()
343 ret = dlmalloc_space_->Alloc(self, in TryToAllocate()
350 ret = dlmalloc_space_->AllocNonvirtual(self, in TryToAllocate()
359 ret = non_moving_space_->Alloc(self, in TryToAllocate()
367 ret = large_object_space_->Alloc(self, in TryToAllocate()
395 if (UNLIKELY(self->TlabSize() < alloc_size)) { in TryToAllocate()
396 return AllocWithNewTLAB(self, in TryToAllocate()
405 ret = self->AllocTlab(alloc_size); in TryToAllocate()
466 inline void Heap::CheckConcurrentGCForJava(Thread* self, in CheckConcurrentGCForJava() argument
470 RequestConcurrentGCAndSaveObject(self, false /* force_full */, obj); in CheckConcurrentGCForJava()