Home
last modified time | relevance | path

Searched refs:self (Results 126 – 150 of 397) sorted by relevance

12345678910>>...16

/art/runtime/interpreter/
Dinterpreter_switch_impl-inl.h59 if (PerformNonStandardReturn<kMonitorState>(self, in CheckForceReturn()
72 DCHECK(self->IsExceptionPending()); in HandlePendingException()
73 self->AllowThreadSuspension(); in HandlePendingException()
79 if (!MoveToExceptionHandler(self, shadow_frame, skip_event ? nullptr : instrumentation)) { in HandlePendingException()
81 DoMonitorCheckOnExit<do_assignability_check>(self, &shadow_frame); in HandlePendingException()
102 DCHECK(self->IsExceptionPending()); in PossiblyHandlePendingExceptionOnInvoke()
105 << self->GetException()->Dump(); in PossiblyHandlePendingExceptionOnInvoke()
107 self->ClearException(); in PossiblyHandlePendingExceptionOnInvoke()
119 if (!DoMonitorCheckOnExit<do_assignability_check>(self, &shadow_frame)) { in HandleMonitorChecks()
136 if (UNLIKELY(!DoDexPcMoveEvent(self, in Preamble()
[all …]
Dunstarted_runtime.h51 static void Invoke(Thread* self,
58 static void Jni(Thread* self,
68 static void Unstarted ## ShortName(Thread* self, \
81 static void UnstartedJNI ## ShortName(Thread* self, \
93 static void UnstartedClassForNameCommon(Thread* self,
Dinterpreter_switch_impl.h39 Thread* self; member
59 ALWAYS_INLINE JValue ExecuteSwitchImpl(Thread* self, const CodeItemDataAccessor& accessor, in ExecuteSwitchImpl() argument
64 .self = self, in ExecuteSwitchImpl()
/art/test/common/
Druntime_state.cc170 Thread* self = Thread::Current(); in Java_Main_isAotCompiled() local
171 ScopedObjectAccess soa(self); in Java_Main_isAotCompiled()
208 Thread* self = Thread::Current(); in Java_Main_hasJitCompiledEntrypoint() local
209 ScopedObjectAccess soa(self); in Java_Main_hasJitCompiledEntrypoint()
225 Thread* self = Thread::Current(); in Java_Main_hasJitCompiledCode() local
226 ScopedObjectAccess soa(self); in Java_Main_hasJitCompiledCode()
232 static void ForceJitCompiled(Thread* self, ArtMethod* method) REQUIRES(!Locks::mutator_lock_) { in ForceJitCompiled() argument
235 ScopedObjectAccess soa(self); in ForceJitCompiled()
246 StackHandleScope<1> hs(self); in ForceJitCompiled()
249 if (!class_linker->EnsureInitialized(self, h_klass, true, true)) { in ForceJitCompiled()
[all …]
/art/runtime/gc/
Dscoped_gc_critical_section.h33 GCCriticalSection(Thread* self, const char* name) in GCCriticalSection() argument
34 : self_(self), section_name_(name) {} in GCCriticalSection()
53 ScopedGCCriticalSection(Thread* self, GcCause cause, CollectorType collector_type)
67 ScopedInterruptibleGCCriticalSection(Thread* self, GcCause cause, CollectorType type);
Dallocation_record.cc141 Thread* self = Thread::Current(); in SetAllocTrackingEnabled() local
145 MutexLock mu(self, *Locks::alloc_tracker_lock_); in SetAllocTrackingEnabled()
164 MutexLock mu(self, *Locks::alloc_tracker_lock_); in SetAllocTrackingEnabled()
171 MutexLock mu(self, *Locks::alloc_tracker_lock_); in SetAllocTrackingEnabled()
185 void AllocRecordObjectMap::RecordAllocation(Thread* self, in RecordAllocation() argument
192 StackHandleScope<1> hs(self); in RecordAllocation()
208 self, in RecordAllocation()
213 MutexLock mu(self, *Locks::alloc_tracker_lock_); in RecordAllocation()
226 (kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) { in RecordAllocation()
229 self->CheckEmptyCheckpointFromWeakRefAccess(Locks::alloc_tracker_lock_); in RecordAllocation()
[all …]
/art/runtime/jni/
Djava_vm_ext.h149 jobject AddGlobalRef(Thread* self, ObjPtr<mirror::Object> obj)
153 jweak AddWeakGlobalRef(Thread* self, ObjPtr<mirror::Object> obj)
157 void DeleteGlobalRef(Thread* self, jobject obj) REQUIRES(!Locks::jni_globals_lock_);
159 void DeleteWeakGlobalRef(Thread* self, jweak obj) REQUIRES(!Locks::jni_weak_globals_lock_);
168 void UpdateGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result)
172 ObjPtr<mirror::Object> DecodeWeakGlobal(Thread* self, IndirectRef ref)
176 ObjPtr<mirror::Object> DecodeWeakGlobalLocked(Thread* self, IndirectRef ref)
182 ObjPtr<mirror::Object> DecodeWeakGlobalDuringShutdown(Thread* self, IndirectRef ref)
187 bool IsWeakGlobalCleared(Thread* self, IndirectRef ref)
191 void UpdateWeakGlobal(Thread* self, IndirectRef ref, ObjPtr<mirror::Object> result)
[all …]
Djni_id_manager.cc116 bool EnsureIdsArray(Thread* self, ObjPtr<mirror::Class> k, ArtType* t)
120 bool EnsureIdsArray(Thread* self, ObjPtr<mirror::Class> k, ArtField* field) { in EnsureIdsArray() argument
121 ScopedExceptionStorage ses(self); in EnsureIdsArray()
122 StackHandleScope<1> hs(self); in EnsureIdsArray()
124 if (Locks::mutator_lock_->IsExclusiveHeld(self)) { in EnsureIdsArray()
131 if (self->IsExceptionPending()) { in EnsureIdsArray()
132 self->AssertPendingOOMException(); in EnsureIdsArray()
140 bool EnsureIdsArray(Thread* self, ObjPtr<mirror::Class> k, ArtMethod* method) { in EnsureIdsArray() argument
148 StackHandleScope<1> hs(self); in EnsureIdsArray()
150 if (Locks::mutator_lock_->IsExclusiveHeld(self) || !Locks::mutator_lock_->IsSharedHeld(self)) { in EnsureIdsArray()
[all …]
/art/runtime/verifier/
Dclass_verifier.cc67 FailureKind ClassVerifier::ReverifyClass(Thread* self, in ReverifyClass() argument
73 StackHandleScope<1> hs(self); in ReverifyClass()
115 FailureKind res = CommonVerifyClass(self, in ReverifyClass()
130 FailureKind ClassVerifier::VerifyClass(Thread* self, in VerifyClass() argument
141 return CommonVerifyClass(self, in VerifyClass()
151 FailureKind ClassVerifier::CommonVerifyClass(Thread* self, in CommonVerifyClass() argument
183 StackHandleScope<2> hs(self); in CommonVerifyClass()
186 return VerifyClass(self, in CommonVerifyClass()
200 FailureKind ClassVerifier::VerifyClass(Thread* self, in VerifyClass() argument
211 return VerifyClass(self, in VerifyClass()
[all …]
/art/runtime/mirror/
Dclass.cc136 Thread* self = Thread::Current(); in GetPrimitiveClass() local
139 self->ThrowNewException("Ljava/lang/NullPointerException;", /* msg= */ nullptr); in GetPrimitiveClass()
141 self->ThrowNewException("Ljava/lang/ClassNotFoundException;", name->ToModifiedUtf8().c_str()); in GetPrimitiveClass()
147 ObjPtr<ClassExt> Class::EnsureExtDataPresent(Handle<Class> h_this, Thread* self) { in EnsureExtDataPresent() argument
152 StackHandleScope<2> hs(self); in EnsureExtDataPresent()
154 Handle<Throwable> throwable(hs.NewHandle(self->GetException())); in EnsureExtDataPresent()
155 self->ClearException(); in EnsureExtDataPresent()
157 Handle<ClassExt> new_ext(hs.NewHandle(ClassExt::Alloc(self))); in EnsureExtDataPresent()
161 self->AssertPendingOOMException(); in EnsureExtDataPresent()
185 self->SetException(throwable.Get()); in EnsureExtDataPresent()
[all …]
Demulated_stack_frame.cc147 Thread* self, in CreateFromShadowFrameAndArgs() argument
152 StackHandleScope<6> hs(self); in CreateFromShadowFrameAndArgs()
176 mirror::ObjectArray<mirror::Object>::Alloc(self, array_class, refs_size))); in CreateFromShadowFrameAndArgs()
178 DCHECK(self->IsExceptionPending()); in CreateFromShadowFrameAndArgs()
182 Handle<ByteArray> stack_frame(hs.NewHandle(ByteArray::Alloc(self, frame_size))); in CreateFromShadowFrameAndArgs()
184 DCHECK(self->IsExceptionPending()); in CreateFromShadowFrameAndArgs()
192 self, caller_type, callee_type, &getter, &setter, num_method_params)) { in CreateFromShadowFrameAndArgs()
198 ObjPtr<EmulatedStackFrame>::DownCast(GetClassRoot<EmulatedStackFrame>()->AllocObject(self)))); in CreateFromShadowFrameAndArgs()
207 bool EmulatedStackFrame::WriteToShadowFrame(Thread* self, in WriteToShadowFrame() argument
220 StackHandleScope<3> hs(self); in WriteToShadowFrame()
[all …]
/art/runtime/gc/space/
Dspace_test.h56 ObjPtr<mirror::Class> GetByteArrayClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) { in GetByteArrayClass() argument
59 Runtime::Current()->GetClassLinker()->FindSystemClass(self, "[B"); in GetByteArrayClass()
61 byte_array_class_ = self->GetJniEnv()->NewLocalRef(byte_array_class.Ptr()); in GetByteArrayClass()
64 return self->DecodeJObject(byte_array_class_)->AsClass(); in GetByteArrayClass()
68 Thread* self, in Alloc() argument
74 StackHandleScope<1> hs(self); in Alloc()
75 Handle<mirror::Class> byte_array_class(hs.NewHandle(GetByteArrayClass(self))); in Alloc()
76 mirror::Object* obj = alloc_space->Alloc(self, in Alloc()
88 Thread* self, in AllocWithGrowth() argument
94 StackHandleScope<1> hs(self); in AllocWithGrowth()
[all …]
Dmemory_tool_malloc_space.h34 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated,
37 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
39 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
45 size_t Free(Thread* self, mirror::Object* ptr) override
48 size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) override
Dlarge_object_space.cc54 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, in Alloc() argument
58 LargeObjectMapSpace::Alloc(self, num_bytes + kMemoryToolRedZoneBytes * 2, bytes_allocated, in Alloc()
76 bool IsZygoteLargeObject(Thread* self, mirror::Object* obj) const override { in IsZygoteLargeObject() argument
77 return LargeObjectMapSpace::IsZygoteLargeObject(self, ObjectWithRedzone(obj)); in IsZygoteLargeObject()
80 size_t Free(Thread* self, mirror::Object* obj) override { in Free() argument
83 return LargeObjectMapSpace::Free(self, object_with_rdz); in Free()
136 mirror::Object* LargeObjectMapSpace::Alloc(Thread* self, size_t num_bytes, in Alloc() argument
151 MutexLock mu(self, lock_); in Alloc()
173 bool LargeObjectMapSpace::IsZygoteLargeObject(Thread* self, mirror::Object* obj) const { in IsZygoteLargeObject() argument
174 MutexLock mu(self, lock_); in IsZygoteLargeObject()
[all …]
Drosalloc_space.cc202 mirror::Object* RosAllocSpace::AllocWithGrowth(Thread* self, size_t num_bytes, in AllocWithGrowth() argument
207 MutexLock mu(self, lock_); in AllocWithGrowth()
212 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size, in AllocWithGrowth()
260 size_t RosAllocSpace::Free(Thread* self, mirror::Object* ptr) { in Free() argument
266 MutexLock mu(self, lock_); in Free()
269 return rosalloc_->Free(self, ptr); in Free()
272 size_t RosAllocSpace::FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) { in FreeList() argument
286 MutexLock mu(self, lock_); in FreeList()
306 const size_t bytes_freed = rosalloc_->BulkFree(self, reinterpret_cast<void**>(ptrs), num_ptrs); in FreeList()
316 Thread* const self = Thread::Current(); in Trim() local
[all …]
/art/runtime/jit/
Djit_code_cache.cc371 bool JitCodeCache::WaitForPotentialCollectionToComplete(Thread* self) { in WaitForPotentialCollectionToComplete() argument
375 lock_cond_.Wait(self); in WaitForPotentialCollectionToComplete()
527 void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) { in RemoveMethodsIn() argument
535 MutexLock mu(self, *Locks::jit_lock_); in RemoveMethodsIn()
583 bool JitCodeCache::IsWeakAccessEnabled(Thread* self) const { in IsWeakAccessEnabled()
585 ? self->GetWeakRefAccessEnabled() in IsWeakAccessEnabled()
589 void JitCodeCache::WaitUntilInlineCacheAccessible(Thread* self) { in WaitUntilInlineCacheAccessible() argument
590 if (IsWeakAccessEnabled(self)) { in WaitUntilInlineCacheAccessible()
593 ScopedThreadSuspension sts(self, kWaitingWeakGcRootRead); in WaitUntilInlineCacheAccessible()
594 MutexLock mu(self, *Locks::jit_lock_); in WaitUntilInlineCacheAccessible()
[all …]
Djit.h196 Thread* self, JitMemoryRegion* region, ArtMethod* method, CompilationKind compilation_kind)
247 bool CompileMethod(ArtMethod* method, Thread* self, CompilationKind compilation_kind, bool prejit)
303 void WaitForCompilationToFinish(Thread* self);
309 ALWAYS_INLINE void AddSamples(Thread* self,
321 void NotifyInterpreterToCompiledCodeTransition(Thread* self, ArtMethod* caller) in NotifyInterpreterToCompiledCodeTransition() argument
324 AddSamples(self, caller, options_->GetInvokeTransitionWeight(), false); in NotifyInterpreterToCompiledCodeTransition()
328 void NotifyCompiledCodeToInterpreterTransition(Thread* self, ArtMethod* callee) in NotifyCompiledCodeToInterpreterTransition() argument
331 AddSamples(self, callee, options_->GetInvokeTransitionWeight(), false); in NotifyCompiledCodeToInterpreterTransition()
358 static bool ShouldUsePriorityThreadWeight(Thread* self);
405 uint32_t CompileMethodsFromProfile(Thread* self,
[all …]
/art/runtime/gc/collector/
Dmark_sweep.cc148 Thread* self = Thread::Current(); in RunPhases() local
150 Locks::mutator_lock_->AssertNotHeld(self); in RunPhases()
154 ReaderMutexLock mu(self, *Locks::mutator_lock_); in RunPhases()
171 ReaderMutexLock mu(self, *Locks::mutator_lock_); in RunPhases()
178 void MarkSweep::ProcessReferences(Thread* self) { in ProcessReferences() argument
179 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); in ProcessReferences()
189 Thread* self = Thread::Current(); in PausePhase() local
190 Locks::mutator_lock_->AssertExclusiveHeld(self); in PausePhase()
193 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); in PausePhase()
201 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); in PausePhase()
[all …]
/art/runtime/
Druntime_intrinsics.cc33 ArtMethod* FindIntrinsicMethod(Thread* self, in FindIntrinsicMethod() argument
40 ObjPtr<mirror::Class> cls = class_linker->FindSystemClass(self, class_name); in FindIntrinsicMethod()
55 bool InitializeIntrinsic(Thread* self, in InitializeIntrinsic() argument
62 ArtMethod* method = FindIntrinsicMethod(self, class_name, method_name, signature); in InitializeIntrinsic()
75 bool IsIntrinsicInitialized(Thread* self, in IsIntrinsicInitialized() argument
82 ArtMethod* method = FindIntrinsicMethod(self, class_name, method_name, signature); in IsIntrinsicInitialized()
Dclass_linker.h167 void AddExtraBootDexFiles(Thread* self,
191 ObjPtr<mirror::Class> FindClass(Thread* self,
199 ObjPtr<mirror::Class> FindSystemClass(Thread* self, const char* descriptor) in FindSystemClass() argument
202 return FindClass(self, descriptor, ScopedNullHandle<mirror::ClassLoader>()); in FindSystemClass()
206 ObjPtr<mirror::Class> FindArrayClass(Thread* self, ObjPtr<mirror::Class> element_class)
216 ObjPtr<mirror::Class> DefineClass(Thread* self,
227 ObjPtr<mirror::Class> LookupClass(Thread* self,
357 ArtMethod* ResolveMethod(Thread* self, uint32_t method_idx, ArtMethod* referrer, InvokeType type)
416 ObjPtr<mirror::MethodType> ResolveMethodType(Thread* self,
423 ObjPtr<mirror::MethodType> ResolveMethodType(Thread* self,
[all …]
Dinstrumentation.cc107 InstrumentationStackPopper::InstrumentationStackPopper(Thread* self) in InstrumentationStackPopper() argument
108 : self_(self), in InstrumentationStackPopper()
548 Thread* self = Thread::Current(); in DeoptimizeAllThreadFrames() local
549 MutexLock mu(self, *Locks::thread_list_lock_); in DeoptimizeAllThreadFrames()
552 Locks::mutator_lock_->AssertExclusiveHeld(self); in DeoptimizeAllThreadFrames()
788 Thread* const self = Thread::Current(); in UpdateStubs() local
790 Locks::mutator_lock_->AssertExclusiveHeld(self); in UpdateStubs()
791 Locks::thread_list_lock_->AssertNotHeld(self); in UpdateStubs()
804 MutexLock mu(self, *Locks::thread_list_lock_); in UpdateStubs()
814 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock()); in UpdateStubs()
[all …]
Dobject_lock.cc26 ObjectLock<T>::ObjectLock(Thread* self, Handle<T> object) : self_(self), obj_(object) { in ObjectLock() argument
52 ObjectTryLock<T>::ObjectTryLock(Thread* self, Handle<T> object) : self_(self), obj_(object) { in ObjectTryLock() argument
Druntime_callbacks_test.cc56 Thread* self = Thread::Current(); in SetUp() local
57 ScopedObjectAccess soa(self); in SetUp()
58 ScopedThreadSuspension sts(self, kWaitingForDebuggerToAttach); in SetUp()
65 Thread* self = Thread::Current(); in TearDown() local
66 ScopedObjectAccess soa(self); in TearDown()
67 ScopedThreadSuspension sts(self, kWaitingForDebuggerToAttach); in TearDown()
120 void ThreadStart(Thread* self) override { in ThreadStart()
123 stored_self = self; in ThreadStart()
129 void ThreadDeath(Thread* self) override { in ThreadDeath()
130 if (state == CallbackState::kStarted && self == stored_self) { in ThreadDeath()
[all …]
Dmonitor_pool.cc108 Monitor* MonitorPool::CreateMonitorInPool(Thread* self, in CreateMonitorInPool() argument
114 MutexLock mu(self, *Locks::allocated_monitor_ids_lock_); in CreateMonitorInPool()
129 Monitor* monitor = new(mon_uninitialized) Monitor(self, owner, obj, hash_code, id); in CreateMonitorInPool()
134 void MonitorPool::ReleaseMonitorToPool(Thread* self, Monitor* monitor) { in ReleaseMonitorToPool() argument
136 MutexLock mu(self, *Locks::allocated_monitor_ids_lock_); in ReleaseMonitorToPool()
153 void MonitorPool::ReleaseMonitorsToPool(Thread* self, MonitorList::Monitors* monitors) { in ReleaseMonitorsToPool() argument
155 ReleaseMonitorToPool(self, mon); in ReleaseMonitorsToPool()
/art/compiler/optimizing/
Dintrinsics.cc75 static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self, in LookupInitializedClass() argument
80 class_linker->LookupClass(self, descriptor, /* class_loader= */ nullptr); in LookupInitializedClass()
100 static bool CheckIntegerCache(Thread* self, in CheckIntegerCache() argument
110 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); in CheckIntegerCache()
112 LookupInitializedClass(self, class_linker, kIntegerDescriptor); in CheckIntegerCache()
167 Thread* self = Thread::Current(); in ComputeIntegerValueOfLocations() local
168 ScopedObjectAccess soa(self); in ComputeIntegerValueOfLocations()
170 self, kIntegerCacheDescriptor, /* class_loader= */ nullptr); in ComputeIntegerValueOfLocations()
177 class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader= */ nullptr); in ComputeIntegerValueOfLocations()
210 Thread* self = Thread::Current(); in ComputeIntegerValueOfLocations() local
[all …]

12345678910>>...16