Home
last modified time | relevance | path

Searched refs:lock_ (Results 1 – 25 of 41) sorted by relevance

12

/art/runtime/gc/
Dtask_processor.h57 void AddTask(Thread* self, HeapTask* task) REQUIRES(!lock_);
58 HeapTask* GetTask(Thread* self) REQUIRES(!lock_);
59 void Start(Thread* self) REQUIRES(!lock_);
62 void Stop(Thread* self) REQUIRES(!lock_);
63 void RunAllTasks(Thread* self) REQUIRES(!lock_);
64 bool IsRunning() const REQUIRES(!lock_);
66 REQUIRES(!lock_);
67 Thread* GetRunningThread() const REQUIRES(!lock_);
77 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
78 ConditionVariable cond_ GUARDED_BY(lock_);
[all …]
Dtask_processor.cc26 : lock_("Task processor lock", kReferenceProcessorLock), in TaskProcessor()
27 cond_("Task processor condition", lock_), in TaskProcessor()
43 MutexLock mu(self, lock_); in AddTask()
50 MutexLock mu(self, lock_); in GetTask()
80 MutexLock mu(self, lock_); in UpdateTargetRunTime()
102 MutexLock mu(Thread::Current(), lock_); in IsRunning()
107 MutexLock mu(Thread::Current(), lock_); in GetRunningThread()
112 MutexLock mu(self, lock_); in Stop()
119 MutexLock mu(self, lock_); in Start()
Dreference_queue.h64 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!*lock_);
118 Mutex* const lock_;
/art/runtime/
Dlinear_alloc.h32 void* Alloc(Thread* self, size_t size) REQUIRES(!lock_);
33 void* AllocAlign16(Thread* self, size_t size) REQUIRES(!lock_);
36 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
40 T* AllocArray(Thread* self, size_t elements) REQUIRES(!lock_) { in AllocArray()
45 size_t GetUsedMemory() const REQUIRES(!lock_);
47 ArenaPool* GetArenaPool() REQUIRES(!lock_);
50 bool Contains(void* ptr) const REQUIRES(!lock_);
57 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
58 ArenaAllocator allocator_ GUARDED_BY(lock_);
Dclass_table.h143 REQUIRES(!lock_)
149 REQUIRES(!lock_)
154 REQUIRES(!lock_)
159 REQUIRES(!lock_)
164 REQUIRES(!lock_)
169 REQUIRES(!lock_)
176 REQUIRES(!lock_)
183 REQUIRES(!lock_)
189 REQUIRES(!lock_)
195 REQUIRES(!lock_)
[all …]
Dsignal_catcher.h49 void SetHaltFlag(bool new_value) REQUIRES(!lock_);
50 bool ShouldHalt() REQUIRES(!lock_);
51 int WaitForSignal(Thread* self, SignalSet& signals) REQUIRES(!lock_);
53 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
54 ConditionVariable cond_ GUARDED_BY(lock_);
55 bool halt_ GUARDED_BY(lock_);
56 pthread_t pthread_ GUARDED_BY(lock_);
57 Thread* thread_ GUARDED_BY(lock_);
Dlinear_alloc.cc23 LinearAlloc::LinearAlloc(ArenaPool* pool) : lock_("linear alloc"), allocator_(pool) { in LinearAlloc()
27 MutexLock mu(self, lock_); in Realloc()
32 MutexLock mu(self, lock_); in Alloc()
37 MutexLock mu(self, lock_); in AllocAlign16()
42 MutexLock mu(Thread::Current(), lock_); in GetUsedMemory()
47 MutexLock mu(Thread::Current(), lock_); in GetArenaPool()
52 MutexLock mu(Thread::Current(), lock_); in Contains()
Dclass_table.cc25 ClassTable::ClassTable() : lock_("Class loader classes", kClassLoaderClassesLock) { in ClassTable()
32 WriterMutexLock mu(Thread::Current(), lock_); in FreezeSnapshot()
41 ReaderMutexLock mu(Thread::Current(), lock_); in LookupByDescriptor()
55 WriterMutexLock mu(Thread::Current(), lock_); in UpdateClass()
91 ReaderMutexLock mu(Thread::Current(), lock_); in NumZygoteClasses()
100 ReaderMutexLock mu(Thread::Current(), lock_); in NumNonZygoteClasses()
105 ReaderMutexLock mu(Thread::Current(), lock_); in NumReferencedZygoteClasses()
114 ReaderMutexLock mu(Thread::Current(), lock_); in NumReferencedNonZygoteClasses()
120 ReaderMutexLock mu(Thread::Current(), lock_); in Lookup()
132 WriterMutexLock mu(Thread::Current(), lock_); in TryInsert()
[all …]
Dsignal_catcher.cc73 : lock_("SignalCatcher lock"), in SignalCatcher()
74 cond_("SignalCatcher::cond_", lock_), in SignalCatcher()
82 MutexLock mu(self, lock_); in SignalCatcher()
97 MutexLock mu(Thread::Current(), lock_); in SetHaltFlag()
102 MutexLock mu(Thread::Current(), lock_); in ShouldHalt()
182 MutexLock mu(self, signal_catcher->lock_); in Run()
Dclass_table-inl.h32 ReaderMutexLock mu(Thread::Current(), lock_); in VisitRoots()
50 ReaderMutexLock mu(Thread::Current(), lock_); in VisitRoots()
68 ReaderMutexLock mu(Thread::Current(), lock_); in Visit()
81 ReaderMutexLock mu(Thread::Current(), lock_); in Visit()
140 WriterMutexLock mu(Thread::Current(), lock_); in RemoveStrongRoots()
Dbarrier.h83 return lock_.get(); in GetLock()
89 std::unique_ptr<Mutex> lock_ ACQUIRED_AFTER(Locks::abort_lock_);
Dbarrier.cc30 lock_(new Mutex("GC barrier lock", kThreadSuspendCountLock)), in Barrier()
31 condition_(new ConditionVariable("GC barrier condition", *lock_)), in Barrier()
/art/runtime/gc/space/
Dlarge_object_space.h54 MutexLock mu(Thread::Current(), lock_); in GetBytesAllocated()
58 MutexLock mu(Thread::Current(), lock_); in GetObjectsAllocated()
62 MutexLock mu(Thread::Current(), lock_); in GetTotalBytesAllocated()
66 MutexLock mu(Thread::Current(), lock_); in GetTotalObjectsAllocated()
126 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
132 uint64_t num_bytes_allocated_ GUARDED_BY(lock_);
133 uint64_t num_objects_allocated_ GUARDED_BY(lock_);
137 uint64_t total_bytes_allocated_ GUARDED_BY(lock_);
138 uint64_t total_objects_allocated_ GUARDED_BY(lock_);
157 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) override REQUIRES(!lock_);
[all …]
Ddlmalloc_space.h60 size_t* bytes_tl_bulk_allocated) override REQUIRES(!lock_);
66 size_t* bytes_tl_bulk_allocated) override REQUIRES(!lock_) { in Alloc()
76 REQUIRES(!lock_)
80 REQUIRES(!lock_)
98 REQUIRES(!lock_);
116 void Walk(WalkCallback callback, void* arg) override REQUIRES(!lock_);
171 REQUIRES(lock_);
Ddlmalloc_space.cc172 MutexLock mu(self, lock_); in AllocWithGrowth()
227 MutexLock mu(self, lock_); in Free()
256 MutexLock mu(self, lock_); in FreeList()
277 MutexLock mu(self, lock_); in FreeList()
284 MutexLock mu(Thread::Current(), lock_); in Trim()
295 MutexLock mu(Thread::Current(), lock_); in Walk()
301 MutexLock mu(Thread::Current(), lock_); in GetFootprint()
306 MutexLock mu(Thread::Current(), lock_); in GetFootprintLimit()
311 MutexLock mu(Thread::Current(), lock_); in SetFootprintLimit()
324 MutexLock mu(Thread::Current(), lock_); in GetBytesAllocated()
[all …]
Dlarge_object_space.cc115 lock_(lock_name, kAllocSpaceLock), in LargeObjectSpace()
151 MutexLock mu(self, lock_); in Alloc()
174 MutexLock mu(self, lock_); in IsZygoteLargeObject()
181 MutexLock mu(self, lock_); in SetAllLargeObjectsAsZygoteObjects()
192 MutexLock mu(self, lock_); in Free()
209 MutexLock mu(Thread::Current(), lock_); in AllocationSize()
231 MutexLock mu(Thread::Current(), lock_); in Walk()
240 MutexLock mu(Thread::Current(), lock_); in ForEachMemMap()
248 if (lock_.IsExclusiveHeld(self)) { in Contains()
252 MutexLock mu(self, lock_); in Contains()
[all …]
Drosalloc_space.cc207 MutexLock mu(self, lock_); in AllocWithGrowth()
266 MutexLock mu(self, lock_); in Free()
286 MutexLock mu(self, lock_); in FreeList()
319 MutexLock mu(self, lock_); in Trim()
336 MutexLock mu(Thread::Current(), lock_); in GetFootprint()
341 MutexLock mu(Thread::Current(), lock_); in GetFootprintLimit()
346 MutexLock mu(Thread::Current(), lock_); in SetFootprintLimit()
/art/runtime/gc/allocator/
Drosalloc.h54 size_t ByteSize(RosAlloc* rosalloc) const REQUIRES(rosalloc->lock_) { in ByteSize()
63 REQUIRES(rosalloc->lock_) { in SetByteSize()
72 void* End(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in End()
78 REQUIRES(rosalloc->lock_) { in IsLargerThanPageReleaseThreshold()
82 REQUIRES(rosalloc->lock_) { in IsAtEndOfSpace()
85 bool ShouldReleasePages(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in ShouldReleasePages()
102 void ReleasePages(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in ReleasePages()
722 AllocationTrackingSet<FreePageRun*, kAllocatorTagRosAlloc> free_page_runs_ GUARDED_BY(lock_);
755 GUARDED_BY(lock_);
758 Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
[all …]
/art/dex2oat/dex/
Ddex_to_dex_compiler.h98 BitVector* GetOrAddBitVectorForDex(const DexFile* dex_file) REQUIRES(lock_);
107 mutable Mutex lock_; variable
115 GUARDED_BY(lock_);
117 size_t num_code_items_ GUARDED_BY(lock_) = 0u;
/art/openjdkjvmti/
Dobject_tagging.h51 : lock_("Object tag table lock", art::LockLevel::kGenericBottomLock), in ObjectTagTable()
97 REQUIRES(!allow_disallow_lock_, !lock_);
99 art::Mutex lock_ BOTTOM_MUTEX_ACQUIRED_AFTER;
100 std::vector<jlong> null_tags_ GUARDED_BY(lock_);
Dobject_tagging.cc61 art::MutexLock mu(art::Thread::Current(), lock_); in SendDelayedFreeEvents()
95 art::MutexLock mu(art::Thread::Current(), lock_); in HandleNullSweep()
/art/compiler/utils/
Dswap_space.h39 void* Alloc(size_t size) REQUIRES(!lock_);
40 void Free(void* ptr, size_t size) REQUIRES(!lock_);
92 SpaceChunk NewFileChunk(size_t min_size) REQUIRES(lock_);
94 void RemoveChunk(FreeBySizeSet::const_iterator free_by_size_pos) REQUIRES(lock_);
95 void InsertChunk(const SpaceChunk& chunk) REQUIRES(lock_);
103 FreeByStartSet free_by_start_ GUARDED_BY(lock_);
105 FreeBySizeSet free_by_size_ GUARDED_BY(lock_);
107 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Ddedupe_set-inl.h60 lock_(lock_name_.c_str()),
71 const StoreKey* Add(Thread* self, size_t hash, const InKey& in_key) REQUIRES(!lock_) {
72 MutexLock lock(self, lock_);
84 void UpdateStats(Thread* self, Stats* global_stats) REQUIRES(!lock_) {
89 MutexLock lock(self, lock_);
179 Mutex lock_;
180 HashSet<HashedKey<StoreKey>, ShardEmptyFn, ShardHashFn, ShardPred> keys_ GUARDED_BY(lock_);
/art/runtime/base/
Dmem_map_arena_pool.cc100 std::lock_guard<std::mutex> lock(lock_); in LockReclaimMemory()
107 std::lock_guard<std::mutex> lock(lock_); in AllocArena()
122 std::lock_guard<std::mutex> lock(lock_); in TrimMaps()
130 std::lock_guard<std::mutex> lock(lock_); in GetBytesAllocated()
159 std::lock_guard<std::mutex> lock(lock_); in FreeArenaChain()
/art/libartbase/base/
Dmalloc_arena_pool.cc101 std::lock_guard<std::mutex> lock(lock_); in LockReclaimMemory()
108 std::lock_guard<std::mutex> lock(lock_); in AllocArena()
127 std::lock_guard<std::mutex> lock(lock_); in GetBytesAllocated()
156 std::lock_guard<std::mutex> lock(lock_); in FreeArenaChain()

12