/bionic/libc/bionic/ |
D | pthread_key.cpp | 85 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_clean_all() 95 atomic_load_explicit(&key_map[i].key_destructor, memory_order_relaxed)); in pthread_key_clean_all() 100 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) { in pthread_key_clean_all() 126 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_create() 149 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_key_delete() 164 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_getspecific() 183 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_setspecific()
|
D | pthread_barrier.cpp | 120 while(atomic_load_explicit(&barrier->state, memory_order_acquire) == RELEASE) { in pthread_barrier_wait() 124 uint32_t prev_wait_count = atomic_load_explicit(&barrier->wait_count, memory_order_relaxed); in pthread_barrier_wait() 154 while (atomic_load_explicit(&barrier->state, memory_order_acquire) == WAIT) { in pthread_barrier_wait() 175 while (atomic_load_explicit(&barrier->state, memory_order_acquire) == RELEASE) { in pthread_barrier_destroy() 178 if (atomic_load_explicit(&barrier->wait_count, memory_order_relaxed) != 0) { in pthread_barrier_destroy()
|
D | pthread_rwlock.cpp | 257 if (atomic_load_explicit(&rwlock->state, memory_order_relaxed) != 0) { in pthread_rwlock_destroy() 273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_tryrdlock() 291 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { in __pthread_rwlock_timedrdlock() 305 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_timedrdlock() 348 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_trywrlock() 363 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { in __pthread_rwlock_timedwrlock() 376 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_timedwrlock() 489 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in pthread_rwlock_unlock() 491 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) != __get_thread()->tid) { in pthread_rwlock_unlock()
|
D | pthread_mutex.cpp | 203 old_owner = atomic_load_explicit(&mutex.owner_tid, memory_order_relaxed); in PIMutexUnlock() 704 uint32_t owner_tid = atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed); in RecursiveOrErrorcheckMutexWait() 713 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in MutexLockWithTimeout() 724 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { in MutexLockWithTimeout() 788 old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in MutexLockWithTimeout() 819 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_lock() 853 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_unlock() 871 if ( tid != atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed) ) { in pthread_mutex_unlock() 903 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_trylock() 920 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { in pthread_mutex_trylock() [all …]
|
D | semaphore.cpp | 100 return (atomic_load_explicit(sem_count_ptr, memory_order_relaxed) & SEMCOUNT_SHARED_MASK); in SEM_GET_SHARED() 144 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_dec() 164 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_trydec() 188 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_inc()
|
D | pthread_once.cpp | 54 int old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire); in pthread_once() 83 old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire); in pthread_once()
|
D | malloc_common.h | 80 return atomic_load_explicit(&__libc_globals->current_dispatch_table, memory_order_acquire); in GetDispatchTable() 84 return atomic_load_explicit(&__libc_globals->default_dispatch_table, memory_order_acquire); in GetDefaultDispatchTable()
|
D | __cxa_guard.cpp | 80 int old_value = atomic_load_explicit(&gv->state, memory_order_acquire); in __cxa_guard_acquire() 106 old_value = atomic_load_explicit(&gv->state, memory_order_acquire); in __cxa_guard_acquire()
|
D | pthread_cond.cpp | 111 return COND_IS_SHARED(atomic_load_explicit(&state, memory_order_relaxed)); in process_shared() 115 return COND_GET_CLOCK(atomic_load_explicit(&state, memory_order_relaxed)) == CLOCK_REALTIME; in use_realtime_clock() 180 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed); in __pthread_cond_timedwait()
|
D | system_property_api.cpp | 109 return atomic_load_explicit(&pi->serial, memory_order_acquire); in __system_property_serial()
|
D | malloc_limit.cpp | 99 atomic_load_explicit(&gAllocated, memory_order_relaxed), bytes, &total) || in CheckLimit()
|
D | malloc_common_dynamic.cpp | 497 !gZygoteChild || atomic_load_explicit(&gZygoteChildProfileable, memory_order_acquire); in android_mallopt()
|
/bionic/libc/system_properties/ |
D | prop_area.cpp | 204 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); in to_prop_bt() 209 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); in to_prop_info() 240 uint_least32_t left_offset = atomic_load_explicit(¤t->left, memory_order_relaxed); in find_prop_bt() 256 uint_least32_t right_offset = atomic_load_explicit(¤t->right, memory_order_relaxed); in find_prop_bt() 292 uint_least32_t children_offset = atomic_load_explicit(¤t->children, memory_order_relaxed); in find_property() 317 uint_least32_t prop_offset = atomic_load_explicit(¤t->prop, memory_order_relaxed); in find_property() 337 uint_least32_t left_offset = atomic_load_explicit(&trie->left, memory_order_relaxed); in foreach_property() 342 uint_least32_t prop_offset = atomic_load_explicit(&trie->prop, memory_order_relaxed); in foreach_property() 348 uint_least32_t children_offset = atomic_load_explicit(&trie->children, memory_order_relaxed); in foreach_property() 353 uint_least32_t right_offset = atomic_load_explicit(&trie->right, memory_order_relaxed); in foreach_property()
|
D | system_properties.cpp | 122 return atomic_load_explicit(pa->serial(), memory_order_acquire); in AreaSerial() 248 uint32_t serial = atomic_load_explicit(&pi->serial, memory_order_relaxed); in Update() 266 atomic_load_explicit(serial_pa->serial(), memory_order_relaxed) + 1, in Update() 306 atomic_load_explicit(serial_pa->serial(), memory_order_relaxed) + 1, in Add()
|
/bionic/tests/ |
D | stdatomic_test.cpp | 94 ASSERT_EQ(123, atomic_load_explicit(&i, memory_order_relaxed)); in TEST() 215 yval = atomic_load_explicit(&a->y, memory_order_acquire); in reader() 216 zval = atomic_load_explicit(&a->z, memory_order_relaxed); in reader() 217 xval = atomic_load_explicit(&a->x, memory_order_relaxed); in reader() 255 EXPECT_EQ(atomic_load_explicit(&a.x, memory_order_consume), BIG + 1); in TEST() 256 EXPECT_EQ(atomic_load_explicit(&a.y, memory_order_seq_cst), BIG + 1); in TEST()
|
/bionic/libc/system_properties/include/system_properties/ |
D | prop_info.h | 41 return atomic_load_explicit(non_const_s, mo); in load_const_atomic()
|
/bionic/libc/include/bits/ |
D | stdatomic.h | 227 #define atomic_load_explicit(object, order) \ macro 255 atomic_load_explicit(object, memory_order_seq_cst)
|
/bionic/libc/include/ |
D | stdatomic.h | 62 using std::atomic_load_explicit;
|