Home
last modified time | relevance | path

Searched refs:ALWAYS_INLINE (Results 1 – 25 of 130) sorted by relevance

123456

/art/runtime/gc/accounting/
Dbitmap.h47 static ALWAYS_INLINE constexpr size_t BitIndexToWordIndex(uintptr_t offset) { in BitIndexToWordIndex()
52 static ALWAYS_INLINE constexpr T WordIndexToBitIndex(T word_index) { in WordIndexToBitIndex()
56 static ALWAYS_INLINE constexpr uintptr_t BitIndexToMask(uintptr_t bit_index) { in BitIndexToMask()
60 ALWAYS_INLINE bool SetBit(size_t bit_index) { in SetBit()
64 ALWAYS_INLINE bool ClearBit(size_t bit_index) { in ClearBit()
68 ALWAYS_INLINE bool TestBit(size_t bit_index) const;
71 ALWAYS_INLINE bool AtomicTestAndSetBit(size_t bit_index);
94 ALWAYS_INLINE void CheckValidBitIndex(size_t bit_index) const { in CheckValidBitIndex()
110 ALWAYS_INLINE bool ModifyBit(uintptr_t bit_index);
135 ALWAYS_INLINE uintptr_t CoverBegin() const { in CoverBegin()
[all …]
/art/runtime/
Dhandle_scope.h52 ALWAYS_INLINE uint32_t NumberOfReferences() const;
54 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
57 ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
64 ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
65 ALWAYS_INLINE HandleScope* AsHandleScope();
66 ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
67 ALWAYS_INLINE const HandleScope* AsHandleScope() const;
108 ALWAYS_INLINE ObjPtr<mirror::Object> GetReference(size_t i) const
111 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
113 ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
[all …]
Dscoped_thread_state_change.h44 ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
47 ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_);
49 ALWAYS_INLINE Thread* Self() const { in Self()
103 ALWAYS_INLINE bool IsRunnable() const;
106 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
109 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
120 ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {} in ~ScopedObjectAccessAlreadyRunnable()
149 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env)
152 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self)
155 ALWAYS_INLINE ~ScopedObjectAccessUnchecked() REQUIRES(!Locks::thread_suspend_count_lock_) {} in ~ScopedObjectAccessUnchecked()
[all …]
Dreflective_handle.h38 ALWAYS_INLINE ReflectiveHandle(const ReflectiveHandle<T>& handle) = default;
39 ALWAYS_INLINE ReflectiveHandle<T>& operator=(const ReflectiveHandle<T>& handle) = default;
41 ALWAYS_INLINE explicit ReflectiveHandle(ReflectiveReference<T>* reference) in ReflectiveHandle()
44 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED()
48 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) {
52 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) { in Get()
56 ALWAYS_INLINE bool IsNull() const { in IsNull()
61 ALWAYS_INLINE bool operator!=(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) {
65 ALWAYS_INLINE bool operator==(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) {
84 ALWAYS_INLINE MutableReflectiveHandle(const MutableReflectiveHandle<T>& handle)
[all …]
Dreflective_handle_scope.h54 ALWAYS_INLINE void VisitTargets(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) { in VisitTargets()
59 ALWAYS_INLINE virtual ~BaseReflectiveHandleScope() { in ~BaseReflectiveHandleScope()
77 ALWAYS_INLINE BaseReflectiveHandleScope() : self_(nullptr), link_(nullptr) {} in BaseReflectiveHandleScope()
79 ALWAYS_INLINE inline void PushScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
80 ALWAYS_INLINE inline void PopScope() REQUIRES_SHARED(Locks::mutator_lock_);
99 ALWAYS_INLINE explicit StackReflectiveHandleScope(Thread* self)
101 ALWAYS_INLINE ~StackReflectiveHandleScope() REQUIRES_SHARED(Locks::mutator_lock_);
108 ALWAYS_INLINE MutableReflectiveHandle<T> NewHandle(T* t) REQUIRES_SHARED(Locks::mutator_lock_) { in NewHandle()
117 ALWAYS_INLINE ReflectiveHandleWrapper<T> NewReflectiveHandleWrapper(T** t) in NewReflectiveHandleWrapper()
122 ALWAYS_INLINE MutableReflectiveHandle<ArtField> NewFieldHandle(ArtField* f) in NewFieldHandle()
[all …]
Dhandle.h54 ALWAYS_INLINE Handle(const Handle<T>& handle) = default;
56 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) = default;
60 ALWAYS_INLINE Handle(const Handle<Type>& other) : reference_(other.reference_) { in Handle()
63 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) { in Handle()
66 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED()
70 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) {
74 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) { in Get()
80 ALWAYS_INLINE IterationRange<mirror::ConstHandleArrayIter<Type>> ConstIterate() const in ConstIterate()
86 ALWAYS_INLINE IterationRange<mirror::HandleArrayIter<Type>> Iterate() in Iterate()
91 ALWAYS_INLINE bool IsNull() const { in IsNull()
[all …]
Dwrite_barrier.h40 ALWAYS_INLINE static void ForFieldWrite(ObjPtr<mirror::Object> dst,
47 ALWAYS_INLINE static void ForArrayWrite(ObjPtr<mirror::Object> dst,
53 ALWAYS_INLINE static void ForEveryFieldWrite(ObjPtr<mirror::Object> obj)
57 ALWAYS_INLINE static gc::accounting::CardTable* GetCardTable();
Dart_method.h93 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
96 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
180 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
434 ALWAYS_INLINE uint32_t GetDexMethodIndex() const { in GetDexMethodIndex()
471 ALWAYS_INLINE
482 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( in SetEntryPointFromQuickCompiledCodePtrSize()
512 ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) in SetImtConflictTable()
525 ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) REQUIRES_SHARED(Locks::mutator_lock_) { in SetProfilingInfo()
529 ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) in SetProfilingInfoPtrSize()
543 ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
[all …]
Dread_barrier.h56 ALWAYS_INLINE static MirrorType* Barrier(
63 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root,
70 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
76 ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref)
116 ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency)
121 ALWAYS_INLINE static bool IsGray(mirror::Object* obj)
Dgc_root.h112 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info) in VisitRoot()
118 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info) in VisitRootIfNonNull()
190 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
208 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() { in AddressWithoutBarrier()
212 ALWAYS_INLINE bool IsNull() const { in IsNull()
217 ALWAYS_INLINE GcRoot() {} in GcRoot()
218 explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
220 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
247 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) in VisitRootIfNonNull()
255 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) in VisitRootIfNonNull()
[all …]
Dstack_map.h141 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { in GetNativePcOffset()
145 ALWAYS_INLINE bool HasInlineInfo() const { in HasInlineInfo()
149 ALWAYS_INLINE bool HasDexRegisterMap() const { in HasDexRegisterMap()
227 ALWAYS_INLINE DexRegisterLocation GetLocation() const { in BIT_TABLE_HEADER()
258 ALWAYS_INLINE uint32_t GetMask() const { in BIT_TABLE_HEADER()
294 ALWAYS_INLINE CodeInfo() {} in CodeInfo()
295 ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr);
296 ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header);
303 ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const { in GetStackMaps()
307 ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const { in GetStackMapAt()
[all …]
/art/libdexfile/dex/
Dcode_item_accessors.h43 ALWAYS_INLINE CodeItemInstructionAccessor(const DexFile& dex_file,
46 ALWAYS_INLINE explicit CodeItemInstructionAccessor(ArtMethod* method);
48 ALWAYS_INLINE DexInstructionIterator begin() const;
50 ALWAYS_INLINE DexInstructionIterator end() const;
81 ALWAYS_INLINE void Init(uint32_t insns_size_in_code_units, const uint16_t* insns);
82 ALWAYS_INLINE void Init(const DexFile& dex_file, const dex::CodeItem* code_item);
85 ALWAYS_INLINE void Init(const DexFileCodeItemType& code_item);
99 ALWAYS_INLINE CodeItemDataAccessor(const DexFile& dex_file, const dex::CodeItem* code_item);
128 ALWAYS_INLINE void Init(const DexFile& dex_file, const dex::CodeItem* code_item);
131 ALWAYS_INLINE void Init(const DexFileCodeItemType& code_item);
[all …]
Ddex_instruction_iterator.h31 ALWAYS_INLINE const Instruction& Inst() const { in Inst()
35 ALWAYS_INLINE const Instruction* operator->() const {
39 ALWAYS_INLINE uint32_t DexPc() const { in DexPc()
43 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions()
74 ALWAYS_INLINE uint32_t DexPc() const { in DexPc()
79 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions()
87 static ALWAYS_INLINE inline bool operator==(const DexInstructionIteratorBase& lhs,
152 ALWAYS_INLINE uint32_t DexPc() const { in DexPc()
206 ALWAYS_INLINE const Instruction& Inst() const { in Inst()
221 ALWAYS_INLINE void AssertValid() const { in AssertValid()
[all …]
/art/libelffile/dwarf/
Ddebug_frame_opcode_writer.h47 void ALWAYS_INLINE AdvancePC(int absolute_pc) { in AdvancePC()
73 void ALWAYS_INLINE RelOffset(Reg reg, int offset) { in RelOffset()
78 void ALWAYS_INLINE AdjustCFAOffset(int delta) { in AdjustCFAOffset()
83 void ALWAYS_INLINE RelOffsetForMany(Reg reg_base, in RelOffsetForMany()
101 void ALWAYS_INLINE RestoreMany(Reg reg_base, uint32_t reg_mask) { in RestoreMany()
113 void ALWAYS_INLINE Nop() { in Nop()
119 void ALWAYS_INLINE Offset(Reg reg, int offset) { in Offset()
141 void ALWAYS_INLINE Restore(Reg reg) { in Restore()
153 void ALWAYS_INLINE Undefined(Reg reg) { in Undefined()
161 void ALWAYS_INLINE SameValue(Reg reg) { in SameValue()
[all …]
/art/runtime/jni/
Djni_internal.h71 ALWAYS_INLINE
77 ALWAYS_INLINE
83 ALWAYS_INLINE
93 ALWAYS_INLINE static inline jfieldID EncodeArtField(ReflectiveHandle<ArtField> field) in EncodeArtField()
103 ALWAYS_INLINE
113 ALWAYS_INLINE
124 ALWAYS_INLINE
135 ALWAYS_INLINE
/art/libartbase/base/
Dbit_memory_region.h40 ALWAYS_INLINE BitMemoryRegion(uint8_t* data, ssize_t bit_start, size_t bit_size) { in BitMemoryRegion()
46 ALWAYS_INLINE explicit BitMemoryRegion(MemoryRegion region) in BitMemoryRegion()
49 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_length) in BitMemoryRegion()
54 ALWAYS_INLINE bool IsValid() const { return data_ != nullptr; } in IsValid()
69 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_length) const { in Subregion()
78 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset) const { in Subregion()
88 ALWAYS_INLINE bool LoadBit(size_t bit_offset) const { in LoadBit()
95 ALWAYS_INLINE void StoreBit(size_t bit_offset, bool value) { in StoreBit()
109 ALWAYS_INLINE Result LoadBits(size_t bit_offset, size_t bit_length) const { in LoadBits()
139 ALWAYS_INLINE void StoreBits(size_t bit_offset, uint32_t value, size_t bit_length) { in StoreBits()
[all …]
Dmemory_region.h62 ALWAYS_INLINE T Load(uintptr_t offset) const { in Load()
72 ALWAYS_INLINE void Store(uintptr_t offset, T value) const { in Store()
81 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const { in LoadUnaligned()
96 ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const { in StoreUnaligned()
108 ALWAYS_INLINE T* PointerTo(uintptr_t offset) const { in PointerTo()
122 ALWAYS_INLINE MemoryRegion Subregion(uintptr_t offset, uintptr_t size_in) const { in Subregion()
129 ALWAYS_INLINE void Extend(const MemoryRegion& region, uintptr_t extra) { in Extend()
136 ALWAYS_INLINE T* ComputeInternalPointer(size_t offset) const { in ComputeInternalPointer()
144 ALWAYS_INLINE uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const { in ComputeBitPointer()
Dmacros.h39 NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \
40 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \
41 ALWAYS_INLINE void operator delete(void*, void*) noexcept { } \
71 #define ALWAYS_INLINE macro
74 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro
/art/runtime/mirror/
Dobject.h96 ALWAYS_INLINE Class* GetClass() REQUIRES_SHARED(Locks::mutator_lock_);
103 ALWAYS_INLINE uint32_t GetReadBarrierState(uintptr_t* fake_address_dependency)
106 ALWAYS_INLINE uint32_t GetReadBarrierState() REQUIRES_SHARED(Locks::mutator_lock_);
108 ALWAYS_INLINE uint32_t GetReadBarrierStateAcquire() REQUIRES_SHARED(Locks::mutator_lock_);
110 ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_);
113 ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state)
116 ALWAYS_INLINE uint32_t GetMarkBit() REQUIRES_SHARED(Locks::mutator_lock_);
118 ALWAYS_INLINE bool AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_bit)
122 ALWAYS_INLINE void AssertReadBarrierState() const REQUIRES_SHARED(Locks::mutator_lock_);
129 ALWAYS_INLINE bool InstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
[all …]
Ddex_cache.h276 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
279 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
284 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
288 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
301 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
304 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx,
308 ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
312 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
316 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size)
318 ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size)
[all …]
Dclass.h213 ALWAYS_INLINE uint32_t GetAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetAccessFlags()
225 ALWAYS_INLINE uint32_t GetClassFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetClassFlags()
237 ALWAYS_INLINE bool IsEnum() REQUIRES_SHARED(Locks::mutator_lock_) { in IsEnum()
243 ALWAYS_INLINE bool IsInterface() REQUIRES_SHARED(Locks::mutator_lock_) { in IsInterface()
248 ALWAYS_INLINE bool IsPublic() REQUIRES_SHARED(Locks::mutator_lock_) { in IsPublic()
253 ALWAYS_INLINE bool IsFinal() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinal()
257 ALWAYS_INLINE bool IsFinalizable() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinalizable()
261 ALWAYS_INLINE bool ShouldSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in ShouldSkipHiddenApiChecks()
265 ALWAYS_INLINE void SetSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in SetSkipHiddenApiChecks()
270 ALWAYS_INLINE void SetRecursivelyInitialized() REQUIRES_SHARED(Locks::mutator_lock_);
[all …]
Dobject_reference-inl.h33 ALWAYS_INLINE
39 ALWAYS_INLINE
47 ALWAYS_INLINE
/art/openjdkjvmti/
Djvmti_weak_table.h63 ALWAYS_INLINE bool Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag)
66 ALWAYS_INLINE bool RemoveLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag)
72 ALWAYS_INLINE virtual bool Set(art::ObjPtr<art::mirror::Object> obj, T tag)
75 ALWAYS_INLINE virtual bool SetLocked(art::ObjPtr<art::mirror::Object> obj, T tag)
101 ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor)
106 ALWAYS_INLINE
117 ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_);
118 ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_);
119 ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_);
121 ALWAYS_INLINE art::ObjPtr<art::mirror::Object> Find(T tag)
[all …]
Devents.h155 ALWAYS_INLINE static inline ArtJvmtiEvent GetArtJvmtiEvent(ArtJvmTiEnv* env, jvmtiEvent e);
266 ALWAYS_INLINE
277 ALWAYS_INLINE
284 ALWAYS_INLINE
292 ALWAYS_INLINE
301 ALWAYS_INLINE
328 ALWAYS_INLINE
334 ALWAYS_INLINE
338 ALWAYS_INLINE
345 ALWAYS_INLINE
[all …]
/art/runtime/base/
Dmutex.h194 ALWAYS_INLINE bool IsExclusiveHeld(const Thread* self) const;
197 ALWAYS_INLINE void AssertExclusiveHeld(const Thread* self) const ASSERT_CAPABILITY(this);
198 ALWAYS_INLINE void AssertHeld(const Thread* self) const ASSERT_CAPABILITY(this);
322 void SharedLock(Thread* self) ACQUIRE_SHARED() ALWAYS_INLINE;
329 void SharedUnlock(Thread* self) RELEASE_SHARED() ALWAYS_INLINE;
333 ALWAYS_INLINE bool IsExclusiveHeld(const Thread* self) const;
336 ALWAYS_INLINE void AssertExclusiveHeld(const Thread* self) const ASSERT_CAPABILITY(this);
337 ALWAYS_INLINE void AssertWriterHeld(const Thread* self) const ASSERT_CAPABILITY(this);
353 ALWAYS_INLINE void AssertSharedHeld(const Thread* self) ASSERT_SHARED_CAPABILITY(this) { in AssertSharedHeld()
359 ALWAYS_INLINE void AssertReaderHeld(const Thread* self) ASSERT_SHARED_CAPABILITY(this) { in AssertReaderHeld()
[all …]

123456