/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef ART_RUNTIME_MIRROR_OBJECT_INL_H_ #define ART_RUNTIME_MIRROR_OBJECT_INL_H_ #include "object.h" #include "array-inl.h" #include "art_field.h" #include "art_method.h" #include "base/atomic.h" #include "class-inl.h" #include "class_flags.h" #include "class_linker.h" #include "dex_cache.h" #include "heap_poisoning.h" #include "lock_word-inl.h" #include "monitor.h" #include "obj_ptr-inl.h" #include "object-readbarrier-inl.h" #include "object_array-inl.h" #include "object_reference-inl.h" #include "read_barrier-inl.h" #include "reference.h" #include "runtime.h" #include "string.h" #include "throwable.h" #include "write_barrier-inl.h" namespace art { namespace mirror { inline uint32_t Object::ClassSize(PointerSize pointer_size) { uint32_t vtable_entries = kVTableLength; return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size); } template inline Class* Object::GetClass() { return GetFieldObject(ClassOffset()); } template inline void Object::SetClass(ObjPtr new_klass) { // new_klass may be null prior to class linker initialization. // We don't mark the card as this occurs as part of object allocation. Not all objects have // backing cards, such as large objects. // We use non transactional version since we can't undo this write. We also disable checking as // we may run in transaction mode here. SetFieldObjectWithoutWriteBarrier(ClassOffset(), new_klass); } template inline void Object::SetLockWord(LockWord new_val, bool as_volatile) { // Force use of non-transactional mode and do not check. if (as_volatile) { SetField32Volatile(MonitorOffset(), new_val.GetValue()); } else { SetField32(MonitorOffset(), new_val.GetValue()); } } inline uint32_t Object::GetLockOwnerThreadId() { return Monitor::GetLockOwnerThreadId(this); } inline ObjPtr Object::MonitorEnter(Thread* self) { return Monitor::MonitorEnter(self, this, /*trylock=*/false); } inline ObjPtr Object::MonitorTryEnter(Thread* self) { return Monitor::MonitorEnter(self, this, /*trylock=*/true); } inline bool Object::MonitorExit(Thread* self) { return Monitor::MonitorExit(self, this); } inline void Object::Notify(Thread* self) { Monitor::Notify(self, this); } inline void Object::NotifyAll(Thread* self) { Monitor::NotifyAll(self, this); } inline void Object::Wait(Thread* self, int64_t ms, int32_t ns) { Monitor::Wait(self, this, ms, ns, true, kTimedWaiting); } inline uint32_t Object::GetMarkBit() { CHECK(kUseReadBarrier); return GetLockWord(false).MarkBitState(); } inline void Object::SetReadBarrierState(uint32_t rb_state) { CHECK(kUseBakerReadBarrier); DCHECK(ReadBarrier::IsValidReadBarrierState(rb_state)) << rb_state; LockWord lw = GetLockWord(false); lw.SetReadBarrierState(rb_state); SetLockWord(lw, false); } inline void Object::AssertReadBarrierState() const { CHECK(kUseBakerReadBarrier); Object* obj = const_cast(this); DCHECK_EQ(obj->GetReadBarrierState(), ReadBarrier::NonGrayState()) << "Bad Baker pointer: obj=" << obj << " rb_state" << obj->GetReadBarrierState(); } template inline bool Object::VerifierInstanceOf(ObjPtr klass) { DCHECK(klass != nullptr); DCHECK(GetClass() != nullptr); return klass->IsInterface() || InstanceOf(klass); } template inline bool Object::InstanceOf(ObjPtr klass) { DCHECK(klass != nullptr); DCHECK(GetClass() != nullptr) << "this=" << this; return klass->IsAssignableFrom(GetClass()); } template inline bool Object::IsClass() { // OK to look at from-space copies since java.lang.Class.class is non-moveable // (even when running without boot image, see ClassLinker::InitWithoutImage()) // and we're reading constant references for comparison only. See ReadBarrierOption. ObjPtr klass = GetClass(); ObjPtr java_lang_Class = klass->GetClass(); return klass == java_lang_Class; } template inline ObjPtr Object::AsClass() { DCHECK((IsClass())); return ObjPtr::DownCast(this); } template inline bool Object::IsObjectArray() { // We do not need a read barrier here as the primitive type is constant, // both from-space and to-space component type classes shall yield the same result. constexpr VerifyObjectFlags kNewFlags = RemoveThisFlags(kVerifyFlags); return IsArrayInstance() && !GetClass()-> template GetComponentType()->IsPrimitive(); } template inline ObjPtr> Object::AsObjectArray() { DCHECK((IsObjectArray())); return ObjPtr>::DownCast(this); } template inline bool Object::IsArrayInstance() { // We do not need a read barrier here, both from-space and to-space version of the class // shall return the same result from IsArrayClass(). return GetClass()->template IsArrayClass(); } template inline bool Object::IsReferenceInstance() { return GetClass()->IsTypeOfReferenceClass(); } template inline ObjPtr Object::AsReference() { DCHECK((IsReferenceInstance())); return ObjPtr::DownCast(this); } template inline ObjPtr Object::AsArray() { DCHECK((IsArrayInstance())); return ObjPtr::DownCast(this); } template ALWAYS_INLINE bool Object::IsSpecificPrimitiveArray() { // We do not need a read barrier here as the primitive type is constant, both from-space // and to-space component type classes shall yield the same result. See ReadBarrierOption. const ObjPtr klass = GetClass(); constexpr VerifyObjectFlags kNewFlags = RemoveThisFlags(kVerifyFlags); const ObjPtr component_type = klass->GetComponentType(); return component_type != nullptr && component_type->GetPrimitiveType() == kType; } template inline bool Object::IsBooleanArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsBooleanArray() { DCHECK(IsBooleanArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsByteArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsByteArray() { DCHECK(IsByteArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsCharArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsCharArray() { DCHECK(IsCharArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsShortArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsShortArray() { DCHECK(IsShortArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsIntArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsIntArrayUnchecked() { return ObjPtr::DownCast(this); } template inline ObjPtr Object::AsIntArray() { DCHECK((IsIntArray())); return AsIntArrayUnchecked(); } template inline bool Object::IsLongArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsLongArrayUnchecked() { return ObjPtr::DownCast(this); } template inline ObjPtr Object::AsLongArray() { DCHECK((IsLongArray())); return AsLongArrayUnchecked(); } template inline bool Object::IsFloatArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsFloatArray() { DCHECK(IsFloatArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsDoubleArray() { return IsSpecificPrimitiveArray(); } template inline ObjPtr Object::AsDoubleArray() { DCHECK(IsDoubleArray()); return ObjPtr::DownCast(this); } template inline bool Object::IsString() { // No read barrier is needed for reading a constant primitive field through // constant reference field. See ReadBarrierOption. return GetClass()->IsStringClass(); } template inline ObjPtr Object::AsString() { DCHECK((IsString())); return ObjPtr::DownCast(this); } template inline ObjPtr Object::AsThrowable() { DCHECK(GetClass()->IsThrowableClass()); return ObjPtr::DownCast(this); } template inline bool Object::IsWeakReferenceInstance() { return GetClass()->IsWeakReferenceClass(); } template inline bool Object::IsSoftReferenceInstance() { return GetClass()->IsSoftReferenceClass(); } template inline bool Object::IsFinalizerReferenceInstance() { return GetClass()->IsFinalizerReferenceClass(); } template inline ObjPtr Object::AsFinalizerReference() { DCHECK(IsFinalizerReferenceInstance()); return ObjPtr::DownCast(this); } template inline bool Object::IsPhantomReferenceInstance() { return GetClass()->IsPhantomReferenceClass(); } template inline size_t Object::SizeOf() { // Read barrier is never required for SizeOf since objects sizes are constant. Reading from-space // values is OK because of that. size_t result; constexpr VerifyObjectFlags kNewFlags = RemoveThisFlags(kVerifyFlags); if (IsArrayInstance()) { result = AsArray()->template SizeOf(); } else if (IsClass()) { result = AsClass()->template SizeOf(); } else if (IsString()) { result = AsString()->template SizeOf(); } else { result = GetClass()->template GetObjectSize(); } DCHECK_GE(result, sizeof(Object)) << " class=" // Note: Class::PrettyClass() is reading constant reference fields to get to constant // primitive fields and safely avoids read barriers, so it is safe to call on a Class // reference read without read barrier from a constant reference field. // See ReadBarrierOption. And, for correctness, we actually have to avoid the read // barrier here if Object::SizeOf() is called on a from-space reference. << GetClass()->PrettyClass(); return result; } template inline int8_t Object::GetFieldByte(MemberOffset field_offset) { Verify(); return GetFieldPrimitive(field_offset); } template inline uint8_t Object::GetFieldBooleanVolatile(MemberOffset field_offset) { return GetFieldBoolean(field_offset); } template inline int8_t Object::GetFieldByteVolatile(MemberOffset field_offset) { return GetFieldByte(field_offset); } template inline void Object::SetFieldBoolean(MemberOffset field_offset, uint8_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteFieldBoolean( this, field_offset, GetFieldBoolean(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetFieldByte(MemberOffset field_offset, int8_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteFieldByte(this, field_offset, GetFieldByte(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value) { return SetFieldBoolean( field_offset, new_value); } template inline void Object::SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value) { return SetFieldByte( field_offset, new_value); } template inline uint16_t Object::GetFieldChar(MemberOffset field_offset) { Verify(); return GetFieldPrimitive(field_offset); } template inline int16_t Object::GetFieldShort(MemberOffset field_offset) { Verify(); return GetFieldPrimitive(field_offset); } template inline uint16_t Object::GetFieldCharVolatile(MemberOffset field_offset) { return GetFieldChar(field_offset); } template inline int16_t Object::GetFieldShortVolatile(MemberOffset field_offset) { return GetFieldShort(field_offset); } template inline void Object::SetFieldChar(MemberOffset field_offset, uint16_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteFieldChar(this, field_offset, GetFieldChar(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetFieldShort(MemberOffset field_offset, int16_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteFieldChar(this, field_offset, GetFieldShort(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value) { return SetFieldChar( field_offset, new_value); } template inline void Object::SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value) { return SetFieldShort( field_offset, new_value); } template inline void Object::SetField32(MemberOffset field_offset, int32_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteField32(this, field_offset, GetField32(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetField32Volatile(MemberOffset field_offset, int32_t new_value) { SetField32(field_offset, new_value); } template inline void Object::SetField32Transaction(MemberOffset field_offset, int32_t new_value) { if (Runtime::Current()->IsActiveTransaction()) { SetField32(field_offset, new_value); } else { SetField32(field_offset, new_value); } } template inline void Object::SetField64(MemberOffset field_offset, int64_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, GetField64(field_offset), kIsVolatile); } Verify(); SetFieldPrimitive(field_offset, new_value); } template inline void Object::SetField64Volatile(MemberOffset field_offset, int64_t new_value) { return SetField64(field_offset, new_value); } template inline void Object::SetField64Transaction(MemberOffset field_offset, int32_t new_value) { if (Runtime::Current()->IsActiveTransaction()) { SetField64(field_offset, new_value); } else { SetField64(field_offset, new_value); } } template inline kSize Object::GetFieldAcquire(MemberOffset field_offset) { const uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); const kSize* addr = reinterpret_cast(raw_addr); return reinterpret_cast*>(addr)->load(std::memory_order_acquire); } template inline bool Object::CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, int64_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); return atomic_addr->CompareAndSetWeakSequentiallyConsistent(old_value, new_value); } template inline bool Object::CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value, int64_t new_value) { VerifyTransaction(); if (kTransactionActive) { Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); return atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value); } /* * Returns a pointer to an object representing what the field points to, not an * object representing the field. */ template inline T* Object::GetFieldObject(MemberOffset field_offset) { Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); HeapReference* objref_addr = reinterpret_cast*>(raw_addr); T* result = ReadBarrier::Barrier( this, field_offset, objref_addr); VerifyRead(result); return result; } template inline T* Object::GetFieldObjectVolatile(MemberOffset field_offset) { return GetFieldObject(field_offset); } template inline void Object::SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, ObjPtr new_value) { VerifyTransaction(); if (kTransactionActive) { ObjPtr obj; if (kIsVolatile) { obj = GetFieldObjectVolatile(field_offset); } else { obj = GetFieldObject(field_offset); } Runtime::Current()->RecordWriteFieldReference(this, field_offset, obj, true); } Verify(); VerifyWrite(new_value); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); HeapReference* objref_addr = reinterpret_cast*>(raw_addr); objref_addr->Assign(new_value.Ptr()); } template inline void Object::SetFieldObject(MemberOffset field_offset, ObjPtr new_value) { SetFieldObjectWithoutWriteBarrier(field_offset, new_value); if (new_value != nullptr) { WriteBarrier::ForFieldWrite(this, field_offset, new_value); // TODO: Check field assignment could theoretically cause thread suspension, TODO: fix this. CheckFieldAssignment(field_offset, new_value); } } template inline void Object::SetFieldObjectVolatile(MemberOffset field_offset, ObjPtr new_value) { SetFieldObject(field_offset, new_value); } template inline void Object::SetFieldObjectTransaction(MemberOffset field_offset, ObjPtr new_value) { if (Runtime::Current()->IsActiveTransaction()) { SetFieldObject(field_offset, new_value); } else { SetFieldObject(field_offset, new_value); } } template inline HeapReference* Object::GetFieldObjectReferenceAddr(MemberOffset field_offset) { Verify(); return reinterpret_cast*>(reinterpret_cast(this) + field_offset.Int32Value()); } template inline bool Object::CasFieldObjectWithoutWriteBarrier(MemberOffset field_offset, ObjPtr old_value, ObjPtr new_value, CASMode mode, std::memory_order memory_order) { VerifyTransaction(); VerifyCAS(new_value, old_value); if (kTransactionActive) { Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true); } uint32_t old_ref(PtrCompression::Compress(old_value)); uint32_t new_ref(PtrCompression::Compress(new_value)); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); return atomic_addr->CompareAndSet(old_ref, new_ref, mode, memory_order); } template inline bool Object::CasFieldObject(MemberOffset field_offset, ObjPtr old_value, ObjPtr new_value, CASMode mode, std::memory_order memory_order) { bool success = CasFieldObjectWithoutWriteBarrier< kTransactionActive, kCheckTransaction, kVerifyFlags>(field_offset, old_value, new_value, mode, memory_order); if (success) { WriteBarrier::ForFieldWrite(this, field_offset, new_value); } return success; } template inline ObjPtr Object::CompareAndExchangeFieldObject(MemberOffset field_offset, ObjPtr old_value, ObjPtr new_value) { VerifyTransaction(); VerifyCAS(new_value, old_value); uint32_t old_ref(PtrCompression::Compress(old_value)); uint32_t new_ref(PtrCompression::Compress(new_value)); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); bool success = atomic_addr->compare_exchange_strong(old_ref, new_ref, std::memory_order_seq_cst); ObjPtr witness_value(PtrCompression::Decompress(old_ref)); if (kIsDebugBuild) { // Ensure caller has done read barrier on the reference field so it's in the to-space. ReadBarrier::AssertToSpaceInvariant(witness_value.Ptr()); } if (success) { if (kTransactionActive) { Runtime::Current()->RecordWriteFieldReference(this, field_offset, witness_value, true); } WriteBarrier::ForFieldWrite(this, field_offset, new_value); } VerifyRead(witness_value); return witness_value; } template inline ObjPtr Object::ExchangeFieldObject(MemberOffset field_offset, ObjPtr new_value) { VerifyTransaction(); VerifyCAS(new_value, /*old_value=*/ nullptr); uint32_t new_ref(PtrCompression::Compress(new_value)); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); Atomic* atomic_addr = reinterpret_cast*>(raw_addr); uint32_t old_ref = atomic_addr->exchange(new_ref, std::memory_order_seq_cst); ObjPtr old_value(PtrCompression::Decompress(old_ref)); if (kIsDebugBuild) { // Ensure caller has done read barrier on the reference field so it's in the to-space. ReadBarrier::AssertToSpaceInvariant(old_value.Ptr()); } if (kTransactionActive) { Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true); } WriteBarrier::ForFieldWrite(this, field_offset, new_value); VerifyRead(old_value); return old_value; } template inline void Object::GetPrimitiveFieldViaAccessor(MemberOffset field_offset, Accessor* accessor) { Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); T* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::UpdateFieldBooleanViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; uint8_t old_value = GetFieldBoolean(field_offset); Runtime::Current()->RecordWriteFieldBoolean(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); uint8_t* addr = raw_addr; accessor->Access(addr); } template inline void Object::UpdateFieldByteViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; int8_t old_value = GetFieldByte(field_offset); Runtime::Current()->RecordWriteFieldByte(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int8_t* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::UpdateFieldCharViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; uint16_t old_value = GetFieldChar(field_offset); Runtime::Current()->RecordWriteFieldChar(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); uint16_t* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::UpdateFieldShortViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; int16_t old_value = GetFieldShort(field_offset); Runtime::Current()->RecordWriteFieldShort(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int16_t* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::UpdateField32ViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; int32_t old_value = GetField32(field_offset); Runtime::Current()->RecordWriteField32(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int32_t* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::UpdateField64ViaAccessor(MemberOffset field_offset, Accessor* accessor) { VerifyTransaction(); if (kTransactionActive) { static const bool kIsVolatile = true; int64_t old_value = GetField64(field_offset); Runtime::Current()->RecordWriteField64(this, field_offset, old_value, kIsVolatile); } Verify(); uint8_t* raw_addr = reinterpret_cast(this) + field_offset.Int32Value(); int64_t* addr = reinterpret_cast(raw_addr); accessor->Access(addr); } template inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) { if (!kIsStatic && (ref_offsets != mirror::Class::kClassWalkSuper)) { // Instance fields and not the slow-path. uint32_t field_offset = mirror::kObjectHeaderSize; while (ref_offsets != 0) { if ((ref_offsets & 1) != 0) { visitor(this, MemberOffset(field_offset), kIsStatic); } ref_offsets >>= 1; field_offset += sizeof(mirror::HeapReference); } } else { // There is no reference offset bitmap. In the non-static case, walk up the class // inheritance hierarchy and find reference offsets the hard way. In the static case, just // consider this class. for (ObjPtr klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr; klass = kIsStatic ? nullptr : klass->GetSuperClass()) { const size_t num_reference_fields = kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields(); if (num_reference_fields == 0u) { continue; } // Presumably GC can happen when we are cross compiling, it should not cause performance // problems to do pointer size logic. MemberOffset field_offset = kIsStatic ? klass->GetFirstReferenceStaticFieldOffset( Runtime::Current()->GetClassLinker()->GetImagePointerSize()) : klass->GetFirstReferenceInstanceFieldOffset(); for (size_t i = 0u; i < num_reference_fields; ++i) { // TODO: Do a simpler check? if (field_offset.Uint32Value() != ClassOffset().Uint32Value()) { visitor(this, field_offset, kIsStatic); } field_offset = MemberOffset(field_offset.Uint32Value() + sizeof(mirror::HeapReference)); } } } } template inline void Object::VisitInstanceFieldsReferences(ObjPtr klass, const Visitor& visitor) { VisitFieldsReferences( klass->GetReferenceInstanceOffsets(), visitor); } template inline void Object::VisitStaticFieldsReferences(ObjPtr klass, const Visitor& visitor) { DCHECK(!klass->IsTemp()); klass->VisitFieldsReferences(0, visitor); } template inline bool Object::IsClassLoader() { return GetClass()->template IsClassLoaderClass(); } template inline ObjPtr Object::AsClassLoader() { DCHECK((IsClassLoader())); return ObjPtr::DownCast(this); } template inline bool Object::IsDexCache() { return GetClass()->template IsDexCacheClass(); } template inline ObjPtr Object::AsDexCache() { DCHECK((IsDexCache())); return ObjPtr::DownCast(this); } template inline void Object::VerifyTransaction() { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } } } // namespace mirror } // namespace art #endif // ART_RUNTIME_MIRROR_OBJECT_INL_H_