1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_
18 #define ART_RUNTIME_MIRROR_DEX_CACHE_H_
19 
20 #include "array.h"
21 #include "base/bit_utils.h"
22 #include "base/locks.h"
23 #include "dex/dex_file_types.h"
24 #include "gc_root.h"  // Note: must not use -inl here to avoid circular dependency.
25 #include "object.h"
26 #include "object_array.h"
27 
28 namespace art {
29 
30 namespace linker {
31 class ImageWriter;
32 }  // namespace linker
33 
34 class ArtField;
35 class ArtMethod;
36 struct DexCacheOffsets;
37 class DexFile;
38 union JValue;
39 class LinearAlloc;
40 class ReflectiveValueVisitor;
41 class Thread;
42 
43 namespace mirror {
44 
45 class CallSite;
46 class Class;
47 class ClassLoader;
48 class MethodType;
49 class String;
50 
51 template <typename T> struct PACKED(8) DexCachePair {
52   GcRoot<T> object;
53   uint32_t index;
54   // The array is initially [ {0,0}, {0,0}, {0,0} ... ]
55   // We maintain the invariant that once a dex cache entry is populated,
56   // the pointer is always non-0
57   // Any given entry would thus be:
58   // {non-0, non-0} OR {0,0}
59   //
60   // It's generally sufficiently enough then to check if the
61   // lookup index matches the stored index (for a >0 lookup index)
62   // because if it's true the pointer is also non-null.
63   //
64   // For the 0th entry which is a special case, the value is either
65   // {0,0} (initial state) or {non-0, 0} which indicates
66   // that a valid object is stored at that index for a dex section id of 0.
67   //
68   // As an optimization, we want to avoid branching on the object pointer since
69   // it's always non-null if the id branch succeeds (except for the 0th id).
70   // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail
71   // the lookup id == stored id branch.
72   DexCachePair(ObjPtr<T> object, uint32_t index);
DexCachePairDexCachePair73   DexCachePair() : index(0) {}
74   DexCachePair(const DexCachePair<T>&) = default;
75   DexCachePair& operator=(const DexCachePair<T>&) = default;
76 
77   static void Initialize(std::atomic<DexCachePair<T>>* dex_cache);
78 
InvalidIndexForSlotDexCachePair79   static uint32_t InvalidIndexForSlot(uint32_t slot) {
80     // Since the cache size is a power of two, 0 will always map to slot 0.
81     // Use 1 for slot 0 and 0 for all other slots.
82     return (slot == 0) ? 1u : 0u;
83   }
84 
85   T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_);
86 };
87 
88 template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair {
89   T* object;
90   size_t index;
91   // This is similar to DexCachePair except that we're storing a native pointer
92   // instead of a GC root. See DexCachePair for the details.
NativeDexCachePairNativeDexCachePair93   NativeDexCachePair(T* object, uint32_t index)
94       : object(object),
95         index(index) {}
NativeDexCachePairNativeDexCachePair96   NativeDexCachePair() : object(nullptr), index(0u) { }
97   NativeDexCachePair(const NativeDexCachePair<T>&) = default;
98   NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default;
99 
100   static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, PointerSize pointer_size);
101 
InvalidIndexForSlotNativeDexCachePair102   static uint32_t InvalidIndexForSlot(uint32_t slot) {
103     // Since the cache size is a power of two, 0 will always map to slot 0.
104     // Use 1 for slot 0 and 0 for all other slots.
105     return (slot == 0) ? 1u : 0u;
106   }
107 
GetObjectForIndexNativeDexCachePair108   T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
109     if (idx != index) {
110       return nullptr;
111     }
112     DCHECK(object != nullptr);
113     return object;
114   }
115 };
116 
117 using TypeDexCachePair = DexCachePair<Class>;
118 using TypeDexCacheType = std::atomic<TypeDexCachePair>;
119 
120 using StringDexCachePair = DexCachePair<String>;
121 using StringDexCacheType = std::atomic<StringDexCachePair>;
122 
123 using FieldDexCachePair = NativeDexCachePair<ArtField>;
124 using FieldDexCacheType = std::atomic<FieldDexCachePair>;
125 
126 using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
127 using MethodDexCacheType = std::atomic<MethodDexCachePair>;
128 
129 using MethodTypeDexCachePair = DexCachePair<MethodType>;
130 using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
131 
132 // C++ mirror of java.lang.DexCache.
133 class MANAGED DexCache final : public Object {
134  public:
135   // Size of java.lang.DexCache.class.
136   static uint32_t ClassSize(PointerSize pointer_size);
137 
138   // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
139   static constexpr size_t kDexCacheTypeCacheSize = 1024;
140   static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize),
141                 "Type dex cache size is not a power of 2.");
142 
143   // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
144   static constexpr size_t kDexCacheStringCacheSize = 1024;
145   static_assert(IsPowerOfTwo(kDexCacheStringCacheSize),
146                 "String dex cache size is not a power of 2.");
147 
148   // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
149   static constexpr size_t kDexCacheFieldCacheSize = 1024;
150   static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize),
151                 "Field dex cache size is not a power of 2.");
152 
153   // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
154   static constexpr size_t kDexCacheMethodCacheSize = 1024;
155   static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize),
156                 "Method dex cache size is not a power of 2.");
157 
158   // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions
159   // to hold.
160   static constexpr size_t kDexCacheMethodTypeCacheSize = 1024;
161   static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize),
162                 "MethodType dex cache size is not a power of 2.");
163 
StaticTypeSize()164   static constexpr size_t StaticTypeSize() {
165     return kDexCacheTypeCacheSize;
166   }
167 
StaticStringSize()168   static constexpr size_t StaticStringSize() {
169     return kDexCacheStringCacheSize;
170   }
171 
StaticArtFieldSize()172   static constexpr size_t StaticArtFieldSize() {
173     return kDexCacheFieldCacheSize;
174   }
175 
StaticMethodSize()176   static constexpr size_t StaticMethodSize() {
177     return kDexCacheMethodCacheSize;
178   }
179 
StaticMethodTypeSize()180   static constexpr size_t StaticMethodTypeSize() {
181     return kDexCacheMethodTypeCacheSize;
182   }
183 
184   // Size of an instance of java.lang.DexCache not including referenced values.
InstanceSize()185   static constexpr uint32_t InstanceSize() {
186     return sizeof(DexCache);
187   }
188 
189   static void InitializeDexCache(Thread* self,
190                                  ObjPtr<mirror::DexCache> dex_cache,
191                                  ObjPtr<mirror::String> location,
192                                  const DexFile* dex_file,
193                                  LinearAlloc* linear_alloc,
194                                  PointerSize image_pointer_size)
195       REQUIRES_SHARED(Locks::mutator_lock_)
196       REQUIRES(Locks::dex_lock_);
197 
198   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
199   void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
200       REQUIRES_SHARED(Locks::mutator_lock_);
201 
202   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
203   void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor)
204       REQUIRES_SHARED(Locks::mutator_lock_);
205 
206   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
207   void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor)
208       REQUIRES_SHARED(Locks::mutator_lock_);
209 
210   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
211   void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor)
212       REQUIRES_SHARED(Locks::mutator_lock_);
213 
214   ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_);
215 
StringsOffset()216   static constexpr MemberOffset StringsOffset() {
217     return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_);
218   }
219 
PreResolvedStringsOffset()220   static constexpr MemberOffset PreResolvedStringsOffset() {
221     return OFFSET_OF_OBJECT_MEMBER(DexCache, preresolved_strings_);
222   }
223 
ResolvedTypesOffset()224   static constexpr MemberOffset ResolvedTypesOffset() {
225     return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_);
226   }
227 
ResolvedFieldsOffset()228   static constexpr MemberOffset ResolvedFieldsOffset() {
229     return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_);
230   }
231 
ResolvedMethodsOffset()232   static constexpr MemberOffset ResolvedMethodsOffset() {
233     return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_);
234   }
235 
ResolvedMethodTypesOffset()236   static constexpr MemberOffset ResolvedMethodTypesOffset() {
237     return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_);
238   }
239 
ResolvedCallSitesOffset()240   static constexpr MemberOffset ResolvedCallSitesOffset() {
241     return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_);
242   }
243 
NumStringsOffset()244   static constexpr MemberOffset NumStringsOffset() {
245     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_);
246   }
247 
NumPreResolvedStringsOffset()248   static constexpr MemberOffset NumPreResolvedStringsOffset() {
249     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_preresolved_strings_);
250   }
251 
NumResolvedTypesOffset()252   static constexpr MemberOffset NumResolvedTypesOffset() {
253     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_);
254   }
255 
NumResolvedFieldsOffset()256   static constexpr MemberOffset NumResolvedFieldsOffset() {
257     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_);
258   }
259 
NumResolvedMethodsOffset()260   static constexpr MemberOffset NumResolvedMethodsOffset() {
261     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_);
262   }
263 
NumResolvedMethodTypesOffset()264   static constexpr MemberOffset NumResolvedMethodTypesOffset() {
265     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
266   }
267 
NumResolvedCallSitesOffset()268   static constexpr MemberOffset NumResolvedCallSitesOffset() {
269     return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_);
270   }
271 
PreResolvedStringsAlignment()272   static constexpr size_t PreResolvedStringsAlignment() {
273     return alignof(GcRoot<mirror::String>);
274   }
275 
276   String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
277       REQUIRES_SHARED(Locks::mutator_lock_);
278 
279   void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
280       REQUIRES_SHARED(Locks::mutator_lock_);
281 
282   void SetPreResolvedString(dex::StringIndex string_idx,
283                             ObjPtr<mirror::String> resolved)
284       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
285 
286   // Clear the preresolved string cache to prevent further usage.
287   void ClearPreResolvedStrings()
288       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
289 
290   // Clear a string for a string_idx, used to undo string intern transactions to make sure
291   // the string isn't kept live.
292   void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
293 
294   Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
295 
296   void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved)
297       REQUIRES_SHARED(Locks::mutator_lock_);
298 
299   void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
300 
301   ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
302       REQUIRES_SHARED(Locks::mutator_lock_);
303 
304   ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx,
305                                        ArtMethod* resolved,
306                                        PointerSize ptr_size)
307       REQUIRES_SHARED(Locks::mutator_lock_);
308   ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
309       REQUIRES_SHARED(Locks::mutator_lock_);
310 
311   // Pointer sized variant, used for patching.
312   ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
313       REQUIRES_SHARED(Locks::mutator_lock_);
314 
315   // Pointer sized variant, used for patching.
316   ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size)
317       REQUIRES_SHARED(Locks::mutator_lock_);
318   ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size)
319       REQUIRES_SHARED(Locks::mutator_lock_);
320 
321   MethodType* GetResolvedMethodType(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
322 
323   void SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved)
324       REQUIRES_SHARED(Locks::mutator_lock_);
325 
326   CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_);
327 
328   // Attempts to bind |call_site_idx| to the call site |resolved|. The
329   // caller must use the return value in place of |resolved|. This is
330   // because multiple threads can invoke the bootstrap method each
331   // producing a call site, but the method handle invocation on the
332   // call site must be on a common agreed value.
333   ObjPtr<CallSite> SetResolvedCallSite(uint32_t call_site_idx, ObjPtr<CallSite> resolved)
334       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
335 
336   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetStrings()337   StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
338     return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset());
339   }
340 
341   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetPreResolvedStrings()342   GcRoot<mirror::String>* GetPreResolvedStrings() ALWAYS_INLINE
343       REQUIRES_SHARED(Locks::mutator_lock_) {
344     return GetFieldPtr64<GcRoot<mirror::String>*, kVerifyFlags>(PreResolvedStringsOffset());
345   }
346 
SetStrings(StringDexCacheType * strings)347   void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
348     SetFieldPtr<false>(StringsOffset(), strings);
349   }
350 
SetPreResolvedStrings(GcRoot<mirror::String> * strings)351   void SetPreResolvedStrings(GcRoot<mirror::String>* strings)
352       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
353     SetFieldPtr<false>(PreResolvedStringsOffset(), strings);
354   }
355 
356   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetResolvedTypes()357   TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
358     return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset());
359   }
360 
SetResolvedTypes(TypeDexCacheType * resolved_types)361   void SetResolvedTypes(TypeDexCacheType* resolved_types)
362       ALWAYS_INLINE
363       REQUIRES_SHARED(Locks::mutator_lock_) {
364     SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
365   }
366 
GetResolvedMethods()367   MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
368     return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset());
369   }
370 
SetResolvedMethods(MethodDexCacheType * resolved_methods)371   void SetResolvedMethods(MethodDexCacheType* resolved_methods)
372       ALWAYS_INLINE
373       REQUIRES_SHARED(Locks::mutator_lock_) {
374     SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
375   }
376 
GetResolvedFields()377   FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
378     return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset());
379   }
380 
SetResolvedFields(FieldDexCacheType * resolved_fields)381   void SetResolvedFields(FieldDexCacheType* resolved_fields)
382       ALWAYS_INLINE
383       REQUIRES_SHARED(Locks::mutator_lock_) {
384     SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields);
385   }
386 
387   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetResolvedMethodTypes()388   MethodTypeDexCacheType* GetResolvedMethodTypes()
389       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
390     return GetFieldPtr64<MethodTypeDexCacheType*, kVerifyFlags>(ResolvedMethodTypesOffset());
391   }
392 
SetResolvedMethodTypes(MethodTypeDexCacheType * resolved_method_types)393   void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types)
394       ALWAYS_INLINE
395       REQUIRES_SHARED(Locks::mutator_lock_) {
396     SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types);
397   }
398 
399   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetResolvedCallSites()400   GcRoot<CallSite>* GetResolvedCallSites()
401       ALWAYS_INLINE
402       REQUIRES_SHARED(Locks::mutator_lock_) {
403     return GetFieldPtr<GcRoot<CallSite>*, kVerifyFlags>(ResolvedCallSitesOffset());
404   }
405 
SetResolvedCallSites(GcRoot<CallSite> * resolved_call_sites)406   void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites)
407       ALWAYS_INLINE
408       REQUIRES_SHARED(Locks::mutator_lock_) {
409     SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites);
410   }
411 
412   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumStrings()413   size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
414     return GetField32<kVerifyFlags>(NumStringsOffset());
415   }
416 
417   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumPreResolvedStrings()418   size_t NumPreResolvedStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
419     return GetField32<kVerifyFlags>(NumPreResolvedStringsOffset());
420   }
421 
422   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumResolvedTypes()423   size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
424     return GetField32<kVerifyFlags>(NumResolvedTypesOffset());
425   }
426 
427   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumResolvedMethods()428   size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) {
429     return GetField32<kVerifyFlags>(NumResolvedMethodsOffset());
430   }
431 
432   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumResolvedFields()433   size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) {
434     return GetField32<kVerifyFlags>(NumResolvedFieldsOffset());
435   }
436 
437   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumResolvedMethodTypes()438   size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
439     return GetField32<kVerifyFlags>(NumResolvedMethodTypesOffset());
440   }
441 
442   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
NumResolvedCallSites()443   size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) {
444     return GetField32<kVerifyFlags>(NumResolvedCallSitesOffset());
445   }
446 
GetDexFile()447   const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
448     return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_));
449   }
450 
SetDexFile(const DexFile * dex_file)451   void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
452     SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file);
453   }
454 
455   void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_);
456 
457   template <typename T>
458   static NativeDexCachePair<T> GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
459                                                     size_t idx,
460                                                     PointerSize ptr_size);
461 
462   template <typename T>
463   static void SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
464                                    size_t idx,
465                                    NativeDexCachePair<T> pair,
466                                    PointerSize ptr_size);
467 
PreResolvedStringsSize(size_t num_strings)468   static size_t PreResolvedStringsSize(size_t num_strings) {
469     return sizeof(GcRoot<mirror::String>) * num_strings;
470   }
471 
472   uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
473   uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
474   uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
475   uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
476   uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
477 
478   // Returns true if we succeeded in adding the pre-resolved string array.
479   bool AddPreResolvedStringsArray() REQUIRES_SHARED(Locks::mutator_lock_);
480 
481   void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_);
482 
483   void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_);
484 
485  private:
486   void Init(const DexFile* dex_file,
487             ObjPtr<String> location,
488             StringDexCacheType* strings,
489             uint32_t num_strings,
490             TypeDexCacheType* resolved_types,
491             uint32_t num_resolved_types,
492             MethodDexCacheType* resolved_methods,
493             uint32_t num_resolved_methods,
494             FieldDexCacheType* resolved_fields,
495             uint32_t num_resolved_fields,
496             MethodTypeDexCacheType* resolved_method_types,
497             uint32_t num_resolved_method_types,
498             GcRoot<CallSite>* resolved_call_sites,
499             uint32_t num_resolved_call_sites)
500       REQUIRES_SHARED(Locks::mutator_lock_);
501 
502   // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
503   // so we use a custom pair class for loading and storing the NativeDexCachePair<>.
504   template <typename IntType>
505   struct PACKED(2 * sizeof(IntType)) ConversionPair {
ConversionPairConversionPair506     ConversionPair(IntType f, IntType s) : first(f), second(s) { }
507     ConversionPair(const ConversionPair&) = default;
508     ConversionPair& operator=(const ConversionPair&) = default;
509     IntType first;
510     IntType second;
511   };
512   using ConversionPair32 = ConversionPair<uint32_t>;
513   using ConversionPair64 = ConversionPair<uint64_t>;
514 
515   // Visit instance fields of the dex cache as well as its associated arrays.
516   template <bool kVisitNativeRoots,
517             VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
518             ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
519             typename Visitor>
520   void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
521       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
522 
523   // Due to lack of 16-byte atomics support, we use hand-crafted routines.
524 #if defined(__aarch64__)
525   // 16-byte atomics are supported on aarch64.
AtomicLoadRelaxed16B(std::atomic<ConversionPair64> * target)526   ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
527       std::atomic<ConversionPair64>* target) {
528     return target->load(std::memory_order_relaxed);
529   }
530 
AtomicStoreRelease16B(std::atomic<ConversionPair64> * target,ConversionPair64 value)531   ALWAYS_INLINE static void AtomicStoreRelease16B(
532       std::atomic<ConversionPair64>* target, ConversionPair64 value) {
533     target->store(value, std::memory_order_release);
534   }
535 #elif defined(__x86_64__)
AtomicLoadRelaxed16B(std::atomic<ConversionPair64> * target)536   ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
537       std::atomic<ConversionPair64>* target) {
538     uint64_t first, second;
539     __asm__ __volatile__(
540         "lock cmpxchg16b (%2)"
541         : "=&a"(first), "=&d"(second)
542         : "r"(target), "a"(0), "d"(0), "b"(0), "c"(0)
543         : "cc");
544     return ConversionPair64(first, second);
545   }
546 
AtomicStoreRelease16B(std::atomic<ConversionPair64> * target,ConversionPair64 value)547   ALWAYS_INLINE static void AtomicStoreRelease16B(
548       std::atomic<ConversionPair64>* target, ConversionPair64 value) {
549     uint64_t first, second;
550     __asm__ __volatile__ (
551         "movq (%2), %%rax\n\t"
552         "movq 8(%2), %%rdx\n\t"
553         "1:\n\t"
554         "lock cmpxchg16b (%2)\n\t"
555         "jnz 1b"
556         : "=&a"(first), "=&d"(second)
557         : "r"(target), "b"(value.first), "c"(value.second)
558         : "cc");
559   }
560 #else
561   static ConversionPair64 AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target);
562   static void AtomicStoreRelease16B(std::atomic<ConversionPair64>* target, ConversionPair64 value);
563 #endif
564 
565   HeapReference<ClassLoader> class_loader_;
566   HeapReference<String> location_;
567 
568   uint64_t dex_file_;                // const DexFile*
569   uint64_t preresolved_strings_;     // GcRoot<mirror::String*> array with num_preresolved_strings
570                                      // elements.
571   uint64_t resolved_call_sites_;     // GcRoot<CallSite>* array with num_resolved_call_sites_
572                                      // elements.
573   uint64_t resolved_fields_;         // std::atomic<FieldDexCachePair>*, array with
574                                      // num_resolved_fields_ elements.
575   uint64_t resolved_method_types_;   // std::atomic<MethodTypeDexCachePair>* array with
576                                      // num_resolved_method_types_ elements.
577   uint64_t resolved_methods_;        // ArtMethod*, array with num_resolved_methods_ elements.
578   uint64_t resolved_types_;          // TypeDexCacheType*, array with num_resolved_types_ elements.
579   uint64_t strings_;                 // std::atomic<StringDexCachePair>*, array with num_strings_
580                                      // elements.
581 
582   uint32_t num_preresolved_strings_;    // Number of elements in the preresolved_strings_ array.
583   uint32_t num_resolved_call_sites_;    // Number of elements in the call_sites_ array.
584   uint32_t num_resolved_fields_;        // Number of elements in the resolved_fields_ array.
585   uint32_t num_resolved_method_types_;  // Number of elements in the resolved_method_types_ array.
586   uint32_t num_resolved_methods_;       // Number of elements in the resolved_methods_ array.
587   uint32_t num_resolved_types_;         // Number of elements in the resolved_types_ array.
588   uint32_t num_strings_;                // Number of elements in the strings_ array.
589 
590   friend struct art::DexCacheOffsets;  // for verifying offset information
591   friend class linker::ImageWriter;
592   friend class Object;  // For VisitReferences
593   DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache);
594 };
595 
596 }  // namespace mirror
597 }  // namespace art
598 
599 #endif  // ART_RUNTIME_MIRROR_DEX_CACHE_H_
600