1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "entrypoints/quick/quick_alloc_entrypoints.h"
18 
19 #include "art_method-inl.h"
20 #include "base/enums.h"
21 #include "base/quasi_atomic.h"
22 #include "callee_save_frame.h"
23 #include "dex/dex_file_types.h"
24 #include "entrypoints/entrypoint_utils-inl.h"
25 #include "mirror/class-inl.h"
26 #include "mirror/object-inl.h"
27 #include "mirror/object_array-inl.h"
28 #include "mirror/string-alloc-inl.h"
29 
30 namespace art {
31 
32 static constexpr bool kUseTlabFastPath = true;
33 
34 template <bool kInitialized,
35           bool kFinalize,
36           bool kInstrumented,
37           gc::AllocatorType allocator_type>
artAllocObjectFromCode(mirror::Class * klass,Thread * self)38 static ALWAYS_INLINE inline mirror::Object* artAllocObjectFromCode(
39     mirror::Class* klass,
40     Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
41   ScopedQuickEntrypointChecks sqec(self);
42   DCHECK(klass != nullptr);
43   if (kUseTlabFastPath && !kInstrumented && allocator_type == gc::kAllocatorTypeTLAB) {
44     // The "object size alloc fast path" is set when the class is
45     // visibly initialized, objects are fixed size and non-finalizable.
46     // Otherwise, the value is too large for the size check to succeed.
47     size_t byte_count = klass->GetObjectSizeAllocFastPath();
48     if (LIKELY(byte_count < self->TlabSize())) {
49       static_assert(kObjectAlignment == gc::space::BumpPointerSpace::kAlignment, "Alignment check");
50       DCHECK_ALIGNED(byte_count, gc::space::BumpPointerSpace::kAlignment);
51       mirror::Object* obj = self->AllocTlab(byte_count);
52       DCHECK(obj != nullptr) << "AllocTlab can't fail";
53       obj->SetClass(klass);
54       if (kUseBakerReadBarrier) {
55         obj->AssertReadBarrierState();
56       }
57       QuasiAtomic::ThreadFenceForConstructor();
58       return obj;
59     }
60   }
61   if (kInitialized) {
62     return AllocObjectFromCodeInitialized<kInstrumented>(klass, self, allocator_type).Ptr();
63   } else if (!kFinalize) {
64     return AllocObjectFromCodeResolved<kInstrumented>(klass, self, allocator_type).Ptr();
65   } else {
66     return AllocObjectFromCode<kInstrumented>(klass, self, allocator_type).Ptr();
67   }
68 }
69 
70 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \
71 extern "C" mirror::Object* artAllocObjectFromCodeWithChecks##suffix##suffix2( \
72     mirror::Class* klass, Thread* self) \
73     REQUIRES_SHARED(Locks::mutator_lock_) { \
74   return artAllocObjectFromCode<false, true, instrumented_bool, allocator_type>(klass, self); \
75 } \
76 extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \
77     mirror::Class* klass, Thread* self) \
78     REQUIRES_SHARED(Locks::mutator_lock_) { \
79   return artAllocObjectFromCode<false, false, instrumented_bool, allocator_type>(klass, self); \
80 } \
81 extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \
82     mirror::Class* klass, Thread* self) \
83     REQUIRES_SHARED(Locks::mutator_lock_) { \
84   return artAllocObjectFromCode<true, false, instrumented_bool, allocator_type>(klass, self); \
85 } \
86 extern "C" mirror::String* artAllocStringObject##suffix##suffix2( \
87     mirror::Class* klass, Thread* self) \
88     REQUIRES_SHARED(Locks::mutator_lock_) { \
89   /* The klass arg is so it matches the ABI of the other object alloc callbacks. */ \
90   DCHECK(klass->IsStringClass()) << klass->PrettyClass(); \
91   return mirror::String::AllocEmptyString<instrumented_bool>(self, allocator_type).Ptr(); \
92 } \
93 extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \
94     mirror::Class* klass, int32_t component_count, Thread* self) \
95     REQUIRES_SHARED(Locks::mutator_lock_) { \
96   ScopedQuickEntrypointChecks sqec(self); \
97   return AllocArrayFromCodeResolved<instrumented_bool>( \
98       klass, component_count, self, allocator_type).Ptr(); \
99 } \
100 extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \
101     mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \
102     Thread* self) \
103     REQUIRES_SHARED(Locks::mutator_lock_) { \
104   ScopedQuickEntrypointChecks sqec(self); \
105   StackHandleScope<1> hs(self); \
106   Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \
107   return mirror::String::AllocFromByteArray<instrumented_bool>( \
108       self, byte_count, handle_array, offset, high, allocator_type).Ptr(); \
109 } \
110 extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \
111     int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \
112     REQUIRES_SHARED(Locks::mutator_lock_) { \
113   StackHandleScope<1> hs(self); \
114   Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \
115   return mirror::String::AllocFromCharArray<instrumented_bool>( \
116       self, char_count, handle_array, offset, allocator_type).Ptr(); \
117 } \
118 extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \
119     mirror::String* string, Thread* self) \
120     REQUIRES_SHARED(Locks::mutator_lock_) { \
121   StackHandleScope<1> hs(self); \
122   Handle<mirror::String> handle_string(hs.NewHandle(string)); \
123   return mirror::String::AllocFromString<instrumented_bool>( \
124     self, handle_string->GetLength(), handle_string, 0, allocator_type).Ptr(); \
125 }
126 
127 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \
128     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \
129     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type)
130 
131 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc)
132 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc)
133 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer)
134 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB)
135 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion)
136 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB)
137 
138 #define GENERATE_ENTRYPOINTS(suffix) \
139 extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t); \
140 extern "C" void* art_quick_alloc_array_resolved8##suffix(mirror::Class* klass, int32_t); \
141 extern "C" void* art_quick_alloc_array_resolved16##suffix(mirror::Class* klass, int32_t); \
142 extern "C" void* art_quick_alloc_array_resolved32##suffix(mirror::Class* klass, int32_t); \
143 extern "C" void* art_quick_alloc_array_resolved64##suffix(mirror::Class* klass, int32_t); \
144 extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass); \
145 extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass); \
146 extern "C" void* art_quick_alloc_object_with_checks##suffix(mirror::Class* klass); \
147 extern "C" void* art_quick_alloc_string_object##suffix(mirror::Class* klass); \
148 extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \
149 extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \
150 extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \
151 extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t); \
152 extern "C" void* art_quick_alloc_array_resolved8##suffix##_instrumented(mirror::Class* klass, int32_t); \
153 extern "C" void* art_quick_alloc_array_resolved16##suffix##_instrumented(mirror::Class* klass, int32_t); \
154 extern "C" void* art_quick_alloc_array_resolved32##suffix##_instrumented(mirror::Class* klass, int32_t); \
155 extern "C" void* art_quick_alloc_array_resolved64##suffix##_instrumented(mirror::Class* klass, int32_t); \
156 extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass); \
157 extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass); \
158 extern "C" void* art_quick_alloc_object_with_checks##suffix##_instrumented(mirror::Class* klass); \
159 extern "C" void* art_quick_alloc_string_object##suffix##_instrumented(mirror::Class* klass); \
160 extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \
161 extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \
162 extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \
163 void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \
164   if (instrumented) { \
165     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \
166     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix##_instrumented; \
167     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix##_instrumented; \
168     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix##_instrumented; \
169     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix##_instrumented; \
170     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \
171     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \
172     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix##_instrumented; \
173     qpoints->pAllocStringObject = art_quick_alloc_string_object##suffix##_instrumented; \
174     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \
175     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \
176     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \
177   } else { \
178     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \
179     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix; \
180     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix; \
181     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix; \
182     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix; \
183     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \
184     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \
185     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix; \
186     qpoints->pAllocStringObject = art_quick_alloc_string_object##suffix; \
187     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \
188     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \
189     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \
190   } \
191 }
192 
193 // Generate the entrypoint functions.
194 #if !defined(__APPLE__) || !defined(__LP64__)
195 GENERATE_ENTRYPOINTS(_dlmalloc)
196 GENERATE_ENTRYPOINTS(_rosalloc)
197 GENERATE_ENTRYPOINTS(_bump_pointer)
198 GENERATE_ENTRYPOINTS(_tlab)
199 GENERATE_ENTRYPOINTS(_region)
200 GENERATE_ENTRYPOINTS(_region_tlab)
201 #endif
202 
203 static bool entry_points_instrumented = false;
204 static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc;
205 
SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator)206 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) {
207   entry_points_allocator = allocator;
208 }
209 
SetQuickAllocEntryPointsInstrumented(bool instrumented)210 void SetQuickAllocEntryPointsInstrumented(bool instrumented) {
211   entry_points_instrumented = instrumented;
212 }
213 
ResetQuickAllocEntryPoints(QuickEntryPoints * qpoints)214 void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) {
215 #if !defined(__APPLE__) || !defined(__LP64__)
216   switch (entry_points_allocator) {
217     case gc::kAllocatorTypeDlMalloc: {
218       SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented);
219       return;
220     }
221     case gc::kAllocatorTypeRosAlloc: {
222       SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented);
223       return;
224     }
225     case gc::kAllocatorTypeBumpPointer: {
226       CHECK(kMovingCollector);
227       SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented);
228       return;
229     }
230     case gc::kAllocatorTypeTLAB: {
231       CHECK(kMovingCollector);
232       SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
233       return;
234     }
235     case gc::kAllocatorTypeRegion: {
236       CHECK(kMovingCollector);
237       SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented);
238       return;
239     }
240     case gc::kAllocatorTypeRegionTLAB: {
241       CHECK(kMovingCollector);
242       SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented);
243       return;
244     }
245     default:
246       break;
247   }
248 #else
249   UNUSED(qpoints);
250 #endif
251   UNIMPLEMENTED(FATAL);
252   UNREACHABLE();
253 }
254 
255 }  // namespace art
256