1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "arena_allocator-inl.h"
18 #include "arena_bit_vector.h"
19 #include "base/common_art_test.h"
20 #include "gtest/gtest.h"
21 #include "malloc_arena_pool.h"
22 #include "memory_tool.h"
23
24 namespace art {
25
26 class ArenaAllocatorTest : public testing::Test {
27 protected:
NumberOfArenas(ArenaAllocator * allocator)28 size_t NumberOfArenas(ArenaAllocator* allocator) {
29 size_t result = 0u;
30 for (Arena* a = allocator->arena_head_; a != nullptr; a = a->next_) {
31 ++result;
32 }
33 return result;
34 }
35 };
36
TEST_F(ArenaAllocatorTest,Test)37 TEST_F(ArenaAllocatorTest, Test) {
38 MallocArenaPool pool;
39 ArenaAllocator allocator(&pool);
40 ArenaBitVector bv(&allocator, 10, true);
41 bv.SetBit(5);
42 EXPECT_EQ(1U, bv.GetStorageSize());
43 bv.SetBit(35);
44 EXPECT_EQ(2U, bv.GetStorageSize());
45 }
46
TEST_F(ArenaAllocatorTest,MakeDefined)47 TEST_F(ArenaAllocatorTest, MakeDefined) {
48 // Regression test to make sure we mark the allocated area defined.
49 MallocArenaPool pool;
50 static constexpr size_t kSmallArraySize = 10;
51 static constexpr size_t kLargeArraySize = 50;
52 uint32_t* small_array;
53 {
54 // Allocate a small array from an arena and release it.
55 ArenaAllocator allocator(&pool);
56 small_array = allocator.AllocArray<uint32_t>(kSmallArraySize);
57 ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]);
58 }
59 {
60 // Reuse the previous arena and allocate more than previous allocation including red zone.
61 ArenaAllocator allocator(&pool);
62 uint32_t* large_array = allocator.AllocArray<uint32_t>(kLargeArraySize);
63 ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]);
64 // Verify that the allocation was made on the same arena.
65 ASSERT_EQ(small_array, large_array);
66 }
67 }
68
TEST_F(ArenaAllocatorTest,LargeAllocations)69 TEST_F(ArenaAllocatorTest, LargeAllocations) {
70 if (arena_allocator::kArenaAllocatorPreciseTracking) {
71 printf("WARNING: TEST DISABLED FOR precise arena tracking\n");
72 return;
73 }
74
75 {
76 MallocArenaPool pool;
77 ArenaAllocator allocator(&pool);
78 // Note: Leaving some space for memory tool red zones.
79 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8);
80 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 2 / 8);
81 ASSERT_NE(alloc1, alloc2);
82 ASSERT_EQ(1u, NumberOfArenas(&allocator));
83 }
84 {
85 MallocArenaPool pool;
86 ArenaAllocator allocator(&pool);
87 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
88 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 11 / 16);
89 ASSERT_NE(alloc1, alloc2);
90 ASSERT_EQ(2u, NumberOfArenas(&allocator));
91 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 7 / 16);
92 ASSERT_NE(alloc1, alloc3);
93 ASSERT_NE(alloc2, alloc3);
94 ASSERT_EQ(3u, NumberOfArenas(&allocator));
95 }
96 {
97 MallocArenaPool pool;
98 ArenaAllocator allocator(&pool);
99 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
100 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
101 ASSERT_NE(alloc1, alloc2);
102 ASSERT_EQ(2u, NumberOfArenas(&allocator));
103 // Note: Leaving some space for memory tool red zones.
104 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
105 ASSERT_NE(alloc1, alloc3);
106 ASSERT_NE(alloc2, alloc3);
107 ASSERT_EQ(2u, NumberOfArenas(&allocator));
108 }
109 {
110 MallocArenaPool pool;
111 ArenaAllocator allocator(&pool);
112 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
113 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
114 ASSERT_NE(alloc1, alloc2);
115 ASSERT_EQ(2u, NumberOfArenas(&allocator));
116 // Note: Leaving some space for memory tool red zones.
117 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
118 ASSERT_NE(alloc1, alloc3);
119 ASSERT_NE(alloc2, alloc3);
120 ASSERT_EQ(2u, NumberOfArenas(&allocator));
121 }
122 {
123 MallocArenaPool pool;
124 ArenaAllocator allocator(&pool);
125 // Note: Leaving some space for memory tool red zones.
126 for (size_t i = 0; i != 15; ++i) {
127 // Allocate 15 times from the same arena.
128 allocator.Alloc(arena_allocator::kArenaDefaultSize * 1 / 16);
129 ASSERT_EQ(i + 1u, NumberOfArenas(&allocator));
130 // Allocate a separate arena.
131 allocator.Alloc(arena_allocator::kArenaDefaultSize * 17 / 16);
132 ASSERT_EQ(i + 2u, NumberOfArenas(&allocator));
133 }
134 }
135 }
136
TEST_F(ArenaAllocatorTest,AllocAlignment)137 TEST_F(ArenaAllocatorTest, AllocAlignment) {
138 MallocArenaPool pool;
139 ArenaAllocator allocator(&pool);
140 for (size_t iterations = 0; iterations <= 10; ++iterations) {
141 for (size_t size = 1; size <= ArenaAllocator::kAlignment + 1; ++size) {
142 void* allocation = allocator.Alloc(size);
143 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation))
144 << reinterpret_cast<uintptr_t>(allocation);
145 }
146 }
147 }
148
TEST_F(ArenaAllocatorTest,ReallocReuse)149 TEST_F(ArenaAllocatorTest, ReallocReuse) {
150 // Realloc does not reuse arenas when running under sanitization.
151 TEST_DISABLED_FOR_MEMORY_TOOL();
152
153 {
154 // Case 1: small aligned allocation, aligned extend inside arena.
155 MallocArenaPool pool;
156 ArenaAllocator allocator(&pool);
157
158 const size_t original_size = ArenaAllocator::kAlignment * 2;
159 void* original_allocation = allocator.Alloc(original_size);
160
161 const size_t new_size = ArenaAllocator::kAlignment * 3;
162 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
163 EXPECT_EQ(original_allocation, realloc_allocation);
164 }
165
166 {
167 // Case 2: small aligned allocation, non-aligned extend inside arena.
168 MallocArenaPool pool;
169 ArenaAllocator allocator(&pool);
170
171 const size_t original_size = ArenaAllocator::kAlignment * 2;
172 void* original_allocation = allocator.Alloc(original_size);
173
174 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
175 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
176 EXPECT_EQ(original_allocation, realloc_allocation);
177 }
178
179 {
180 // Case 3: small non-aligned allocation, aligned extend inside arena.
181 MallocArenaPool pool;
182 ArenaAllocator allocator(&pool);
183
184 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
185 void* original_allocation = allocator.Alloc(original_size);
186
187 const size_t new_size = ArenaAllocator::kAlignment * 4;
188 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
189 EXPECT_EQ(original_allocation, realloc_allocation);
190 }
191
192 {
193 // Case 4: small non-aligned allocation, aligned non-extend inside arena.
194 MallocArenaPool pool;
195 ArenaAllocator allocator(&pool);
196
197 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
198 void* original_allocation = allocator.Alloc(original_size);
199
200 const size_t new_size = ArenaAllocator::kAlignment * 3;
201 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
202 EXPECT_EQ(original_allocation, realloc_allocation);
203 }
204
205 // The next part is brittle, as the default size for an arena is variable, and we don't know about
206 // sanitization.
207
208 {
209 // Case 5: large allocation, aligned extend into next arena.
210 MallocArenaPool pool;
211 ArenaAllocator allocator(&pool);
212
213 const size_t original_size = arena_allocator::kArenaDefaultSize -
214 ArenaAllocator::kAlignment * 5;
215 void* original_allocation = allocator.Alloc(original_size);
216
217 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
218 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
219 EXPECT_NE(original_allocation, realloc_allocation);
220 }
221
222 {
223 // Case 6: large allocation, non-aligned extend into next arena.
224 MallocArenaPool pool;
225 ArenaAllocator allocator(&pool);
226
227 const size_t original_size = arena_allocator::kArenaDefaultSize -
228 ArenaAllocator::kAlignment * 4 -
229 ArenaAllocator::kAlignment / 2;
230 void* original_allocation = allocator.Alloc(original_size);
231
232 const size_t new_size = arena_allocator::kArenaDefaultSize +
233 ArenaAllocator::kAlignment * 2 +
234 ArenaAllocator::kAlignment / 2;
235 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
236 EXPECT_NE(original_allocation, realloc_allocation);
237 }
238 }
239
TEST_F(ArenaAllocatorTest,ReallocAlignment)240 TEST_F(ArenaAllocatorTest, ReallocAlignment) {
241 {
242 // Case 1: small aligned allocation, aligned extend inside arena.
243 MallocArenaPool pool;
244 ArenaAllocator allocator(&pool);
245
246 const size_t original_size = ArenaAllocator::kAlignment * 2;
247 void* original_allocation = allocator.Alloc(original_size);
248 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
249
250 const size_t new_size = ArenaAllocator::kAlignment * 3;
251 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
252 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
253
254 void* after_alloc = allocator.Alloc(1);
255 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
256 }
257
258 {
259 // Case 2: small aligned allocation, non-aligned extend inside arena.
260 MallocArenaPool pool;
261 ArenaAllocator allocator(&pool);
262
263 const size_t original_size = ArenaAllocator::kAlignment * 2;
264 void* original_allocation = allocator.Alloc(original_size);
265 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
266
267 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
268 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
269 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
270
271 void* after_alloc = allocator.Alloc(1);
272 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
273 }
274
275 {
276 // Case 3: small non-aligned allocation, aligned extend inside arena.
277 MallocArenaPool pool;
278 ArenaAllocator allocator(&pool);
279
280 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
281 void* original_allocation = allocator.Alloc(original_size);
282 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
283
284 const size_t new_size = ArenaAllocator::kAlignment * 4;
285 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
286 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
287
288 void* after_alloc = allocator.Alloc(1);
289 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
290 }
291
292 {
293 // Case 4: small non-aligned allocation, aligned non-extend inside arena.
294 MallocArenaPool pool;
295 ArenaAllocator allocator(&pool);
296
297 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
298 void* original_allocation = allocator.Alloc(original_size);
299 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
300
301 const size_t new_size = ArenaAllocator::kAlignment * 3;
302 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
303 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
304
305 void* after_alloc = allocator.Alloc(1);
306 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
307 }
308
309 // The next part is brittle, as the default size for an arena is variable, and we don't know about
310 // sanitization.
311
312 {
313 // Case 5: large allocation, aligned extend into next arena.
314 MallocArenaPool pool;
315 ArenaAllocator allocator(&pool);
316
317 const size_t original_size = arena_allocator::kArenaDefaultSize -
318 ArenaAllocator::kAlignment * 5;
319 void* original_allocation = allocator.Alloc(original_size);
320 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
321
322 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
323 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
324 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
325
326 void* after_alloc = allocator.Alloc(1);
327 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
328 }
329
330 {
331 // Case 6: large allocation, non-aligned extend into next arena.
332 MallocArenaPool pool;
333 ArenaAllocator allocator(&pool);
334
335 const size_t original_size = arena_allocator::kArenaDefaultSize -
336 ArenaAllocator::kAlignment * 4 -
337 ArenaAllocator::kAlignment / 2;
338 void* original_allocation = allocator.Alloc(original_size);
339 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
340
341 const size_t new_size = arena_allocator::kArenaDefaultSize +
342 ArenaAllocator::kAlignment * 2 +
343 ArenaAllocator::kAlignment / 2;
344 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
345 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
346
347 void* after_alloc = allocator.Alloc(1);
348 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
349 }
350 }
351
352
353 } // namespace art
354