Home
last modified time | relevance | path

Searched refs:allocator (Results 1 – 25 of 170) sorted by relevance

1234567

/art/libartbase/base/
Darena_allocator_test.cc28 size_t NumberOfArenas(ArenaAllocator* allocator) { in NumberOfArenas() argument
30 for (Arena* a = allocator->arena_head_; a != nullptr; a = a->next_) { in NumberOfArenas()
39 ArenaAllocator allocator(&pool); in TEST_F() local
40 ArenaBitVector bv(&allocator, 10, true); in TEST_F()
55 ArenaAllocator allocator(&pool); in TEST_F() local
56 small_array = allocator.AllocArray<uint32_t>(kSmallArraySize); in TEST_F()
61 ArenaAllocator allocator(&pool); in TEST_F() local
62 uint32_t* large_array = allocator.AllocArray<uint32_t>(kLargeArraySize); in TEST_F()
77 ArenaAllocator allocator(&pool); in TEST_F() local
79 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8); in TEST_F()
[all …]
Darena_object.h33 void* operator new(size_t size, ArenaAllocator* allocator) {
34 return allocator->Alloc(size, kAllocKind);
37 static void* operator new(size_t size, ScopedArenaAllocator* allocator) {
38 return allocator->Alloc(size, kAllocKind);
57 void* operator new(size_t size, ArenaAllocator* allocator) {
58 return allocator->Alloc(size, kAllocKind);
61 static void* operator new(size_t size, ScopedArenaAllocator* allocator) {
62 return allocator->Alloc(size, kAllocKind);
Darena_bit_vector.cc55 static ArenaBitVectorAllocator* Create(ArenaAlloc* allocator, ArenaAllocKind kind) { in Create() argument
56 void* storage = allocator->template Alloc<ArenaBitVectorAllocator>(kind); in Create()
57 return new (storage) ArenaBitVectorAllocator(allocator, kind); in Create()
72 ArenaBitVectorAllocator(ArenaAlloc* allocator, ArenaAllocKind kind) in ArenaBitVectorAllocator() argument
73 : ArenaBitVectorAllocatorKind(kind), allocator_(allocator) { } in ArenaBitVectorAllocator()
80 ArenaBitVector::ArenaBitVector(ArenaAllocator* allocator, in ArenaBitVector() argument
86 ArenaBitVectorAllocator<ArenaAllocator>::Create(allocator, kind)) { in ArenaBitVector()
89 ArenaBitVector::ArenaBitVector(ScopedArenaAllocator* allocator, in ArenaBitVector() argument
95 ArenaBitVectorAllocator<ScopedArenaAllocator>::Create(allocator, kind)) { in ArenaBitVector()
Darena_bit_vector.h34 static ArenaBitVector* Create(Allocator* allocator,
38 void* storage = allocator->template Alloc<ArenaBitVector>(kind);
39 return new (storage) ArenaBitVector(allocator, start_bits, expandable, kind);
42 ArenaBitVector(ArenaAllocator* allocator,
46 ArenaBitVector(ScopedArenaAllocator* allocator,
Dbit_table_test.cc32 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
36 BitTableBuilderBase<1> builder(&allocator); in TEST()
48 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
53 BitTableBuilderBase<1> builder(&allocator); in TEST()
74 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
79 BitTableBuilderBase<1> builder(&allocator); in TEST()
94 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
99 BitTableBuilderBase<4> builder(&allocator); in TEST()
125 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
127 BitTableBuilderBase<2> builder(&allocator); in TEST()
[all …]
Dallocator.h106 class TrackingAllocatorImpl : public std::allocator<T> {
108 typedef typename std::allocator<T>::value_type value_type;
109 typedef typename std::allocator<T>::size_type size_type;
110 typedef typename std::allocator<T>::difference_type difference_type;
111 typedef typename std::allocator<T>::pointer pointer;
112 typedef typename std::allocator<T>::const_pointer const_pointer;
113 typedef typename std::allocator<T>::reference reference;
114 typedef typename std::allocator<T>::const_reference const_reference;
156 std::allocator<T>>::type;
/art/compiler/utils/
Djni_macro_assembler.cc44 ArenaAllocator* allocator, in Create() argument
54 return MacroAsm32UniquePtr(new (allocator) arm::ArmVIXLJNIMacroAssembler(allocator)); in Create()
58 return MacroAsm32UniquePtr(new (allocator) x86::X86JNIMacroAssembler(allocator)); in Create()
70 ArenaAllocator* allocator, in Create() argument
79 return MacroAsm64UniquePtr(new (allocator) arm64::Arm64JNIMacroAssembler(allocator)); in Create()
83 return MacroAsm64UniquePtr(new (allocator) x86_64::X86_64JNIMacroAssembler(allocator)); in Create()
86 UNUSED(allocator); in Create()
/art/compiler/optimizing/
Dparallel_move_test.cc56 explicit TestParallelMoveResolverWithSwap(ArenaAllocator* allocator) in TestParallelMoveResolverWithSwap() argument
57 : ParallelMoveResolverWithSwap(allocator) {} in TestParallelMoveResolverWithSwap()
99 explicit TestParallelMoveResolverNoSwap(ArenaAllocator* allocator) in TestParallelMoveResolverNoSwap() argument
100 : ParallelMoveResolverNoSwap(allocator), scratch_index_(kScratchRegisterStartIndexForTest) {} in TestParallelMoveResolverNoSwap()
154 static HParallelMove* BuildParallelMove(ArenaAllocator* allocator, in BuildParallelMove() argument
157 HParallelMove* moves = new (allocator) HParallelMove(allocator); in BuildParallelMove()
185 ArenaAllocator allocator(&pool); in TYPED_TEST() local
188 TypeParam resolver(&allocator); in TYPED_TEST()
190 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves))); in TYPED_TEST()
199 TypeParam resolver(&allocator); in TYPED_TEST()
[all …]
Dstack_map_stream.h39 explicit StackMapStream(ScopedArenaAllocator* allocator, InstructionSet instruction_set) in StackMapStream() argument
40 : allocator_(allocator), in StackMapStream()
42 stack_maps_(allocator), in StackMapStream()
43 register_masks_(allocator), in StackMapStream()
44 stack_masks_(allocator), in StackMapStream()
45 inline_infos_(allocator), in StackMapStream()
46 method_infos_(allocator), in StackMapStream()
47 dex_register_masks_(allocator), in StackMapStream()
48 dex_register_maps_(allocator), in StackMapStream()
49 dex_register_catalog_(allocator), in StackMapStream()
[all …]
Dlive_interval_test.cc27 ScopedArenaAllocator* allocator = pool.GetScopedAllocator(); in TEST() local
31 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
37 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
44 ScopedArenaAllocator* allocator = pool.GetScopedAllocator(); in TEST() local
48 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
58 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
72 ScopedArenaAllocator* allocator = pool.GetScopedAllocator(); in TEST() local
76 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
86 LiveInterval* interval = BuildInterval(ranges, arraysize(ranges), allocator); in TEST()
100 ScopedArenaAllocator* allocator = pool.GetScopedAllocator(); in TEST() local
[all …]
Doptimization.cc163 ArenaAllocator* allocator, in ConstructOptimizations() argument
168 ArenaVector<HOptimization*> optimizations(allocator->Adapter()); in ConstructOptimizations()
190 opt = most_recent_side_effects = new (allocator) SideEffectsAnalysis(graph, pass_name); in ConstructOptimizations()
193 opt = most_recent_induction = new (allocator) HInductionVarAnalysis(graph, pass_name); in ConstructOptimizations()
200 opt = new (allocator) GVNOptimization(graph, *most_recent_side_effects, pass_name); in ConstructOptimizations()
204 opt = new (allocator) LICM(graph, *most_recent_side_effects, stats, pass_name); in ConstructOptimizations()
208 opt = new (allocator) HLoopOptimization( in ConstructOptimizations()
213 opt = new (allocator) BoundsCheckElimination( in ConstructOptimizations()
218 opt = new (allocator) LoadStoreElimination( in ConstructOptimizations()
225 opt = new (allocator) HConstantFolding(graph, pass_name); in ConstructOptimizations()
[all …]
Dnodes_vector.h75 ArenaAllocator* allocator, in HVecOperation() argument
85 allocator, in HVecOperation()
260 ArenaAllocator* allocator, in HVecUnaryOperation() argument
266 allocator, in HVecUnaryOperation()
287 ArenaAllocator* allocator, in HVecBinaryOperation() argument
294 allocator, in HVecBinaryOperation()
318 ArenaAllocator* allocator, in HVecMemoryOperation() argument
325 allocator, in HVecMemoryOperation()
379 HVecReplicateScalar(ArenaAllocator* allocator, in HVecReplicateScalar() argument
385 kVecReplicateScalar, allocator, scalar, packed_type, vector_length, dex_pc) { in HVecReplicateScalar()
[all …]
Dparallel_move_resolver.h34 explicit ParallelMoveResolver(ArenaAllocator* allocator) in ParallelMoveResolver() argument
35 : moves_(allocator->Adapter(kArenaAllocParallelMoveResolver)) { in ParallelMoveResolver()
56 explicit ParallelMoveResolverWithSwap(ArenaAllocator* allocator) in ParallelMoveResolverWithSwap() argument
57 : ParallelMoveResolver(allocator) {} in ParallelMoveResolverWithSwap()
125 explicit ParallelMoveResolverNoSwap(ArenaAllocator* allocator) in ParallelMoveResolverNoSwap() argument
126 : ParallelMoveResolver(allocator), in ParallelMoveResolverNoSwap()
127 scratches_(allocator->Adapter(kArenaAllocParallelMoveResolver)), in ParallelMoveResolverNoSwap()
128 pending_moves_(allocator->Adapter(kArenaAllocParallelMoveResolver)), in ParallelMoveResolverNoSwap()
129 allocator_(allocator) { in ParallelMoveResolverNoSwap()
Dbounds_check_elimination_test.cc359 ArenaAllocator* allocator, in BuildSSAGraph1() argument
363 HBasicBlock* entry = new (allocator) HBasicBlock(graph); in BuildSSAGraph1()
366 HInstruction* parameter = new (allocator) HParameterValue( in BuildSSAGraph1()
374 HBasicBlock* block = new (allocator) HBasicBlock(graph); in BuildSSAGraph1()
377 block->AddInstruction(new (allocator) HGoto()); in BuildSSAGraph1()
379 HBasicBlock* loop_header = new (allocator) HBasicBlock(graph); in BuildSSAGraph1()
380 HBasicBlock* loop_body = new (allocator) HBasicBlock(graph); in BuildSSAGraph1()
381 HBasicBlock* exit = new (allocator) HBasicBlock(graph); in BuildSSAGraph1()
391 HPhi* phi = new (allocator) HPhi(allocator, 0, 0, DataType::Type::kInt32); in BuildSSAGraph1()
392 HInstruction* null_check = new (allocator) HNullCheck(parameter, 0); in BuildSSAGraph1()
[all …]
Dregister_allocator.cc32 RegisterAllocator::RegisterAllocator(ScopedArenaAllocator* allocator, in RegisterAllocator() argument
35 : allocator_(allocator), in RegisterAllocator()
39 std::unique_ptr<RegisterAllocator> RegisterAllocator::Create(ScopedArenaAllocator* allocator, in Create() argument
46 new (allocator) RegisterAllocatorLinearScan(allocator, codegen, analysis)); in Create()
49 new (allocator) RegisterAllocatorGraphColor(allocator, codegen, analysis)); in Create()
107 ScopedArenaAllocator allocator(codegen.GetGraph()->GetArenaStack()); in ValidateIntervals() local
109 allocator.Adapter(kArenaAllocRegisterAllocatorValidate)); in ValidateIntervals()
123 ArenaBitVector::Create(&allocator, max_end, false, kArenaAllocRegisterAllocatorValidate)); in ValidateIntervals()
Dinstruction_simplifier_shared.cc78 ArenaAllocator* allocator = mul->GetBlock()->GetGraph()->GetAllocator(); in TrySimpleMultiplyAccumulatePatterns() local
79 HMultiplyAccumulate* mulacc = new (allocator) HMultiplyAccumulate( in TrySimpleMultiplyAccumulatePatterns()
108 ArenaAllocator* allocator = mul->GetBlock()->GetGraph()->GetAllocator(); in TryCombineMultiplyAccumulate() local
140 new (allocator) HMultiplyAccumulate(type, in TryCombineMultiplyAccumulate()
153 new (allocator) HMultiplyAccumulate(type, in TryCombineMultiplyAccumulate()
258 ArenaAllocator* allocator = graph->GetAllocator(); in TryExtractArrayAccessAddress() local
261 HIntermediateAddress* address = new (allocator) HIntermediateAddress(array, offset, kNoDexPc); in TryExtractArrayAccessAddress()
292 ArenaAllocator* allocator = graph->GetAllocator(); in TryExtractVecArrayAccessAddress() local
331 new (allocator) HIntermediateAddressIndex(index, offset, shift, kNoDexPc); in TryExtractVecArrayAccessAddress()
342 ArenaAllocator* allocator = basic_block->GetGraph()->GetAllocator(); in TryReplaceSubSubWithSubAdd() local
[all …]
Dssa_phi_elimination.cc34 ScopedArenaAllocator allocator(graph_->GetArenaStack()); in MarkDeadPhis() local
37 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in MarkDeadPhis()
43 ScopedArenaSet<HPhi*> initially_live(allocator.Adapter(kArenaAllocSsaPhiElimination)); in MarkDeadPhis()
128 ScopedArenaAllocator allocator(graph_->GetArenaStack()); in Run() local
131 ScopedArenaVector<HPhi*> worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run()
142 ArenaBitVector visited_phis_in_cycle(&allocator, in Run()
147 ScopedArenaVector<HPhi*> cycle_worklist(allocator.Adapter(kArenaAllocSsaPhiElimination)); in Run()
Dlocations.cc32 ArenaAllocator* allocator) in LocationSummary() argument
33 : inputs_(instruction->InputCount(), allocator->Adapter(kArenaAllocLocationSummary)), in LocationSummary()
34 temps_(allocator->Adapter(kArenaAllocLocationSummary)), in LocationSummary()
46 stack_mask_ = ArenaBitVector::Create(allocator, 0, true, kArenaAllocLocationSummary); in LocationSummary()
Dstack_map_test.cc53 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
54 StackMapStream stream(&allocator, kRuntimeISA); in TEST()
57 ArenaBitVector sp_mask(&allocator, 0, false); in TEST()
107 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
108 StackMapStream stream(&allocator, kRuntimeISA); in TEST()
112 ArenaBitVector sp_mask1(&allocator, 0, true); in TEST()
126 ArenaBitVector sp_mask2(&allocator, 0, true); in TEST()
134 ArenaBitVector sp_mask3(&allocator, 0, true); in TEST()
142 ArenaBitVector sp_mask4(&allocator, 0, true); in TEST()
301 ScopedArenaAllocator allocator(&arena_stack); in TEST() local
[all …]
/art/tools/cpp-define-generator/
Drosalloc.def18 #include "gc/allocator/rosalloc.h"
22 art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1)
24 ~static_cast<uint32_t>(art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1))
26 ~static_cast<uint64_t>(art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSize - 1))
28 art::gc::allocator::RosAlloc::kThreadLocalBracketQuantumSizeShift)
30 art::gc::allocator::RosAlloc::kMaxThreadLocalBracketSize)
32 art::gc::allocator::RosAlloc::RunFreeListHeadOffset())
34 art::gc::allocator::RosAlloc::RunFreeListOffset())
36 art::gc::allocator::RosAlloc::RunFreeListSizeOffset())
38 art::gc::allocator::RosAlloc::RunSlotNextOffset())
/art/compiler/trampolines/
Dtrampoline_compiler.cc53 ArenaAllocator* allocator, EntryPointCallingConvention abi, ThreadOffset32 offset) { in CreateTrampoline() argument
57 ArmVIXLAssembler assembler(allocator); in CreateTrampoline()
94 ArenaAllocator* allocator, EntryPointCallingConvention abi, ThreadOffset64 offset) { in CreateTrampoline() argument
95 Arm64Assembler assembler(allocator); in CreateTrampoline()
132 static std::unique_ptr<const std::vector<uint8_t>> CreateTrampoline(ArenaAllocator* allocator, in CreateTrampoline() argument
134 X86Assembler assembler(allocator); in CreateTrampoline()
153 static std::unique_ptr<const std::vector<uint8_t>> CreateTrampoline(ArenaAllocator* allocator, in CreateTrampoline() argument
155 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline()
176 ArenaAllocator allocator(&pool); in CreateTrampoline64() local
180 return arm64::CreateTrampoline(&allocator, abi, offset); in CreateTrampoline64()
[all …]
/art/test/130-hprof/src/
DMain.java48 Class<?> allocator = loader.loadClass("Allocator"); in allocInDifferentLoader() local
49 return allocator.getDeclaredMethod("allocObject", null).invoke(null); in allocInDifferentLoader()
135 Allocator allocator = new Allocator(); in testGcAndDump() local
136 Dumper dumper = new Dumper(allocator); in testGcAndDump()
137 allocator.start(); in testGcAndDump()
140 allocator.join(); in testGcAndDump()
164 Dumper(Allocator allocator) { in Dumper() argument
165 this.allocator = allocator; in Dumper()
167 Allocator allocator; field in Main.Dumper
173 allocator.running = false; in run()
/art/runtime/gc/
Dallocator_type.h42 inline constexpr bool IsTLABAllocator(AllocatorType allocator) { in IsTLABAllocator() argument
43 return allocator == kAllocatorTypeTLAB || allocator == kAllocatorTypeRegionTLAB; in IsTLABAllocator()
/art/compiler/jni/quick/
Dcalling_convention.cc44 ArenaAllocator* allocator, in Create() argument
54 new (allocator) arm::ArmManagedRuntimeCallingConvention( in Create()
60 new (allocator) arm64::Arm64ManagedRuntimeCallingConvention( in Create()
66 new (allocator) x86::X86ManagedRuntimeCallingConvention( in Create()
72 new (allocator) x86_64::X86_64ManagedRuntimeCallingConvention( in Create()
133 std::unique_ptr<JniCallingConvention> JniCallingConvention::Create(ArenaAllocator* allocator, in Create() argument
144 new (allocator) arm::ArmJniCallingConvention( in Create()
150 new (allocator) arm64::Arm64JniCallingConvention( in Create()
156 new (allocator) x86::X86JniCallingConvention( in Create()
162 new (allocator) x86_64::X86_64JniCallingConvention( in Create()
/art/runtime/gc/space/
Drosalloc_space.cc50 art::gc::allocator::RosAlloc* rosalloc, in RosAllocSpace()
83 allocator::RosAlloc* rosalloc = CreateRosAlloc(mem_map.Begin(), in CreateFromMemMap()
179 allocator::RosAlloc* RosAllocSpace::CreateRosAlloc(void* begin, size_t morecore_start, in CreateRosAlloc()
188 allocator::RosAlloc* rosalloc = new art::gc::allocator::RosAlloc( in CreateRosAlloc()
191 art::gc::allocator::RosAlloc::kPageReleaseModeAll : in CreateRosAlloc()
192 art::gc::allocator::RosAlloc::kPageReleaseModeSizeAndEnd, in CreateRosAlloc()
226 void* allocator, in CreateInstance() argument
237 reinterpret_cast<allocator::RosAlloc*>(allocator), in CreateInstance()
249 reinterpret_cast<allocator::RosAlloc*>(allocator), in CreateInstance()
360 InspectAllRosAlloc(art::gc::allocator::RosAlloc::BytesAllocatedCallback, &bytes_allocated, false); in GetBytesAllocated()
[all …]

1234567