/art/runtime/gc/space/ |
D | bump_pointer_space-inl.h | 28 inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, in Alloc() argument 31 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc() 32 mirror::Object* ret = AllocNonvirtual(num_bytes); in Alloc() 34 *bytes_allocated = num_bytes; in Alloc() 36 *usable_size = num_bytes; in Alloc() 38 *bytes_tl_bulk_allocated = num_bytes; in Alloc() 43 inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes, in AllocThreadUnsafe() argument 48 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe() 50 if (end + num_bytes > growth_end_) { in AllocThreadUnsafe() 54 end_.store(end + num_bytes, std::memory_order_relaxed); in AllocThreadUnsafe() [all …]
|
D | rosalloc_space-inl.h | 32 inline mirror::Object* RosAllocSpace::AllocCommon(Thread* self, size_t num_bytes, in AllocCommon() argument 42 rosalloc_->Alloc<kThreadSafe>(self, num_bytes, &rosalloc_bytes_allocated, in AllocCommon() 62 inline bool RosAllocSpace::CanAllocThreadLocal(Thread* self, size_t num_bytes) { in CanAllocThreadLocal() argument 63 return rosalloc_->CanAllocFromThreadLocalRun(self, num_bytes); in CanAllocThreadLocal() 66 inline mirror::Object* RosAllocSpace::AllocThreadLocal(Thread* self, size_t num_bytes, in AllocThreadLocal() argument 70 rosalloc_->AllocFromThreadLocalRun(self, num_bytes, bytes_allocated)); in AllocThreadLocal() 73 inline size_t RosAllocSpace::MaxBytesBulkAllocatedForNonvirtual(size_t num_bytes) { in MaxBytesBulkAllocatedForNonvirtual() argument 74 return rosalloc_->MaxBytesBulkAllocatedFor(num_bytes); in MaxBytesBulkAllocatedForNonvirtual()
|
D | rosalloc_space.h | 56 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, 59 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, in Alloc() argument 61 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size, in Alloc() 64 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, in AllocThreadUnsafe() argument 67 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size, in AllocThreadUnsafe() 78 mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, in AllocNonvirtual() argument 81 return AllocCommon(self, num_bytes, bytes_allocated, usable_size, in AllocNonvirtual() 84 mirror::Object* AllocNonvirtualThreadUnsafe(Thread* self, size_t num_bytes, in AllocNonvirtualThreadUnsafe() argument 88 return AllocCommon<false>(self, num_bytes, bytes_allocated, usable_size, in AllocNonvirtualThreadUnsafe() 94 ALWAYS_INLINE bool CanAllocThreadLocal(Thread* self, size_t num_bytes); [all …]
|
D | memory_tool_malloc_space-inl.h | 34 size_t num_bytes, in AdjustForMemoryTool() argument 52 *usable_size_out = num_bytes; in AdjustForMemoryTool() 66 MEMORY_TOOL_MAKE_DEFINED(result, num_bytes); in AdjustForMemoryTool() 72 MEMORY_TOOL_MAKE_NOACCESS(reinterpret_cast<uint8_t*>(result) + num_bytes, in AdjustForMemoryTool() 73 usable_size - (num_bytes + kMemoryToolRedZoneBytes)); in AdjustForMemoryTool() 94 size_t num_bytes, in AllocWithGrowth() argument 102 num_bytes + 2 * kMemoryToolRedZoneBytes, in AllocWithGrowth() 112 num_bytes, in AllocWithGrowth() 130 size_t num_bytes, in Alloc() argument 138 num_bytes + 2 * kMemoryToolRedZoneBytes, in Alloc() [all …]
|
D | dlmalloc_space-inl.h | 28 inline mirror::Object* DlMallocSpace::AllocNonvirtual(Thread* self, size_t num_bytes, in AllocNonvirtual() argument 35 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size, in AllocNonvirtual() 40 memset(obj, 0, num_bytes); in AllocNonvirtual() 55 Thread* /*self*/, size_t num_bytes, in AllocWithoutGrowthLocked() argument 59 mirror::Object* result = reinterpret_cast<mirror::Object*>(mspace_malloc(mspace_, num_bytes)); in AllocWithoutGrowthLocked()
|
D | region_space-inl.h | 32 size_t num_bytes, in Alloc() argument 36 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc() 37 return AllocNonvirtual<false>(num_bytes, bytes_allocated, usable_size, in Alloc() 42 size_t num_bytes, in AllocThreadUnsafe() argument 47 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); in AllocThreadUnsafe() 51 inline mirror::Object* RegionSpace::AllocNonvirtual(size_t num_bytes, in AllocNonvirtual() argument 55 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocNonvirtual() 57 if (LIKELY(num_bytes <= kRegionSize)) { in AllocNonvirtual() 59 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, in AllocNonvirtual() 69 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, in AllocNonvirtual() [all …]
|
D | memory_tool_malloc_space.h | 34 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, 37 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 39 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, 53 size_t MaxBytesBulkAllocatedFor(size_t num_bytes) override;
|
D | dlmalloc_space.h | 57 size_t num_bytes, 63 size_t num_bytes, in Alloc() argument 67 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size, in Alloc() 83 size_t MaxBytesBulkAllocatedFor(size_t num_bytes) override { in MaxBytesBulkAllocatedFor() argument 84 return num_bytes; in MaxBytesBulkAllocatedFor() 96 mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, 168 mirror::Object* AllocWithoutGrowthLocked(Thread* self, size_t num_bytes, size_t* bytes_allocated,
|
D | bump_pointer_space.h | 42 typedef void(*WalkCallback)(void *start, void *end, size_t num_bytes, void* callback_arg); 55 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 58 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, 62 mirror::Object* AllocNonvirtual(size_t num_bytes); 63 mirror::Object* AllocNonvirtualWithoutAccounting(size_t num_bytes);
|
D | malloc_space.h | 41 typedef void(*WalkCallback)(void *start, void *end, size_t num_bytes, void* callback_arg); 48 virtual mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, 52 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 65 virtual size_t MaxBytesBulkAllocatedFor(size_t num_bytes) = 0;
|
D | region_space.h | 49 typedef void(*WalkCallback)(void *start, void *end, size_t num_bytes, void* callback_arg); 69 size_t num_bytes, 76 size_t num_bytes, 83 ALWAYS_INLINE mirror::Object* AllocNonvirtual(size_t num_bytes, 90 mirror::Object* AllocLarge(size_t num_bytes, 433 ALWAYS_INLINE mirror::Object* Alloc(size_t num_bytes, 603 void RecordThreadLocalAllocations(size_t num_objects, size_t num_bytes) { in RecordThreadLocalAllocations() argument 607 top_.store(begin_ + num_bytes, std::memory_order_relaxed); in RecordThreadLocalAllocations()
|
D | large_object_space.cc | 54 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, in Alloc() argument 58 LargeObjectMapSpace::Alloc(self, num_bytes + kMemoryToolRedZoneBytes * 2, bytes_allocated, in Alloc() 64 reinterpret_cast<uint8_t*>(object_without_rdz) + num_bytes, in Alloc() 67 *usable_size = num_bytes; // Since we have redzones, shrink the usable size. in Alloc() 136 mirror::Object* LargeObjectMapSpace::Alloc(Thread* self, size_t num_bytes, in Alloc() argument 141 num_bytes, in Alloc() 500 mirror::Object* FreeListSpace::Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, in Alloc() argument 503 const size_t allocation_size = RoundUp(num_bytes, kAlignment); in Alloc()
|
D | rosalloc_space.cc | 202 mirror::Object* RosAllocSpace::AllocWithGrowth(Thread* self, size_t num_bytes, in AllocWithGrowth() argument 212 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size, in AllocWithGrowth() 330 void RosAllocSpace::Walk(void(*callback)(void *start, void *end, size_t num_bytes, void* callback_a… in Walk() argument 371 void (*callback)(void *start, void *end, size_t num_bytes, void* callback_arg), in InspectAllRosAllocWithSuspendAll() argument 384 void RosAllocSpace::InspectAllRosAlloc(void (*callback)(void *start, void *end, size_t num_bytes, v… in InspectAllRosAlloc() argument
|
D | bump_pointer_space.cc | 232 size_t num_bytes = obj->SizeOf(); in AllocationSizeNonvirtual() local 234 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual() 236 return num_bytes; in AllocationSizeNonvirtual()
|
D | dlmalloc_space.cc | 167 mirror::Object* DlMallocSpace::AllocWithGrowth(Thread* self, size_t num_bytes, in AllocWithGrowth() argument 177 result = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size, in AllocWithGrowth() 185 memset(result, 0, num_bytes); in AllocWithGrowth() 293 void DlMallocSpace::Walk(void(*callback)(void *start, void *end, size_t num_bytes, void* callback_a… in Walk() argument
|
D | space.h | 214 virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 218 virtual mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, in AllocThreadUnsafe() argument 222 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); in AllocThreadUnsafe()
|
D | zygote_space.h | 51 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
|
D | large_object_space.h | 158 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, 194 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
|
D | region_space.cc | 996 size_t num_bytes = obj->SizeOf(); in AllocationSizeNonvirtual() local 998 if (LIKELY(num_bytes <= kRegionSize)) { in AllocationSizeNonvirtual() 1000 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual() 1003 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual() 1006 return num_bytes; in AllocationSizeNonvirtual()
|
/art/runtime/mirror/ |
D | object.cc | 79 size_t num_bytes) { in CopyObject() argument 85 num_bytes -= offset; in CopyObject() 89 while (num_bytes >= sizeof(uintptr_t)) { in CopyObject() 95 num_bytes -= sizeof(uintptr_t); in CopyObject() 98 if (sizeof(uintptr_t) != sizeof(uint32_t) && num_bytes >= sizeof(uint32_t)) { in CopyObject() 104 num_bytes -= sizeof(uint32_t); in CopyObject() 108 while (num_bytes > 0) { in CopyObject() 114 num_bytes -= sizeof(uint8_t); in CopyObject() 140 CopyObjectVisitor(Handle<Object>* orig, size_t num_bytes) in CopyObjectVisitor() argument 141 : orig_(orig), num_bytes_(num_bytes) {} in CopyObjectVisitor() [all …]
|
/art/libelffile/dwarf/ |
D | expression.h | 75 void WriteOpPiece(uint32_t num_bytes) { in WriteOpPiece() argument 77 PushUleb128(num_bytes); in WriteOpPiece() 84 void WriteOpDerefSize(uint8_t num_bytes) { in WriteOpDerefSize() argument 86 PushUint8(num_bytes); in WriteOpDerefSize()
|
D | writer.h | 119 void PushData(const uint8_t* ptr, size_t num_bytes) { in PushData() argument 120 data_->insert(data_->end(), ptr, ptr + num_bytes); in PushData() 123 void PushData(const char* ptr, size_t num_bytes) { in PushData() argument 124 data_->insert(data_->end(), ptr, ptr + num_bytes); in PushData()
|
D | debug_info_entry_writer.h | 95 void WriteBlock(Attribute attrib, const uint8_t* ptr, size_t num_bytes) { in WriteBlock() argument 97 this->PushUleb128(num_bytes); in WriteBlock() 98 this->PushData(ptr, num_bytes); in WriteBlock()
|
/art/runtime/ |
D | art_field.cc | 34 void ArtField::SetOffset(MemberOffset num_bytes) { in SetOffset() argument 36 DCHECK_ALIGNED_PARAM(num_bytes.Uint32Value(), in SetOffset() 39 offset_ = num_bytes.Uint32Value(); in SetOffset()
|
/art/runtime/gc/collector/ |
D | object_byte_pair.h | 27 explicit ObjectBytePair(uint64_t num_objects = 0, int64_t num_bytes = 0) 28 : objects(num_objects), bytes(num_bytes) {} in objects()
|