Lines Matching refs:begin

45 bool HeapWalker::Allocation(uintptr_t begin, uintptr_t end) {  in Allocation()  argument
46 if (end == begin) { in Allocation()
47 end = begin + 1; in Allocation()
49 begin = UntagAddress(begin); in Allocation()
51 Range range{begin, end}; in Allocation()
53 (begin < valid_mappings_range_.begin || end > valid_mappings_range_.end)) { in Allocation()
55 reinterpret_cast<void*>(begin), reinterpret_cast<void*>(end), in Allocation()
56 reinterpret_cast<void*>(valid_mappings_range_.begin), in Allocation()
61 valid_allocations_range_.begin = std::min(valid_allocations_range_.begin, begin); in Allocation()
68 MEM_ALOGE("range %p-%p overlaps with existing range %p-%p", reinterpret_cast<void*>(begin), in Allocation()
69 reinterpret_cast<void*>(end), reinterpret_cast<void*>(overlap.begin), in Allocation()
94 if (value >= valid_allocations_range_.begin && value < valid_allocations_range_.end) { in WordContainsAllocationPtr()
122 void HeapWalker::Mapping(uintptr_t begin, uintptr_t end) { in Mapping() argument
123 valid_mappings_range_.begin = std::min(valid_mappings_range_.begin, begin); in Mapping()
127 void HeapWalker::Root(uintptr_t begin, uintptr_t end) { in Root() argument
128 roots_.push_back(Range{begin, end}); in Root()
132 root_vals_.insert(root_vals_.end(), vals.begin(), vals.end()); in Root()
145 for (auto it = roots_.begin(); it != roots_.end(); it++) { in DetectLeaks()
150 vals.begin = reinterpret_cast<uintptr_t>(root_vals_.data()); in DetectLeaks()
151 vals.end = vals.begin + root_vals_.size() * sizeof(uintptr_t); in DetectLeaks()
168 for (auto it = allocations_.begin(); it != allocations_.end(); it++) { in Leaked()
171 leak_bytes += it->first.end - it->first.begin; in Leaked()
176 for (auto it = allocations_.begin(); it != allocations_.end(); it++) { in Leaked()
216 if (walking_range_.begin != 0U) { in HandleSegFault()
217 MEM_ALOGW("while walking range %p-%p", reinterpret_cast<void*>(walking_range_.begin), in HandleSegFault()