Lines Matching refs:descriptor

255 static uint64_t GetNextTimestamp(JITDescriptor& descriptor) {  in GetNextTimestamp()  argument
256 return std::max(descriptor.timestamp_ + 1, NanoTime()); in GetNextTimestamp()
260 static void Seqlock(JITDescriptor& descriptor) { in Seqlock() argument
261 DCHECK_EQ(descriptor.seqlock_.load(kNonRacingRelaxed) & 1, 0u) << "Already locked"; in Seqlock()
262 descriptor.seqlock_.fetch_add(1, std::memory_order_relaxed); in Seqlock()
268 static void Sequnlock(JITDescriptor& descriptor) { in Sequnlock() argument
269 DCHECK_EQ(descriptor.seqlock_.load(kNonRacingRelaxed) & 1, 1u) << "Already unlocked"; in Sequnlock()
272 descriptor.seqlock_.fetch_add(1, std::memory_order_relaxed); in Sequnlock()
280 JITDescriptor& descriptor = NativeInfo::Descriptor(); in InsertNewEntry() local
281 const JITCodeEntry* prev = (next != nullptr ? next->prev_ : descriptor.tail_); in InsertNewEntry()
290 descriptor.tail_ = entry; in InsertNewEntry()
296 descriptor.head_.store(entry, std::memory_order_release); in InsertNewEntry()
307 JITDescriptor& descriptor = NativeInfo::Descriptor(); in CreateJITCodeEntryInternal() local
310 if (descriptor.free_entries_ == nullptr) { in CreateJITCodeEntryInternal()
317 descriptor.free_entries_ = reinterpret_cast<const JITCodeEntry*>(memory); in CreateJITCodeEntryInternal()
331 uint64_t timestamp = GetNextTimestamp(descriptor); in CreateJITCodeEntryInternal()
334 const JITCodeEntry* next = descriptor.head_.load(kNonRacingRelaxed); // Insert at the head. in CreateJITCodeEntryInternal()
335 if (descriptor.zygote_head_entry_ != nullptr && Runtime::Current()->IsZygote()) { in CreateJITCodeEntryInternal()
340 const JITCodeEntry* entry = descriptor.free_entries_; in CreateJITCodeEntryInternal()
341 descriptor.free_entries_ = descriptor.free_entries_->next_.load(kNonRacingRelaxed); in CreateJITCodeEntryInternal()
353 Seqlock(descriptor); in CreateJITCodeEntryInternal()
355 descriptor.relevant_entry_ = entry; in CreateJITCodeEntryInternal()
356 descriptor.action_flag_ = JIT_REGISTER_FN; in CreateJITCodeEntryInternal()
357 descriptor.timestamp_ = timestamp; in CreateJITCodeEntryInternal()
358 Sequnlock(descriptor); in CreateJITCodeEntryInternal()
368 JITDescriptor& descriptor = NativeInfo::Descriptor(); in DeleteJITCodeEntryInternal() local
371 Seqlock(descriptor); in DeleteJITCodeEntryInternal()
377 descriptor.tail_ = prev; in DeleteJITCodeEntryInternal()
382 descriptor.head_.store(next, std::memory_order_relaxed); in DeleteJITCodeEntryInternal()
384 descriptor.relevant_entry_ = entry; in DeleteJITCodeEntryInternal()
385 descriptor.action_flag_ = JIT_UNREGISTER_FN; in DeleteJITCodeEntryInternal()
386 descriptor.timestamp_ = GetNextTimestamp(descriptor); in DeleteJITCodeEntryInternal()
387 Sequnlock(descriptor); in DeleteJITCodeEntryInternal()
405 writable_entry->next_.store(descriptor.free_entries_, kNonRacingRelaxed); in DeleteJITCodeEntryInternal()
407 descriptor.free_entries_ = entry; in DeleteJITCodeEntryInternal()
456 JITDescriptor& descriptor = JitNativeInfo::Descriptor(); in NativeDebugInfoPreFork() local
457 if (descriptor.zygote_head_entry_ != nullptr) { in NativeDebugInfoPreFork()
468 InsertNewEntry<JitNativeInfo>(zygote_head, descriptor.head_); in NativeDebugInfoPreFork()
469 descriptor.zygote_head_entry_ = zygote_head; in NativeDebugInfoPreFork()
473 InsertNewEntry<JitNativeInfo>(&descriptor.application_tail_entry_, descriptor.head_); in NativeDebugInfoPreFork()
478 JITDescriptor& descriptor = JitNativeInfo::Descriptor(); in NativeDebugInfoPostFork() local
479 descriptor.free_entries_ = nullptr; // Don't reuse zygote's entries. in NativeDebugInfoPostFork()
492 JITDescriptor& descriptor = __jit_debug_descriptor; in RepackEntries() local
498 for (const JITCodeEntry* it = descriptor.head_; it != nullptr; it = it->next_) { in RepackEntries()
499 if (it == descriptor.zygote_head_entry_ && !is_zygote) { in RepackEntries()