Home
last modified time | relevance | path

Searched refs:GetGraph (Results 26 – 50 of 55) sorted by relevance

123

/art/compiler/optimizing/
Dgraph_checker.cc64 size_t current_size = GetGraph()->GetReversePostOrder().size(); in Run()
90 ScopedArenaAllocator allocator(GetGraph()->GetArenaStack()); in VisitBasicBlock()
142 if (GetGraph()->IsInSsaForm() && !block->EndsWithControlFlowInstruction()) { in VisitBasicBlock()
294 if (!GetGraph()->HasBoundsChecks()) { in VisitBoundsCheck()
306 if (GetGraph()->IsCompilingOsr()) { in VisitDeoptimize()
758 HBasicBlock* loop_block = GetGraph()->GetBlocks()[i]; in HandleLoop()
930 ScopedArenaAllocator allocator(GetGraph()->GetArenaStack()); in VisitPhi()
934 GetGraph()->GetCurrentInstructionId(), in VisitPhi()
Dlocations.cc56 instruction->GetBlock()->GetGraph()->GetAllocator()) {} in LocationSummary()
Dregister_allocator.cc60 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearOrder()) { in ~RegisterAllocator()
107 ScopedArenaAllocator allocator(codegen.GetGraph()->GetArenaStack()); in ValidateIntervals()
Dreference_type_propagation.cc103 return GetGraph()->GetHandleCache(); in GetHandleCache()
241 bound_type = new (receiver->GetBlock()->GetGraph()->GetAllocator()) HBoundType(receiver); in BoundTypeIn()
541 instr->SetReferenceTypeInfo(GetGraph()->GetInexactObjectRti()); in SetClassAsTypeInfo()
611 instr->SetReferenceTypeInfo(GetGraph()->GetInexactObjectRti()); in VisitUnresolvedInstanceFieldGet()
619 instr->SetReferenceTypeInfo(GetGraph()->GetInexactObjectRti()); in VisitUnresolvedStaticFieldGet()
840 instr->SetReferenceTypeInfo(GetGraph()->GetInexactObjectRti()); in UpdateArrayGet()
927 instr->SetReferenceTypeInfo(instr->GetBlock()->GetGraph()->GetInexactObjectRti()); in UpdatePhi()
Dcode_generator_arm_vixl.cc529 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm_codegen->GetGraph()->GetDexFile())); in EmitNativeCode()
689 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator()); in EmitNativeCode()
845 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator()); in EmitNativeCode()
2046 if (GetGraph()->IsDebuggable()) { in SetupBlockedRegisters()
2106 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) { in MaybeIncrementHotness()
2108 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize); in MaybeIncrementHotness()
2257 if (GetGraph()->HasShouldDeoptimizeFlag()) { in GenerateFrameEntry()
2542 HParallelMove move(GetGraph()->GetAllocator()); in MoveLocation()
2782 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr); in VisitIf()
2799 LocationSummary* locations = new (GetGraph()->GetAllocator()) in VisitDeoptimize()
[all …]
Dcode_generator.h195 HGraph* GetGraph() const { return graph_; } in GetGraph() function
334 DCHECK(GetGraph()->HasShouldDeoptimizeFlag()); in GetStackOffsetOfShouldDeoptimizeFlag()
580 HParallelMove parallel_move(GetGraph()->GetAllocator()); in PrepareCriticalNativeCall()
761 size_t size = GetGraph()->GetBlocks().size(); in CommonInitializeLabels()
763 GetGraph()->GetAllocator()->AllocArray<LabelType>(size, kArenaAllocCodeGenerator); in CommonInitializeLabels()
Ddead_code_elimination.cc127 return condition->GetBlock()->GetGraph()->GetIntConstant( in Evaluate()
194 bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj); in RemoveNonNullControlDependences()
Dgvn.cc320 if (!pure || instruction->GetBlock()->GetGraph()->HasIrreducibleLoops()) { in HashCode()
556 HBasicBlock* current_block = block->GetGraph()->GetBlocks()[block_id]; in FindVisitedBlockWithRecyclableSet()
Dgraph_visualizer.cc467 ? GetGraph()->GetDexFile().PrettyMethod(invoke->GetDexMethodIndex(), kWithSignature) in VisitInvoke()
778 GetGraph()->GetEntryBlock()->GetBlockId()); in DumpDisassemblyBlockForFrameEntry()
798 GetGraph()->HasExitBlock() ? GetGraph()->GetExitBlock()->GetBlockId() : -1, in DumpDisassemblyBlockForSlowPaths()
Dinstruction_simplifier_arm.cc140 new (GetGraph()->GetAllocator()) HDataProcWithShifterOp(use, in TryMergeIntoShifterOperand()
Dsuperblock_cloner.cc1018 HGraph* graph = orig_block->GetGraph(); in CloneBasicBlock()
1149 HGraph* graph = loop_header->GetGraph(); in DoLoopTransformationImpl()
1201 info->GetHeader()->GetGraph()->GetAllocator()->Adapter(kArenaAllocSuperblockCloner)), in LoopClonerSimpleHelper()
1203 info->GetHeader()->GetGraph()->GetAllocator()->Adapter(kArenaAllocSuperblockCloner)), in LoopClonerSimpleHelper()
Dssa_liveness_analysis_test.cc46 HGraph* graph = block->GetGraph(); in CreateSuccessor()
Dintrinsics.h88 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator()); in INTRINSICS_LIST()
Dsuperblock_cloner.h389 cloner_(info->GetHeader()->GetGraph(), &info->GetBlocks(), bb_map, hir_map, induction_range) { in LoopClonerHelper()
Dscheduler.cc578 ScopedArenaAllocator allocator(block->GetGraph()->GetArenaStack()); in Schedule()
627 HGraph* graph = block->GetGraph(); in Schedule()
Dcode_sinking.cc48 if (instruction->GetBlock() == instruction->GetBlock()->GetGraph()->GetEntryBlock()) { in IsInterestingInstruction()
Dregister_allocator_linear_scan.cc81 reserved_out_slots_ = ptr_size / kVRegSize + codegen->GetGraph()->GetMaximumNumberOfOutVRegs(); in RegisterAllocatorLinearScan()
114 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearOrder()) { in AllocateRegisters()
168 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearPostOrder()) { in AllocateRegistersInternal()
Dcode_generator_arm64.h557 return GetGraph()->HasSIMD() in GetSlowPathFPWidth()
621 block_labels_.resize(GetGraph()->GetBlocks().size()); in Initialize()
629 jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARM64(switch_instr)); in CreateJumpTable()
Doptimizing_compiler.cc690 ArenaVector<linker::LinkerPatch> linker_patches(codegen->GetGraph()->GetAllocator()->Adapter()); in EmitAndSortLinkerPatches()
1405 codegen->GetGraph()->HasShouldDeoptimizeFlag(), in JitCompile()
1406 codegen->GetGraph()->GetCHASingleImplementationList())) { in JitCompile()
Dintrinsics.cc245 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); in ComputeIntegerValueOfLocations()
Dcode_generator_arm_vixl.h554 block_labels_.resize(GetGraph()->GetBlocks().size()); in Initialize()
768 jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARMVIXL(switch_instr)); in CreateJumpTable()
Dssa_liveness_analysis.h1050 if (block_at_use.GetGraph()->HasIrreducibleLoops()) { in AddBackEdgeUses()
1286 HGraph* graph = instruction->GetBlock()->GetGraph(); in ShouldBeLiveForEnvironment()
Dssa_builder.cc299 HArrayGet* equivalent = new (aget->GetBlock()->GetGraph()->GetAllocator()) HArrayGet( in CreateFloatOrDoubleEquivalentOfArrayGet()
Dregister_allocator_graph_color.cc83 if (block->Dominates(block->GetGraph()->GetExitBlock())) { in CostForMoveAt()
593 reserved_out_slots_(codegen->GetGraph()->GetMaximumNumberOfOutVRegs()) { in RegisterAllocatorGraphColor()
784 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearPostOrder()) { in ProcessInstructions()
Dcode_generator_x86.h384 return GetGraph()->HasSIMD() in GetSlowPathFPWidth()

123