/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 64 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() 106 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitVecReplicateScalar() 111 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitVecReplicateScalar() 161 __ movd(locations->Out().AsRegister<Register>(), src); in VisitVecExtractScalar() 166 __ movd(locations->Out().AsRegisterPairLow<Register>(), src); in VisitVecExtractScalar() 168 __ movd(locations->Out().AsRegisterPairHigh<Register>(), tmp); in VisitVecExtractScalar() 175 DCHECK(locations->InAt(0).Equals(locations->Out())); // no code required in VisitVecExtractScalar() 218 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReduce() 264 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecCnv() 282 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecNeg() [all …]
|
D | code_generator_vector_x86_64.cc | 59 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() 97 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitVecReplicateScalar() 102 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitVecReplicateScalar() 148 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ false); in VisitVecExtractScalar() 152 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ true); in VisitVecExtractScalar() 158 DCHECK(locations->InAt(0).Equals(locations->Out())); // no code required in VisitVecExtractScalar() 201 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReduce() 247 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecCnv() 265 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecNeg() 316 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecAbs() [all …]
|
D | code_generator_vector_arm_vixl.cc | 56 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecReplicateScalar() 137 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecReduce() 174 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecNeg() 203 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecAbs() 230 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecNot() 278 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecAdd() 308 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecSaturationAdd() 340 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecHalvingAdd() 380 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecSub() 410 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecSaturationSub() [all …]
|
D | code_generator_vector_arm64_sve.cc | 82 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecReplicateScalar() 181 DCHECK(locations->InAt(0).Equals(locations->Out())); // no code required in VisitVecExtractScalar() 223 VRegister dst = DRegisterFrom(locations->Out()); in VisitVecReduce() 263 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecCnv() 281 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecNeg() 322 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecAbs() 361 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecNot() 413 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecAdd() 455 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecSaturationAdd() 487 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecHalvingAdd() [all …]
|
D | code_generator_vector_arm64_neon.cc | 82 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecReplicateScalar() 181 DCHECK(locations->InAt(0).Equals(locations->Out())); // no code required in VisitVecExtractScalar() 223 VRegister dst = DRegisterFrom(locations->Out()); in VisitVecReduce() 263 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecCnv() 281 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecNeg() 322 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecAbs() 361 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecNot() 413 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecAdd() 455 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecSaturationAdd() 487 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecHalvingAdd() [all …]
|
D | ssa_liveness_analysis.cc | 55 if (locations != nullptr && locations->Out().IsValid()) { in NumberInstructions() 73 if (locations != nullptr && locations->Out().IsValid()) { in NumberInstructions() 113 bool has_out_location = input->GetLocations()->Out().IsValid(); in RecursivelyProcessInputs() 221 DCHECK(!current->GetLocations()->Out().IsValid()); in ComputeLiveRanges() 456 Location out = locations->Out(); in FindHintAtDefinition() 524 return defined_by->GetLocations()->Out(); in ToLocation()
|
D | common_arm.h | 90 return SRegisterFrom(instr->GetLocations()->Out()); in OutputSRegister() 96 return DRegisterFrom(instr->GetLocations()->Out()); in OutputDRegister() 136 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType()); in OutputRegister()
|
D | intrinsics_utils.h | 73 Location out = invoke_->GetLocations()->Out(); in EmitNativeCode()
|
D | intrinsics_arm64.cc | 181 Location output = locations->Out(); in MoveFPToInt() 188 Location output = locations->Out(); in MoveIntToFP() 240 Location out = locations->Out(); in GenReverseBytes() 287 Location out = locations->Out(); in GenNumberOfLeadingZeros() 314 Location out = locations->Out(); in GenNumberOfTrailingZeros() 342 Location out = locations->Out(); in GenReverse() 371 Register dst = RegisterFrom(instr->GetLocations()->Out(), type); in GenBitCount() 402 Register dst = RegisterFrom(invoke->GetLocations()->Out(), type); in GenHighestOneBit() 435 Register dst = RegisterFrom(invoke->GetLocations()->Out(), type); in GenLowestOneBit() 472 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); in VisitMathSqrt() [all …]
|
D | intrinsics_x86_64.cc | 142 Location output = locations->Out(); in MoveFPToInt() 148 Location output = locations->Out(); in MoveIntToFP() 190 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in GenReverseBytes() 248 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); in VisitMathSqrt() 268 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); in GenSSE41FPToFPIntrinsic() 321 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in VisitMathRoundFloat() 362 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in VisitMathRoundDouble() 1196 CpuRegister rsi = locations->Out().AsRegister<CpuRegister>(); in VisitStringEquals() 1329 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in GenerateStringIndexOf() 1637 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); // == address, here for clarity. in GenPeek() [all …]
|
D | code_generator_x86_64.cc | 260 Location out = locations->Out(); in EmitNativeCode() 312 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 326 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX)); in EmitNativeCode() 347 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 383 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX)); in EmitNativeCode() 1932 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(), in VisitShouldDeoptimizeFlag() 1983 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitSelect() 2028 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType()); in VisitSelect() 2087 CpuRegister reg = locations->Out().AsRegister<CpuRegister>(); in HandleCondition() 2259 CpuRegister out = locations->Out().AsRegister<CpuRegister>(); in VisitCompare() [all …]
|
D | code_generator_x86.cc | 245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 256 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX)); in EmitNativeCode() 278 Location out = locations->Out(); in EmitNativeCode() 331 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 373 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX)); in EmitNativeCode() 1843 __ movl(flag->GetLocations()->Out().AsRegister<Register>(), in VisitShouldDeoptimizeFlag() 1894 DCHECK(locations->InAt(0).Equals(locations->Out())); in VisitSelect() 1957 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType()); in VisitSelect() 2031 Register reg = locations->Out().AsRegister<Register>(); in HandleCondition() 2538 Location out = locations->Out(); in VisitNeg() [all …]
|
D | intrinsics_x86.cc | 191 Location output = locations->Out(); in MoveFPToInt() 206 Location output = locations->Out(); in MoveIntToFP() 272 Register out = locations->Out().AsRegister<Register>(); in GenReverseBytes() 306 Location output = locations->Out(); in VisitLongReverseBytes() 340 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); in VisitMathSqrt() 360 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); in GenSSE41FPToFPIntrinsic() 416 Register out = locations->Out().AsRegister<Register>(); in VisitMathRoundFloat() 520 Location out_loc = locations->Out(); in GenLowestOneBit() 1006 Register esi = locations->Out().AsRegister<Register>(); in VisitStringEquals() 1143 Register out = locations->Out().AsRegister<Register>(); in GenerateStringIndexOf() [all …]
|
D | common_arm64.h | 82 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType()); in OutputRegister() 121 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType()); in OutputFPRegister()
|
D | intrinsics_arm_vixl.cc | 225 Location output = locations->Out(); in MoveFPToInt() 235 Location output = locations->Out(); in MoveIntToFP() 298 vixl32::Register out = RegisterFrom(locations->Out()); in GenNumberOfLeadingZeros() 342 vixl32::Register out = RegisterFrom(locations->Out()); in GenNumberOfTrailingZeros() 483 vixl32::Register lo = LowRegisterFrom(invoke->GetLocations()->Out()); in VisitMemoryPeekLongNative() 484 vixl32::Register hi = HighRegisterFrom(invoke->GetLocations()->Out()); in VisitMemoryPeekLongNative() 574 Location trg_loc = locations->Out(); in GenUnsafeGet() 2508 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out()); in VisitLongReverse() 2509 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out()); in VisitLongReverse() 2534 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out()); in VisitLongReverseBytes() [all …]
|
D | code_generator_arm_vixl.cc | 518 Location out = locations->Out(); in EmitNativeCode() 548 arm_codegen->Move32(locations->Out(), LocationFrom(r0)); in EmitNativeCode() 572 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 584 arm_codegen->Move32(locations->Out(), LocationFrom(r0)); in EmitNativeCode() 604 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 629 arm_codegen->Move32(locations->Out(), LocationFrom(r0)); in EmitNativeCode() 1136 const Location out = locations->Out(); in GenerateLongDataProc() 2864 const Location out = locations->Out(); in VisitSelect() 3587 Location out = locations->Out(); in VisitNeg() 3772 Location out = locations->Out(); in VisitTypeConversion() [all …]
|
D | code_generator.cc | 109 if (locations->Out().IsUnallocated() in CheckTypeConsistency() 110 && (locations->Out().GetPolicy() == Location::kSameAsFirstInput)) { in CheckTypeConsistency() 115 DCHECK(CheckType(instruction->GetType(), locations->Out())) in CheckTypeConsistency() 117 << " " << locations->Out(); in CheckTypeConsistency() 819 MoveLocation(locations->Out(), calling_convention.GetReturnLocation(field_type), field_type); in GenerateUnresolvedFieldAccess() 1331 Location location = current_phi->GetLocations()->Out(); in RecordCatchBlockInfo()
|
D | register_allocation_resolver.cc | 62 Location location = locations->Out(); in Resolve() 466 location_source = defined_by->GetLocations()->Out(); in ConnectSplitSiblings()
|
D | code_generator_arm64.cc | 315 Location out = locations->Out(); in EmitNativeCode() 370 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 382 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type); in EmitNativeCode() 471 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 495 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type); in EmitNativeCode() 2018 Location out = locations->Out(); in HandleFieldGet() 2497 Location out = locations->Out(); in VisitArrayGet() 3022 Register res = RegisterFrom(locations->Out(), instruction->GetType()); in HandleCondition() 3798 Location out_loc = locations->Out(); in VisitInstanceOf() 5058 Location out_loc = cls->GetLocations()->Out(); in VisitLoadClass() [all …]
|
D | locations.h | 580 Location Out() const { return output_; } in Out() function
|
D | register_allocator_test.cc | 770 ASSERT_EQ(first_sub->GetLocations()->Out().GetPolicy(), Location::kSameAsFirstInput); in SameAsFirstInputHint() 771 ASSERT_EQ(second_sub->GetLocations()->Out().GetPolicy(), Location::kSameAsFirstInput); in SameAsFirstInputHint()
|
D | register_allocator_linear_scan.cc | 344 Location output = locations->Out(); in ProcessInstruction() 637 if (!locations->OutputCanOverlapWithInputs() && locations->Out().IsUnallocated()) { in TryAllocateFreeReg()
|
D | ssa_liveness_analysis.h | 596 DCHECK(defined_by_->GetLocations()->Out().IsValid()); in FirstUseAfter() 957 Location location = locations->Out(); in DefinitionRequiresRegister()
|
D | register_allocator_graph_color.cc | 838 DCHECK(!locations->Out().IsValid()); in ProcessInstruction() 899 Location out = interval->GetDefinedBy()->GetLocations()->Out(); in CheckForFixedOutput() 1377 Location out = defined_by->GetLocations()->Out(); in FindCoalesceOpportunities()
|
/art/tools/dexfuzz/ |
D | README | 85 |Iterations|VerifyFail|MutateFail|Timed Out |Successful|Divergence| 96 Timed Out - mutated files that timed out for one or more backends.
|