/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 29 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecReplicateScalar() local 36 locations->AddTemp(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 45 locations->SetInAt(0, is_zero ? Location::ConstantLocation(input->AsConstant()) in VisitVecReplicateScalar() 47 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 51 locations->SetInAt(0, is_zero ? Location::ConstantLocation(input->AsConstant()) in VisitVecReplicateScalar() 53 locations->SetOut(is_zero ? Location::RequiresFpuRegister() in VisitVecReplicateScalar() 63 LocationSummary* locations = instruction->GetLocations(); in VisitVecReplicateScalar() local 64 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() 78 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() 86 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_x86_64.cc | 29 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecReplicateScalar() local 40 locations->SetInAt(0, is_zero ? Location::ConstantLocation(input->AsConstant()) in VisitVecReplicateScalar() 42 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 46 locations->SetInAt(0, is_zero ? Location::ConstantLocation(input->AsConstant()) in VisitVecReplicateScalar() 48 locations->SetOut(is_zero ? Location::RequiresFpuRegister() in VisitVecReplicateScalar() 58 LocationSummary* locations = instruction->GetLocations(); in VisitVecReplicateScalar() local 59 XmmRegister dst = locations->Out().AsFpuRegister<XmmRegister>(); in VisitVecReplicateScalar() 73 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() 81 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() 87 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_arm_vixl.cc | 37 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecReplicateScalar() local 45 locations->SetInAt(0, Location::RequiresRegister()); in VisitVecReplicateScalar() 46 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 55 LocationSummary* locations = instruction->GetLocations(); in VisitVecReplicateScalar() local 56 vixl32::DRegister dst = DRegisterFrom(locations->Out()); in VisitVecReplicateScalar() 80 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecExtractScalar() local 83 locations->SetInAt(0, Location::RequiresFpuRegister()); in VisitVecExtractScalar() 84 locations->SetOut(Location::RequiresRegister()); in VisitVecExtractScalar() 93 LocationSummary* locations = instruction->GetLocations(); in VisitVecExtractScalar() local 94 vixl32::DRegister src = DRegisterFrom(locations->InAt(0)); in VisitVecExtractScalar() [all …]
|
D | intrinsics_x86_64.cc | 80 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 81 DCHECK(locations->CanCall()); in EmitNativeCode() 90 CpuRegister src_curr_addr = locations->GetTemp(0).AsRegister<CpuRegister>(); in EmitNativeCode() 91 CpuRegister dst_curr_addr = locations->GetTemp(1).AsRegister<CpuRegister>(); in EmitNativeCode() 92 CpuRegister src_stop_addr = locations->GetTemp(2).AsRegister<CpuRegister>(); in EmitNativeCode() 127 LocationSummary* locations = in CreateFPToIntLocations() local 129 locations->SetInAt(0, Location::RequiresFpuRegister()); in CreateFPToIntLocations() 130 locations->SetOut(Location::RequiresRegister()); in CreateFPToIntLocations() 134 LocationSummary* locations = in CreateIntToFPLocations() local 136 locations->SetInAt(0, Location::RequiresRegister()); in CreateIntToFPLocations() [all …]
|
D | intrinsics_x86.cc | 82 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 83 DCHECK(locations->CanCall()); in EmitNativeCode() 93 Register src = locations->InAt(0).AsRegister<Register>(); in EmitNativeCode() 94 Location src_pos = locations->InAt(1); in EmitNativeCode() 95 Register dest = locations->InAt(2).AsRegister<Register>(); in EmitNativeCode() 96 Location dest_pos = locations->InAt(3); in EmitNativeCode() 97 Location length = locations->InAt(4); in EmitNativeCode() 98 Location temp1_loc = locations->GetTemp(0); in EmitNativeCode() 100 Register temp2 = locations->GetTemp(1).AsRegister<Register>(); in EmitNativeCode() 101 Register temp3 = locations->GetTemp(2).AsRegister<Register>(); in EmitNativeCode() [all …]
|
D | code_generator_vector_arm64_sve.cc | 49 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecReplicateScalar() local 59 locations->SetInAt(0, ARM64EncodableConstantOrRegister(input, instruction)); in VisitVecReplicateScalar() 60 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 66 locations->SetInAt(0, Location::ConstantLocation(input->AsConstant())); in VisitVecReplicateScalar() 67 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 69 locations->SetInAt(0, Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 70 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); in VisitVecReplicateScalar() 80 LocationSummary* locations = instruction->GetLocations(); in VisitVecReplicateScalar() local 81 Location src_loc = locations->InAt(0); in VisitVecReplicateScalar() 82 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecReplicateScalar() [all …]
|
D | code_generator_vector_arm64_neon.cc | 49 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction); in VisitVecReplicateScalar() local 59 locations->SetInAt(0, ARM64EncodableConstantOrRegister(input, instruction)); in VisitVecReplicateScalar() 60 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 66 locations->SetInAt(0, Location::ConstantLocation(input->AsConstant())); in VisitVecReplicateScalar() 67 locations->SetOut(Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 69 locations->SetInAt(0, Location::RequiresFpuRegister()); in VisitVecReplicateScalar() 70 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); in VisitVecReplicateScalar() 80 LocationSummary* locations = instruction->GetLocations(); in VisitVecReplicateScalar() local 81 Location src_loc = locations->InAt(0); in VisitVecReplicateScalar() 82 VRegister dst = VRegisterFrom(locations->Out()); in VisitVecReplicateScalar() [all …]
|
D | intrinsics_arm64.cc | 94 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 95 DCHECK(locations->CanCall()); in EmitNativeCode() 104 Register src_curr_addr = XRegisterFrom(locations->GetTemp(0)); in EmitNativeCode() 105 Register dst_curr_addr = XRegisterFrom(locations->GetTemp(1)); in EmitNativeCode() 106 Register src_stop_addr = XRegisterFrom(locations->GetTemp(2)); in EmitNativeCode() 166 LocationSummary* locations = in CreateFPToIntLocations() local 168 locations->SetInAt(0, Location::RequiresFpuRegister()); in CreateFPToIntLocations() 169 locations->SetOut(Location::RequiresRegister()); in CreateFPToIntLocations() 173 LocationSummary* locations = in CreateIntToFPLocations() local 175 locations->SetInAt(0, Location::RequiresRegister()); in CreateIntToFPLocations() [all …]
|
D | code_generator_x86.cc | 147 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 159 Location length_loc = locations->InAt(1); in EmitNativeCode() 169 if (length_loc.Equals(locations->InAt(0))) { in EmitNativeCode() 179 locations->InAt(0), in EmitNativeCode() 207 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 210 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD. in EmitNativeCode() 213 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD. in EmitNativeCode() 244 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); in EmitNativeCode() 249 SaveLiveRegisters(codegen, locations); in EmitNativeCode() [all …]
|
D | intrinsics_arm_vixl.cc | 129 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 130 DCHECK(locations->CanCall()); in EmitNativeCode() 141 Location dest_pos = locations->InAt(3); in EmitNativeCode() 142 vixl32::Register src_curr_addr = RegisterFrom(locations->GetTemp(0)); in EmitNativeCode() 143 vixl32::Register dst_curr_addr = RegisterFrom(locations->GetTemp(1)); in EmitNativeCode() 144 vixl32::Register src_stop_addr = RegisterFrom(locations->GetTemp(2)); in EmitNativeCode() 145 vixl32::Register tmp = RegisterFrom(locations->GetTemp(3)); in EmitNativeCode() 210 LocationSummary* locations = in CreateFPToIntLocations() local 212 locations->SetInAt(0, Location::RequiresFpuRegister()); in CreateFPToIntLocations() 213 locations->SetOut(Location::RequiresRegister()); in CreateFPToIntLocations() [all …]
|
D | code_generator_x86_64.cc | 158 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 161 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD. in EmitNativeCode() 164 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD. in EmitNativeCode() 196 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 205 Location length_loc = locations->InAt(1); in EmitNativeCode() 215 if (length_loc.Equals(locations->InAt(0))) { in EmitNativeCode() 228 locations->InAt(0), in EmitNativeCode() 259 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 260 Location out = locations->Out(); in EmitNativeCode() 267 SaveLiveRegisters(codegen, locations); in EmitNativeCode() [all …]
|
D | code_generator_arm_vixl.cc | 320 void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { in SaveLiveRegisters() argument 324 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true); in SaveLiveRegisters() 327 if (locations->RegisterContainsObject(i)) { in SaveLiveRegisters() 328 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 339 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false); in SaveLiveRegisters() 358 void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { in RestoreLiveRegisters() argument 362 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true); in RestoreLiveRegisters() 373 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false); in RestoreLiveRegisters() 475 LocationSummary* locations = instruction_->GetLocations(); in EmitNativeCode() local 486 locations->InAt(0), in EmitNativeCode() [all …]
|
D | code_generator.cc | 104 LocationSummary* locations = instruction->GetLocations(); in CheckTypeConsistency() local 105 if (locations == nullptr) { in CheckTypeConsistency() 109 if (locations->Out().IsUnallocated() in CheckTypeConsistency() 110 && (locations->Out().GetPolicy() == Location::kSameAsFirstInput)) { in CheckTypeConsistency() 111 DCHECK(CheckType(instruction->GetType(), locations->InAt(0))) in CheckTypeConsistency() 113 << " " << locations->InAt(0); in CheckTypeConsistency() 115 DCHECK(CheckType(instruction->GetType(), locations->Out())) in CheckTypeConsistency() 117 << " " << locations->Out(); in CheckTypeConsistency() 122 DCHECK(CheckType(inputs[i]->GetType(), locations->InAt(i))) in CheckTypeConsistency() 123 << inputs[i]->GetType() << " " << locations->InAt(i); in CheckTypeConsistency() [all …]
|
D | code_generator_arm64.cc | 168 LocationSummary* locations, in SaveRestoreLiveRegistersHelper() argument 171 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true); in SaveRestoreLiveRegistersHelper() 172 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false); in SaveRestoreLiveRegistersHelper() 214 void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { in SaveLiveRegisters() argument 216 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true); in SaveLiveRegisters() 219 if (locations->RegisterContainsObject(i)) { in SaveLiveRegisters() 220 locations->SetStackBit(stack_offset / kVRegSize); in SaveLiveRegisters() 229 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false); in SaveLiveRegisters() 238 locations, in SaveLiveRegisters() 242 void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { in RestoreLiveRegisters() argument [all …]
|
D | register_allocation_resolver.cc | 61 LocationSummary* locations = instruction->GetLocations(); in Resolve() local 62 Location location = locations->Out(); in Resolve() 68 locations->UpdateOut(location); in Resolve() 72 locations->UpdateOut(location); in Resolve() 133 if (locations->InAt(0).IsUnallocated()) { in Resolve() 134 locations->SetInAt(0, source); in Resolve() 136 DCHECK(locations->InAt(0).Equals(source)); in Resolve() 139 locations->UpdateOut(source); in Resolve() 209 LocationSummary* locations = at->GetLocations(); in Resolve() local 212 locations->SetTempAt(temp_index, Location::RegisterLocation(temp->GetRegister())); in Resolve() [all …]
|
D | ssa_liveness_analysis.cc | 54 LocationSummary* locations = current->GetLocations(); in NumberInstructions() local 55 if (locations != nullptr && locations->Out().IsValid()) { in NumberInstructions() 72 LocationSummary* locations = current->GetLocations(); in NumberInstructions() local 73 if (locations != nullptr && locations->Out().IsValid()) { in NumberInstructions() 417 LocationSummary* locations = user->GetLocations(); in FindFirstRegisterHint() local 418 Location expected = locations->InAt(use.GetInputIndex()); in FindFirstRegisterHint() 455 LocationSummary* locations = GetDefinedBy()->GetLocations(); in FindHintAtDefinition() local 456 Location out = locations->Out(); in FindHintAtDefinition()
|
D | code_generator.h | 98 virtual void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations); 100 virtual void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations); 220 virtual void AddLocationAsTemp(Location location, LocationSummary* locations) = 0; 309 uint32_t GetSlowPathSpills(LocationSummary* locations, bool core_registers) const { in GetSlowPathSpills() argument 310 DCHECK(locations->OnlyCallsOnSlowPath() || in GetSlowPathSpills() 311 (locations->Intrinsified() && locations->CallsOnMainAndSlowPath() && in GetSlowPathSpills() 312 !locations->HasCustomSlowPathCallingConvention())); in GetSlowPathSpills() 314 ? locations->GetLiveRegisters()->GetCoreRegisters() in GetSlowPathSpills() 315 : locations->GetLiveRegisters()->GetFloatingPointRegisters(); in GetSlowPathSpills() 316 if (locations->HasCustomSlowPathCallingConvention()) { in GetSlowPathSpills() [all …]
|
D | register_allocator_linear_scan.cc | 225 LocationSummary* locations = instruction->GetLocations(); in ProcessInstruction() local 228 if (locations == nullptr) return; in ProcessInstruction() 231 for (size_t i = 0; i < locations->GetTempCount(); ++i) { in ProcessInstruction() 232 Location temp = locations->GetTemp(i); in ProcessInstruction() 274 if (locations->NeedsSafepoint()) { in ProcessInstruction() 286 if (locations->WillCall()) { in ProcessInstruction() 290 for (size_t i = 0; i < locations->GetInputCount(); ++i) { in ProcessInstruction() 291 Location input = locations->InAt(i); in ProcessInstruction() 344 Location output = locations->Out(); in ProcessInstruction() 346 Location first = locations->InAt(0); in ProcessInstruction() [all …]
|
D | ssa_liveness_analysis.h | 318 LocationSummary* locations = instruction->GetLocations(); variable 326 if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) { 333 } else if (!locations->InAt(input_index).IsValid()) { 907 LocationSummary* locations = defined_by_->GetLocations(); in CanUseInputRegister() local 908 if (locations->OutputCanOverlapWithInputs()) { in CanUseInputRegister() 956 LocationSummary* locations = defined_by_->GetLocations(); in DefinitionRequiresRegister() local 957 Location location = locations->Out(); in DefinitionRequiresRegister() 964 && (locations->InAt(0).IsRegister() in DefinitionRequiresRegister() 965 || locations->InAt(0).IsRegisterPair() in DefinitionRequiresRegister() 966 || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) { in DefinitionRequiresRegister() [all …]
|
/art/test/510-checker-try-catch/smali/ |
D | RegisterAllocator.smali | 23 ## CHECK-DAG: Phi reg:0 is_catch_phi:true locations:{{\[.*\]}}-><<SlotA1:\d+>>(sp) 24 ## CHECK-DAG: Phi reg:0 is_catch_phi:true locations:{{\[.*\]}}-><<SlotA2:\d+>>(sp) 25 ## CHECK-DAG: Phi reg:1 is_catch_phi:true locations:{{\[.*\]}}-><<SlotB:\d+>>(sp) 61 ## CHECK-DAG: Phi reg:0 is_catch_phi:true locations:{{\[.*\]}}->2x<<SlotB1:\d+>>(sp) 62 ## CHECK-DAG: Phi reg:0 is_catch_phi:true locations:{{\[.*\]}}->2x<<SlotB2:\d+>>(sp) 63 ## CHECK-DAG: Phi reg:2 is_catch_phi:true locations:{{\[.*\]}}-><<SlotA:\d+>>(sp)
|
/art/tools/ |
D | host_bcp.sh | 21 Extracts boot class path locations from <image> and outputs the appropriate 23 --runtime-arg -Xbootclasspath-locations:... 92 --runtime-arg -Xbootclasspath-locations:${BCPL}
|
D | dex2oat_wrapper | 20 # boot classpath and bootclasspath locations. 45 elif [[ $1 == "-Xbootclasspath-locations:*" ]]; then 47 # Remove '-Xbootclasspath-locations:' from the argument. 48 DEX2OAT_BCP_LOCS=${DEX2OAT_BCP_LOCS##-Xbootclasspath-locations:} 120 --runtime-arg -Xbootclasspath-locations:$DEX2OAT_BCP_LOCS \
|
/art/runtime/jit/ |
D | profile_saver.cc | 453 const std::set<std::string>& locations = it.second; in FetchAndCacheResolvedClassesAndMethods() local 454 VLOG(profiler) << "Locations for " << it.first << " " << android::base::Join(locations, ':'); in FetchAndCacheResolvedClassesAndMethods() 462 << " found=" << (locations.find(base_location) != locations.end()) in FetchAndCacheResolvedClassesAndMethods() 464 if (locations.find(base_location) != locations.end()) { in FetchAndCacheResolvedClassesAndMethods() 480 << " found=" << (locations.find(base_location) != locations.end()) in FetchAndCacheResolvedClassesAndMethods() 482 if (locations.find(base_location) != locations.end()) { in FetchAndCacheResolvedClassesAndMethods() 494 if (locations.find(base_location) != locations.end()) { in FetchAndCacheResolvedClassesAndMethods() 545 const std::set<std::string>& locations = it.second; in ProcessProfilingInfo() local 547 << android::base::Join(locations, ":"); in ProcessProfilingInfo() 552 jit_code_cache_->GetProfiledMethods(locations, profile_methods); in ProcessProfilingInfo() [all …]
|
/art/test/555-UnsafeGetLong-regression/ |
D | info.txt | 1 Regression test for sun.misc.Unsafe.getLong's intrinsic's locations
|
/art/test/614-checker-dump-constant-location/ |
D | info.txt | 2 locations in parallel moves.
|