/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 39 #define __ assembler. 49 #define ___ assembler.GetVIXLAssembler()-> 57 ArmVIXLAssembler assembler(allocator); in CreateTrampoline() local 64 vixl::aarch32::UseScratchRegisterScope temps(assembler.GetVIXLAssembler()); in CreateTrampoline() 95 Arm64Assembler assembler(allocator); in CreateTrampoline() local 134 X86Assembler assembler(allocator); in CreateTrampoline() local 155 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline() local
|
/art/compiler/optimizing/ |
D | intrinsics_utils.h | 54 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode() local 55 assembler->Bind(this->GetEntryLabel()); in EmitNativeCode() 81 assembler->Jump(this->GetExitLabel()); in EmitNativeCode()
|
D | intrinsics_arm_vixl.cc | 39 #define __ assembler->GetVIXLAssembler()-> 74 static void GenSystemArrayCopyBaseAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyBaseAddress() argument 97 static void GenSystemArrayCopyEndAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyEndAddress() argument 128 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() local 149 GenSystemArrayCopyBaseAddress(assembler, type, dest, dest_pos, dst_curr_addr); in EmitNativeCode() 154 assembler->MaybeUnpoisonHeapReference(tmp); in EmitNativeCode() 179 assembler->MaybePoisonHeapReference(tmp); in EmitNativeCode() 223 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveFPToInt() argument 233 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveIntToFP() argument 295 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() local [all …]
|
D | intrinsics_x86_64.cc | 124 #define __ assembler-> 140 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() argument 146 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() argument 189 X86_64Assembler* assembler) { in GenReverseBytes() argument 264 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86_64Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() argument 325 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 366 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundDouble() local 606 static void CheckPosition(X86_64Assembler* assembler, in CheckPosition() argument 671 X86_64Assembler* assembler = GetAssembler(); in VisitSystemArrayCopyChar() local 711 CheckPosition(assembler, src_pos, src, length, slow_path, src_base); in VisitSystemArrayCopyChar() [all …]
|
D | intrinsics_x86.cc | 166 #define __ assembler-> 189 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() argument 204 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() argument 271 X86Assembler* assembler) { in GenReverseBytes() argument 310 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local 356 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() argument 418 X86Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 477 X86Assembler* assembler = codegen->GetAssembler(); in GenFPToFPCall() local 514 static void GenLowestOneBit(X86Assembler* assembler, in GenLowestOneBit() argument 811 static void CheckPosition(X86Assembler* assembler, in CheckPosition() argument [all …]
|
D | code_generator_arm_vixl.cc | 114 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label) in EmitAdrCode() argument 115 : assembler_(assembler), rd_(rd), label_(label) { in EmitAdrCode() 116 DCHECK(!assembler->AllowMacroInstructions()); // In ExactAssemblyScope. in EmitAdrCode() 117 adr_location_ = assembler->GetCursorOffset(); in EmitAdrCode() 118 assembler->adr(EncodingSize(Wide), rd, label); in EmitAdrCode() 5882 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in CanEncode32BitConstantAsImmediate() local 5883 if (assembler->ShifterOperandCanHold(opcode, value, flags_update)) { in CanEncode32BitConstantAsImmediate() 5900 if (assembler->ShifterOperandCanHold(neg_opcode, neg_value, flags_update)) { in CanEncode32BitConstantAsImmediate() 9455 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 9461 assembler.LoadFromOffset(arm::kLoadWord, vixl32::pc, vixl32::r0, offset.Int32Value()); in EmitThunkCode() [all …]
|
D | code_generator_arm64.cc | 4904 Arm64Assembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 4911 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0)); in EmitThunkCode() 4919 assembler.JumpTo(ManagedRegister(arm64::TR), offset, ManagedRegister(arm64::IP0)); in EmitThunkCode() 4927 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name); in EmitThunkCode() 4936 assembler.FinalizeCode(); in EmitThunkCode() 4937 code->resize(assembler.CodeSize()); in EmitThunkCode() 4939 assembler.FinalizeInstructions(code_region); in EmitThunkCode() 6681 #define __ assembler.GetVIXLAssembler()-> 6683 static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler, in EmitGrayCheckAndFastPath() argument 6715 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() argument [all …]
|
D | intrinsics_arm64.cc | 947 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode() local 948 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitNativeCode() 988 assembler->MaybeUnpoisonHeapReference(tmp); in EmitNativeCode() 992 assembler->MaybePoisonHeapReference(value); in EmitNativeCode() 994 assembler->MaybeUnpoisonHeapReference(value); in EmitNativeCode() 1003 Arm64Assembler* assembler = codegen->GetAssembler(); in GenCas() local 1004 MacroAssembler* masm = assembler->GetVIXLAssembler(); in GenCas() 1056 assembler->MaybeUnpoisonHeapReference(old_value); in GenCas() 1061 assembler->MaybePoisonHeapReference(value); in GenCas() 1065 assembler->MaybeUnpoisonHeapReference(value); in GenCas()
|
D | optimizing_cfi_test_expected.inc | 142 // as with the old assembler.
|
D | code_generator_x86_64.cc | 7742 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 7745 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable() 7749 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table; in CreateJumpTable() 7761 assembler->AppendInt32(offset_to_block); in CreateJumpTable() 7771 X86_64Assembler* assembler = GetAssembler(); in Finalize() local 7772 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize() 7774 assembler->Align(4, 0); in Finalize() 7775 constant_area_start_ = assembler->CodeSize(); in Finalize() 7783 assembler->AddConstantArea(); in Finalize()
|
D | code_generator_arm_vixl.h | 879 void CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler,
|
D | code_generator_arm64.h | 980 void CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
|
D | code_generator_x86.cc | 8466 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 8469 const int32_t offset_in_constant_table = assembler->ConstantAreaSize(); in CreateJumpTable() 8486 assembler->AppendInt32(offset_to_block); in CreateJumpTable() 8496 X86Assembler* assembler = GetAssembler(); in Finalize() local 8498 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) { in Finalize() 8501 assembler->Align(4, 0); in Finalize() 8502 constant_area_start_ = assembler->CodeSize(); in Finalize() 8510 assembler->AddConstantArea(); in Finalize()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 44 ArmVIXLAssemblerTest() : pool(), allocator(&pool), assembler(&allocator) { } in ArmVIXLAssemblerTest() 74 #define __ assembler. 88 #define __ assembler. 92 ArmVIXLJNIMacroAssembler assembler; member in art::arm::ArmVIXLAssemblerTest 202 #define __ assembler.asm_. 224 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F() 257 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler()); in TEST_F()
|
D | jni_macro_assembler_test.h | 42 typedef std::string (*TestFn)(JNIMacroAssemblerTest* assembler_test, Ass* assembler);
|
D | assembler_test.h | 62 typedef std::string (*TestFn)(AssemblerTest* assembler_test, Ass* assembler);
|
/art/test/538-checker-embed-constants/ |
D | info.txt | 1 Test embedding of constants in assembler instructions.
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 641 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() argument 646 assembler->shll(*reg, shifter); in shll_fn() 662 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() argument 667 assembler->shlq(*reg, shifter); in shlq_fn() 683 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() argument 688 assembler->shrl(*reg, shifter); in shrl_fn() 703 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() argument 708 assembler->shrq(*reg, shifter); in shrq_fn() 723 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() argument 728 assembler->sarl(*reg, shifter); in sarl_fn() [all …]
|
D | jni_macro_assembler_x86_64.cc | 142 static void DecreaseFrameSizeImpl(size_t adjust, X86_64Assembler* assembler) { in DecreaseFrameSizeImpl() argument 145 assembler->addq(CpuRegister(RSP), Immediate(adjust)); in DecreaseFrameSizeImpl() 146 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
|
/art/compiler/utils/x86/ |
D | jni_macro_assembler_x86.cc | 122 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { in DecreaseFrameSizeImpl() argument 125 assembler->addl(ESP, Immediate(adjust)); in DecreaseFrameSizeImpl() 126 assembler->cfi().AdjustCFAOffset(-adjust); in DecreaseFrameSizeImpl()
|
D | assembler_x86_test.cc | 393 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() argument 398 assembler->rorl(*reg, shifter); in rorl_fn() 413 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn() argument 418 assembler->roll(*reg, shifter); in roll_fn()
|
/art/test/ |
D | README.md | 14 [Jasmin](http://jasmin.sourceforge.net/) assembler or the
|
/art/compiler/ |
D | Android.bp | 94 "utils/assembler.cc",
|