1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <dirent.h>
18 #include <errno.h>
19 #include <string.h>
20 #include <sys/types.h>
21
22 #include <fstream>
23 #include <map>
24 #include <regex>
25
26 #include "gtest/gtest.h"
27
28 #include "jni/quick/calling_convention.h"
29 #include "utils/arm/jni_macro_assembler_arm_vixl.h"
30 #include "utils/assembler_test_base.h"
31
32 #include "base/hex_dump.h"
33 #include "base/malloc_arena_pool.h"
34 #include "common_runtime_test.h"
35
36 namespace art {
37 namespace arm {
38
39 // Include results file (generated manually)
40 #include "assembler_thumb_test_expected.cc.inc"
41
42 class ArmVIXLAssemblerTest : public AssemblerTestBase {
43 public:
ArmVIXLAssemblerTest()44 ArmVIXLAssemblerTest() : pool(), allocator(&pool), assembler(&allocator) { }
45
46 protected:
GetIsa()47 InstructionSet GetIsa() override { return InstructionSet::kThumb2; }
48
DumpAndCheck(std::vector<uint8_t> & code,const char * testname,const std::string & expected)49 void DumpAndCheck(std::vector<uint8_t>& code, const char* testname, const std::string& expected) {
50 #ifndef ART_TARGET_ANDROID
51 std::string obj_file = scratch_dir_->GetPath() + testname + ".o";
52 WriteElf</*IsElf64=*/false>(obj_file, InstructionSet::kThumb2, code);
53 std::string disassembly;
54 ASSERT_TRUE(Disassemble(obj_file, &disassembly));
55
56 // objdump on buildbot seems to sometimes add annotation like in "bne #226 <.text+0x1e8>".
57 // It is unclear why it does not reproduce locally. As work-around, remove the annotation.
58 std::regex annotation_re(" <\\.text\\+\\w+>");
59 disassembly = std::regex_replace(disassembly, annotation_re, "");
60
61 std::string expected2 = "\n" +
62 obj_file + ": file format ELF32-arm-little\n\n\n"
63 "Disassembly of section .text:\n\n"
64 "00000000 .text:\n" +
65 expected;
66 EXPECT_EQ(expected2, disassembly);
67 if (expected2 != disassembly) {
68 std::string out = " \"" + Replace(disassembly, "\n", "\\n\"\n \"") + "\"";
69 printf("C++ formatted disassembler output for %s:\n%s\n", testname, out.c_str());
70 }
71 #endif // ART_TARGET_ANDROID
72 }
73
74 #define __ assembler.
75
EmitAndCheck(const char * testname,const char * expected)76 void EmitAndCheck(const char* testname, const char* expected) {
77 __ FinalizeCode();
78 size_t cs = __ CodeSize();
79 std::vector<uint8_t> managed_code(cs);
80 MemoryRegion code(&managed_code[0], managed_code.size());
81 __ FinalizeInstructions(code);
82
83 DumpAndCheck(managed_code, testname, expected);
84 }
85
86 #undef __
87
88 #define __ assembler.
89
90 MallocArenaPool pool;
91 ArenaAllocator allocator;
92 ArmVIXLJNIMacroAssembler assembler;
93 };
94
TEST_F(ArmVIXLAssemblerTest,VixlJniHelpers)95 TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) {
96 // Run the test only with Baker read barriers, as the expected
97 // generated code contains a Marking Register refresh instruction.
98 TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
99
100 const bool is_static = true;
101 const bool is_synchronized = false;
102 const bool is_critical_native = false;
103 const char* shorty = "IIFII";
104
105 std::unique_ptr<JniCallingConvention> jni_conv(
106 JniCallingConvention::Create(&allocator,
107 is_static,
108 is_synchronized,
109 is_critical_native,
110 shorty,
111 InstructionSet::kThumb2));
112 std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
113 ManagedRuntimeCallingConvention::Create(
114 &allocator, is_static, is_synchronized, shorty, InstructionSet::kThumb2));
115 const int frame_size(jni_conv->FrameSize());
116 ArrayRef<const ManagedRegister> callee_save_regs = jni_conv->CalleeSaveRegisters();
117
118 const ManagedRegister method_register = ArmManagedRegister::FromCoreRegister(R0);
119 const ManagedRegister hidden_arg_register = ArmManagedRegister::FromCoreRegister(R4);
120 const ManagedRegister scratch_register = ArmManagedRegister::FromCoreRegister(R12);
121
122 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs);
123
124 // Spill arguments.
125 mr_conv->ResetIterator(FrameOffset(frame_size));
126 for (; mr_conv->HasNext(); mr_conv->Next()) {
127 if (mr_conv->IsCurrentParamInRegister()) {
128 size_t size = mr_conv->IsCurrentParamALongOrDouble() ? 8u : 4u;
129 __ Store(mr_conv->CurrentParamStackOffset(), mr_conv->CurrentParamRegister(), size);
130 }
131 }
132 __ IncreaseFrameSize(32);
133
134 // Loads
135 __ IncreaseFrameSize(4096);
136 __ Load(method_register, FrameOffset(32), 4);
137 __ Load(method_register, FrameOffset(124), 4);
138 __ Load(method_register, FrameOffset(132), 4);
139 __ Load(method_register, FrameOffset(1020), 4);
140 __ Load(method_register, FrameOffset(1024), 4);
141 __ Load(scratch_register, FrameOffset(4092), 4);
142 __ Load(scratch_register, FrameOffset(4096), 4);
143 __ LoadRawPtrFromThread(scratch_register, ThreadOffset32(512));
144 __ LoadRef(method_register, scratch_register, MemberOffset(128), /* unpoison_reference= */ false);
145
146 // Stores
147 __ Store(FrameOffset(32), method_register, 4);
148 __ Store(FrameOffset(124), method_register, 4);
149 __ Store(FrameOffset(132), method_register, 4);
150 __ Store(FrameOffset(1020), method_register, 4);
151 __ Store(FrameOffset(1024), method_register, 4);
152 __ Store(FrameOffset(4092), scratch_register, 4);
153 __ Store(FrameOffset(4096), scratch_register, 4);
154 __ StoreImmediateToFrame(FrameOffset(48), 0xFF);
155 __ StoreImmediateToFrame(FrameOffset(48), 0xFFFFFF);
156 __ StoreRawPtr(FrameOffset(48), scratch_register);
157 __ StoreRef(FrameOffset(48), scratch_register);
158 __ StoreSpanning(FrameOffset(48), method_register, FrameOffset(48));
159 __ StoreStackOffsetToThread(ThreadOffset32(512), FrameOffset(4096));
160 __ StoreStackPointerToThread(ThreadOffset32(512));
161
162 // Other
163 __ Call(method_register, FrameOffset(48));
164 __ Copy(FrameOffset(48), FrameOffset(44), 4);
165 __ CopyRawPtrFromThread(FrameOffset(44), ThreadOffset32(512));
166 __ CopyRef(FrameOffset(48), FrameOffset(44));
167 __ GetCurrentThread(method_register);
168 __ GetCurrentThread(FrameOffset(48));
169 __ Move(hidden_arg_register, method_register, 4);
170 __ VerifyObject(scratch_register, false);
171
172 __ CreateHandleScopeEntry(scratch_register, FrameOffset(48), scratch_register, true);
173 __ CreateHandleScopeEntry(scratch_register, FrameOffset(48), scratch_register, false);
174 __ CreateHandleScopeEntry(method_register, FrameOffset(48), scratch_register, true);
175 __ CreateHandleScopeEntry(FrameOffset(48), FrameOffset(64), true);
176 __ CreateHandleScopeEntry(method_register, FrameOffset(0), scratch_register, true);
177 __ CreateHandleScopeEntry(method_register, FrameOffset(1025), scratch_register, true);
178 __ CreateHandleScopeEntry(scratch_register, FrameOffset(1025), scratch_register, true);
179
180 __ ExceptionPoll(0);
181
182 // Push the target out of range of branch emitted by ExceptionPoll.
183 for (int i = 0; i < 64; i++) {
184 __ Store(FrameOffset(2047), scratch_register, 4);
185 }
186
187 __ DecreaseFrameSize(4096);
188 __ DecreaseFrameSize(32);
189 __ RemoveFrame(frame_size, callee_save_regs, /* may_suspend= */ true);
190
191 EmitAndCheck("VixlJniHelpers", VixlJniHelpersResults);
192 }
193
194 #undef __
195
196 // TODO: Avoid these macros.
197 #define R0 vixl::aarch32::r0
198 #define R2 vixl::aarch32::r2
199 #define R4 vixl::aarch32::r4
200 #define R12 vixl::aarch32::r12
201
202 #define __ assembler.asm_.
203
TEST_F(ArmVIXLAssemblerTest,VixlLoadFromOffset)204 TEST_F(ArmVIXLAssemblerTest, VixlLoadFromOffset) {
205 __ LoadFromOffset(kLoadWord, R2, R4, 12);
206 __ LoadFromOffset(kLoadWord, R2, R4, 0xfff);
207 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000);
208 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000a4);
209 __ LoadFromOffset(kLoadWord, R2, R4, 0x101000);
210 __ LoadFromOffset(kLoadWord, R4, R4, 0x101000);
211 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 12);
212 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0xfff);
213 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000);
214 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000a4);
215 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x101000);
216 __ LoadFromOffset(kLoadUnsignedHalfword, R4, R4, 0x101000);
217 __ LoadFromOffset(kLoadWordPair, R2, R4, 12);
218 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x3fc);
219 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400);
220 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400a4);
221 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x40400);
222 __ LoadFromOffset(kLoadWordPair, R4, R4, 0x40400);
223
224 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler());
225 temps.Exclude(R12);
226 __ LoadFromOffset(kLoadWord, R0, R12, 12); // 32-bit because of R12.
227 temps.Include(R12);
228 __ LoadFromOffset(kLoadWord, R2, R4, 0xa4 - 0x100000);
229
230 __ LoadFromOffset(kLoadSignedByte, R2, R4, 12);
231 __ LoadFromOffset(kLoadUnsignedByte, R2, R4, 12);
232 __ LoadFromOffset(kLoadSignedHalfword, R2, R4, 12);
233
234 EmitAndCheck("VixlLoadFromOffset", VixlLoadFromOffsetResults);
235 }
236
TEST_F(ArmVIXLAssemblerTest,VixlStoreToOffset)237 TEST_F(ArmVIXLAssemblerTest, VixlStoreToOffset) {
238 __ StoreToOffset(kStoreWord, R2, R4, 12);
239 __ StoreToOffset(kStoreWord, R2, R4, 0xfff);
240 __ StoreToOffset(kStoreWord, R2, R4, 0x1000);
241 __ StoreToOffset(kStoreWord, R2, R4, 0x1000a4);
242 __ StoreToOffset(kStoreWord, R2, R4, 0x101000);
243 __ StoreToOffset(kStoreWord, R4, R4, 0x101000);
244 __ StoreToOffset(kStoreHalfword, R2, R4, 12);
245 __ StoreToOffset(kStoreHalfword, R2, R4, 0xfff);
246 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000);
247 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000a4);
248 __ StoreToOffset(kStoreHalfword, R2, R4, 0x101000);
249 __ StoreToOffset(kStoreHalfword, R4, R4, 0x101000);
250 __ StoreToOffset(kStoreWordPair, R2, R4, 12);
251 __ StoreToOffset(kStoreWordPair, R2, R4, 0x3fc);
252 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400);
253 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400a4);
254 __ StoreToOffset(kStoreWordPair, R2, R4, 0x40400);
255 __ StoreToOffset(kStoreWordPair, R4, R4, 0x40400);
256
257 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler());
258 temps.Exclude(R12);
259 __ StoreToOffset(kStoreWord, R0, R12, 12); // 32-bit because of R12.
260 temps.Include(R12);
261 __ StoreToOffset(kStoreWord, R2, R4, 0xa4 - 0x100000);
262
263 __ StoreToOffset(kStoreByte, R2, R4, 12);
264
265 EmitAndCheck("VixlStoreToOffset", VixlStoreToOffsetResults);
266 }
267
268 #undef __
269 } // namespace arm
270 } // namespace art
271