1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "calling_convention_arm64.h"
18
19 #include <android-base/logging.h>
20
21 #include "arch/arm64/jni_frame_arm64.h"
22 #include "arch/instruction_set.h"
23 #include "handle_scope-inl.h"
24 #include "utils/arm64/managed_register_arm64.h"
25
26 namespace art {
27 namespace arm64 {
28
29 static const XRegister kXArgumentRegisters[] = {
30 X0, X1, X2, X3, X4, X5, X6, X7
31 };
32 static_assert(kMaxIntLikeRegisterArguments == arraysize(kXArgumentRegisters));
33
34 static const WRegister kWArgumentRegisters[] = {
35 W0, W1, W2, W3, W4, W5, W6, W7
36 };
37 static_assert(kMaxIntLikeRegisterArguments == arraysize(kWArgumentRegisters));
38
39 static const DRegister kDArgumentRegisters[] = {
40 D0, D1, D2, D3, D4, D5, D6, D7
41 };
42 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kDArgumentRegisters));
43
44 static const SRegister kSArgumentRegisters[] = {
45 S0, S1, S2, S3, S4, S5, S6, S7
46 };
47 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kSArgumentRegisters));
48
49 static constexpr ManagedRegister kCalleeSaveRegisters[] = {
50 // Core registers.
51 // Note: The native jni function may call to some VM runtime functions which may suspend
52 // or trigger GC. And the jni method frame will become top quick frame in those cases.
53 // So we need to satisfy GC to save LR and callee-save registers which is similar to
54 // CalleeSaveMethod(RefOnly) frame.
55 // Jni function is the native function which the java code wants to call.
56 // Jni method is the method that is compiled by jni compiler.
57 // Call chain: managed code(java) --> jni method --> jni function.
58 // This does not apply to the @CriticalNative.
59
60 // Thread register(X19) is saved on stack.
61 Arm64ManagedRegister::FromXRegister(X19),
62 Arm64ManagedRegister::FromXRegister(X20),
63 Arm64ManagedRegister::FromXRegister(X21),
64 Arm64ManagedRegister::FromXRegister(X22),
65 Arm64ManagedRegister::FromXRegister(X23),
66 Arm64ManagedRegister::FromXRegister(X24),
67 Arm64ManagedRegister::FromXRegister(X25),
68 Arm64ManagedRegister::FromXRegister(X26),
69 Arm64ManagedRegister::FromXRegister(X27),
70 Arm64ManagedRegister::FromXRegister(X28),
71 Arm64ManagedRegister::FromXRegister(X29),
72 Arm64ManagedRegister::FromXRegister(LR),
73 // Hard float registers.
74 // Considering the case, java_method_1 --> jni method --> jni function --> java_method_2,
75 // we may break on java_method_2 and we still need to find out the values of DEX registers
76 // in java_method_1. So all callee-saves(in managed code) need to be saved.
77 Arm64ManagedRegister::FromDRegister(D8),
78 Arm64ManagedRegister::FromDRegister(D9),
79 Arm64ManagedRegister::FromDRegister(D10),
80 Arm64ManagedRegister::FromDRegister(D11),
81 Arm64ManagedRegister::FromDRegister(D12),
82 Arm64ManagedRegister::FromDRegister(D13),
83 Arm64ManagedRegister::FromDRegister(D14),
84 Arm64ManagedRegister::FromDRegister(D15),
85 };
86
87 template <size_t size>
CalculateCoreCalleeSpillMask(const ManagedRegister (& callee_saves)[size])88 static constexpr uint32_t CalculateCoreCalleeSpillMask(
89 const ManagedRegister (&callee_saves)[size]) {
90 uint32_t result = 0u;
91 for (auto&& r : callee_saves) {
92 if (r.AsArm64().IsXRegister()) {
93 result |= (1u << r.AsArm64().AsXRegister());
94 }
95 }
96 return result;
97 }
98
99 template <size_t size>
CalculateFpCalleeSpillMask(const ManagedRegister (& callee_saves)[size])100 static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&callee_saves)[size]) {
101 uint32_t result = 0u;
102 for (auto&& r : callee_saves) {
103 if (r.AsArm64().IsDRegister()) {
104 result |= (1u << r.AsArm64().AsDRegister());
105 }
106 }
107 return result;
108 }
109
110 static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
111 static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);
112
113 static constexpr ManagedRegister kAapcs64CalleeSaveRegisters[] = {
114 // Core registers.
115 Arm64ManagedRegister::FromXRegister(X19),
116 Arm64ManagedRegister::FromXRegister(X20),
117 Arm64ManagedRegister::FromXRegister(X21),
118 Arm64ManagedRegister::FromXRegister(X22),
119 Arm64ManagedRegister::FromXRegister(X23),
120 Arm64ManagedRegister::FromXRegister(X24),
121 Arm64ManagedRegister::FromXRegister(X25),
122 Arm64ManagedRegister::FromXRegister(X26),
123 Arm64ManagedRegister::FromXRegister(X27),
124 Arm64ManagedRegister::FromXRegister(X28),
125 Arm64ManagedRegister::FromXRegister(X29),
126 Arm64ManagedRegister::FromXRegister(LR),
127 // Hard float registers.
128 Arm64ManagedRegister::FromDRegister(D8),
129 Arm64ManagedRegister::FromDRegister(D9),
130 Arm64ManagedRegister::FromDRegister(D10),
131 Arm64ManagedRegister::FromDRegister(D11),
132 Arm64ManagedRegister::FromDRegister(D12),
133 Arm64ManagedRegister::FromDRegister(D13),
134 Arm64ManagedRegister::FromDRegister(D14),
135 Arm64ManagedRegister::FromDRegister(D15),
136 };
137
138 static constexpr uint32_t kAapcs64CoreCalleeSpillMask =
139 CalculateCoreCalleeSpillMask(kAapcs64CalleeSaveRegisters);
140 static constexpr uint32_t kAapcs64FpCalleeSpillMask =
141 CalculateFpCalleeSpillMask(kAapcs64CalleeSaveRegisters);
142
143 // Calling convention
ReturnRegisterForShorty(const char * shorty)144 static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
145 if (shorty[0] == 'F') {
146 return Arm64ManagedRegister::FromSRegister(S0);
147 } else if (shorty[0] == 'D') {
148 return Arm64ManagedRegister::FromDRegister(D0);
149 } else if (shorty[0] == 'J') {
150 return Arm64ManagedRegister::FromXRegister(X0);
151 } else if (shorty[0] == 'V') {
152 return Arm64ManagedRegister::NoRegister();
153 } else {
154 return Arm64ManagedRegister::FromWRegister(W0);
155 }
156 }
157
ReturnRegister()158 ManagedRegister Arm64ManagedRuntimeCallingConvention::ReturnRegister() {
159 return ReturnRegisterForShorty(GetShorty());
160 }
161
ReturnRegister()162 ManagedRegister Arm64JniCallingConvention::ReturnRegister() {
163 return ReturnRegisterForShorty(GetShorty());
164 }
165
IntReturnRegister()166 ManagedRegister Arm64JniCallingConvention::IntReturnRegister() {
167 return Arm64ManagedRegister::FromWRegister(W0);
168 }
169
170 // Managed runtime calling convention
171
MethodRegister()172 ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
173 return Arm64ManagedRegister::FromXRegister(X0);
174 }
175
IsCurrentParamInRegister()176 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
177 if (IsCurrentParamAFloatOrDouble()) {
178 return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
179 } else {
180 size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
181 return /* method */ 1u + non_fp_arg_number < kMaxIntLikeRegisterArguments;
182 }
183 }
184
IsCurrentParamOnStack()185 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
186 return !IsCurrentParamInRegister();
187 }
188
CurrentParamRegister()189 ManagedRegister Arm64ManagedRuntimeCallingConvention::CurrentParamRegister() {
190 DCHECK(IsCurrentParamInRegister());
191 if (IsCurrentParamAFloatOrDouble()) {
192 if (IsCurrentParamADouble()) {
193 return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
194 } else {
195 return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
196 }
197 } else {
198 size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
199 if (IsCurrentParamALong()) {
200 XRegister x_reg = kXArgumentRegisters[/* method */ 1u + non_fp_arg_number];
201 return Arm64ManagedRegister::FromXRegister(x_reg);
202 } else {
203 WRegister w_reg = kWArgumentRegisters[/* method */ 1u + non_fp_arg_number];
204 return Arm64ManagedRegister::FromWRegister(w_reg);
205 }
206 }
207 }
208
CurrentParamStackOffset()209 FrameOffset Arm64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
210 return FrameOffset(displacement_.Int32Value() + // displacement
211 kFramePointerSize + // Method ref
212 (itr_slots_ * sizeof(uint32_t))); // offset into in args
213 }
214
215 // JNI calling convention
216
Arm64JniCallingConvention(bool is_static,bool is_synchronized,bool is_critical_native,const char * shorty)217 Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static,
218 bool is_synchronized,
219 bool is_critical_native,
220 const char* shorty)
221 : JniCallingConvention(is_static,
222 is_synchronized,
223 is_critical_native,
224 shorty,
225 kArm64PointerSize) {
226 }
227
CoreSpillMask() const228 uint32_t Arm64JniCallingConvention::CoreSpillMask() const {
229 return is_critical_native_ ? 0u : kCoreCalleeSpillMask;
230 }
231
FpSpillMask() const232 uint32_t Arm64JniCallingConvention::FpSpillMask() const {
233 return is_critical_native_ ? 0u : kFpCalleeSpillMask;
234 }
235
ReturnScratchRegister() const236 ManagedRegister Arm64JniCallingConvention::ReturnScratchRegister() const {
237 return ManagedRegister::NoRegister();
238 }
239
FrameSize() const240 size_t Arm64JniCallingConvention::FrameSize() const {
241 if (is_critical_native_) {
242 CHECK(!SpillsMethod());
243 CHECK(!HasLocalReferenceSegmentState());
244 CHECK(!HasHandleScope());
245 CHECK(!SpillsReturnValue());
246 return 0u; // There is no managed frame for @CriticalNative.
247 }
248
249 // Method*, callee save area size, local reference segment state
250 CHECK(SpillsMethod());
251 size_t method_ptr_size = static_cast<size_t>(kFramePointerSize);
252 size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize;
253 size_t total_size = method_ptr_size + callee_save_area_size;
254
255 CHECK(HasLocalReferenceSegmentState());
256 total_size += sizeof(uint32_t);
257
258 CHECK(HasHandleScope());
259 total_size += HandleScope::SizeOf(kArm64PointerSize, ReferenceCount());
260
261 // Plus return value spill area size
262 CHECK(SpillsReturnValue());
263 total_size += SizeOfReturnValue();
264
265 return RoundUp(total_size, kStackAlignment);
266 }
267
OutFrameSize() const268 size_t Arm64JniCallingConvention::OutFrameSize() const {
269 // Count param args, including JNIEnv* and jclass*.
270 size_t all_args = NumberOfExtraArgumentsForJni() + NumArgs();
271 size_t num_fp_args = NumFloatOrDoubleArgs();
272 DCHECK_GE(all_args, num_fp_args);
273 size_t num_non_fp_args = all_args - num_fp_args;
274 // The size of outgoing arguments.
275 size_t size = GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
276
277 // @CriticalNative can use tail call as all managed callee saves are preserved by AAPCS64.
278 static_assert((kCoreCalleeSpillMask & ~kAapcs64CoreCalleeSpillMask) == 0u);
279 static_assert((kFpCalleeSpillMask & ~kAapcs64FpCalleeSpillMask) == 0u);
280
281 // For @CriticalNative, we can make a tail call if there are no stack args and
282 // we do not need to extend the result. Otherwise, add space for return PC.
283 if (is_critical_native_ && (size != 0u || RequiresSmallResultTypeExtension())) {
284 size += kFramePointerSize; // We need to spill LR with the args.
285 }
286 size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
287 if (UNLIKELY(IsCriticalNative())) {
288 DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
289 }
290 return out_args_size;
291 }
292
CalleeSaveRegisters() const293 ArrayRef<const ManagedRegister> Arm64JniCallingConvention::CalleeSaveRegisters() const {
294 if (UNLIKELY(IsCriticalNative())) {
295 if (UseTailCall()) {
296 return ArrayRef<const ManagedRegister>(); // Do not spill anything.
297 } else {
298 // Spill LR with out args.
299 static_assert((kCoreCalleeSpillMask >> LR) == 1u); // Contains LR as the highest bit.
300 constexpr size_t lr_index = POPCOUNT(kCoreCalleeSpillMask) - 1u;
301 static_assert(kCalleeSaveRegisters[lr_index].Equals(
302 Arm64ManagedRegister::FromXRegister(LR)));
303 return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters).SubArray(
304 /*pos*/ lr_index, /*length=*/ 1u);
305 }
306 } else {
307 return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
308 }
309 }
310
IsCurrentParamInRegister()311 bool Arm64JniCallingConvention::IsCurrentParamInRegister() {
312 if (IsCurrentParamAFloatOrDouble()) {
313 return (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments);
314 } else {
315 return ((itr_args_ - itr_float_and_doubles_) < kMaxIntLikeRegisterArguments);
316 }
317 // TODO: Can we just call CurrentParamRegister to figure this out?
318 }
319
IsCurrentParamOnStack()320 bool Arm64JniCallingConvention::IsCurrentParamOnStack() {
321 // Is this ever not the same for all the architectures?
322 return !IsCurrentParamInRegister();
323 }
324
CurrentParamRegister()325 ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() {
326 CHECK(IsCurrentParamInRegister());
327 if (IsCurrentParamAFloatOrDouble()) {
328 CHECK_LT(itr_float_and_doubles_, kMaxFloatOrDoubleRegisterArguments);
329 if (IsCurrentParamADouble()) {
330 return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
331 } else {
332 return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
333 }
334 } else {
335 int gp_reg = itr_args_ - itr_float_and_doubles_;
336 CHECK_LT(static_cast<unsigned int>(gp_reg), kMaxIntLikeRegisterArguments);
337 if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv()) {
338 return Arm64ManagedRegister::FromXRegister(kXArgumentRegisters[gp_reg]);
339 } else {
340 return Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg]);
341 }
342 }
343 }
344
CurrentParamStackOffset()345 FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() {
346 CHECK(IsCurrentParamOnStack());
347 size_t args_on_stack = itr_args_
348 - std::min(kMaxFloatOrDoubleRegisterArguments,
349 static_cast<size_t>(itr_float_and_doubles_))
350 - std::min(kMaxIntLikeRegisterArguments,
351 static_cast<size_t>(itr_args_ - itr_float_and_doubles_));
352 size_t offset = displacement_.Int32Value() - OutFrameSize() + (args_on_stack * kFramePointerSize);
353 CHECK_LT(offset, OutFrameSize());
354 return FrameOffset(offset);
355 }
356
HiddenArgumentRegister() const357 ManagedRegister Arm64JniCallingConvention::HiddenArgumentRegister() const {
358 CHECK(IsCriticalNative());
359 // X15 is neither managed callee-save, nor argument register, nor scratch register.
360 // TODO: Change to static_assert; std::none_of should be constexpr since C++20.
361 DCHECK(std::none_of(kCalleeSaveRegisters,
362 kCalleeSaveRegisters + std::size(kCalleeSaveRegisters),
363 [](ManagedRegister callee_save) constexpr {
364 return callee_save.Equals(Arm64ManagedRegister::FromXRegister(X15));
365 }));
366 DCHECK(std::none_of(kXArgumentRegisters,
367 kXArgumentRegisters + std::size(kXArgumentRegisters),
368 [](XRegister reg) { return reg == X15; }));
369 return Arm64ManagedRegister::FromXRegister(X15);
370 }
371
372 // Whether to use tail call (used only for @CriticalNative).
UseTailCall() const373 bool Arm64JniCallingConvention::UseTailCall() const {
374 CHECK(IsCriticalNative());
375 return OutFrameSize() == 0u;
376 }
377
378 } // namespace arm64
379 } // namespace art
380