1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "asm_support_arm64.S" 18 19 /* 20 * Jni dlsym lookup stub. 21 */ 22 .extern artFindNativeMethod 23 .extern artFindNativeMethodRunnable 24 25ENTRY art_jni_dlsym_lookup_stub 26 // spill regs. 27 stp x29, x30, [sp, #-16]! 28 .cfi_adjust_cfa_offset 16 29 .cfi_rel_offset x29, 0 30 .cfi_rel_offset x30, 8 31 mov x29, sp 32 stp d6, d7, [sp, #-16]! 33 .cfi_adjust_cfa_offset 16 34 stp d4, d5, [sp, #-16]! 35 .cfi_adjust_cfa_offset 16 36 stp d2, d3, [sp, #-16]! 37 .cfi_adjust_cfa_offset 16 38 stp d0, d1, [sp, #-16]! 39 .cfi_adjust_cfa_offset 16 40 stp x6, x7, [sp, #-16]! 41 .cfi_adjust_cfa_offset 16 42 stp x4, x5, [sp, #-16]! 43 .cfi_adjust_cfa_offset 16 44 stp x2, x3, [sp, #-16]! 45 .cfi_adjust_cfa_offset 16 46 stp x0, x1, [sp, #-16]! 47 .cfi_adjust_cfa_offset 16 48 49 mov x0, xSELF // pass Thread::Current() 50 // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable() 51 // for @FastNative or @CriticalNative. 52 ldr xIP0, [x0, #THREAD_TOP_QUICK_FRAME_OFFSET] // uintptr_t tagged_quick_frame 53 bic xIP0, xIP0, #1 // ArtMethod** sp 54 ldr xIP0, [xIP0] // ArtMethod* method 55 ldr xIP0, [xIP0, #ART_METHOD_ACCESS_FLAGS_OFFSET] // uint32_t access_flags 56 mov xIP1, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE) 57 tst xIP0, xIP1 58 b.ne .Llookup_stub_fast_or_critical_native 59 bl artFindNativeMethod 60 b .Llookup_stub_continue 61 .Llookup_stub_fast_or_critical_native: 62 bl artFindNativeMethodRunnable 63.Llookup_stub_continue: 64 mov x17, x0 // store result in scratch reg. 65 66 // load spill regs. 67 ldp x0, x1, [sp], #16 68 .cfi_adjust_cfa_offset -16 69 ldp x2, x3, [sp], #16 70 .cfi_adjust_cfa_offset -16 71 ldp x4, x5, [sp], #16 72 .cfi_adjust_cfa_offset -16 73 ldp x6, x7, [sp], #16 74 .cfi_adjust_cfa_offset -16 75 ldp d0, d1, [sp], #16 76 .cfi_adjust_cfa_offset -16 77 ldp d2, d3, [sp], #16 78 .cfi_adjust_cfa_offset -16 79 ldp d4, d5, [sp], #16 80 .cfi_adjust_cfa_offset -16 81 ldp d6, d7, [sp], #16 82 .cfi_adjust_cfa_offset -16 83 ldp x29, x30, [sp], #16 84 .cfi_adjust_cfa_offset -16 85 .cfi_restore x29 86 .cfi_restore x30 87 88 cbz x17, 1f // is method code null ? 89 br x17 // if non-null, tail call to method's code. 90 911: 92 ret // restore regs and return to caller to handle exception. 93END art_jni_dlsym_lookup_stub 94 95ENTRY art_jni_dlsym_lookup_critical_stub 96 // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is x15. 97 // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub. 98 tbnz x15, #0, art_jni_dlsym_lookup_stub 99 100 // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg. 101 stp x0, x1, [sp, #-(8 * 8 + 8 * 8 + 2 * 8)]! 102 .cfi_adjust_cfa_offset (8 * 8 + 8 * 8 + 2 * 8) 103 stp x2, x3, [sp, #16] 104 stp x4, x5, [sp, #32] 105 stp x6, x7, [sp, #48] 106 stp d0, d1, [sp, #64] 107 stp d2, d3, [sp, #80] 108 stp d4, d5, [sp, #96] 109 stp d6, d7, [sp, #112] 110 stp x15, lr, [sp, #128] 111 .cfi_rel_offset lr, 136 112 113 // Call artCriticalNativeFrameSize(method, caller_pc) 114 mov x0, x15 // x0 := method (from hidden arg) 115 mov x1, lr // x1 := caller_pc 116 bl artCriticalNativeFrameSize 117 118 // Move frame size to x14. 119 mov x14, x0 120 121 // Restore args, the hidden arg and caller PC. 122 ldp x2, x3, [sp, #16] 123 ldp x4, x5, [sp, #32] 124 ldp x6, x7, [sp, #48] 125 ldp d0, d1, [sp, #64] 126 ldp d2, d3, [sp, #80] 127 ldp d4, d5, [sp, #96] 128 ldp d6, d7, [sp, #112] 129 ldp x15, lr, [sp, #128] 130 .cfi_restore lr 131 ldp x0, x1, [sp], #(8 * 8 + 8 * 8 + 2 * 8) 132 .cfi_adjust_cfa_offset -(8 * 8 + 8 * 8 + 2 * 8) 133 134 // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime 135 // method or for a GenericJNI frame which is similar but has a native method and a tag. 136 INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 137 138 // Calculate the base address of the managed frame. 139 add x13, sp, x14 140 141 // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame. 142 // If we're coming from JNI stub with tail call, it is LR. If we're coming from 143 // JNI stub that saved the return address, it will be the last value we copy below. 144 // If we're coming directly from compiled code, it is LR, set further down. 145 mov xIP1, lr 146 147 // Move the stack args if any. 148 cbz x14, .Lcritical_skip_copy_args 149 mov x12, sp 150.Lcritical_copy_args_loop: 151 ldp xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 152 subs x14, x14, #16 153 stp xIP0, xIP1, [x12], #16 154 bne .Lcritical_copy_args_loop 155.Lcritical_skip_copy_args: 156 157 // Spill registers for the SaveRefsAndArgs frame above the stack args. 158 // Note that the runtime shall not examine the args here, otherwise we would have to 159 // move them in registers and stack to account for the difference between managed and 160 // native ABIs. Do not update CFI while we hold the frame address in x13 and the values 161 // in registers are unchanged. 162 stp d0, d1, [x13, #16] 163 stp d2, d3, [x13, #32] 164 stp d4, d5, [x13, #48] 165 stp d6, d7, [x13, #64] 166 stp x1, x2, [x13, #80] 167 stp x3, x4, [x13, #96] 168 stp x5, x6, [x13, #112] 169 stp x7, x20, [x13, #128] 170 stp x21, x22, [x13, #144] 171 stp x23, x24, [x13, #160] 172 stp x25, x26, [x13, #176] 173 stp x27, x28, [x13, #192] 174 stp x29, xIP1, [x13, #208] // xIP1: Save return address for tail call from JNI stub. 175 // (If there were any stack args, we're storing the value that's already there. 176 // For direct calls from compiled managed code, we shall overwrite this below.) 177 178 // Move the managed frame address to native callee-save register x29 and update CFI. 179 mov x29, x13 180 // Skip args d0-d7, x1-x7 181 CFI_EXPRESSION_BREG 20, 29, 136 182 CFI_EXPRESSION_BREG 21, 29, 144 183 CFI_EXPRESSION_BREG 22, 29, 152 184 CFI_EXPRESSION_BREG 23, 29, 160 185 CFI_EXPRESSION_BREG 24, 29, 168 186 CFI_EXPRESSION_BREG 25, 29, 176 187 CFI_EXPRESSION_BREG 26, 29, 184 188 CFI_EXPRESSION_BREG 27, 29, 192 189 CFI_EXPRESSION_BREG 28, 29, 200 190 CFI_EXPRESSION_BREG 29, 29, 208 191 // The saved return PC for managed stack walk is not necessarily our LR. 192 193 // Save our return PC in the padding. 194 str lr, [x29, #__SIZEOF_POINTER__] 195 CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__ 196 197 ldr wIP0, [x15, #ART_METHOD_ACCESS_FLAGS_OFFSET] // Load access flags. 198 add x14, x29, #1 // Prepare managed SP tagged for a GenericJNI frame. 199 tbnz wIP0, #ACCESS_FLAGS_METHOD_IS_NATIVE_BIT, .Lcritical_skip_prepare_runtime_method 200 201 // When coming from a compiled method, the return PC for managed stack walk is LR. 202 // (When coming from a compiled stub, the correct return PC is already stored above.) 203 str lr, [x29, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)] 204 205 // Replace the target method with the SaveRefsAndArgs runtime method. 206 LOAD_RUNTIME_INSTANCE x15 207 ldr x15, [x15, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET] 208 209 mov x14, x29 // Prepare untagged managed SP for the runtime method. 210 211.Lcritical_skip_prepare_runtime_method: 212 // Store the method on the bottom of the managed frame. 213 str x15, [x29] 214 215 // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame. 216 str x14, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 217 218 // Preserve the native arg register x0 in callee-save register x28 which was saved above. 219 mov x28, x0 220 221 // Call artFindNativeMethodRunnable() 222 mov x0, xSELF // pass Thread::Current() 223 bl artFindNativeMethodRunnable 224 225 // Store result in scratch reg. 226 mov x13, x0 227 228 // Restore the native arg register x0. 229 mov x0, x28 230 231 // Restore our return PC. 232 RESTORE_REG_BASE x29, lr, __SIZEOF_POINTER__ 233 234 // Remember the stack args size, negated because SP cannot be on the right-hand side in SUB. 235 sub x14, sp, x29 236 237 // Restore the frame. We shall not need the method anymore. 238 ldp d0, d1, [x29, #16] 239 ldp d2, d3, [x29, #32] 240 ldp d4, d5, [x29, #48] 241 ldp d6, d7, [x29, #64] 242 ldp x1, x2, [x29, #80] 243 ldp x3, x4, [x29, #96] 244 ldp x5, x6, [x29, #112] 245 ldp x7, x20, [x29, #128] 246 .cfi_restore x20 247 RESTORE_TWO_REGS_BASE x29, x21, x22, 144 248 RESTORE_TWO_REGS_BASE x29, x23, x24, 160 249 RESTORE_TWO_REGS_BASE x29, x25, x26, 176 250 RESTORE_TWO_REGS_BASE x29, x27, x28, 192 251 RESTORE_REG_BASE x29, x29, 208 252 253 REFRESH_MARKING_REGISTER 254 255 // Check for exception before moving args back to keep the return PC for managed stack walk. 256 cbz x13, .Lcritical_deliver_exception 257 258 .cfi_remember_state 259 260 // Move stack args to their original place. 261 cbz x14, .Lcritical_skip_copy_args_back 262 sub x12, sp, x14 263.Lcritical_copy_args_back_loop: 264 ldp xIP0, xIP1, [x12, #-16]! 265 adds x14, x14, #16 266 stp xIP0, xIP1, [x12, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 267 bne .Lcritical_copy_args_back_loop 268.Lcritical_skip_copy_args_back: 269 270 // Remove the frame reservation. 271 DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 272 273 // Do the tail call. 274 br x13 275 .cfi_restore_state 276 .cfi_def_cfa sp, FRAME_SIZE_SAVE_REFS_AND_ARGS 277 278.Lcritical_deliver_exception: 279 // The exception delivery checks that xSELF was saved but the SaveRefsAndArgs 280 // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame. 281 // That's why we checked for exception after restoring registers from it. 282 // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this 283 // point but keep the area allocated for stack args to keep CFA definition simple. 284 DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_ALL_CALLEE_SAVES 285 286 // Calculate the base address of the managed frame. 287 sub x13, sp, x14 288 289 // Spill registers for the SaveAllCalleeSaves frame above the stack args area. Do not update 290 // CFI while we hold the frame address in x13 and the values in registers are unchanged. 291 stp d8, d9, [x13, #16] 292 stp d10, d11, [x13, #32] 293 stp d12, d13, [x13, #48] 294 stp d14, d15, [x13, #64] 295 stp x19, x20, [x13, #80] 296 stp x21, x22, [x13, #96] 297 stp x23, x24, [x13, #112] 298 stp x25, x26, [x13, #128] 299 stp x27, x28, [x13, #144] 300 str x29, [x13, #160] 301 // Keep the caller PC for managed stack walk. 302 303 // Move the managed frame address to native callee-save register x29 and update CFI. 304 mov x29, x13 305 CFI_EXPRESSION_BREG 19, 29, 80 306 CFI_EXPRESSION_BREG 20, 29, 88 307 CFI_EXPRESSION_BREG 21, 29, 96 308 CFI_EXPRESSION_BREG 22, 29, 104 309 CFI_EXPRESSION_BREG 23, 29, 112 310 CFI_EXPRESSION_BREG 24, 29, 120 311 CFI_EXPRESSION_BREG 25, 29, 128 312 CFI_EXPRESSION_BREG 26, 29, 136 313 CFI_EXPRESSION_BREG 27, 29, 144 314 CFI_EXPRESSION_BREG 28, 29, 152 315 CFI_EXPRESSION_BREG 29, 29, 160 316 // The saved return PC for managed stack walk is not necessarily our LR. 317 318 // Save our return PC in the padding. 319 str lr, [x29, #__SIZEOF_POINTER__] 320 CFI_EXPRESSION_BREG 30, 29, __SIZEOF_POINTER__ 321 322 // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame. 323 LOAD_RUNTIME_INSTANCE xIP0 324 ldr xIP0, [xIP0, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET] 325 str xIP0, [x29] 326 327 // Place the managed frame SP in Thread::Current()->top_quick_frame. 328 str x29, [xSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 329 330 DELIVER_PENDING_EXCEPTION_FRAME_READY 331END art_jni_dlsym_lookup_critical_stub 332