1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "asm_support_arm.S" 18 19 /* 20 * Jni dlsym lookup stub. 21 */ 22 .extern artFindNativeMethod 23 .extern artFindNativeMethodRunnable 24ENTRY art_jni_dlsym_lookup_stub 25 push {r0, r1, r2, r3, lr} @ spill regs 26 .cfi_adjust_cfa_offset 20 27 .cfi_rel_offset lr, 16 28 sub sp, #12 @ pad stack pointer to align frame 29 .cfi_adjust_cfa_offset 12 30 31 mov r0, rSELF @ pass Thread::Current() 32 // Call artFindNativeMethod() for normal native and artFindNativeMethodRunnable() 33 // for @FastNative or @CriticalNative. 34 ldr ip, [r0, #THREAD_TOP_QUICK_FRAME_OFFSET] // uintptr_t tagged_quick_frame 35 bic ip, #1 // ArtMethod** sp 36 ldr ip, [ip] // ArtMethod* method 37 ldr ip, [ip, #ART_METHOD_ACCESS_FLAGS_OFFSET] // uint32_t access_flags 38 tst ip, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE) 39 bne .Llookup_stub_fast_or_critical_native 40 blx artFindNativeMethod 41 b .Llookup_stub_continue 42.Llookup_stub_fast_or_critical_native: 43 blx artFindNativeMethodRunnable 44.Llookup_stub_continue: 45 mov r12, r0 @ save result in r12 46 47 add sp, #12 @ restore stack pointer 48 .cfi_adjust_cfa_offset -12 49 cbz r0, 1f @ is method code null? 50 pop {r0, r1, r2, r3, lr} @ restore regs 51 .cfi_adjust_cfa_offset -20 52 .cfi_restore lr 53 bx r12 @ if non-null, tail call to method's code 541: 55 pop {r0, r1, r2, r3, pc} @ restore regs and return to caller to handle exception 56END art_jni_dlsym_lookup_stub 57 58ENTRY art_jni_dlsym_lookup_critical_stub 59 // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is r4. 60 // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub. 61 tst r4, #1 62 bne art_jni_dlsym_lookup_stub 63 64 // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime 65 // method or for a GenericJNI frame which is similar but has a native method and a tag. 66 // Do this eagerly, so that we can use these registers as temps without the need to 67 // save and restore them multiple times. 68 INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 69 70 // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg. 71 push {r0, r1, r2, r3, r4, lr} 72 .cfi_adjust_cfa_offset 24 73 .cfi_rel_offset lr, 20 74 75 // Call artCriticalNativeFrameSize(method, caller_pc) 76 mov r0, r4 // r0 := method (from hidden arg) 77 mov r1, lr // r1 := caller_pc 78 bl artCriticalNativeFrameSize 79 80 // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame. 81 // If we're coming from JNI stub with tail call, it is LR. If we're coming from 82 // JNI stub that saved the return address, it will be the last value we copy below. 83 // If we're coming directly from compiled code, it is LR, set further down. 84 ldr lr, [sp, #20] 85 86 // Move the stack args if any. 87 add r4, sp, #24 88 cbz r0, .Lcritical_skip_copy_args 89.Lcritical_copy_args_loop: 90 ldrd ip, lr, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 91 subs r0, r0, #8 92 strd ip, lr, [r4], #8 93 bne .Lcritical_copy_args_loop 94.Lcritical_skip_copy_args: 95 // The managed frame address is now in R4. This is conveniently a callee-save in native ABI. 96 97 // Restore args. 98 pop {r0, r1, r2, r3} 99 .cfi_adjust_cfa_offset -16 100 101 // Spill registers for the SaveRefsAndArgs frame above the stack args. 102 // Note that the runtime shall not examine the args here, otherwise we would have to 103 // move them in registers and stack to account for the difference between managed and 104 // native ABIs. 105 add ip, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40 106 stmia ip, {r1-r3, r5-r8, r10-r11, lr} // LR: Save return address for tail call from JNI stub. 107 // (If there were any stack args, we're storing the value that's already there. 108 // For direct calls from compiled managed code, we shall overwrite this below.) 109 // Skip args r1-r3. 110 CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 28 111 CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 24 112 CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 20 113 CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 16 114 CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 12 115 CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 8 116 // The saved return PC for managed stack walk is not necessarily our LR. 117 // Skip managed FP args as these are native ABI caller-saves and not args. 118 119 // Restore the hidden arg to r1 and caller PC. 120 pop {r1, lr} 121 .cfi_adjust_cfa_offset -8 122 .cfi_restore lr 123 124 // Save our return PC in the padding. 125 str lr, [r4, #__SIZEOF_POINTER__] 126 CFI_EXPRESSION_BREG 14, 4, __SIZEOF_POINTER__ 127 128 ldr ip, [r1, #ART_METHOD_ACCESS_FLAGS_OFFSET] // Load access flags. 129 add r2, r4, #1 // Prepare managed SP tagged for a GenericJNI frame. 130 tst ip, #ACCESS_FLAGS_METHOD_IS_NATIVE 131 bne .Lcritical_skip_prepare_runtime_method 132 133 // When coming from a compiled method, the return PC for managed stack walk is LR. 134 // (When coming from a compiled stub, the correct return PC is already stored above.) 135 str lr, [r4, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)] 136 137 // Replace the target method with the SaveRefsAndArgs runtime method. 138 RUNTIME_CURRENT1 r1 139 ldr r1, [r1, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET] 140 141 mov r2, r4 // Prepare untagged managed SP for the runtime method. 142 143.Lcritical_skip_prepare_runtime_method: 144 // Store the method on the bottom of the managed frame. 145 str r1, [r4] 146 147 // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame. 148 str r2, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 149 150 // Preserve the native arg register r0 in callee-save register r10 which was saved above. 151 mov r10, r0 152 153 // Call artFindNativeMethodRunnable() 154 mov r0, rSELF // pass Thread::Current() 155 bl artFindNativeMethodRunnable 156 157 // Store result in scratch reg. 158 mov ip, r0 159 160 // Restore the native arg register r0. 161 mov r0, r10 162 163 // Restore the frame. We shall not need the method anymore. 164 add r1, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40 165 ldmia r1, {r1-r3, r5-r8, r10-r11} 166 .cfi_restore r5 167 .cfi_restore r6 168 .cfi_restore r7 169 .cfi_restore r8 170 .cfi_restore r10 171 .cfi_restore r11 172 173 REFRESH_MARKING_REGISTER 174 175 // Check for exception before moving args back to keep the return PC for managed stack walk. 176 cmp ip, #0 177 beq .Lcritical_deliver_exception 178 179 .cfi_remember_state 180 181 // Restore our return PC. 182 ldr lr, [r4, #__SIZEOF_POINTER__] 183 .cfi_restore lr 184 185 // Move stack args to their original place. 186 cmp sp, r4 187 beq .Lcritical_skip_copy_args_back 188 push {r0, r1, r2, r3} 189 .cfi_adjust_cfa_offset 16 190 add r0, sp, #16 191 sub r0, r4, r0 192.Lcritical_copy_args_loop_back: 193 ldrd r2, r3, [r4, #-8]! 194 subs r0, r0, #8 195 strd r2, r3, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS] 196 bne .Lcritical_copy_args_loop_back 197 pop {r0, r1, r2, r3} 198 .cfi_adjust_cfa_offset -16 199.Lcritical_skip_copy_args_back: 200 201 // Remove the frame reservation. 202 DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS 203 204 // Do the tail call. 205 bx ip 206 .cfi_restore_state 207 .cfi_def_cfa sp, FRAME_SIZE_SAVE_REFS_AND_ARGS 208 209.Lcritical_deliver_exception: 210 // The exception delivery checks that rSELF was saved but the SaveRefsAndArgs 211 // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame. 212 // That's why we checked for exception after restoring registers from it. 213 // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this 214 // point but keep the area allocated for stack args to keep CFA definition simple. 215#if FRAME_SIZE_SAVE_REFS_AND_ARGS != FRAME_SIZE_SAVE_ALL_CALLEE_SAVES 216# error "Expected FRAME_SIZE_SAVE_REFS_AND_ARGS == FRAME_SIZE_SAVE_ALL_CALLEE_SAVES" 217 // Otherwise we would need to adjust SP and R4 and move our return PC which is at [R4, #4]. 218 // (Luckily, both SaveRefsAndArgs and SaveAllCalleeSaves frames have padding there.) 219#endif 220 221 // Spill registers for the SaveAllCalleeSaves frame above the stack args area. 222 add ip, r4, #FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32 223 stmia ip, {r5-r11} // Keep the caller PC for managed stack walk. 224 CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32 225 CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 28 226 CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 24 227 CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 20 228 CFI_EXPRESSION_BREG 9, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 16 229 CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 12 230 CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 8 231 // Skip R4, it is callee-save in managed ABI. 232 add ip, r4, #12 233 vstmia ip, {s16-s31} 234 235 // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame. 236 RUNTIME_CURRENT2 ip 237 ldr ip, [ip, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET] 238 str ip, [r4] 239 240 // Place the managed frame SP in Thread::Current()->top_quick_frame. 241 str r4, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] 242 243 DELIVER_PENDING_EXCEPTION_FRAME_READY 244END art_jni_dlsym_lookup_critical_stub 245