1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * * Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * * Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in 12 * the documentation and/or other materials provided with the 13 * distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26 * SUCH DAMAGE. 27 */ 28 29#include <private/bionic_asm.h> 30#include <private/bionic_constants.h> 31 32// According to AARCH64 PCS document we need to save the following 33// registers: 34// 35// Core x19 - x30, sp (see section 5.1.1) 36// VFP d8 - d15 (see section 5.1.2) 37// 38// NOTE: All the registers saved here will have 64 bit vales. 39// AAPCS mandates that the higher part of q registers do not need to 40// be saved by the callee. 41// 42// The internal structure of a jmp_buf is totally private. 43// Current layout (changes from release to release): 44// 45// word name description 46// 0 sigflag/cookie setjmp cookie in top 31 bits, signal mask flag in low bit 47// 1 sigmask signal mask (not used with _setjmp / _longjmp) 48// 2 core_base base of core registers (x18-x30, sp) 49// (We only store the low bits of x18 to avoid leaking the 50// shadow call stack address into memory.) 51// 16 float_base base of float registers (d8-d15) 52// 24 checksum checksum of core registers 53// 25 reserved reserved entries (room to grow) 54// 32 55 56#define _JB_SIGFLAG 0 57#define _JB_SIGMASK (_JB_SIGFLAG + 1) 58#define _JB_X30_SP (_JB_SIGMASK + 1) 59#define _JB_X28_X29 (_JB_X30_SP + 2) 60#define _JB_X26_X27 (_JB_X28_X29 + 2) 61#define _JB_X24_X25 (_JB_X26_X27 + 2) 62#define _JB_X22_X23 (_JB_X24_X25 + 2) 63#define _JB_X20_X21 (_JB_X22_X23 + 2) 64#define _JB_SCS_X19 (_JB_X20_X21 + 2) 65#define _JB_D14_D15 (_JB_SCS_X19 + 2) 66#define _JB_D12_D13 (_JB_D14_D15 + 2) 67#define _JB_D10_D11 (_JB_D12_D13 + 2) 68#define _JB_D8_D9 (_JB_D10_D11 + 2) 69#define _JB_CHECKSUM (_JB_D8_D9 + 2) 70 71#define SCS_MASK (SCS_SIZE - 1) 72#define MANGLE_REGISTERS 1 73#define USE_CHECKSUM 1 74 75.macro m_mangle_registers reg, sp_reg 76#if MANGLE_REGISTERS 77 eor x3, x3, \reg 78 eor x19, x19, \reg 79 eor x20, x20, \reg 80 eor x21, x21, \reg 81 eor x22, x22, \reg 82 eor x23, x23, \reg 83 eor x24, x24, \reg 84 eor x25, x25, \reg 85 eor x26, x26, \reg 86 eor x27, x27, \reg 87 eor x28, x28, \reg 88 eor x29, x29, \reg 89 eor x30, x30, \reg 90 eor \sp_reg, \sp_reg, \reg 91#endif 92.endm 93 94.macro m_calculate_checksum dst, src, scratch 95 mov \dst, #0 96 .irp i,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23 97 ldr \scratch, [\src, #(\i * 8)] 98 eor \dst, \dst, \scratch 99 .endr 100.endm 101 102.macro m_unmangle_registers reg, sp_reg 103 m_mangle_registers \reg, sp_reg=\sp_reg 104.endm 105 106ENTRY(setjmp) 107__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(setjmp) 108 mov w1, #1 109 b sigsetjmp 110END(setjmp) 111 112ENTRY(_setjmp) 113__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(_setjmp) 114 mov w1, #0 115 b sigsetjmp 116END(_setjmp) 117 118// int sigsetjmp(sigjmp_buf env, int save_signal_mask); 119ENTRY(sigsetjmp) 120__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(sigsetjmp) 121 stp x0, x30, [sp, #-16]! 122 .cfi_def_cfa_offset 16 123 .cfi_rel_offset x0, 0 124 .cfi_rel_offset x30, 8 125 126 // Get the cookie and store it along with the signal flag. 127 mov x0, x1 128 bl __bionic_setjmp_cookie_get 129 mov x1, x0 130 ldr x0, [sp, #0] 131 str x1, [x0, #(_JB_SIGFLAG * 8)] 132 133 // Do we need to save the signal mask? 134 tbz w1, #0, 1f 135 136 // Save the cookie for later. 137 stp x1, xzr, [sp, #-16]! 138 .cfi_adjust_cfa_offset 16 139 140 // Save current signal mask. 141 // The 'how' argument is ignored if new_mask is NULL. 142 mov x1, #0 // NULL. 143 add x2, x0, #(_JB_SIGMASK * 8) // old_mask. 144 bl sigprocmask 145 146 ldp x1, xzr, [sp], #16 147 .cfi_adjust_cfa_offset -16 148 1491: 150 // Restore original x0 and lr. 151 ldp x0, x30, [sp], #16 152 .cfi_adjust_cfa_offset -16 153 .cfi_restore x0 154 .cfi_restore x30 155 156 // Mask off the signal flag bit. 157 bic x1, x1, #1 158 159 // Mask off the high bits of the shadow call stack pointer. 160 and x3, x18, #SCS_MASK 161 162 // Save core registers. 163 mov x10, sp 164 m_mangle_registers x1, sp_reg=x10 165 stp x30, x10, [x0, #(_JB_X30_SP * 8)] 166 stp x28, x29, [x0, #(_JB_X28_X29 * 8)] 167 stp x26, x27, [x0, #(_JB_X26_X27 * 8)] 168 stp x24, x25, [x0, #(_JB_X24_X25 * 8)] 169 stp x22, x23, [x0, #(_JB_X22_X23 * 8)] 170 stp x20, x21, [x0, #(_JB_X20_X21 * 8)] 171 stp x3, x19, [x0, #(_JB_SCS_X19 * 8)] 172 m_unmangle_registers x1, sp_reg=x10 173 174 // Save floating point registers. 175 stp d14, d15, [x0, #(_JB_D14_D15 * 8)] 176 stp d12, d13, [x0, #(_JB_D12_D13 * 8)] 177 stp d10, d11, [x0, #(_JB_D10_D11 * 8)] 178 stp d8, d9, [x0, #(_JB_D8_D9 * 8)] 179 180#if USE_CHECKSUM 181 // Calculate the checksum. 182 m_calculate_checksum x12, x0, x2 183 str x12, [x0, #(_JB_CHECKSUM * 8)] 184#endif 185 186 mov w0, #0 187 ret 188END(sigsetjmp) 189 190// void siglongjmp(sigjmp_buf env, int value); 191ENTRY(siglongjmp) 192__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(siglongjmp) 193#if USE_CHECKSUM 194 // Check the checksum before doing anything. 195 m_calculate_checksum x12, x0, x2 196 ldr x2, [x0, #(_JB_CHECKSUM * 8)] 197 198 cmp x2, x12 199 bne __bionic_setjmp_checksum_mismatch 200#endif 201 202#if __has_feature(hwaddress_sanitizer) 203 stp x0, x30, [sp, #-16]! 204 .cfi_adjust_cfa_offset 16 205 .cfi_rel_offset x0, 0 206 .cfi_rel_offset x30, 8 207 mov x19, x1 // Save 'value'. 208 209 // load and unmangle destination SP 210 ldr x2, [x0, #(_JB_SIGFLAG * 8)] 211 bic x2, x2, #1 212 ldr x0, [x0, #(_JB_X30_SP * 8 + 8)] 213 eor x0, x0, x2 214 bl __hwasan_handle_longjmp 215 216 mov x1, x19 // Restore 'value'. 217 // Restore original x0 and lr. 218 ldp x0, x30, [sp], #16 219 .cfi_adjust_cfa_offset -16 220 .cfi_restore x0 221 .cfi_restore x30 222#endif 223 224 // Do we need to restore the signal mask? 225 ldr x2, [x0, #(_JB_SIGFLAG * 8)] 226 tbz w2, #0, 1f 227 228 stp x0, x30, [sp, #-16]! 229 .cfi_adjust_cfa_offset 16 230 .cfi_rel_offset x0, 0 231 .cfi_rel_offset x30, 8 232 233 // Restore signal mask. 234 mov x19, x1 // Save 'value'. 235 236 mov x2, x0 237 mov x0, #2 // SIG_SETMASK 238 add x1, x2, #(_JB_SIGMASK * 8) // new_mask. 239 mov x2, #0 // NULL. 240 bl sigprocmask 241 mov x1, x19 // Restore 'value'. 242 243 // Restore original x0 and lr. 244 ldp x0, x30, [sp], #16 245 .cfi_adjust_cfa_offset -16 246 .cfi_restore x0 247 .cfi_restore x30 248 249 ldr x2, [x0, #(_JB_SIGFLAG * 8)] 2501: 251 // Restore core registers. 252 bic x2, x2, #1 253 ldp x30, x10, [x0, #(_JB_X30_SP * 8)] 254 ldp x28, x29, [x0, #(_JB_X28_X29 * 8)] 255 ldp x26, x27, [x0, #(_JB_X26_X27 * 8)] 256 ldp x24, x25, [x0, #(_JB_X24_X25 * 8)] 257 ldp x22, x23, [x0, #(_JB_X22_X23 * 8)] 258 ldp x20, x21, [x0, #(_JB_X20_X21 * 8)] 259 ldp x3, x19, [x0, #(_JB_SCS_X19 * 8)] 260 m_unmangle_registers x2, sp_reg=x10 261 mov sp, x10 262 263 // Restore the low bits of the shadow call stack pointer. 264 and x18, x18, #~SCS_MASK 265 orr x18, x3, x18 266 267 stp x0, x1, [sp, #-16]! 268 .cfi_adjust_cfa_offset 16 269 .cfi_rel_offset x0, 0 270 .cfi_rel_offset x1, 8 271 stp x30, xzr, [sp, #-16]! 272 .cfi_adjust_cfa_offset 16 273 .cfi_rel_offset x30, 0 274 ldr x0, [x0, #(_JB_SIGFLAG * 8)] 275 bl __bionic_setjmp_cookie_check 276 ldp x30, xzr, [sp], #16 277 .cfi_adjust_cfa_offset -16 278 .cfi_restore x30 279 ldp x0, x1, [sp], #16 280 .cfi_adjust_cfa_offset -16 281 .cfi_restore x0 282 .cfi_restore x1 283 284 // Restore floating point registers. 285 ldp d14, d15, [x0, #(_JB_D14_D15 * 8)] 286 ldp d12, d13, [x0, #(_JB_D12_D13 * 8)] 287 ldp d10, d11, [x0, #(_JB_D10_D11 * 8)] 288 ldp d8, d9, [x0, #(_JB_D8_D9 * 8)] 289 290 // Set return value. 291 cmp w1, wzr 292 csinc w0, w1, wzr, ne 293 ret 294END(siglongjmp) 295 296ALIAS_SYMBOL(longjmp, siglongjmp) 297__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(longjmp) 298ALIAS_SYMBOL(_longjmp, siglongjmp) 299__BIONIC_WEAK_ASM_FOR_NATIVE_BRIDGE(_longjmp) 300