1%def const(helper="UndefinedConstHandler"): 2 /* const/class vAA, type@BBBB */ 3 /* const/method-handle vAA, method_handle@BBBB */ 4 /* const/method-type vAA, proto@BBBB */ 5 /* const/string vAA, string@@BBBB */ 6 .extern $helper 7 EXPORT_PC 8 FETCH r0, 1 @ r0<- BBBB 9 mov r1, rINST, lsr #8 @ r1<- AA 10 add r2, rFP, #OFF_FP_SHADOWFRAME 11 mov r3, rSELF 12 bl $helper @ (index, tgt_reg, shadow_frame, self) 13 PREFETCH_INST 2 @ load rINST 14 cmp r0, #0 @ fail? 15 bne MterpPossibleException @ let reference interpreter deal with it. 16 ADVANCE 2 @ advance rPC 17 GET_INST_OPCODE ip @ extract opcode from rINST 18 GOTO_OPCODE ip @ jump to next instruction 19 20%def unused(): 21/* 22 * Bail to reference interpreter to throw. 23 */ 24 b MterpFallback 25 26%def op_const(): 27 /* const vAA, #+BBBBbbbb */ 28 mov r3, rINST, lsr #8 @ r3<- AA 29 FETCH r0, 1 @ r0<- bbbb (low) 30 FETCH r1, 2 @ r1<- BBBB (high) 31 FETCH_ADVANCE_INST 3 @ advance rPC, load rINST 32 orr r0, r0, r1, lsl #16 @ r0<- BBBBbbbb 33 GET_INST_OPCODE ip @ extract opcode from rINST 34 SET_VREG r0, r3 @ vAA<- r0 35 GOTO_OPCODE ip @ jump to next instruction 36 37%def op_const_16(): 38 /* const/16 vAA, #+BBBB */ 39 FETCH_S r0, 1 @ r0<- ssssBBBB (sign-extended) 40 mov r3, rINST, lsr #8 @ r3<- AA 41 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 42 SET_VREG r0, r3 @ vAA<- r0 43 GET_INST_OPCODE ip @ extract opcode from rINST 44 GOTO_OPCODE ip @ jump to next instruction 45 46%def op_const_4(): 47 /* const/4 vA, #+B */ 48 sbfx r1, rINST, #12, #4 @ r1<- sssssssB (sign-extended) 49 ubfx r0, rINST, #8, #4 @ r0<- A 50 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 51 GET_INST_OPCODE ip @ ip<- opcode from rINST 52 SET_VREG r1, r0 @ fp[A]<- r1 53 GOTO_OPCODE ip @ execute next instruction 54 55%def op_const_class(): 56% const(helper="MterpConstClass") 57 58%def op_const_high16(): 59 /* const/high16 vAA, #+BBBB0000 */ 60 FETCH r0, 1 @ r0<- 0000BBBB (zero-extended) 61 mov r3, rINST, lsr #8 @ r3<- AA 62 mov r0, r0, lsl #16 @ r0<- BBBB0000 63 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 64 SET_VREG r0, r3 @ vAA<- r0 65 GET_INST_OPCODE ip @ extract opcode from rINST 66 GOTO_OPCODE ip @ jump to next instruction 67 68%def op_const_method_handle(): 69% const(helper="MterpConstMethodHandle") 70 71%def op_const_method_type(): 72% const(helper="MterpConstMethodType") 73 74%def op_const_string(): 75% const(helper="MterpConstString") 76 77%def op_const_string_jumbo(): 78 /* const/string vAA, String@BBBBBBBB */ 79 EXPORT_PC 80 FETCH r0, 1 @ r0<- bbbb (low) 81 FETCH r2, 2 @ r2<- BBBB (high) 82 mov r1, rINST, lsr #8 @ r1<- AA 83 orr r0, r0, r2, lsl #16 @ r1<- BBBBbbbb 84 add r2, rFP, #OFF_FP_SHADOWFRAME 85 mov r3, rSELF 86 bl MterpConstString @ (index, tgt_reg, shadow_frame, self) 87 PREFETCH_INST 3 @ advance rPC 88 cmp r0, #0 @ fail? 89 bne MterpPossibleException @ let reference interpreter deal with it. 90 ADVANCE 3 @ advance rPC 91 GET_INST_OPCODE ip @ extract opcode from rINST 92 GOTO_OPCODE ip @ jump to next instruction 93 94%def op_const_wide(): 95 /* const-wide vAA, #+HHHHhhhhBBBBbbbb */ 96 FETCH r0, 1 @ r0<- bbbb (low) 97 FETCH r1, 2 @ r1<- BBBB (low middle) 98 FETCH r2, 3 @ r2<- hhhh (high middle) 99 orr r0, r0, r1, lsl #16 @ r0<- BBBBbbbb (low word) 100 FETCH r3, 4 @ r3<- HHHH (high) 101 mov r9, rINST, lsr #8 @ r9<- AA 102 orr r1, r2, r3, lsl #16 @ r1<- HHHHhhhh (high word) 103 CLEAR_SHADOW_PAIR r9, r2, r3 @ Zero out the shadow regs 104 FETCH_ADVANCE_INST 5 @ advance rPC, load rINST 105 VREG_INDEX_TO_ADDR r9, r9 @ r9<- &fp[AA] 106 GET_INST_OPCODE ip @ extract opcode from rINST 107 SET_VREG_WIDE_BY_ADDR r0, r1, r9 @ vAA<- r0/r1 108 GOTO_OPCODE ip @ jump to next instruction 109 110%def op_const_wide_16(): 111 /* const-wide/16 vAA, #+BBBB */ 112 FETCH_S r0, 1 @ r0<- ssssBBBB (sign-extended) 113 mov r3, rINST, lsr #8 @ r3<- AA 114 mov r1, r0, asr #31 @ r1<- ssssssss 115 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 116 CLEAR_SHADOW_PAIR r3, r2, lr @ Zero out the shadow regs 117 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[AA] 118 GET_INST_OPCODE ip @ extract opcode from rINST 119 SET_VREG_WIDE_BY_ADDR r0, r1, r3 @ vAA<- r0/r1 120 GOTO_OPCODE ip @ jump to next instruction 121 122%def op_const_wide_32(): 123 /* const-wide/32 vAA, #+BBBBbbbb */ 124 FETCH r0, 1 @ r0<- 0000bbbb (low) 125 mov r3, rINST, lsr #8 @ r3<- AA 126 FETCH_S r2, 2 @ r2<- ssssBBBB (high) 127 FETCH_ADVANCE_INST 3 @ advance rPC, load rINST 128 orr r0, r0, r2, lsl #16 @ r0<- BBBBbbbb 129 CLEAR_SHADOW_PAIR r3, r2, lr @ Zero out the shadow regs 130 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[AA] 131 mov r1, r0, asr #31 @ r1<- ssssssss 132 GET_INST_OPCODE ip @ extract opcode from rINST 133 SET_VREG_WIDE_BY_ADDR r0, r1, r3 @ vAA<- r0/r1 134 GOTO_OPCODE ip @ jump to next instruction 135 136%def op_const_wide_high16(): 137 /* const-wide/high16 vAA, #+BBBB000000000000 */ 138 FETCH r1, 1 @ r1<- 0000BBBB (zero-extended) 139 mov r3, rINST, lsr #8 @ r3<- AA 140 mov r0, #0 @ r0<- 00000000 141 mov r1, r1, lsl #16 @ r1<- BBBB0000 142 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 143 CLEAR_SHADOW_PAIR r3, r0, r2 @ Zero shadow regs 144 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[AA] 145 GET_INST_OPCODE ip @ extract opcode from rINST 146 SET_VREG_WIDE_BY_ADDR r0, r1, r3 @ vAA<- r0/r1 147 GOTO_OPCODE ip @ jump to next instruction 148 149%def op_monitor_enter(): 150 /* 151 * Synchronize on an object. 152 */ 153 /* monitor-enter vAA */ 154 EXPORT_PC 155 mov r2, rINST, lsr #8 @ r2<- AA 156 GET_VREG r0, r2 @ r0<- vAA (object) 157 mov r1, rSELF @ r1<- self 158 bl artLockObjectFromCode 159 cmp r0, #0 160 bne MterpException 161 FETCH_ADVANCE_INST 1 162 ldr r0, [rSELF, #THREAD_USE_MTERP_OFFSET] 163 cmp r0, #0 164 beq MterpFallback 165 GET_INST_OPCODE ip @ extract opcode from rINST 166 GOTO_OPCODE ip @ jump to next instruction 167 168%def op_monitor_exit(): 169 /* 170 * Unlock an object. 171 * 172 * Exceptions that occur when unlocking a monitor need to appear as 173 * if they happened at the following instruction. See the Dalvik 174 * instruction spec. 175 */ 176 /* monitor-exit vAA */ 177 EXPORT_PC 178 mov r2, rINST, lsr #8 @ r2<- AA 179 GET_VREG r0, r2 @ r0<- vAA (object) 180 mov r1, rSELF @ r0<- self 181 bl artUnlockObjectFromCode @ r0<- success for unlock(self, obj) 182 cmp r0, #0 @ failed? 183 bne MterpException 184 FETCH_ADVANCE_INST 1 @ before throw: advance rPC, load rINST 185 ldr r0, [rSELF, #THREAD_USE_MTERP_OFFSET] 186 cmp r0, #0 187 beq MterpFallback 188 GET_INST_OPCODE ip @ extract opcode from rINST 189 GOTO_OPCODE ip @ jump to next instruction 190 191%def op_move(is_object="0"): 192 /* for move, move-object, long-to-int */ 193 /* op vA, vB */ 194 mov r1, rINST, lsr #12 @ r1<- B from 15:12 195 ubfx r0, rINST, #8, #4 @ r0<- A from 11:8 196 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 197 GET_VREG r2, r1 @ r2<- fp[B] 198 GET_INST_OPCODE ip @ ip<- opcode from rINST 199 .if $is_object 200 SET_VREG_OBJECT r2, r0 @ fp[A]<- r2 201 .else 202 SET_VREG r2, r0 @ fp[A]<- r2 203 .endif 204 GOTO_OPCODE ip @ execute next instruction 205 206%def op_move_16(is_object="0"): 207 /* for: move/16, move-object/16 */ 208 /* op vAAAA, vBBBB */ 209 FETCH r1, 2 @ r1<- BBBB 210 FETCH r0, 1 @ r0<- AAAA 211 FETCH_ADVANCE_INST 3 @ advance rPC, load rINST 212 GET_VREG r2, r1 @ r2<- fp[BBBB] 213 GET_INST_OPCODE ip @ extract opcode from rINST 214 .if $is_object 215 SET_VREG_OBJECT r2, r0 @ fp[AAAA]<- r2 216 .else 217 SET_VREG r2, r0 @ fp[AAAA]<- r2 218 .endif 219 GOTO_OPCODE ip @ jump to next instruction 220 221%def op_move_exception(): 222 /* move-exception vAA */ 223 mov r2, rINST, lsr #8 @ r2<- AA 224 ldr r3, [rSELF, #THREAD_EXCEPTION_OFFSET] 225 mov r1, #0 @ r1<- 0 226 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 227 SET_VREG_OBJECT r3, r2 @ fp[AA]<- exception obj 228 GET_INST_OPCODE ip @ extract opcode from rINST 229 str r1, [rSELF, #THREAD_EXCEPTION_OFFSET] @ clear exception 230 GOTO_OPCODE ip @ jump to next instruction 231 232%def op_move_from16(is_object="0"): 233 /* for: move/from16, move-object/from16 */ 234 /* op vAA, vBBBB */ 235 FETCH r1, 1 @ r1<- BBBB 236 mov r0, rINST, lsr #8 @ r0<- AA 237 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 238 GET_VREG r2, r1 @ r2<- fp[BBBB] 239 GET_INST_OPCODE ip @ extract opcode from rINST 240 .if $is_object 241 SET_VREG_OBJECT r2, r0 @ fp[AA]<- r2 242 .else 243 SET_VREG r2, r0 @ fp[AA]<- r2 244 .endif 245 GOTO_OPCODE ip @ jump to next instruction 246 247%def op_move_object(): 248% op_move(is_object="1") 249 250%def op_move_object_16(): 251% op_move_16(is_object="1") 252 253%def op_move_object_from16(): 254% op_move_from16(is_object="1") 255 256%def op_move_result(is_object="0"): 257 /* for: move-result, move-result-object */ 258 /* op vAA */ 259 mov r2, rINST, lsr #8 @ r2<- AA 260 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 261 ldr r0, [rFP, #OFF_FP_RESULT_REGISTER] @ get pointer to result JType. 262 ldr r0, [r0] @ r0 <- result.i. 263 GET_INST_OPCODE ip @ extract opcode from rINST 264 .if $is_object 265 SET_VREG_OBJECT r0, r2, r1 @ fp[AA]<- r0 266 .else 267 SET_VREG r0, r2 @ fp[AA]<- r0 268 .endif 269 GOTO_OPCODE ip @ jump to next instruction 270 271%def op_move_result_object(): 272% op_move_result(is_object="1") 273 274%def op_move_result_wide(): 275 /* move-result-wide vAA */ 276 mov rINST, rINST, lsr #8 @ rINST<- AA 277 ldr r3, [rFP, #OFF_FP_RESULT_REGISTER] 278 VREG_INDEX_TO_ADDR r2, rINST @ r2<- &fp[AA] 279 ldmia r3, {r0-r1} @ r0/r1<- retval.j 280 CLEAR_SHADOW_PAIR rINST, ip, lr @ Zero out the shadow regs 281 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 282 SET_VREG_WIDE_BY_ADDR r0, r1, r2 @ fp[AA]<- r0/r1 283 GET_INST_OPCODE ip @ extract opcode from rINST 284 GOTO_OPCODE ip @ jump to next instruction 285 286%def op_move_wide(): 287 /* move-wide vA, vB */ 288 /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */ 289 mov r3, rINST, lsr #12 @ r3<- B 290 ubfx rINST, rINST, #8, #4 @ rINST<- A 291 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[B] 292 VREG_INDEX_TO_ADDR r2, rINST @ r2<- &fp[A] 293 GET_VREG_WIDE_BY_ADDR r0, r1, r3 @ r0/r1<- fp[B] 294 CLEAR_SHADOW_PAIR rINST, ip, lr @ Zero out the shadow regs 295 FETCH_ADVANCE_INST 1 @ advance rPC, load rINST 296 GET_INST_OPCODE ip @ extract opcode from rINST 297 SET_VREG_WIDE_BY_ADDR r0, r1, r2 @ fp[A]<- r0/r1 298 GOTO_OPCODE ip @ jump to next instruction 299 300%def op_move_wide_16(): 301 /* move-wide/16 vAAAA, vBBBB */ 302 /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */ 303 FETCH r3, 2 @ r3<- BBBB 304 FETCH r2, 1 @ r2<- AAAA 305 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[BBBB] 306 VREG_INDEX_TO_ADDR lr, r2 @ r2<- &fp[AAAA] 307 GET_VREG_WIDE_BY_ADDR r0, r1, r3 @ r0/r1<- fp[BBBB] 308 FETCH_ADVANCE_INST 3 @ advance rPC, load rINST 309 CLEAR_SHADOW_PAIR r2, r3, ip @ Zero out the shadow regs 310 SET_VREG_WIDE_BY_ADDR r0, r1, lr @ fp[AAAA]<- r0/r1 311 GET_INST_OPCODE ip @ extract opcode from rINST 312 GOTO_OPCODE ip @ jump to next instruction 313 314%def op_move_wide_from16(): 315 /* move-wide/from16 vAA, vBBBB */ 316 /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */ 317 FETCH r3, 1 @ r3<- BBBB 318 mov rINST, rINST, lsr #8 @ rINST<- AA 319 VREG_INDEX_TO_ADDR r3, r3 @ r3<- &fp[BBBB] 320 VREG_INDEX_TO_ADDR r2, rINST @ r2<- &fp[AA] 321 GET_VREG_WIDE_BY_ADDR r0, r1, r3 @ r0/r1<- fp[BBBB] 322 CLEAR_SHADOW_PAIR rINST, ip, lr @ Zero out the shadow regs 323 FETCH_ADVANCE_INST 2 @ advance rPC, load rINST 324 GET_INST_OPCODE ip @ extract opcode from rINST 325 SET_VREG_WIDE_BY_ADDR r0, r1, r2 @ fp[AA]<- r0/r1 326 GOTO_OPCODE ip @ jump to next instruction 327 328%def op_nop(): 329 FETCH_ADVANCE_INST 1 @ advance to next instr, load rINST 330 GET_INST_OPCODE ip @ ip<- opcode from rINST 331 GOTO_OPCODE ip @ execute it 332 333%def op_unused_3e(): 334% unused() 335 336%def op_unused_3f(): 337% unused() 338 339%def op_unused_40(): 340% unused() 341 342%def op_unused_41(): 343% unused() 344 345%def op_unused_42(): 346% unused() 347 348%def op_unused_43(): 349% unused() 350 351%def op_unused_73(): 352% unused() 353 354%def op_unused_79(): 355% unused() 356 357%def op_unused_7a(): 358% unused() 359 360%def op_unused_f3(): 361% unused() 362 363%def op_unused_f4(): 364% unused() 365 366%def op_unused_f5(): 367% unused() 368 369%def op_unused_f6(): 370% unused() 371 372%def op_unused_f7(): 373% unused() 374 375%def op_unused_f8(): 376% unused() 377 378%def op_unused_f9(): 379% unused() 380 381%def op_unused_fc(): 382% unused() 383 384%def op_unused_fd(): 385% unused() 386