1%def const(helper="UndefinedConstHandler"):
2    /* const/class vAA, type@BBBB */
3    /* const/method-handle vAA, method_handle@BBBB */
4    /* const/method-type vAA, proto@BBBB */
5    /* const/string vAA, string@@BBBB */
6    .extern $helper
7    EXPORT_PC
8    movzwq  2(rPC), OUT_ARG0                # eax <- OUT_ARG0
9    movq    rINSTq, OUT_ARG1
10    leaq    OFF_FP_SHADOWFRAME(rFP), OUT_ARG2
11    movq    rSELF, OUT_ARG3
12    call    SYMBOL($helper)                 # (index, tgt_reg, shadow_frame, self)
13    testb   %al, %al
14    jnz     MterpPossibleException
15    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16
17%def unused():
18/*
19 * Bail to reference interpreter to throw.
20 */
21    jmp     MterpFallback
22
23%def op_const():
24    /* const vAA, #+BBBBbbbb */
25    movl    2(rPC), %eax                    # grab all 32 bits at once
26    SET_VREG %eax, rINSTq                   # vAA<- eax
27    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
28
29%def op_const_16():
30    /* const/16 vAA, #+BBBB */
31    movswl  2(rPC), %ecx                    # ecx <- ssssBBBB
32    SET_VREG %ecx, rINSTq                   # vAA <- ssssBBBB
33    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
34
35%def op_const_4():
36    /* const/4 vA, #+B */
37    movsbl  rINSTbl, %eax                   # eax <-ssssssBx
38    movl    $$0xf, rINST
39    andl    %eax, rINST                     # rINST <- A
40    sarl    $$4, %eax
41    SET_VREG %eax, rINSTq
42    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
43
44%def op_const_class():
45%  const(helper="MterpConstClass")
46
47%def op_const_high16():
48    /* const/high16 vAA, #+BBBB0000 */
49    movzwl  2(rPC), %eax                    # eax <- 0000BBBB
50    sall    $$16, %eax                      # eax <- BBBB0000
51    SET_VREG %eax, rINSTq                   # vAA <- eax
52    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
53
54%def op_const_method_handle():
55%  const(helper="MterpConstMethodHandle")
56
57%def op_const_method_type():
58%  const(helper="MterpConstMethodType")
59
60%def op_const_string():
61%  const(helper="MterpConstString")
62
63%def op_const_string_jumbo():
64    /* const/string vAA, String@BBBBBBBB */
65    EXPORT_PC
66    movl    2(rPC), OUT_32_ARG0             # OUT_32_ARG0 <- BBBB
67    movq    rINSTq, OUT_ARG1
68    leaq    OFF_FP_SHADOWFRAME(rFP), OUT_ARG2
69    movq    rSELF, OUT_ARG3
70    call    SYMBOL(MterpConstString)        # (index, tgt_reg, shadow_frame, self)
71    testb   %al, %al
72    jnz     MterpPossibleException
73    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
74
75%def op_const_wide():
76    /* const-wide vAA, #+HHHHhhhhBBBBbbbb */
77    movq    2(rPC), %rax                    # rax <- HHHHhhhhBBBBbbbb
78    SET_WIDE_VREG %rax, rINSTq
79    ADVANCE_PC_FETCH_AND_GOTO_NEXT 5
80
81%def op_const_wide_16():
82    /* const-wide/16 vAA, #+BBBB */
83    movswq  2(rPC), %rax                    # rax <- ssssBBBB
84    SET_WIDE_VREG %rax, rINSTq              # store
85    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
86
87%def op_const_wide_32():
88    /* const-wide/32 vAA, #+BBBBbbbb */
89    movslq   2(rPC), %rax                   # eax <- ssssssssBBBBbbbb
90    SET_WIDE_VREG %rax, rINSTq              # store
91    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
92
93%def op_const_wide_high16():
94    /* const-wide/high16 vAA, #+BBBB000000000000 */
95    movzwq  2(rPC), %rax                    # eax <- 0000BBBB
96    salq    $$48, %rax                      # eax <- BBBB0000
97    SET_WIDE_VREG %rax, rINSTq              # v[AA+0] <- eax
98    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
99
100%def op_monitor_enter():
101/*
102 * Synchronize on an object.
103 */
104    /* monitor-enter vAA */
105    EXPORT_PC
106    GET_VREG OUT_32_ARG0, rINSTq
107    movq    rSELF, OUT_ARG1
108    call    SYMBOL(artLockObjectFromCode)   # (object, self)
109    testq   %rax, %rax
110    jnz     MterpException
111    ADVANCE_PC 1
112    movq    rSELF, %rax
113    cmpb    LITERAL(0), THREAD_USE_MTERP_OFFSET(%rax)
114    jz      MterpFallback
115    FETCH_INST
116    GOTO_NEXT
117
118%def op_monitor_exit():
119/*
120 * Unlock an object.
121 *
122 * Exceptions that occur when unlocking a monitor need to appear as
123 * if they happened at the following instruction.  See the Dalvik
124 * instruction spec.
125 */
126    /* monitor-exit vAA */
127    EXPORT_PC
128    GET_VREG OUT_32_ARG0, rINSTq
129    movq    rSELF, OUT_ARG1
130    call    SYMBOL(artUnlockObjectFromCode) # (object, self)
131    testq   %rax, %rax
132    jnz     MterpException
133    ADVANCE_PC 1
134    movq    rSELF, %rax
135    cmpb    LITERAL(0), THREAD_USE_MTERP_OFFSET(%rax)
136    jz      MterpFallback
137    FETCH_INST
138    GOTO_NEXT
139
140%def op_move(is_object="0"):
141    /* for move, move-object, long-to-int */
142    /* op vA, vB */
143    movl    rINST, %eax                     # eax <- BA
144    andb    $$0xf, %al                      # eax <- A
145    shrl    $$4, rINST                      # rINST <- B
146    GET_VREG %edx, rINSTq
147    .if $is_object
148    SET_VREG_OBJECT %edx, %rax              # fp[A] <- fp[B]
149    .else
150    SET_VREG %edx, %rax                     # fp[A] <- fp[B]
151    .endif
152    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
153
154%def op_move_16(is_object="0"):
155    /* for: move/16, move-object/16 */
156    /* op vAAAA, vBBBB */
157    movzwq  4(rPC), %rcx                    # ecx <- BBBB
158    movzwq  2(rPC), %rax                    # eax <- AAAA
159    GET_VREG %edx, %rcx
160    .if $is_object
161    SET_VREG_OBJECT %edx, %rax              # fp[A] <- fp[B]
162    .else
163    SET_VREG %edx, %rax                     # fp[A] <- fp[B]
164    .endif
165    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
166
167%def op_move_exception():
168    /* move-exception vAA */
169    movq    rSELF, %rcx
170    movl    THREAD_EXCEPTION_OFFSET(%rcx), %eax
171    SET_VREG_OBJECT %eax, rINSTq            # fp[AA] <- exception object
172    movl    $$0, THREAD_EXCEPTION_OFFSET(%rcx)
173    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
174
175%def op_move_from16(is_object="0"):
176    /* for: move/from16, move-object/from16 */
177    /* op vAA, vBBBB */
178    movzwq  2(rPC), %rax                    # eax <- BBBB
179    GET_VREG %edx, %rax                     # edx <- fp[BBBB]
180    .if $is_object
181    SET_VREG_OBJECT %edx, rINSTq            # fp[A] <- fp[B]
182    .else
183    SET_VREG %edx, rINSTq                   # fp[A] <- fp[B]
184    .endif
185    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
186
187%def op_move_object():
188%  op_move(is_object="1")
189
190%def op_move_object_16():
191%  op_move_16(is_object="1")
192
193%def op_move_object_from16():
194%  op_move_from16(is_object="1")
195
196%def op_move_result(is_object="0"):
197    /* for: move-result, move-result-object */
198    /* op vAA */
199    movq    OFF_FP_RESULT_REGISTER(rFP), %rax    # get pointer to result JType.
200    movl    (%rax), %eax                    # r0 <- result.i.
201    .if $is_object
202    SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- fp[B]
203    .else
204    SET_VREG %eax, rINSTq                   # fp[A] <- fp[B]
205    .endif
206    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
207
208%def op_move_result_object():
209%  op_move_result(is_object="1")
210
211%def op_move_result_wide():
212    /* move-result-wide vAA */
213    movq    OFF_FP_RESULT_REGISTER(rFP), %rax    # get pointer to result JType.
214    movq    (%rax), %rdx                         # Get wide
215    SET_WIDE_VREG %rdx, rINSTq                   # v[AA] <- rdx
216    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
217
218%def op_move_wide():
219    /* move-wide vA, vB */
220    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
221    movl    rINST, %ecx                     # ecx <- BA
222    sarl    $$4, rINST                      # rINST <- B
223    andb    $$0xf, %cl                      # ecx <- A
224    GET_WIDE_VREG %rdx, rINSTq              # rdx <- v[B]
225    SET_WIDE_VREG %rdx, %rcx                # v[A] <- rdx
226    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
227
228%def op_move_wide_16():
229    /* move-wide/16 vAAAA, vBBBB */
230    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
231    movzwq  4(rPC), %rcx                    # ecx<- BBBB
232    movzwq  2(rPC), %rax                    # eax<- AAAA
233    GET_WIDE_VREG %rdx, %rcx                # rdx <- v[B]
234    SET_WIDE_VREG %rdx, %rax                # v[A] <- rdx
235    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
236
237%def op_move_wide_from16():
238    /* move-wide/from16 vAA, vBBBB */
239    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
240    movzwl  2(rPC), %ecx                    # ecx <- BBBB
241    GET_WIDE_VREG %rdx, %rcx                # rdx <- v[B]
242    SET_WIDE_VREG %rdx, rINSTq              # v[A] <- rdx
243    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
244
245%def op_nop():
246    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
247
248%def op_unused_3e():
249%  unused()
250
251%def op_unused_3f():
252%  unused()
253
254%def op_unused_40():
255%  unused()
256
257%def op_unused_41():
258%  unused()
259
260%def op_unused_42():
261%  unused()
262
263%def op_unused_43():
264%  unused()
265
266%def op_unused_79():
267%  unused()
268
269%def op_unused_7a():
270%  unused()
271
272%def op_unused_f3():
273%  unused()
274
275%def op_unused_f4():
276%  unused()
277
278%def op_unused_f5():
279%  unused()
280
281%def op_unused_f6():
282%  unused()
283
284%def op_unused_f7():
285%  unused()
286
287%def op_unused_f8():
288%  unused()
289
290%def op_unused_f9():
291%  unused()
292
293%def op_unused_fc():
294%  unused()
295
296%def op_unused_fd():
297%  unused()
298