1%def unused():
2    int3
3
4%def op_const():
5    /* const vAA, #+BBBBbbbb */
6    movl    2(rPC), %eax                    # grab all 32 bits at once
7    SET_VREG %eax, rINSTq                   # vAA<- eax
8    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
9
10%def op_const_16():
11    /* const/16 vAA, #+BBBB */
12    movswl  2(rPC), %ecx                    # ecx <- ssssBBBB
13    SET_VREG %ecx, rINSTq                   # vAA <- ssssBBBB
14    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15
16%def op_const_4():
17    /* const/4 vA, #+B */
18    movsbl  rINSTbl, %eax                   # eax <-ssssssBx
19    andl    MACRO_LITERAL(0xf), rINST       # rINST <- A
20    sarl    MACRO_LITERAL(4), %eax
21    SET_VREG %eax, rINSTq
22    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
23
24%def op_const_high16():
25    /* const/high16 vAA, #+BBBB0000 */
26    movzwl  2(rPC), %eax                    # eax <- 0000BBBB
27    sall    MACRO_LITERAL(16), %eax         # eax <- BBBB0000
28    SET_VREG %eax, rINSTq                   # vAA <- eax
29    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
30
31%def op_const_object(jumbo="0", helper="nterp_load_object"):
32   // Fast-path which gets the object from thread-local cache.
33   FETCH_FROM_THREAD_CACHE %rax, 2f
34   cmpq MACRO_LITERAL(0), rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
35   jne 3f
361:
37   SET_VREG_OBJECT %eax, rINSTq            # vAA <- value
38   .if $jumbo
39   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
40   .else
41   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
42   .endif
432:
44   EXPORT_PC
45   movq rSELF:THREAD_SELF_OFFSET, %rdi
46   movq 0(%rsp), %rsi
47   movq rPC, %rdx
48   call SYMBOL($helper)
49   jmp 1b
503:
51   // 00 is %rax
52   call art_quick_read_barrier_mark_reg00
53   jmp 1b
54
55%def op_const_class():
56%  op_const_object(jumbo="0", helper="nterp_get_class_or_allocate_object")
57
58%def op_const_method_handle():
59%  op_const_object(jumbo="0")
60
61%def op_const_method_type():
62%  op_const_object(jumbo="0")
63
64%def op_const_string():
65   /* const/string vAA, String@BBBB */
66%  op_const_object(jumbo="0")
67
68%def op_const_string_jumbo():
69   /* const/string vAA, String@BBBBBBBB */
70%  op_const_object(jumbo="1")
71
72%def op_const_wide():
73    /* const-wide vAA, #+HHHHhhhhBBBBbbbb */
74    movq    2(rPC), %rax                    # rax <- HHHHhhhhBBBBbbbb
75    SET_WIDE_VREG %rax, rINSTq
76    ADVANCE_PC_FETCH_AND_GOTO_NEXT 5
77
78%def op_const_wide_16():
79    /* const-wide/16 vAA, #+BBBB */
80    movswq  2(rPC), %rax                    # rax <- ssssssssssssBBBB
81    SET_WIDE_VREG %rax, rINSTq              # store
82    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
83
84%def op_const_wide_32():
85    /* const-wide/32 vAA, #+BBBBbbbb */
86    movslq   2(rPC), %rax                   # eax <- ssssssssBBBBbbbb
87    SET_WIDE_VREG %rax, rINSTq              # store
88    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
89
90%def op_const_wide_high16():
91    /* const-wide/high16 vAA, #+BBBB000000000000 */
92    movzwq  2(rPC), %rax                    # eax <- 000000000000BBBB
93    salq    $$48, %rax                      # eax <- 00000000BBBB0000
94    SET_WIDE_VREG %rax, rINSTq              # v[AA+0] <- eax
95    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
96
97%def op_monitor_enter():
98/*
99 * Synchronize on an object.
100 */
101    /* monitor-enter vAA */
102    EXPORT_PC
103    GET_VREG %edi, rINSTq
104    call art_quick_lock_object
105    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
106
107%def op_monitor_exit():
108/*
109 * Unlock an object.
110 *
111 * Exceptions that occur when unlocking a monitor need to appear as
112 * if they happened at the following instruction.  See the Dalvik
113 * instruction spec.
114 */
115    /* monitor-exit vAA */
116    EXPORT_PC
117    GET_VREG %edi, rINSTq
118    call art_quick_unlock_object
119    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
120
121%def op_move(is_object="0"):
122    /* for move, move-object, long-to-int */
123    /* op vA, vB */
124    movl    rINST, %eax                     # eax <- BA
125    andb    $$0xf, %al                      # eax <- A
126    shrl    $$4, rINST                      # rINST <- B
127    GET_VREG %edx, rINSTq
128    .if $is_object
129    SET_VREG_OBJECT %edx, %rax              # fp[A] <- fp[B]
130    .else
131    SET_VREG %edx, %rax                     # fp[A] <- fp[B]
132    .endif
133    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
134
135%def op_move_16(is_object="0"):
136    /* for: move/16, move-object/16 */
137    /* op vAAAA, vBBBB */
138    movzwq  4(rPC), %rcx                    # ecx <- BBBB
139    movzwq  2(rPC), %rax                    # eax <- AAAA
140    GET_VREG %edx, %rcx
141    .if $is_object
142    SET_VREG_OBJECT %edx, %rax              # fp[A] <- fp[B]
143    .else
144    SET_VREG %edx, %rax                     # fp[A] <- fp[B]
145    .endif
146    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
147
148%def op_move_exception():
149    /* move-exception vAA */
150    movl    rSELF:THREAD_EXCEPTION_OFFSET, %eax
151    SET_VREG_OBJECT %eax, rINSTq            # fp[AA] <- exception object
152    movl    $$0, rSELF:THREAD_EXCEPTION_OFFSET
153    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
154
155%def op_move_from16(is_object="0"):
156    /* for: move/from16, move-object/from16 */
157    /* op vAA, vBBBB */
158    movzwq  2(rPC), %rax                    # eax <- BBBB
159    GET_VREG %edx, %rax                     # edx <- fp[BBBB]
160    .if $is_object
161    SET_VREG_OBJECT %edx, rINSTq            # fp[A] <- fp[B]
162    .else
163    SET_VREG %edx, rINSTq                   # fp[A] <- fp[B]
164    .endif
165    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
166
167%def op_move_object():
168%  op_move(is_object="1")
169
170%def op_move_object_16():
171%  op_move_16(is_object="1")
172
173%def op_move_object_from16():
174%  op_move_from16(is_object="1")
175
176%def op_move_result(is_object="0"):
177    /* for: move-result, move-result-object */
178    /* op vAA */
179    .if $is_object
180    SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- fp[B]
181    .else
182    SET_VREG %eax, rINSTq                   # fp[A] <- fp[B]
183    .endif
184    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
185
186%def op_move_result_object():
187%  op_move_result(is_object="1")
188
189%def op_move_result_wide():
190    /* move-result-wide vAA */
191    SET_WIDE_VREG %rax, rINSTq                   # v[AA] <- rdx
192    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
193
194%def op_move_wide():
195    /* move-wide vA, vB */
196    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
197    movl    rINST, %ecx                     # ecx <- BA
198    sarl    $$4, rINST                      # rINST <- B
199    andb    $$0xf, %cl                      # ecx <- A
200    GET_WIDE_VREG %rdx, rINSTq              # rdx <- v[B]
201    SET_WIDE_VREG %rdx, %rcx                # v[A] <- rdx
202    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
203
204%def op_move_wide_16():
205    /* move-wide/16 vAAAA, vBBBB */
206    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
207    movzwq  4(rPC), %rcx                    # ecx<- BBBB
208    movzwq  2(rPC), %rax                    # eax<- AAAA
209    GET_WIDE_VREG %rdx, %rcx                # rdx <- v[B]
210    SET_WIDE_VREG %rdx, %rax                # v[A] <- rdx
211    ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
212
213%def op_move_wide_from16():
214    /* move-wide/from16 vAA, vBBBB */
215    /* NOTE: regs can overlap, e.g. "move v6,v7" or "move v7,v6" */
216    movzwl  2(rPC), %ecx                    # ecx <- BBBB
217    GET_WIDE_VREG %rdx, %rcx                # rdx <- v[B]
218    SET_WIDE_VREG %rdx, rINSTq              # v[A] <- rdx
219    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
220
221%def op_nop():
222    ADVANCE_PC_FETCH_AND_GOTO_NEXT 1
223
224%def op_unused_3e():
225%  unused()
226
227%def op_unused_3f():
228%  unused()
229
230%def op_unused_40():
231%  unused()
232
233%def op_unused_41():
234%  unused()
235
236%def op_unused_42():
237%  unused()
238
239%def op_unused_43():
240%  unused()
241
242%def op_unused_79():
243%  unused()
244
245%def op_unused_7a():
246%  unused()
247
248%def op_unused_f3():
249%  unused()
250
251%def op_unused_f4():
252%  unused()
253
254%def op_unused_f5():
255%  unused()
256
257%def op_unused_f6():
258%  unused()
259
260%def op_unused_f7():
261%  unused()
262
263%def op_unused_f8():
264%  unused()
265
266%def op_unused_f9():
267%  unused()
268
269%def op_unused_fc():
270%  unused()
271
272%def op_unused_fd():
273%  unused()
274