1%def bincmp(condition=""):
2    /*
3     * Generic two-operand compare-and-branch operation.  Provide a "condition"
4     * fragment that specifies the comparison to perform.
5     *
6     * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
7     */
8    /* if-cmp vA, vB, +CCCC */
9    mov     r1, rINST, lsr #12          @ r1<- B
10    ubfx    r0, rINST, #8, #4           @ r0<- A
11    GET_VREG r3, r1                     @ r3<- vB
12    GET_VREG r0, r0                     @ r0<- vA
13    FETCH_S rINST, 1                    @ rINST<- branch offset, in code units
14    cmp     r0, r3                      @ compare (vA, vB)
15    b${condition} MterpCommonTakenBranchNoFlags
16    cmp     rPROFILE, #JIT_CHECK_OSR    @ possible OSR re-entry?
17    beq     .L_check_not_taken_osr
18    FETCH_ADVANCE_INST 2
19    GET_INST_OPCODE ip                  @ extract opcode from rINST
20    GOTO_OPCODE ip                      @ jump to next instruction
21
22%def zcmp(condition=""):
23    /*
24     * Generic one-operand compare-and-branch operation.  Provide a "condition"
25     * fragment that specifies the comparison to perform.
26     *
27     * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
28     */
29    /* if-cmp vAA, +BBBB */
30    mov     r0, rINST, lsr #8           @ r0<- AA
31    GET_VREG r0, r0                     @ r0<- vAA
32    FETCH_S rINST, 1                    @ rINST<- branch offset, in code units
33    cmp     r0, #0                      @ compare (vA, 0)
34    b${condition} MterpCommonTakenBranchNoFlags
35    cmp     rPROFILE, #JIT_CHECK_OSR    @ possible OSR re-entry?
36    beq     .L_check_not_taken_osr
37    FETCH_ADVANCE_INST 2
38    GET_INST_OPCODE ip                  @ extract opcode from rINST
39    GOTO_OPCODE ip                      @ jump to next instruction
40
41%def op_goto():
42    /*
43     * Unconditional branch, 8-bit offset.
44     *
45     * The branch distance is a signed code-unit offset, which we need to
46     * double to get a byte offset.
47     */
48    /* goto +AA */
49    sbfx    rINST, rINST, #8, #8           @ rINST<- ssssssAA (sign-extended)
50    b       MterpCommonTakenBranchNoFlags
51
52%def op_goto_16():
53    /*
54     * Unconditional branch, 16-bit offset.
55     *
56     * The branch distance is a signed code-unit offset, which we need to
57     * double to get a byte offset.
58     */
59    /* goto/16 +AAAA */
60    FETCH_S rINST, 1                    @ rINST<- ssssAAAA (sign-extended)
61    b       MterpCommonTakenBranchNoFlags
62
63%def op_goto_32():
64    /*
65     * Unconditional branch, 32-bit offset.
66     *
67     * The branch distance is a signed code-unit offset, which we need to
68     * double to get a byte offset.
69     *
70     * Unlike most opcodes, this one is allowed to branch to itself, so
71     * our "backward branch" test must be "<=0" instead of "<0".  Because
72     * we need the V bit set, we'll use an adds to convert from Dalvik
73     * offset to byte offset.
74     */
75    /* goto/32 +AAAAAAAA */
76    FETCH r0, 1                         @ r0<- aaaa (lo)
77    FETCH r3, 2                         @ r1<- AAAA (hi)
78    orrs    rINST, r0, r3, lsl #16      @ rINST<- AAAAaaaa
79    b       MterpCommonTakenBranch
80
81%def op_if_eq():
82%  bincmp(condition="eq")
83
84%def op_if_eqz():
85%  zcmp(condition="eq")
86
87%def op_if_ge():
88%  bincmp(condition="ge")
89
90%def op_if_gez():
91%  zcmp(condition="ge")
92
93%def op_if_gt():
94%  bincmp(condition="gt")
95
96%def op_if_gtz():
97%  zcmp(condition="gt")
98
99%def op_if_le():
100%  bincmp(condition="le")
101
102%def op_if_lez():
103%  zcmp(condition="le")
104
105%def op_if_lt():
106%  bincmp(condition="lt")
107
108%def op_if_ltz():
109%  zcmp(condition="lt")
110
111%def op_if_ne():
112%  bincmp(condition="ne")
113
114%def op_if_nez():
115%  zcmp(condition="ne")
116
117%def op_packed_switch(func="MterpDoPackedSwitch"):
118    /*
119     * Handle a packed-switch or sparse-switch instruction.  In both cases
120     * we decode it and hand it off to a helper function.
121     *
122     * We don't really expect backward branches in a switch statement, but
123     * they're perfectly legal, so we check for them here.
124     *
125     * for: packed-switch, sparse-switch
126     */
127    /* op vAA, +BBBB */
128    FETCH r0, 1                         @ r0<- bbbb (lo)
129    FETCH r1, 2                         @ r1<- BBBB (hi)
130    mov     r3, rINST, lsr #8           @ r3<- AA
131    orr     r0, r0, r1, lsl #16         @ r0<- BBBBbbbb
132    GET_VREG r1, r3                     @ r1<- vAA
133    add     r0, rPC, r0, lsl #1         @ r0<- PC + BBBBbbbb*2
134    bl      $func                       @ r0<- code-unit branch offset
135    movs    rINST, r0
136    b       MterpCommonTakenBranch
137
138%def op_return():
139    /*
140     * Return a 32-bit value.
141     *
142     * for: return, return-object
143     */
144    /* op vAA */
145    .extern MterpThreadFenceForConstructor
146    bl      MterpThreadFenceForConstructor
147    ldr     lr, [rSELF, #THREAD_FLAGS_OFFSET]
148    mov     r0, rSELF
149    ands    lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
150    blne    MterpSuspendCheck                       @ (self)
151    mov     r2, rINST, lsr #8           @ r2<- AA
152    GET_VREG r0, r2                     @ r0<- vAA
153    mov     r1, #0
154    b       MterpReturn
155
156%def op_return_object():
157%  op_return()
158
159%def op_return_void():
160    .extern MterpThreadFenceForConstructor
161    bl      MterpThreadFenceForConstructor
162    ldr     lr, [rSELF, #THREAD_FLAGS_OFFSET]
163    mov     r0, rSELF
164    ands    lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
165    blne    MterpSuspendCheck                       @ (self)
166    mov    r0, #0
167    mov    r1, #0
168    b      MterpReturn
169
170%def op_return_void_no_barrier():
171    ldr     lr, [rSELF, #THREAD_FLAGS_OFFSET]
172    mov     r0, rSELF
173    ands    lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
174    blne    MterpSuspendCheck                       @ (self)
175    mov    r0, #0
176    mov    r1, #0
177    b      MterpReturn
178
179%def op_return_wide():
180    /*
181     * Return a 64-bit value.
182     */
183    /* return-wide vAA */
184    .extern MterpThreadFenceForConstructor
185    bl      MterpThreadFenceForConstructor
186    ldr     lr, [rSELF, #THREAD_FLAGS_OFFSET]
187    mov     r0, rSELF
188    ands    lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
189    blne    MterpSuspendCheck                       @ (self)
190    mov     r2, rINST, lsr #8           @ r2<- AA
191    VREG_INDEX_TO_ADDR r2, r2           @ r2<- &fp[AA]
192    GET_VREG_WIDE_BY_ADDR r0, r1, r2    @ r0/r1 <- vAA/vAA+1
193    b       MterpReturn
194
195%def op_sparse_switch():
196%  op_packed_switch(func="MterpDoSparseSwitch")
197
198%def op_throw():
199    /*
200     * Throw an exception object in the current thread.
201     */
202    /* throw vAA */
203    EXPORT_PC
204    mov      r2, rINST, lsr #8           @ r2<- AA
205    GET_VREG r1, r2                      @ r1<- vAA (exception object)
206    cmp      r1, #0                      @ null object?
207    beq      common_errNullObject        @ yes, throw an NPE instead
208    str      r1, [rSELF, #THREAD_EXCEPTION_OFFSET]  @ thread->exception<- obj
209    b        MterpException
210