1%def bincmp(condition=""):
2    /*
3     * Generic two-operand compare-and-branch operation.  Provide a "condition"
4     * fragment that specifies the comparison to perform.
5     *
6     * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
7     */
8    /* if-cmp vA, vB, +CCCC */
9    lsr     w1, wINST, #12              // w1<- B
10    ubfx    w0, wINST, #8, #4           // w0<- A
11    GET_VREG w3, w1                     // w3<- vB
12    GET_VREG w2, w0                     // w2<- vA
13    FETCH_S wINST, 1                    // wINST<- branch offset, in code units
14    cmp     w2, w3                      // compare (vA, vB)
15    b.${condition} MterpCommonTakenBranchNoFlags
16    cmp     wPROFILE, #JIT_CHECK_OSR    // possible OSR re-entry?
17    b.eq    .L_check_not_taken_osr
18    FETCH_ADVANCE_INST 2
19    GET_INST_OPCODE ip                  // extract opcode from wINST
20    GOTO_OPCODE ip                      // jump to next instruction
21
22%def zcmp(compare="1", branch=""):
23    /*
24     * Generic one-operand compare-and-branch operation.  Provide a "condition"
25     * fragment that specifies the comparison to perform.
26     *
27     * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
28     */
29    /* if-cmp vAA, +BBBB */
30    lsr     w0, wINST, #8               // w0<- AA
31    GET_VREG w2, w0                     // w2<- vAA
32    FETCH_S wINST, 1                    // w1<- branch offset, in code units
33    .if ${compare}
34    cmp     w2, #0                      // compare (vA, 0)
35    .endif
36    ${branch} MterpCommonTakenBranchNoFlags
37    cmp     wPROFILE, #JIT_CHECK_OSR    // possible OSR re-entry?
38    b.eq    .L_check_not_taken_osr
39    FETCH_ADVANCE_INST 2
40    GET_INST_OPCODE ip                  // extract opcode from wINST
41    GOTO_OPCODE ip                      // jump to next instruction
42
43%def op_goto():
44    /*
45     * Unconditional branch, 8-bit offset.
46     *
47     * The branch distance is a signed code-unit offset, which we need to
48     * double to get a byte offset.
49     */
50    /* goto +AA */
51    sbfx    wINST, wINST, #8, #8           // wINST<- ssssssAA (sign-extended)
52    b       MterpCommonTakenBranchNoFlags
53
54%def op_goto_16():
55    /*
56     * Unconditional branch, 16-bit offset.
57     *
58     * The branch distance is a signed code-unit offset, which we need to
59     * double to get a byte offset.
60     */
61    /* goto/16 +AAAA */
62    FETCH_S wINST, 1                    // wINST<- ssssAAAA (sign-extended)
63    b       MterpCommonTakenBranchNoFlags
64
65%def op_goto_32():
66    /*
67     * Unconditional branch, 32-bit offset.
68     *
69     * The branch distance is a signed code-unit offset, which we need to
70     * double to get a byte offset.
71     *
72     * Unlike most opcodes, this one is allowed to branch to itself, so
73     * our "backward branch" test must be "<=0" instead of "<0".  Because
74     * we need the V bit set, we'll use an adds to convert from Dalvik
75     * offset to byte offset.
76     */
77    /* goto/32 +AAAAAAAA */
78    FETCH w0, 1                         // w0<- aaaa (lo)
79    FETCH w1, 2                         // w1<- AAAA (hi)
80    orr     wINST, w0, w1, lsl #16      // wINST<- AAAAaaaa
81    b       MterpCommonTakenBranchNoFlags
82
83%def op_if_eq():
84%  bincmp(condition="eq")
85
86%def op_if_eqz():
87%  zcmp(compare="0", branch="cbz     w2,")
88
89%def op_if_ge():
90%  bincmp(condition="ge")
91
92%def op_if_gez():
93%  zcmp(compare="0", branch="tbz     w2, #31,")
94
95%def op_if_gt():
96%  bincmp(condition="gt")
97
98%def op_if_gtz():
99%  zcmp(branch="b.gt")
100
101%def op_if_le():
102%  bincmp(condition="le")
103
104%def op_if_lez():
105%  zcmp(branch="b.le")
106
107%def op_if_lt():
108%  bincmp(condition="lt")
109
110%def op_if_ltz():
111%  zcmp(compare="0", branch="tbnz    w2, #31,")
112
113%def op_if_ne():
114%  bincmp(condition="ne")
115
116%def op_if_nez():
117%  zcmp(compare="0", branch="cbnz    w2,")
118
119%def op_packed_switch(func="MterpDoPackedSwitch"):
120    /*
121     * Handle a packed-switch or sparse-switch instruction.  In both cases
122     * we decode it and hand it off to a helper function.
123     *
124     * We don't really expect backward branches in a switch statement, but
125     * they're perfectly legal, so we check for them here.
126     *
127     * for: packed-switch, sparse-switch
128     */
129    /* op vAA, +BBBB */
130    FETCH   w0, 1                       // x0<- 000000000000bbbb (lo)
131    FETCH_S x1, 2                       // x1<- ssssssssssssBBBB (hi)
132    lsr     w3, wINST, #8               // w3<- AA
133    orr     x0, x0, x1, lsl #16         // x0<- ssssssssBBBBbbbb
134    GET_VREG w1, w3                     // w1<- vAA
135    add     x0, xPC, x0, lsl #1         // x0<- PC + ssssssssBBBBbbbb*2
136    bl      $func                       // w0<- code-unit branch offset
137    sxtw    xINST, w0
138    b       MterpCommonTakenBranchNoFlags
139
140%def op_return():
141    /*
142     * Return a 32-bit value.
143     *
144     * for: return, return-object
145     */
146    /* op vAA */
147    .extern MterpThreadFenceForConstructor
148    bl      MterpThreadFenceForConstructor
149    ldr     w7, [xSELF, #THREAD_FLAGS_OFFSET]
150    mov     x0, xSELF
151    ands    w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
152    b.ne    .L${opcode}_check
153.L${opcode}_return:
154    lsr     w2, wINST, #8               // r2<- AA
155    GET_VREG w0, w2                     // r0<- vAA
156    b       MterpReturn
157.L${opcode}_check:
158    bl      MterpSuspendCheck           // (self)
159    b       .L${opcode}_return
160
161%def op_return_object():
162%  op_return()
163
164%def op_return_void():
165    .extern MterpThreadFenceForConstructor
166    bl      MterpThreadFenceForConstructor
167    ldr     w7, [xSELF, #THREAD_FLAGS_OFFSET]
168    mov     x0, xSELF
169    ands    w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
170    b.ne    .L${opcode}_check
171.L${opcode}_return:
172    mov     x0, #0
173    b       MterpReturn
174.L${opcode}_check:
175    bl      MterpSuspendCheck           // (self)
176    b       .L${opcode}_return
177
178%def op_return_void_no_barrier():
179    ldr     w7, [xSELF, #THREAD_FLAGS_OFFSET]
180    mov     x0, xSELF
181    ands    w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
182    b.ne    .L${opcode}_check
183.L${opcode}_return:
184    mov     x0, #0
185    b       MterpReturn
186.L${opcode}_check:
187    bl      MterpSuspendCheck           // (self)
188    b       .L${opcode}_return
189
190%def op_return_wide():
191    /*
192     * Return a 64-bit value.
193     */
194    /* return-wide vAA */
195    /* op vAA */
196    .extern MterpThreadFenceForConstructor
197    bl      MterpThreadFenceForConstructor
198    ldr     w7, [xSELF, #THREAD_FLAGS_OFFSET]
199    mov     x0, xSELF
200    ands    w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
201    b.ne    .L${opcode}_check
202.L${opcode}_return:
203    lsr     w2, wINST, #8               // w2<- AA
204    GET_VREG_WIDE x0, w2                // x0<- vAA
205    b       MterpReturn
206.L${opcode}_check:
207    bl      MterpSuspendCheck           // (self)
208    b       .L${opcode}_return
209
210%def op_sparse_switch():
211%  op_packed_switch(func="MterpDoSparseSwitch")
212
213%def op_throw():
214    /*
215     * Throw an exception object in the current thread.
216     */
217    /* throw vAA */
218    EXPORT_PC
219    lsr      w2, wINST, #8               // r2<- AA
220    GET_VREG w1, w2                      // r1<- vAA (exception object)
221    cbz      w1, common_errNullObject
222    str      x1, [xSELF, #THREAD_EXCEPTION_OFFSET]  // thread->exception<- obj
223    b        MterpException
224