1%def bincmp(revcmp=""):
2/*
3 * Generic two-operand compare-and-branch operation.  Provide a "revcmp"
4 * fragment that specifies the *reverse* comparison to perform, e.g.
5 * for "if-le" you would use "gt".
6 *
7 * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
8 */
9    /* if-cmp vA, vB, +CCCC */
10    movl    rINST, %ecx                     # rcx <- A+
11    sarl    $$4, rINST                      # rINST <- B
12    andb    $$0xf, %cl                      # rcx <- A
13    GET_VREG %eax, %rcx                     # eax <- vA
14    cmpl    VREG_ADDRESS(rINSTq), %eax      # compare (vA, vB)
15    j${revcmp}   1f
16    movswq  2(rPC), rINSTq                  # Get signed branch offset
17    testq   rINSTq, rINSTq
18    jmp     MterpCommonTakenBranch
191:
20    cmpl    $$JIT_CHECK_OSR, rPROFILE
21    je      .L_check_not_taken_osr
22    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
23
24%def zcmp(revcmp=""):
25/*
26 * Generic one-operand compare-and-branch operation.  Provide a "revcmp"
27 * fragment that specifies the *reverse* comparison to perform, e.g.
28 * for "if-le" you would use "gt".
29 *
30 * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
31 */
32    /* if-cmp vAA, +BBBB */
33    cmpl    $$0, VREG_ADDRESS(rINSTq)       # compare (vA, 0)
34    j${revcmp}   1f
35    movswq  2(rPC), rINSTq                  # fetch signed displacement
36    testq   rINSTq, rINSTq
37    jmp     MterpCommonTakenBranch
381:
39    cmpl    $$JIT_CHECK_OSR, rPROFILE
40    je      .L_check_not_taken_osr
41    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
42
43%def op_goto():
44/*
45 * Unconditional branch, 8-bit offset.
46 *
47 * The branch distance is a signed code-unit offset, which we need to
48 * double to get a byte offset.
49 */
50    /* goto +AA */
51    movsbq  rINSTbl, rINSTq                 # rINSTq <- ssssssAA
52    testq   rINSTq, rINSTq
53    jmp     MterpCommonTakenBranch
54
55%def op_goto_16():
56/*
57 * Unconditional branch, 16-bit offset.
58 *
59 * The branch distance is a signed code-unit offset, which we need to
60 * double to get a byte offset.
61 */
62    /* goto/16 +AAAA */
63    movswq  2(rPC), rINSTq                  # rINSTq <- ssssAAAA
64    testq   rINSTq, rINSTq
65    jmp     MterpCommonTakenBranch
66
67%def op_goto_32():
68/*
69 * Unconditional branch, 32-bit offset.
70 *
71 * The branch distance is a signed code-unit offset, which we need to
72 * double to get a byte offset.
73 *
74 *  Because we need the SF bit set, we'll use an adds
75 * to convert from Dalvik offset to byte offset.
76 */
77    /* goto/32 +AAAAAAAA */
78    movslq  2(rPC), rINSTq                  # rINSTq <- AAAAAAAA
79    testq   rINSTq, rINSTq
80    jmp     MterpCommonTakenBranch
81
82%def op_if_eq():
83%  bincmp(revcmp="ne")
84
85%def op_if_eqz():
86%  zcmp(revcmp="ne")
87
88%def op_if_ge():
89%  bincmp(revcmp="l")
90
91%def op_if_gez():
92%  zcmp(revcmp="l")
93
94%def op_if_gt():
95%  bincmp(revcmp="le")
96
97%def op_if_gtz():
98%  zcmp(revcmp="le")
99
100%def op_if_le():
101%  bincmp(revcmp="g")
102
103%def op_if_lez():
104%  zcmp(revcmp="g")
105
106%def op_if_lt():
107%  bincmp(revcmp="ge")
108
109%def op_if_ltz():
110%  zcmp(revcmp="ge")
111
112%def op_if_ne():
113%  bincmp(revcmp="e")
114
115%def op_if_nez():
116%  zcmp(revcmp="e")
117
118%def op_packed_switch(func="MterpDoPackedSwitch"):
119/*
120 * Handle a packed-switch or sparse-switch instruction.  In both cases
121 * we decode it and hand it off to a helper function.
122 *
123 * We don't really expect backward branches in a switch statement, but
124 * they're perfectly legal, so we check for them here.
125 *
126 * for: packed-switch, sparse-switch
127 */
128    /* op vAA, +BBBB */
129    movslq  2(rPC), OUT_ARG0                # rcx <- ssssssssBBBBbbbb
130    leaq    (rPC,OUT_ARG0,2), OUT_ARG0      # rcx <- PC + ssssssssBBBBbbbb*2
131    GET_VREG OUT_32_ARG1, rINSTq            # eax <- vAA
132    call    SYMBOL($func)
133    testl   %eax, %eax
134    movslq  %eax, rINSTq
135    jmp     MterpCommonTakenBranch
136
137%def op_return():
138/*
139 * Return a 32-bit value.
140 *
141 * for: return, return-object
142 */
143    /* op vAA */
144    .extern MterpThreadFenceForConstructor
145    call    SYMBOL(MterpThreadFenceForConstructor)
146    movq    rSELF, OUT_ARG0
147    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
148    jz      1f
149    call    SYMBOL(MterpSuspendCheck)
1501:
151    GET_VREG %eax, rINSTq                   # eax <- vAA
152    jmp     MterpReturn
153
154%def op_return_object():
155%  op_return()
156
157%def op_return_void():
158    .extern MterpThreadFenceForConstructor
159    call    SYMBOL(MterpThreadFenceForConstructor)
160    movq    rSELF, OUT_ARG0
161    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
162    jz      1f
163    call    SYMBOL(MterpSuspendCheck)
1641:
165    xorq    %rax, %rax
166    jmp     MterpReturn
167
168%def op_return_void_no_barrier():
169    movq    rSELF, OUT_ARG0
170    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
171    jz      1f
172    call    SYMBOL(MterpSuspendCheck)
1731:
174    xorq    %rax, %rax
175    jmp     MterpReturn
176
177%def op_return_wide():
178/*
179 * Return a 64-bit value.
180 */
181    /* return-wide vAA */
182    .extern MterpThreadFenceForConstructor
183    call    SYMBOL(MterpThreadFenceForConstructor)
184    movq    rSELF, OUT_ARG0
185    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
186    jz      1f
187    call    SYMBOL(MterpSuspendCheck)
1881:
189    GET_WIDE_VREG %rax, rINSTq              # eax <- v[AA]
190    jmp     MterpReturn
191
192%def op_sparse_switch():
193%  op_packed_switch(func="MterpDoSparseSwitch")
194
195%def op_throw():
196/*
197 * Throw an exception object in the current thread.
198 */
199    /* throw vAA */
200    EXPORT_PC
201    GET_VREG %eax, rINSTq                   # eax<- vAA (exception object)
202    testb   %al, %al
203    jz      common_errNullObject
204    movq    rSELF, %rcx
205    movq    %rax, THREAD_EXCEPTION_OFFSET(%rcx)
206    jmp     MterpException
207