1%def bincmp(revcmp=""):
2/*
3 * Generic two-operand compare-and-branch operation.  Provide a "revcmp"
4 * fragment that specifies the *reverse* comparison to perform, e.g.
5 * for "if-le" you would use "gt".
6 *
7 * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
8 */
9    /* if-cmp vA, vB, +CCCC */
10    movl    rINST, %ecx                     # rcx <- A+
11    sarl    $$4, rINST                      # rINST <- B
12    andb    $$0xf, %cl                      # rcx <- A
13    GET_VREG %eax, %rcx                     # eax <- vA
14    cmpl    VREG_ADDRESS(rINSTq), %eax      # compare (vA, vB)
15    j${revcmp}   1f
16    movswq  2(rPC), rINSTq                  # Get signed branch offset
17    BRANCH
181:
19    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20
21%def zcmp(revcmp=""):
22/*
23 * Generic one-operand compare-and-branch operation.  Provide a "revcmp"
24 * fragment that specifies the *reverse* comparison to perform, e.g.
25 * for "if-le" you would use "gt".
26 *
27 * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
28 */
29    /* if-cmp vAA, +BBBB */
30    cmpl    $$0, VREG_ADDRESS(rINSTq)       # compare (vA, 0)
31    j${revcmp}   1f
32    movswq  2(rPC), rINSTq                  # fetch signed displacement
33    BRANCH
341:
35    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
36
37%def op_goto():
38/*
39 * Unconditional branch, 8-bit offset.
40 *
41 * The branch distance is a signed code-unit offset, which we need to
42 * double to get a byte offset.
43 */
44    /* goto +AA */
45    movsbq  rINSTbl, rINSTq                 # rINSTq <- ssssssAA
46    BRANCH
47
48%def op_goto_16():
49/*
50 * Unconditional branch, 16-bit offset.
51 *
52 * The branch distance is a signed code-unit offset, which we need to
53 * double to get a byte offset.
54 */
55    /* goto/16 +AAAA */
56    movswq  2(rPC), rINSTq                  # rINSTq <- ssssAAAA
57    BRANCH
58
59%def op_goto_32():
60/*
61 * Unconditional branch, 32-bit offset.
62 *
63 * The branch distance is a signed code-unit offset, which we need to
64 * double to get a byte offset.
65 *
66 * Because we need the SF bit set, we'll use an adds
67 * to convert from Dalvik offset to byte offset.
68 */
69    /* goto/32 +AAAAAAAA */
70    movslq  2(rPC), rINSTq                  # rINSTq <- AAAAAAAA
71    BRANCH
72
73%def op_if_eq():
74%  bincmp(revcmp="ne")
75
76%def op_if_eqz():
77%  zcmp(revcmp="ne")
78
79%def op_if_ge():
80%  bincmp(revcmp="l")
81
82%def op_if_gez():
83%  zcmp(revcmp="l")
84
85%def op_if_gt():
86%  bincmp(revcmp="le")
87
88%def op_if_gtz():
89%  zcmp(revcmp="le")
90
91%def op_if_le():
92%  bincmp(revcmp="g")
93
94%def op_if_lez():
95%  zcmp(revcmp="g")
96
97%def op_if_lt():
98%  bincmp(revcmp="ge")
99
100%def op_if_ltz():
101%  zcmp(revcmp="ge")
102
103%def op_if_ne():
104%  bincmp(revcmp="e")
105
106%def op_if_nez():
107%  zcmp(revcmp="e")
108
109%def op_packed_switch(func="NterpDoPackedSwitch"):
110/*
111 * Handle a packed-switch or sparse-switch instruction.  In both cases
112 * we decode it and hand it off to a helper function.
113 *
114 * We don't really expect backward branches in a switch statement, but
115 * they're perfectly legal, so we check for them here.
116 *
117 * for: packed-switch, sparse-switch
118 */
119    /* op vAA, +BBBB */
120    movslq  2(rPC), OUT_ARG0                # rcx <- ssssssssBBBBbbbb
121    leaq    (rPC,OUT_ARG0,2), OUT_ARG0      # rcx <- PC + ssssssssBBBBbbbb*2
122    GET_VREG OUT_32_ARG1, rINSTq            # eax <- vAA
123    call    SYMBOL($func)
124    movslq  %eax, rINSTq
125    BRANCH
126
127/*
128 * Return a 32-bit value.
129 */
130%def op_return(is_object="0"):
131    GET_VREG %eax, rINSTq                   # eax <- vAA
132    .if !$is_object
133    // In case we're going back to compiled code, put the
134    // result also in a xmm register.
135    movd %eax, %xmm0
136    .endif
137    CFI_REMEMBER_STATE
138    movq -8(rREFS), %rsp
139    CFI_DEF_CFA(rsp, CALLEE_SAVES_SIZE)
140    RESTORE_ALL_CALLEE_SAVES
141    ret
142    CFI_RESTORE_STATE
143
144%def op_return_object():
145%  op_return(is_object="1")
146
147%def op_return_void():
148    // Thread fence for constructor is a no-op on x86_64.
149    CFI_REMEMBER_STATE
150    movq -8(rREFS), %rsp
151    CFI_DEF_CFA(rsp, CALLEE_SAVES_SIZE)
152    RESTORE_ALL_CALLEE_SAVES
153    ret
154    CFI_RESTORE_STATE
155
156%def op_return_void_no_barrier():
157%  op_return_void()
158
159%def op_return_wide():
160    GET_WIDE_VREG %rax, rINSTq   # eax <- vAA
161    // In case we're going back to compiled code, put the
162    // result also in a xmm register.
163    movq    %rax, %xmm0
164    CFI_REMEMBER_STATE
165    movq    -8(rREFS), %rsp
166    CFI_DEF_CFA(rsp, CALLEE_SAVES_SIZE)
167    RESTORE_ALL_CALLEE_SAVES
168    ret
169    CFI_RESTORE_STATE
170
171%def op_sparse_switch():
172%  op_packed_switch(func="NterpDoSparseSwitch")
173
174%def op_throw():
175  EXPORT_PC
176  GET_VREG %edi, rINSTq                   # edi<- vAA (exception object)
177  movq rSELF:THREAD_SELF_OFFSET, %rsi
178  call SYMBOL(art_quick_deliver_exception)
179  int3
180