1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
18 #define ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
19 
20 #include <stdint.h>
21 #include <memory>
22 #include <vector>
23 
24 #include <android-base/logging.h>
25 
26 #include "assembler_arm64.h"
27 #include "base/arena_containers.h"
28 #include "base/enums.h"
29 #include "base/macros.h"
30 #include "offsets.h"
31 #include "utils/assembler.h"
32 #include "utils/jni_macro_assembler.h"
33 
34 // TODO(VIXL): Make VIXL compile with -Wshadow.
35 #pragma GCC diagnostic push
36 #pragma GCC diagnostic ignored "-Wshadow"
37 #include "aarch64/macro-assembler-aarch64.h"
38 #pragma GCC diagnostic pop
39 
40 namespace art {
41 namespace arm64 {
42 
43 class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler, PointerSize::k64> {
44  public:
Arm64JNIMacroAssembler(ArenaAllocator * allocator)45   explicit Arm64JNIMacroAssembler(ArenaAllocator* allocator)
46       : JNIMacroAssemblerFwd(allocator),
47         exception_blocks_(allocator->Adapter(kArenaAllocAssembler)) {}
48 
49   ~Arm64JNIMacroAssembler();
50 
51   // Finalize the code.
52   void FinalizeCode() override;
53 
54   // Emit code that will create an activation on the stack.
55   void BuildFrame(size_t frame_size,
56                   ManagedRegister method_reg,
57                   ArrayRef<const ManagedRegister> callee_save_regs) override;
58 
59   // Emit code that will remove an activation from the stack.
60   void RemoveFrame(size_t frame_size,
61                    ArrayRef<const ManagedRegister> callee_save_regs,
62                    bool may_suspend) override;
63 
64   void IncreaseFrameSize(size_t adjust) override;
65   void DecreaseFrameSize(size_t adjust) override;
66 
67   // Store routines.
68   void Store(FrameOffset offs, ManagedRegister src, size_t size) override;
69   void StoreRef(FrameOffset dest, ManagedRegister src) override;
70   void StoreRawPtr(FrameOffset dest, ManagedRegister src) override;
71   void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override;
72   void StoreStackOffsetToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs) override;
73   void StoreStackPointerToThread(ThreadOffset64 thr_offs) override;
74   void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override;
75 
76   // Load routines.
77   void Load(ManagedRegister dest, FrameOffset src, size_t size) override;
78   void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override;
79   void LoadRef(ManagedRegister dest, FrameOffset src) override;
80   void LoadRef(ManagedRegister dest,
81                ManagedRegister base,
82                MemberOffset offs,
83                bool unpoison_reference) override;
84   void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override;
85   void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) override;
86 
87   // Copying routines.
88   void MoveArguments(ArrayRef<ArgumentLocation> dests, ArrayRef<ArgumentLocation> srcs) override;
89   void Move(ManagedRegister dest, ManagedRegister src, size_t size) override;
90   void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset64 thr_offs) override;
91   void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
92       override;
93   void CopyRef(FrameOffset dest, FrameOffset src) override;
94   void CopyRef(FrameOffset dest,
95                ManagedRegister base,
96                MemberOffset offs,
97                bool unpoison_reference) override;
98   void Copy(FrameOffset dest, FrameOffset src, size_t size) override;
99   void Copy(FrameOffset dest,
100             ManagedRegister src_base,
101             Offset src_offset,
102             ManagedRegister scratch,
103             size_t size) override;
104   void Copy(ManagedRegister dest_base,
105             Offset dest_offset,
106             FrameOffset src,
107             ManagedRegister scratch,
108             size_t size) override;
109   void Copy(FrameOffset dest,
110             FrameOffset src_base,
111             Offset src_offset,
112             ManagedRegister scratch,
113             size_t size) override;
114   void Copy(ManagedRegister dest,
115             Offset dest_offset,
116             ManagedRegister src,
117             Offset src_offset,
118             ManagedRegister scratch,
119             size_t size) override;
120   void Copy(FrameOffset dest,
121             Offset dest_offset,
122             FrameOffset src,
123             Offset src_offset,
124             ManagedRegister scratch,
125             size_t size) override;
126   void MemoryBarrier(ManagedRegister scratch) override;
127 
128   // Sign extension.
129   void SignExtend(ManagedRegister mreg, size_t size) override;
130 
131   // Zero extension.
132   void ZeroExtend(ManagedRegister mreg, size_t size) override;
133 
134   // Exploit fast access in managed code to Thread::Current().
135   void GetCurrentThread(ManagedRegister dest) override;
136   void GetCurrentThread(FrameOffset dest_offset) override;
137 
138   // Set up out_reg to hold a Object** into the handle scope, or to be null if the
139   // value is null and null_allowed. in_reg holds a possibly stale reference
140   // that can be used to avoid loading the handle scope entry to see if the value is
141   // null.
142   void CreateHandleScopeEntry(ManagedRegister out_reg,
143                               FrameOffset handlescope_offset,
144                               ManagedRegister in_reg,
145                               bool null_allowed) override;
146 
147   // Set up out_off to hold a Object** into the handle scope, or to be null if the
148   // value is null and null_allowed.
149   void CreateHandleScopeEntry(FrameOffset out_off,
150                               FrameOffset handlescope_offset,
151                               bool null_allowed) override;
152 
153   // src holds a handle scope entry (Object**) load this into dst.
154   void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) override;
155 
156   // Heap::VerifyObject on src. In some cases (such as a reference to this) we
157   // know that src may not be null.
158   void VerifyObject(ManagedRegister src, bool could_be_null) override;
159   void VerifyObject(FrameOffset src, bool could_be_null) override;
160 
161   // Jump to address held at [base+offset] (used for tail calls).
162   void Jump(ManagedRegister base, Offset offset) override;
163 
164   // Call to address held at [base+offset].
165   void Call(ManagedRegister base, Offset offset) override;
166   void Call(FrameOffset base, Offset offset) override;
167   void CallFromThread(ThreadOffset64 offset) override;
168 
169   // Generate code to check if Thread::Current()->exception_ is non-null
170   // and branch to a ExceptionSlowPath if it is.
171   void ExceptionPoll(size_t stack_adjust) override;
172 
173   // Create a new label that can be used with Jump/Bind calls.
174   std::unique_ptr<JNIMacroLabel> CreateLabel() override;
175   // Emit an unconditional jump to the label.
176   void Jump(JNIMacroLabel* label) override;
177   // Emit a conditional jump to the label by applying a unary condition test to the GC marking flag.
178   void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) override;
179   // Code at this offset will serve as the target for the Jump call.
180   void Bind(JNIMacroLabel* label) override;
181 
182  private:
183   class Arm64Exception {
184    public:
Arm64Exception(vixl::aarch64::Register scratch,size_t stack_adjust)185     Arm64Exception(vixl::aarch64::Register scratch, size_t stack_adjust)
186         : scratch_(scratch), stack_adjust_(stack_adjust) {}
187 
Entry()188     vixl::aarch64::Label* Entry() { return &exception_entry_; }
189 
190     // Register used for passing Thread::Current()->exception_ .
191     const vixl::aarch64::Register scratch_;
192 
193     // Stack adjust for ExceptionPool.
194     const size_t stack_adjust_;
195 
196     vixl::aarch64::Label exception_entry_;
197 
198    private:
199     DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
200   };
201 
202   // Emits Exception block.
203   void EmitExceptionPoll(Arm64Exception *exception);
204 
205   void StoreWToOffset(StoreOperandType type,
206                       WRegister source,
207                       XRegister base,
208                       int32_t offset);
209   void StoreToOffset(XRegister source, XRegister base, int32_t offset);
210   void StoreSToOffset(SRegister source, XRegister base, int32_t offset);
211   void StoreDToOffset(DRegister source, XRegister base, int32_t offset);
212 
213   void LoadImmediate(XRegister dest,
214                      int32_t value,
215                      vixl::aarch64::Condition cond = vixl::aarch64::al);
216   void Load(Arm64ManagedRegister dst, XRegister src, int32_t src_offset, size_t size);
217   void LoadWFromOffset(LoadOperandType type,
218                        WRegister dest,
219                        XRegister base,
220                        int32_t offset);
221   void LoadFromOffset(XRegister dest, XRegister base, int32_t offset);
222   void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset);
223   void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset);
224   void AddConstant(XRegister rd,
225                    int32_t value,
226                    vixl::aarch64::Condition cond = vixl::aarch64::al);
227   void AddConstant(XRegister rd,
228                    XRegister rn,
229                    int32_t value,
230                    vixl::aarch64::Condition cond = vixl::aarch64::al);
231 
232   // List of exception blocks to generate at the end of the code cache.
233   ArenaVector<std::unique_ptr<Arm64Exception>> exception_blocks_;
234 };
235 
236 class Arm64JNIMacroLabel final
237     : public JNIMacroLabelCommon<Arm64JNIMacroLabel,
238                                  vixl::aarch64::Label,
239                                  InstructionSet::kArm64> {
240  public:
AsArm64()241   vixl::aarch64::Label* AsArm64() {
242     return AsPlatformLabel();
243   }
244 };
245 
246 }  // namespace arm64
247 }  // namespace art
248 
249 #endif  // ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_
250