1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19 
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23 
24 #include "base/arena_bit_vector.h"
25 #include "base/arena_containers.h"
26 #include "base/arena_object.h"
27 #include "base/array_ref.h"
28 #include "base/intrusive_forward_list.h"
29 #include "base/iteration_range.h"
30 #include "base/mutex.h"
31 #include "base/quasi_atomic.h"
32 #include "base/stl_util.h"
33 #include "base/transform_array_ref.h"
34 #include "art_method.h"
35 #include "class_root.h"
36 #include "compilation_kind.h"
37 #include "data_type.h"
38 #include "deoptimization_kind.h"
39 #include "dex/dex_file.h"
40 #include "dex/dex_file_types.h"
41 #include "dex/invoke_type.h"
42 #include "dex/method_reference.h"
43 #include "entrypoints/quick/quick_entrypoints_enum.h"
44 #include "handle.h"
45 #include "handle_scope.h"
46 #include "intrinsics_enum.h"
47 #include "locations.h"
48 #include "mirror/class.h"
49 #include "mirror/method_type.h"
50 #include "offsets.h"
51 
52 namespace art {
53 
54 class ArenaStack;
55 class GraphChecker;
56 class HBasicBlock;
57 class HConstructorFence;
58 class HCurrentMethod;
59 class HDoubleConstant;
60 class HEnvironment;
61 class HFloatConstant;
62 class HGraphBuilder;
63 class HGraphVisitor;
64 class HInstruction;
65 class HIntConstant;
66 class HInvoke;
67 class HLongConstant;
68 class HNullConstant;
69 class HParameterValue;
70 class HPhi;
71 class HSuspendCheck;
72 class HTryBoundary;
73 class LiveInterval;
74 class LocationSummary;
75 class SlowPathCode;
76 class SsaBuilder;
77 
78 namespace mirror {
79 class DexCache;
80 }  // namespace mirror
81 
82 static const int kDefaultNumberOfBlocks = 8;
83 static const int kDefaultNumberOfSuccessors = 2;
84 static const int kDefaultNumberOfPredecessors = 2;
85 static const int kDefaultNumberOfExceptionalPredecessors = 0;
86 static const int kDefaultNumberOfDominatedBlocks = 1;
87 static const int kDefaultNumberOfBackEdges = 1;
88 
89 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
90 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
91 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
92 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
93 
94 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
95 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
96 
97 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
98 
99 static constexpr uint32_t kNoDexPc = -1;
100 
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)101 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
102   // For the purposes of the compiler, the dex files must actually be the same object
103   // if we want to safely treat them as the same. This is especially important for JIT
104   // as custom class loaders can open the same underlying file (or memory) multiple
105   // times and provide different class resolution but no two class loaders should ever
106   // use the same DexFile object - doing so is an unsupported hack that can lead to
107   // all sorts of weird failures.
108   return &lhs == &rhs;
109 }
110 
111 enum IfCondition {
112   // All types.
113   kCondEQ,  // ==
114   kCondNE,  // !=
115   // Signed integers and floating-point numbers.
116   kCondLT,  // <
117   kCondLE,  // <=
118   kCondGT,  // >
119   kCondGE,  // >=
120   // Unsigned integers.
121   kCondB,   // <
122   kCondBE,  // <=
123   kCondA,   // >
124   kCondAE,  // >=
125   // First and last aliases.
126   kCondFirst = kCondEQ,
127   kCondLast = kCondAE,
128 };
129 
130 enum GraphAnalysisResult {
131   kAnalysisSkipped,
132   kAnalysisInvalidBytecode,
133   kAnalysisFailThrowCatchLoop,
134   kAnalysisFailAmbiguousArrayOp,
135   kAnalysisFailIrreducibleLoopAndStringInit,
136   kAnalysisFailPhiEquivalentInOsr,
137   kAnalysisSuccess,
138 };
139 
140 template <typename T>
MakeUnsigned(T x)141 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
142   return static_cast<typename std::make_unsigned<T>::type>(x);
143 }
144 
145 class HInstructionList : public ValueObject {
146  public:
HInstructionList()147   HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
148 
149   void AddInstruction(HInstruction* instruction);
150   void RemoveInstruction(HInstruction* instruction);
151 
152   // Insert `instruction` before/after an existing instruction `cursor`.
153   void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
154   void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
155 
156   // Return true if this list contains `instruction`.
157   bool Contains(HInstruction* instruction) const;
158 
159   // Return true if `instruction1` is found before `instruction2` in
160   // this instruction list and false otherwise.  Abort if none
161   // of these instructions is found.
162   bool FoundBefore(const HInstruction* instruction1,
163                    const HInstruction* instruction2) const;
164 
IsEmpty()165   bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()166   void Clear() { first_instruction_ = last_instruction_ = nullptr; }
167 
168   // Update the block of all instructions to be `block`.
169   void SetBlockOfInstructions(HBasicBlock* block) const;
170 
171   void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
172   void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
173   void Add(const HInstructionList& instruction_list);
174 
175   // Return the number of instructions in the list. This is an expensive operation.
176   size_t CountSize() const;
177 
178  private:
179   HInstruction* first_instruction_;
180   HInstruction* last_instruction_;
181 
182   friend class HBasicBlock;
183   friend class HGraph;
184   friend class HInstruction;
185   friend class HInstructionIterator;
186   friend class HInstructionIteratorHandleChanges;
187   friend class HBackwardInstructionIterator;
188 
189   DISALLOW_COPY_AND_ASSIGN(HInstructionList);
190 };
191 
192 class ReferenceTypeInfo : ValueObject {
193  public:
194   typedef Handle<mirror::Class> TypeHandle;
195 
196   static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
197 
Create(TypeHandle type_handle)198   static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
199     return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
200   }
201 
CreateUnchecked(TypeHandle type_handle,bool is_exact)202   static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
203     return ReferenceTypeInfo(type_handle, is_exact);
204   }
205 
CreateInvalid()206   static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
207 
IsValidHandle(TypeHandle handle)208   static bool IsValidHandle(TypeHandle handle) {
209     return handle.GetReference() != nullptr;
210   }
211 
IsValid()212   bool IsValid() const {
213     return IsValidHandle(type_handle_);
214   }
215 
IsExact()216   bool IsExact() const { return is_exact_; }
217 
IsObjectClass()218   bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
219     DCHECK(IsValid());
220     return GetTypeHandle()->IsObjectClass();
221   }
222 
IsStringClass()223   bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
224     DCHECK(IsValid());
225     return GetTypeHandle()->IsStringClass();
226   }
227 
IsObjectArray()228   bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
229     DCHECK(IsValid());
230     return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
231   }
232 
IsInterface()233   bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
234     DCHECK(IsValid());
235     return GetTypeHandle()->IsInterface();
236   }
237 
IsArrayClass()238   bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
239     DCHECK(IsValid());
240     return GetTypeHandle()->IsArrayClass();
241   }
242 
IsPrimitiveArrayClass()243   bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
244     DCHECK(IsValid());
245     return GetTypeHandle()->IsPrimitiveArray();
246   }
247 
IsNonPrimitiveArrayClass()248   bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
249     DCHECK(IsValid());
250     return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
251   }
252 
CanArrayHold(ReferenceTypeInfo rti)253   bool CanArrayHold(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
254     DCHECK(IsValid());
255     if (!IsExact()) return false;
256     if (!IsArrayClass()) return false;
257     return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
258   }
259 
CanArrayHoldValuesOf(ReferenceTypeInfo rti)260   bool CanArrayHoldValuesOf(ReferenceTypeInfo rti)  const REQUIRES_SHARED(Locks::mutator_lock_) {
261     DCHECK(IsValid());
262     if (!IsExact()) return false;
263     if (!IsArrayClass()) return false;
264     if (!rti.IsArrayClass()) return false;
265     return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
266         rti.GetTypeHandle()->GetComponentType());
267   }
268 
GetTypeHandle()269   Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
270 
IsSupertypeOf(ReferenceTypeInfo rti)271   bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
272     DCHECK(IsValid());
273     DCHECK(rti.IsValid());
274     return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
275   }
276 
277   // Returns true if the type information provide the same amount of details.
278   // Note that it does not mean that the instructions have the same actual type
279   // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)280   bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
281     if (!IsValid() && !rti.IsValid()) {
282       // Invalid types are equal.
283       return true;
284     }
285     if (!IsValid() || !rti.IsValid()) {
286       // One is valid, the other not.
287       return false;
288     }
289     return IsExact() == rti.IsExact()
290         && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
291   }
292 
293  private:
ReferenceTypeInfo()294   ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)295   ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
296       : type_handle_(type_handle), is_exact_(is_exact) { }
297 
298   // The class of the object.
299   TypeHandle type_handle_;
300   // Whether or not the type is exact or a superclass of the actual type.
301   // Whether or not we have any information about this type.
302   bool is_exact_;
303 };
304 
305 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
306 
307 class HandleCache {
308  public:
HandleCache(VariableSizedHandleScope * handles)309   explicit HandleCache(VariableSizedHandleScope* handles) : handles_(handles) { }
310 
GetHandles()311   VariableSizedHandleScope* GetHandles() { return handles_; }
312 
313   template <typename T>
NewHandle(T * object)314   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_) {
315     return handles_->NewHandle(object);
316   }
317 
318   template <typename T>
NewHandle(ObjPtr<T> object)319   MutableHandle<T> NewHandle(ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_) {
320     return handles_->NewHandle(object);
321   }
322 
GetObjectClassHandle()323   ReferenceTypeInfo::TypeHandle GetObjectClassHandle() {
324     return GetRootHandle(ClassRoot::kJavaLangObject, &object_class_handle_);
325   }
326 
GetClassClassHandle()327   ReferenceTypeInfo::TypeHandle GetClassClassHandle() {
328     return GetRootHandle(ClassRoot::kJavaLangClass, &class_class_handle_);
329   }
330 
GetMethodHandleClassHandle()331   ReferenceTypeInfo::TypeHandle GetMethodHandleClassHandle() {
332     return GetRootHandle(ClassRoot::kJavaLangInvokeMethodHandleImpl, &method_handle_class_handle_);
333   }
334 
GetMethodTypeClassHandle()335   ReferenceTypeInfo::TypeHandle GetMethodTypeClassHandle() {
336     return GetRootHandle(ClassRoot::kJavaLangInvokeMethodType, &method_type_class_handle_);
337   }
338 
GetStringClassHandle()339   ReferenceTypeInfo::TypeHandle GetStringClassHandle() {
340     return GetRootHandle(ClassRoot::kJavaLangString, &string_class_handle_);
341   }
342 
GetThrowableClassHandle()343   ReferenceTypeInfo::TypeHandle GetThrowableClassHandle() {
344     return GetRootHandle(ClassRoot::kJavaLangThrowable, &throwable_class_handle_);
345   }
346 
347 
348  private:
GetRootHandle(ClassRoot class_root,ReferenceTypeInfo::TypeHandle * cache)349   inline ReferenceTypeInfo::TypeHandle GetRootHandle(ClassRoot class_root,
350                                                      ReferenceTypeInfo::TypeHandle* cache) {
351     if (UNLIKELY(!ReferenceTypeInfo::IsValidHandle(*cache))) {
352       *cache = CreateRootHandle(handles_, class_root);
353     }
354     return *cache;
355   }
356 
357   static ReferenceTypeInfo::TypeHandle CreateRootHandle(VariableSizedHandleScope* handles,
358                                                         ClassRoot class_root);
359 
360   VariableSizedHandleScope* handles_;
361 
362   ReferenceTypeInfo::TypeHandle object_class_handle_;
363   ReferenceTypeInfo::TypeHandle class_class_handle_;
364   ReferenceTypeInfo::TypeHandle method_handle_class_handle_;
365   ReferenceTypeInfo::TypeHandle method_type_class_handle_;
366   ReferenceTypeInfo::TypeHandle string_class_handle_;
367   ReferenceTypeInfo::TypeHandle throwable_class_handle_;
368 };
369 
370 // Control-flow graph of a method. Contains a list of basic blocks.
371 class HGraph : public ArenaObject<kArenaAllocGraph> {
372  public:
373   HGraph(ArenaAllocator* allocator,
374          ArenaStack* arena_stack,
375          VariableSizedHandleScope* handles,
376          const DexFile& dex_file,
377          uint32_t method_idx,
378          InstructionSet instruction_set,
379          InvokeType invoke_type = kInvalidInvokeType,
380          bool dead_reference_safe = false,
381          bool debuggable = false,
382          CompilationKind compilation_kind = CompilationKind::kOptimized,
383          int start_instruction_id = 0)
allocator_(allocator)384       : allocator_(allocator),
385         arena_stack_(arena_stack),
386         handle_cache_(handles),
387         blocks_(allocator->Adapter(kArenaAllocBlockList)),
388         reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
389         linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
390         entry_block_(nullptr),
391         exit_block_(nullptr),
392         maximum_number_of_out_vregs_(0),
393         number_of_vregs_(0),
394         number_of_in_vregs_(0),
395         temporaries_vreg_slots_(0),
396         has_bounds_checks_(false),
397         has_try_catch_(false),
398         has_monitor_operations_(false),
399         has_simd_(false),
400         has_loops_(false),
401         has_irreducible_loops_(false),
402         dead_reference_safe_(dead_reference_safe),
403         debuggable_(debuggable),
404         current_instruction_id_(start_instruction_id),
405         dex_file_(dex_file),
406         method_idx_(method_idx),
407         invoke_type_(invoke_type),
408         in_ssa_form_(false),
409         number_of_cha_guards_(0),
410         instruction_set_(instruction_set),
411         cached_null_constant_(nullptr),
412         cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
413         cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
414         cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
415         cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
416         cached_current_method_(nullptr),
417         art_method_(nullptr),
418         compilation_kind_(compilation_kind),
419         cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
420     blocks_.reserve(kDefaultNumberOfBlocks);
421   }
422 
GetAllocator()423   ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()424   ArenaStack* GetArenaStack() const { return arena_stack_; }
425 
GetHandleCache()426   HandleCache* GetHandleCache() { return &handle_cache_; }
427 
GetBlocks()428   const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
429 
IsInSsaForm()430   bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()431   void SetInSsaForm() { in_ssa_form_ = true; }
432 
GetEntryBlock()433   HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()434   HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()435   bool HasExitBlock() const { return exit_block_ != nullptr; }
436 
SetEntryBlock(HBasicBlock * block)437   void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)438   void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
439 
440   void AddBlock(HBasicBlock* block);
441 
442   void ComputeDominanceInformation();
443   void ClearDominanceInformation();
444   void ClearLoopInformation();
445   void FindBackEdges(ArenaBitVector* visited);
446   GraphAnalysisResult BuildDominatorTree();
447   void SimplifyCFG();
448   void SimplifyCatchBlocks();
449 
450   // Analyze all natural loops in this graph. Returns a code specifying that it
451   // was successful or the reason for failure. The method will fail if a loop
452   // is a throw-catch loop, i.e. the header is a catch block.
453   GraphAnalysisResult AnalyzeLoops() const;
454 
455   // Iterate over blocks to compute try block membership. Needs reverse post
456   // order and loop information.
457   void ComputeTryBlockInformation();
458 
459   // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
460   // Returns the instruction to replace the invoke expression or null if the
461   // invoke is for a void method. Note that the caller is responsible for replacing
462   // and removing the invoke instruction.
463   HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
464 
465   // Update the loop and try membership of `block`, which was spawned from `reference`.
466   // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
467   // should be the new back edge.
468   void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
469                                              HBasicBlock* reference,
470                                              bool replace_if_back_edge);
471 
472   // Need to add a couple of blocks to test if the loop body is entered and
473   // put deoptimization instructions, etc.
474   void TransformLoopHeaderForBCE(HBasicBlock* header);
475 
476   // Adds a new loop directly after the loop with the given header and exit.
477   // Returns the new preheader.
478   HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
479                                              HBasicBlock* body,
480                                              HBasicBlock* exit);
481 
482   // Removes `block` from the graph. Assumes `block` has been disconnected from
483   // other blocks and has no instructions or phis.
484   void DeleteDeadEmptyBlock(HBasicBlock* block);
485 
486   // Splits the edge between `block` and `successor` while preserving the
487   // indices in the predecessor/successor lists. If there are multiple edges
488   // between the blocks, the lowest indices are used.
489   // Returns the new block which is empty and has the same dex pc as `successor`.
490   HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
491 
492   void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
493   void OrderLoopHeaderPredecessors(HBasicBlock* header);
494 
495   // Transform a loop into a format with a single preheader.
496   //
497   // Each phi in the header should be split: original one in the header should only hold
498   // inputs reachable from the back edges and a single input from the preheader. The newly created
499   // phi in the preheader should collate the inputs from the original multiple incoming blocks.
500   //
501   // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
502   // that no longer have this property.
503   void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
504 
505   void SimplifyLoop(HBasicBlock* header);
506 
GetNextInstructionId()507   int32_t GetNextInstructionId() {
508     CHECK_NE(current_instruction_id_, INT32_MAX);
509     return current_instruction_id_++;
510   }
511 
GetCurrentInstructionId()512   int32_t GetCurrentInstructionId() const {
513     return current_instruction_id_;
514   }
515 
SetCurrentInstructionId(int32_t id)516   void SetCurrentInstructionId(int32_t id) {
517     CHECK_GE(id, current_instruction_id_);
518     current_instruction_id_ = id;
519   }
520 
GetMaximumNumberOfOutVRegs()521   uint16_t GetMaximumNumberOfOutVRegs() const {
522     return maximum_number_of_out_vregs_;
523   }
524 
SetMaximumNumberOfOutVRegs(uint16_t new_value)525   void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
526     maximum_number_of_out_vregs_ = new_value;
527   }
528 
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)529   void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
530     maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
531   }
532 
UpdateTemporariesVRegSlots(size_t slots)533   void UpdateTemporariesVRegSlots(size_t slots) {
534     temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
535   }
536 
GetTemporariesVRegSlots()537   size_t GetTemporariesVRegSlots() const {
538     DCHECK(!in_ssa_form_);
539     return temporaries_vreg_slots_;
540   }
541 
SetNumberOfVRegs(uint16_t number_of_vregs)542   void SetNumberOfVRegs(uint16_t number_of_vregs) {
543     number_of_vregs_ = number_of_vregs;
544   }
545 
GetNumberOfVRegs()546   uint16_t GetNumberOfVRegs() const {
547     return number_of_vregs_;
548   }
549 
SetNumberOfInVRegs(uint16_t value)550   void SetNumberOfInVRegs(uint16_t value) {
551     number_of_in_vregs_ = value;
552   }
553 
GetNumberOfInVRegs()554   uint16_t GetNumberOfInVRegs() const {
555     return number_of_in_vregs_;
556   }
557 
GetNumberOfLocalVRegs()558   uint16_t GetNumberOfLocalVRegs() const {
559     DCHECK(!in_ssa_form_);
560     return number_of_vregs_ - number_of_in_vregs_;
561   }
562 
GetReversePostOrder()563   const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
564     return reverse_post_order_;
565   }
566 
GetReversePostOrderSkipEntryBlock()567   ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() const {
568     DCHECK(GetReversePostOrder()[0] == entry_block_);
569     return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
570   }
571 
GetPostOrder()572   IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
573     return ReverseRange(GetReversePostOrder());
574   }
575 
GetLinearOrder()576   const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
577     return linear_order_;
578   }
579 
GetLinearPostOrder()580   IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
581     return ReverseRange(GetLinearOrder());
582   }
583 
HasBoundsChecks()584   bool HasBoundsChecks() const {
585     return has_bounds_checks_;
586   }
587 
SetHasBoundsChecks(bool value)588   void SetHasBoundsChecks(bool value) {
589     has_bounds_checks_ = value;
590   }
591 
592   // Is the code known to be robust against eliminating dead references
593   // and the effects of early finalization?
IsDeadReferenceSafe()594   bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
595 
MarkDeadReferenceUnsafe()596   void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
597 
IsDebuggable()598   bool IsDebuggable() const { return debuggable_; }
599 
600   // Returns a constant of the given type and value. If it does not exist
601   // already, it is created and inserted into the graph. This method is only for
602   // integral types.
603   HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
604 
605   // TODO: This is problematic for the consistency of reference type propagation
606   // because it can be created anytime after the pass and thus it will be left
607   // with an invalid type.
608   HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
609 
610   HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
611     return CreateConstant(value, &cached_int_constants_, dex_pc);
612   }
613   HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
614     return CreateConstant(value, &cached_long_constants_, dex_pc);
615   }
616   HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
617     return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
618   }
619   HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
620     return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
621   }
622 
623   HCurrentMethod* GetCurrentMethod();
624 
GetDexFile()625   const DexFile& GetDexFile() const {
626     return dex_file_;
627   }
628 
GetMethodIdx()629   uint32_t GetMethodIdx() const {
630     return method_idx_;
631   }
632 
633   // Get the method name (without the signature), e.g. "<init>"
634   const char* GetMethodName() const;
635 
636   // Get the pretty method name (class + name + optionally signature).
637   std::string PrettyMethod(bool with_signature = true) const;
638 
GetInvokeType()639   InvokeType GetInvokeType() const {
640     return invoke_type_;
641   }
642 
GetInstructionSet()643   InstructionSet GetInstructionSet() const {
644     return instruction_set_;
645   }
646 
IsCompilingOsr()647   bool IsCompilingOsr() const { return compilation_kind_ == CompilationKind::kOsr; }
648 
IsCompilingBaseline()649   bool IsCompilingBaseline() const { return compilation_kind_ == CompilationKind::kBaseline; }
650 
GetCompilationKind()651   CompilationKind GetCompilationKind() const { return compilation_kind_; }
652 
GetCHASingleImplementationList()653   ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
654     return cha_single_implementation_list_;
655   }
656 
AddCHASingleImplementationDependency(ArtMethod * method)657   void AddCHASingleImplementationDependency(ArtMethod* method) {
658     cha_single_implementation_list_.insert(method);
659   }
660 
HasShouldDeoptimizeFlag()661   bool HasShouldDeoptimizeFlag() const {
662     return number_of_cha_guards_ != 0;
663   }
664 
HasTryCatch()665   bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)666   void SetHasTryCatch(bool value) { has_try_catch_ = value; }
667 
HasMonitorOperations()668   bool HasMonitorOperations() const { return has_monitor_operations_; }
SetHasMonitorOperations(bool value)669   void SetHasMonitorOperations(bool value) { has_monitor_operations_ = value; }
670 
HasSIMD()671   bool HasSIMD() const { return has_simd_; }
SetHasSIMD(bool value)672   void SetHasSIMD(bool value) { has_simd_ = value; }
673 
HasLoops()674   bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)675   void SetHasLoops(bool value) { has_loops_ = value; }
676 
HasIrreducibleLoops()677   bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)678   void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
679 
GetArtMethod()680   ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)681   void SetArtMethod(ArtMethod* method) { art_method_ = method; }
682 
683   // Returns an instruction with the opposite Boolean value from 'cond'.
684   // The instruction has been inserted into the graph, either as a constant, or
685   // before cursor.
686   HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
687 
GetInexactObjectRti()688   ReferenceTypeInfo GetInexactObjectRti() {
689     return ReferenceTypeInfo::Create(handle_cache_.GetObjectClassHandle(), /* is_exact= */ false);
690   }
691 
GetNumberOfCHAGuards()692   uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)693   void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()694   void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
695 
696  private:
697   void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
698   void RemoveDeadBlocks(const ArenaBitVector& visited);
699 
700   template <class InstructionType, typename ValueType>
701   InstructionType* CreateConstant(ValueType value,
702                                   ArenaSafeMap<ValueType, InstructionType*>* cache,
703                                   uint32_t dex_pc = kNoDexPc) {
704     // Try to find an existing constant of the given value.
705     InstructionType* constant = nullptr;
706     auto cached_constant = cache->find(value);
707     if (cached_constant != cache->end()) {
708       constant = cached_constant->second;
709     }
710 
711     // If not found or previously deleted, create and cache a new instruction.
712     // Don't bother reviving a previously deleted instruction, for simplicity.
713     if (constant == nullptr || constant->GetBlock() == nullptr) {
714       constant = new (allocator_) InstructionType(value, dex_pc);
715       cache->Overwrite(value, constant);
716       InsertConstant(constant);
717     }
718     return constant;
719   }
720 
721   void InsertConstant(HConstant* instruction);
722 
723   // Cache a float constant into the graph. This method should only be
724   // called by the SsaBuilder when creating "equivalent" instructions.
725   void CacheFloatConstant(HFloatConstant* constant);
726 
727   // See CacheFloatConstant comment.
728   void CacheDoubleConstant(HDoubleConstant* constant);
729 
730   ArenaAllocator* const allocator_;
731   ArenaStack* const arena_stack_;
732 
733   HandleCache handle_cache_;
734 
735   // List of blocks in insertion order.
736   ArenaVector<HBasicBlock*> blocks_;
737 
738   // List of blocks to perform a reverse post order tree traversal.
739   ArenaVector<HBasicBlock*> reverse_post_order_;
740 
741   // List of blocks to perform a linear order tree traversal. Unlike the reverse
742   // post order, this order is not incrementally kept up-to-date.
743   ArenaVector<HBasicBlock*> linear_order_;
744 
745   HBasicBlock* entry_block_;
746   HBasicBlock* exit_block_;
747 
748   // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
749   uint16_t maximum_number_of_out_vregs_;
750 
751   // The number of virtual registers in this method. Contains the parameters.
752   uint16_t number_of_vregs_;
753 
754   // The number of virtual registers used by parameters of this method.
755   uint16_t number_of_in_vregs_;
756 
757   // Number of vreg size slots that the temporaries use (used in baseline compiler).
758   size_t temporaries_vreg_slots_;
759 
760   // Flag whether there are bounds checks in the graph. We can skip
761   // BCE if it's false. It's only best effort to keep it up to date in
762   // the presence of code elimination so there might be false positives.
763   bool has_bounds_checks_;
764 
765   // Flag whether there are try/catch blocks in the graph. We will skip
766   // try/catch-related passes if it's false. It's only best effort to keep
767   // it up to date in the presence of code elimination so there might be
768   // false positives.
769   bool has_try_catch_;
770 
771   // Flag whether there are any HMonitorOperation in the graph. If yes this will mandate
772   // DexRegisterMap to be present to allow deadlock analysis for non-debuggable code.
773   bool has_monitor_operations_;
774 
775   // Flag whether SIMD instructions appear in the graph. If true, the
776   // code generators may have to be more careful spilling the wider
777   // contents of SIMD registers.
778   bool has_simd_;
779 
780   // Flag whether there are any loops in the graph. We can skip loop
781   // optimization if it's false. It's only best effort to keep it up
782   // to date in the presence of code elimination so there might be false
783   // positives.
784   bool has_loops_;
785 
786   // Flag whether there are any irreducible loops in the graph. It's only
787   // best effort to keep it up to date in the presence of code elimination
788   // so there might be false positives.
789   bool has_irreducible_loops_;
790 
791   // Is the code known to be robust against eliminating dead references
792   // and the effects of early finalization? If false, dead reference variables
793   // are kept if they might be visible to the garbage collector.
794   // Currently this means that the class was declared to be dead-reference-safe,
795   // the method accesses no reachability-sensitive fields or data, and the same
796   // is true for any methods that were inlined into the current one.
797   bool dead_reference_safe_;
798 
799   // Indicates whether the graph should be compiled in a way that
800   // ensures full debuggability. If false, we can apply more
801   // aggressive optimizations that may limit the level of debugging.
802   const bool debuggable_;
803 
804   // The current id to assign to a newly added instruction. See HInstruction.id_.
805   int32_t current_instruction_id_;
806 
807   // The dex file from which the method is from.
808   const DexFile& dex_file_;
809 
810   // The method index in the dex file.
811   const uint32_t method_idx_;
812 
813   // If inlined, this encodes how the callee is being invoked.
814   const InvokeType invoke_type_;
815 
816   // Whether the graph has been transformed to SSA form. Only used
817   // in debug mode to ensure we are not using properties only valid
818   // for non-SSA form (like the number of temporaries).
819   bool in_ssa_form_;
820 
821   // Number of CHA guards in the graph. Used to short-circuit the
822   // CHA guard optimization pass when there is no CHA guard left.
823   uint32_t number_of_cha_guards_;
824 
825   const InstructionSet instruction_set_;
826 
827   // Cached constants.
828   HNullConstant* cached_null_constant_;
829   ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
830   ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
831   ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
832   ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
833 
834   HCurrentMethod* cached_current_method_;
835 
836   // The ArtMethod this graph is for. Note that for AOT, it may be null,
837   // for example for methods whose declaring class could not be resolved
838   // (such as when the superclass could not be found).
839   ArtMethod* art_method_;
840 
841   // How we are compiling the graph: either optimized, osr, or baseline.
842   // For osr, we will make all loops seen as irreducible and emit special
843   // stack maps to mark compiled code entries which the interpreter can
844   // directly jump to.
845   const CompilationKind compilation_kind_;
846 
847   // List of methods that are assumed to have single implementation.
848   ArenaSet<ArtMethod*> cha_single_implementation_list_;
849 
850   friend class SsaBuilder;           // For caching constants.
851   friend class SsaLivenessAnalysis;  // For the linear order.
852   friend class HInliner;             // For the reverse post order.
853   ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
854   DISALLOW_COPY_AND_ASSIGN(HGraph);
855 };
856 
857 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
858  public:
HLoopInformation(HBasicBlock * header,HGraph * graph)859   HLoopInformation(HBasicBlock* header, HGraph* graph)
860       : header_(header),
861         suspend_check_(nullptr),
862         irreducible_(false),
863         contains_irreducible_loop_(false),
864         back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
865         // Make bit vector growable, as the number of blocks may change.
866         blocks_(graph->GetAllocator(),
867                 graph->GetBlocks().size(),
868                 true,
869                 kArenaAllocLoopInfoBackEdges) {
870     back_edges_.reserve(kDefaultNumberOfBackEdges);
871   }
872 
IsIrreducible()873   bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()874   bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
875 
876   void Dump(std::ostream& os);
877 
GetHeader()878   HBasicBlock* GetHeader() const {
879     return header_;
880   }
881 
SetHeader(HBasicBlock * block)882   void SetHeader(HBasicBlock* block) {
883     header_ = block;
884   }
885 
GetSuspendCheck()886   HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)887   void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()888   bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
889 
AddBackEdge(HBasicBlock * back_edge)890   void AddBackEdge(HBasicBlock* back_edge) {
891     back_edges_.push_back(back_edge);
892   }
893 
RemoveBackEdge(HBasicBlock * back_edge)894   void RemoveBackEdge(HBasicBlock* back_edge) {
895     RemoveElement(back_edges_, back_edge);
896   }
897 
IsBackEdge(const HBasicBlock & block)898   bool IsBackEdge(const HBasicBlock& block) const {
899     return ContainsElement(back_edges_, &block);
900   }
901 
NumberOfBackEdges()902   size_t NumberOfBackEdges() const {
903     return back_edges_.size();
904   }
905 
906   HBasicBlock* GetPreHeader() const;
907 
GetBackEdges()908   const ArenaVector<HBasicBlock*>& GetBackEdges() const {
909     return back_edges_;
910   }
911 
912   // Returns the lifetime position of the back edge that has the
913   // greatest lifetime position.
914   size_t GetLifetimeEnd() const;
915 
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)916   void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
917     ReplaceElement(back_edges_, existing, new_back_edge);
918   }
919 
920   // Finds blocks that are part of this loop.
921   void Populate();
922 
923   // Updates blocks population of the loop and all of its outer' ones recursively after the
924   // population of the inner loop is updated.
925   void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
926 
927   // Returns whether this loop information contains `block`.
928   // Note that this loop information *must* be populated before entering this function.
929   bool Contains(const HBasicBlock& block) const;
930 
931   // Returns whether this loop information is an inner loop of `other`.
932   // Note that `other` *must* be populated before entering this function.
933   bool IsIn(const HLoopInformation& other) const;
934 
935   // Returns true if instruction is not defined within this loop.
936   bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
937 
GetBlocks()938   const ArenaBitVector& GetBlocks() const { return blocks_; }
939 
940   void Add(HBasicBlock* block);
941   void Remove(HBasicBlock* block);
942 
ClearAllBlocks()943   void ClearAllBlocks() {
944     blocks_.ClearAllBits();
945   }
946 
947   bool HasBackEdgeNotDominatedByHeader() const;
948 
IsPopulated()949   bool IsPopulated() const {
950     return blocks_.GetHighestBitSet() != -1;
951   }
952 
953   bool DominatesAllBackEdges(HBasicBlock* block);
954 
955   bool HasExitEdge() const;
956 
957   // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()958   void ResetBasicBlockData() {
959     back_edges_.clear();
960     ClearAllBlocks();
961   }
962 
963  private:
964   // Internal recursive implementation of `Populate`.
965   void PopulateRecursive(HBasicBlock* block);
966   void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
967 
968   HBasicBlock* header_;
969   HSuspendCheck* suspend_check_;
970   bool irreducible_;
971   bool contains_irreducible_loop_;
972   ArenaVector<HBasicBlock*> back_edges_;
973   ArenaBitVector blocks_;
974 
975   DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
976 };
977 
978 // Stores try/catch information for basic blocks.
979 // Note that HGraph is constructed so that catch blocks cannot simultaneously
980 // be try blocks.
981 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
982  public:
983   // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)984   explicit TryCatchInformation(const HTryBoundary& try_entry)
985       : try_entry_(&try_entry),
986         catch_dex_file_(nullptr),
987         catch_type_index_(dex::TypeIndex::Invalid()) {
988     DCHECK(try_entry_ != nullptr);
989   }
990 
991   // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)992   TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
993       : try_entry_(nullptr),
994         catch_dex_file_(&dex_file),
995         catch_type_index_(catch_type_index) {}
996 
IsTryBlock()997   bool IsTryBlock() const { return try_entry_ != nullptr; }
998 
GetTryEntry()999   const HTryBoundary& GetTryEntry() const {
1000     DCHECK(IsTryBlock());
1001     return *try_entry_;
1002   }
1003 
IsCatchBlock()1004   bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
1005 
IsValidTypeIndex()1006   bool IsValidTypeIndex() const {
1007     DCHECK(IsCatchBlock());
1008     return catch_type_index_.IsValid();
1009   }
1010 
GetCatchTypeIndex()1011   dex::TypeIndex GetCatchTypeIndex() const {
1012     DCHECK(IsCatchBlock());
1013     return catch_type_index_;
1014   }
1015 
GetCatchDexFile()1016   const DexFile& GetCatchDexFile() const {
1017     DCHECK(IsCatchBlock());
1018     return *catch_dex_file_;
1019   }
1020 
SetInvalidTypeIndex()1021   void SetInvalidTypeIndex() {
1022     catch_type_index_ = dex::TypeIndex::Invalid();
1023   }
1024 
1025  private:
1026   // One of possibly several TryBoundary instructions entering the block's try.
1027   // Only set for try blocks.
1028   const HTryBoundary* try_entry_;
1029 
1030   // Exception type information. Only set for catch blocks.
1031   const DexFile* catch_dex_file_;
1032   dex::TypeIndex catch_type_index_;
1033 };
1034 
1035 static constexpr size_t kNoLifetime = -1;
1036 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
1037 
1038 // A block in a method. Contains the list of instructions represented
1039 // as a double linked list. Each block knows its predecessors and
1040 // successors.
1041 
1042 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
1043  public:
1044   explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)1045       : graph_(graph),
1046         predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
1047         successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
1048         loop_information_(nullptr),
1049         dominator_(nullptr),
1050         dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
1051         block_id_(kInvalidBlockId),
1052         dex_pc_(dex_pc),
1053         lifetime_start_(kNoLifetime),
1054         lifetime_end_(kNoLifetime),
1055         try_catch_information_(nullptr) {
1056     predecessors_.reserve(kDefaultNumberOfPredecessors);
1057     successors_.reserve(kDefaultNumberOfSuccessors);
1058     dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
1059   }
1060 
GetPredecessors()1061   const ArenaVector<HBasicBlock*>& GetPredecessors() const {
1062     return predecessors_;
1063   }
1064 
GetSuccessors()1065   const ArenaVector<HBasicBlock*>& GetSuccessors() const {
1066     return successors_;
1067   }
1068 
1069   ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
1070   ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
1071 
1072   bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
1073     return ContainsElement(successors_, block, start_from);
1074   }
1075 
GetDominatedBlocks()1076   const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
1077     return dominated_blocks_;
1078   }
1079 
IsEntryBlock()1080   bool IsEntryBlock() const {
1081     return graph_->GetEntryBlock() == this;
1082   }
1083 
IsExitBlock()1084   bool IsExitBlock() const {
1085     return graph_->GetExitBlock() == this;
1086   }
1087 
1088   bool IsSingleGoto() const;
1089   bool IsSingleReturn() const;
1090   bool IsSingleReturnOrReturnVoidAllowingPhis() const;
1091   bool IsSingleTryBoundary() const;
1092 
1093   // Returns true if this block emits nothing but a jump.
IsSingleJump()1094   bool IsSingleJump() const {
1095     HLoopInformation* loop_info = GetLoopInformation();
1096     return (IsSingleGoto() || IsSingleTryBoundary())
1097            // Back edges generate a suspend check.
1098            && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1099   }
1100 
AddBackEdge(HBasicBlock * back_edge)1101   void AddBackEdge(HBasicBlock* back_edge) {
1102     if (loop_information_ == nullptr) {
1103       loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1104     }
1105     DCHECK_EQ(loop_information_->GetHeader(), this);
1106     loop_information_->AddBackEdge(back_edge);
1107   }
1108 
1109   // Registers a back edge; if the block was not a loop header before the call associates a newly
1110   // created loop info with it.
1111   //
1112   // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1113   // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1114   void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1115     if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1116       loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1117     }
1118     loop_information_->AddBackEdge(back_edge);
1119   }
1120 
GetGraph()1121   HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1122   void SetGraph(HGraph* graph) { graph_ = graph; }
1123 
GetBlockId()1124   uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1125   void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1126   uint32_t GetDexPc() const { return dex_pc_; }
1127 
GetDominator()1128   HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1129   void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1130   void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1131 
RemoveDominatedBlock(HBasicBlock * block)1132   void RemoveDominatedBlock(HBasicBlock* block) {
1133     RemoveElement(dominated_blocks_, block);
1134   }
1135 
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1136   void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1137     ReplaceElement(dominated_blocks_, existing, new_block);
1138   }
1139 
1140   void ClearDominanceInformation();
1141 
NumberOfBackEdges()1142   int NumberOfBackEdges() const {
1143     return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1144   }
1145 
GetFirstInstruction()1146   HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1147   HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1148   const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1149   HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1150   HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1151   const HInstructionList& GetPhis() const { return phis_; }
1152 
1153   HInstruction* GetFirstInstructionDisregardMoves() const;
1154 
AddSuccessor(HBasicBlock * block)1155   void AddSuccessor(HBasicBlock* block) {
1156     successors_.push_back(block);
1157     block->predecessors_.push_back(this);
1158   }
1159 
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1160   void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1161     size_t successor_index = GetSuccessorIndexOf(existing);
1162     existing->RemovePredecessor(this);
1163     new_block->predecessors_.push_back(this);
1164     successors_[successor_index] = new_block;
1165   }
1166 
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1167   void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1168     size_t predecessor_index = GetPredecessorIndexOf(existing);
1169     existing->RemoveSuccessor(this);
1170     new_block->successors_.push_back(this);
1171     predecessors_[predecessor_index] = new_block;
1172   }
1173 
1174   // Insert `this` between `predecessor` and `successor. This method
1175   // preserves the indices, and will update the first edge found between
1176   // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1177   void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1178     size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1179     size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1180     successor->predecessors_[predecessor_index] = this;
1181     predecessor->successors_[successor_index] = this;
1182     successors_.push_back(successor);
1183     predecessors_.push_back(predecessor);
1184   }
1185 
RemovePredecessor(HBasicBlock * block)1186   void RemovePredecessor(HBasicBlock* block) {
1187     predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1188   }
1189 
RemoveSuccessor(HBasicBlock * block)1190   void RemoveSuccessor(HBasicBlock* block) {
1191     successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1192   }
1193 
ClearAllPredecessors()1194   void ClearAllPredecessors() {
1195     predecessors_.clear();
1196   }
1197 
AddPredecessor(HBasicBlock * block)1198   void AddPredecessor(HBasicBlock* block) {
1199     predecessors_.push_back(block);
1200     block->successors_.push_back(this);
1201   }
1202 
SwapPredecessors()1203   void SwapPredecessors() {
1204     DCHECK_EQ(predecessors_.size(), 2u);
1205     std::swap(predecessors_[0], predecessors_[1]);
1206   }
1207 
SwapSuccessors()1208   void SwapSuccessors() {
1209     DCHECK_EQ(successors_.size(), 2u);
1210     std::swap(successors_[0], successors_[1]);
1211   }
1212 
GetPredecessorIndexOf(HBasicBlock * predecessor)1213   size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1214     return IndexOfElement(predecessors_, predecessor);
1215   }
1216 
GetSuccessorIndexOf(HBasicBlock * successor)1217   size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1218     return IndexOfElement(successors_, successor);
1219   }
1220 
GetSinglePredecessor()1221   HBasicBlock* GetSinglePredecessor() const {
1222     DCHECK_EQ(GetPredecessors().size(), 1u);
1223     return GetPredecessors()[0];
1224   }
1225 
GetSingleSuccessor()1226   HBasicBlock* GetSingleSuccessor() const {
1227     DCHECK_EQ(GetSuccessors().size(), 1u);
1228     return GetSuccessors()[0];
1229   }
1230 
1231   // Returns whether the first occurrence of `predecessor` in the list of
1232   // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1233   bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1234     DCHECK_EQ(GetPredecessors()[idx], predecessor);
1235     return GetPredecessorIndexOf(predecessor) == idx;
1236   }
1237 
1238   // Create a new block between this block and its predecessors. The new block
1239   // is added to the graph, all predecessor edges are relinked to it and an edge
1240   // is created to `this`. Returns the new empty block. Reverse post order or
1241   // loop and try/catch information are not updated.
1242   HBasicBlock* CreateImmediateDominator();
1243 
1244   // Split the block into two blocks just before `cursor`. Returns the newly
1245   // created, latter block. Note that this method will add the block to the
1246   // graph, create a Goto at the end of the former block and will create an edge
1247   // between the blocks. It will not, however, update the reverse post order or
1248   // loop and try/catch information.
1249   HBasicBlock* SplitBefore(HInstruction* cursor);
1250 
1251   // Split the block into two blocks just before `cursor`. Returns the newly
1252   // created block. Note that this method just updates raw block information,
1253   // like predecessors, successors, dominators, and instruction list. It does not
1254   // update the graph, reverse post order, loop information, nor make sure the
1255   // blocks are consistent (for example ending with a control flow instruction).
1256   HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1257 
1258   // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1259   HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1260 
1261   // Merge `other` at the end of `this`. Successors and dominated blocks of
1262   // `other` are changed to be successors and dominated blocks of `this`. Note
1263   // that this method does not update the graph, reverse post order, loop
1264   // information, nor make sure the blocks are consistent (for example ending
1265   // with a control flow instruction).
1266   void MergeWithInlined(HBasicBlock* other);
1267 
1268   // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1269   // of `this` are moved to `other`.
1270   // Note that this method does not update the graph, reverse post order, loop
1271   // information, nor make sure the blocks are consistent (for example ending
1272   // with a control flow instruction).
1273   void ReplaceWith(HBasicBlock* other);
1274 
1275   // Merges the instructions of `other` at the end of `this`.
1276   void MergeInstructionsWith(HBasicBlock* other);
1277 
1278   // Merge `other` at the end of `this`. This method updates loops, reverse post
1279   // order, links to predecessors, successors, dominators and deletes the block
1280   // from the graph. The two blocks must be successive, i.e. `this` the only
1281   // predecessor of `other` and vice versa.
1282   void MergeWith(HBasicBlock* other);
1283 
1284   // Disconnects `this` from all its predecessors, successors and dominator,
1285   // removes it from all loops it is included in and eventually from the graph.
1286   // The block must not dominate any other block. Predecessors and successors
1287   // are safely updated.
1288   void DisconnectAndDelete();
1289 
1290   void AddInstruction(HInstruction* instruction);
1291   // Insert `instruction` before/after an existing instruction `cursor`.
1292   void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1293   void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1294   // Replace phi `initial` with `replacement` within this block.
1295   void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1296   // Replace instruction `initial` with `replacement` within this block.
1297   void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1298                                        HInstruction* replacement);
1299   void AddPhi(HPhi* phi);
1300   void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1301   // RemoveInstruction and RemovePhi delete a given instruction from the respective
1302   // instruction list. With 'ensure_safety' set to true, it verifies that the
1303   // instruction is not in use and removes it from the use lists of its inputs.
1304   void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1305   void RemovePhi(HPhi* phi, bool ensure_safety = true);
1306   void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1307 
IsLoopHeader()1308   bool IsLoopHeader() const {
1309     return IsInLoop() && (loop_information_->GetHeader() == this);
1310   }
1311 
IsLoopPreHeaderFirstPredecessor()1312   bool IsLoopPreHeaderFirstPredecessor() const {
1313     DCHECK(IsLoopHeader());
1314     return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1315   }
1316 
IsFirstPredecessorBackEdge()1317   bool IsFirstPredecessorBackEdge() const {
1318     DCHECK(IsLoopHeader());
1319     return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1320   }
1321 
GetLoopInformation()1322   HLoopInformation* GetLoopInformation() const {
1323     return loop_information_;
1324   }
1325 
1326   // Set the loop_information_ on this block. Overrides the current
1327   // loop_information if it is an outer loop of the passed loop information.
1328   // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1329   void SetInLoop(HLoopInformation* info) {
1330     if (IsLoopHeader()) {
1331       // Nothing to do. This just means `info` is an outer loop.
1332     } else if (!IsInLoop()) {
1333       loop_information_ = info;
1334     } else if (loop_information_->Contains(*info->GetHeader())) {
1335       // Block is currently part of an outer loop. Make it part of this inner loop.
1336       // Note that a non loop header having a loop information means this loop information
1337       // has already been populated
1338       loop_information_ = info;
1339     } else {
1340       // Block is part of an inner loop. Do not update the loop information.
1341       // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1342       // at this point, because this method is being called while populating `info`.
1343     }
1344   }
1345 
1346   // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1347   void SetLoopInformation(HLoopInformation* info) {
1348     loop_information_ = info;
1349   }
1350 
IsInLoop()1351   bool IsInLoop() const { return loop_information_ != nullptr; }
1352 
GetTryCatchInformation()1353   TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1354 
SetTryCatchInformation(TryCatchInformation * try_catch_information)1355   void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1356     try_catch_information_ = try_catch_information;
1357   }
1358 
IsTryBlock()1359   bool IsTryBlock() const {
1360     return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1361   }
1362 
IsCatchBlock()1363   bool IsCatchBlock() const {
1364     return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1365   }
1366 
1367   // Returns the try entry that this block's successors should have. They will
1368   // be in the same try, unless the block ends in a try boundary. In that case,
1369   // the appropriate try entry will be returned.
1370   const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1371 
1372   bool HasThrowingInstructions() const;
1373 
1374   // Returns whether this block dominates the blocked passed as parameter.
1375   bool Dominates(HBasicBlock* block) const;
1376 
GetLifetimeStart()1377   size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1378   size_t GetLifetimeEnd() const { return lifetime_end_; }
1379 
SetLifetimeStart(size_t start)1380   void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1381   void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1382 
1383   bool EndsWithControlFlowInstruction() const;
1384   bool EndsWithReturn() const;
1385   bool EndsWithIf() const;
1386   bool EndsWithTryBoundary() const;
1387   bool HasSinglePhi() const;
1388 
1389  private:
1390   HGraph* graph_;
1391   ArenaVector<HBasicBlock*> predecessors_;
1392   ArenaVector<HBasicBlock*> successors_;
1393   HInstructionList instructions_;
1394   HInstructionList phis_;
1395   HLoopInformation* loop_information_;
1396   HBasicBlock* dominator_;
1397   ArenaVector<HBasicBlock*> dominated_blocks_;
1398   uint32_t block_id_;
1399   // The dex program counter of the first instruction of this block.
1400   const uint32_t dex_pc_;
1401   size_t lifetime_start_;
1402   size_t lifetime_end_;
1403   TryCatchInformation* try_catch_information_;
1404 
1405   friend class HGraph;
1406   friend class HInstruction;
1407 
1408   DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1409 };
1410 
1411 // Iterates over the LoopInformation of all loops which contain 'block'
1412 // from the innermost to the outermost.
1413 class HLoopInformationOutwardIterator : public ValueObject {
1414  public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1415   explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1416       : current_(block.GetLoopInformation()) {}
1417 
Done()1418   bool Done() const { return current_ == nullptr; }
1419 
Advance()1420   void Advance() {
1421     DCHECK(!Done());
1422     current_ = current_->GetPreHeader()->GetLoopInformation();
1423   }
1424 
Current()1425   HLoopInformation* Current() const {
1426     DCHECK(!Done());
1427     return current_;
1428   }
1429 
1430  private:
1431   HLoopInformation* current_;
1432 
1433   DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1434 };
1435 
1436 #define FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M)                  \
1437   M(Above, Condition)                                                   \
1438   M(AboveOrEqual, Condition)                                            \
1439   M(Abs, UnaryOperation)                                                \
1440   M(Add, BinaryOperation)                                               \
1441   M(And, BinaryOperation)                                               \
1442   M(ArrayGet, Instruction)                                              \
1443   M(ArrayLength, Instruction)                                           \
1444   M(ArraySet, Instruction)                                              \
1445   M(Below, Condition)                                                   \
1446   M(BelowOrEqual, Condition)                                            \
1447   M(BooleanNot, UnaryOperation)                                         \
1448   M(BoundsCheck, Instruction)                                           \
1449   M(BoundType, Instruction)                                             \
1450   M(CheckCast, Instruction)                                             \
1451   M(ClassTableGet, Instruction)                                         \
1452   M(ClearException, Instruction)                                        \
1453   M(ClinitCheck, Instruction)                                           \
1454   M(Compare, BinaryOperation)                                           \
1455   M(ConstructorFence, Instruction)                                      \
1456   M(CurrentMethod, Instruction)                                         \
1457   M(ShouldDeoptimizeFlag, Instruction)                                  \
1458   M(Deoptimize, Instruction)                                            \
1459   M(Div, BinaryOperation)                                               \
1460   M(DivZeroCheck, Instruction)                                          \
1461   M(DoubleConstant, Constant)                                           \
1462   M(Equal, Condition)                                                   \
1463   M(Exit, Instruction)                                                  \
1464   M(FloatConstant, Constant)                                            \
1465   M(Goto, Instruction)                                                  \
1466   M(GreaterThan, Condition)                                             \
1467   M(GreaterThanOrEqual, Condition)                                      \
1468   M(If, Instruction)                                                    \
1469   M(InstanceFieldGet, Instruction)                                      \
1470   M(InstanceFieldSet, Instruction)                                      \
1471   M(InstanceOf, Instruction)                                            \
1472   M(IntConstant, Constant)                                              \
1473   M(IntermediateAddress, Instruction)                                   \
1474   M(InvokeUnresolved, Invoke)                                           \
1475   M(InvokeInterface, Invoke)                                            \
1476   M(InvokeStaticOrDirect, Invoke)                                       \
1477   M(InvokeVirtual, Invoke)                                              \
1478   M(InvokePolymorphic, Invoke)                                          \
1479   M(InvokeCustom, Invoke)                                               \
1480   M(LessThan, Condition)                                                \
1481   M(LessThanOrEqual, Condition)                                         \
1482   M(LoadClass, Instruction)                                             \
1483   M(LoadException, Instruction)                                         \
1484   M(LoadMethodHandle, Instruction)                                      \
1485   M(LoadMethodType, Instruction)                                        \
1486   M(LoadString, Instruction)                                            \
1487   M(LongConstant, Constant)                                             \
1488   M(Max, Instruction)                                                   \
1489   M(MemoryBarrier, Instruction)                                         \
1490   M(Min, BinaryOperation)                                               \
1491   M(MonitorOperation, Instruction)                                      \
1492   M(Mul, BinaryOperation)                                               \
1493   M(NativeDebugInfo, Instruction)                                       \
1494   M(Neg, UnaryOperation)                                                \
1495   M(NewArray, Instruction)                                              \
1496   M(NewInstance, Instruction)                                           \
1497   M(Not, UnaryOperation)                                                \
1498   M(NotEqual, Condition)                                                \
1499   M(NullConstant, Instruction)                                          \
1500   M(NullCheck, Instruction)                                             \
1501   M(Or, BinaryOperation)                                                \
1502   M(PackedSwitch, Instruction)                                          \
1503   M(ParallelMove, Instruction)                                          \
1504   M(ParameterValue, Instruction)                                        \
1505   M(Phi, Instruction)                                                   \
1506   M(Rem, BinaryOperation)                                               \
1507   M(Return, Instruction)                                                \
1508   M(ReturnVoid, Instruction)                                            \
1509   M(Ror, BinaryOperation)                                               \
1510   M(Shl, BinaryOperation)                                               \
1511   M(Shr, BinaryOperation)                                               \
1512   M(StaticFieldGet, Instruction)                                        \
1513   M(StaticFieldSet, Instruction)                                        \
1514   M(StringBuilderAppend, Instruction)                                   \
1515   M(UnresolvedInstanceFieldGet, Instruction)                            \
1516   M(UnresolvedInstanceFieldSet, Instruction)                            \
1517   M(UnresolvedStaticFieldGet, Instruction)                              \
1518   M(UnresolvedStaticFieldSet, Instruction)                              \
1519   M(Select, Instruction)                                                \
1520   M(Sub, BinaryOperation)                                               \
1521   M(SuspendCheck, Instruction)                                          \
1522   M(Throw, Instruction)                                                 \
1523   M(TryBoundary, Instruction)                                           \
1524   M(TypeConversion, Instruction)                                        \
1525   M(UShr, BinaryOperation)                                              \
1526   M(Xor, BinaryOperation)
1527 
1528 #define FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)                  \
1529   M(VecReplicateScalar, VecUnaryOperation)                              \
1530   M(VecExtractScalar, VecUnaryOperation)                                \
1531   M(VecReduce, VecUnaryOperation)                                       \
1532   M(VecCnv, VecUnaryOperation)                                          \
1533   M(VecNeg, VecUnaryOperation)                                          \
1534   M(VecAbs, VecUnaryOperation)                                          \
1535   M(VecNot, VecUnaryOperation)                                          \
1536   M(VecAdd, VecBinaryOperation)                                         \
1537   M(VecHalvingAdd, VecBinaryOperation)                                  \
1538   M(VecSub, VecBinaryOperation)                                         \
1539   M(VecMul, VecBinaryOperation)                                         \
1540   M(VecDiv, VecBinaryOperation)                                         \
1541   M(VecMin, VecBinaryOperation)                                         \
1542   M(VecMax, VecBinaryOperation)                                         \
1543   M(VecAnd, VecBinaryOperation)                                         \
1544   M(VecAndNot, VecBinaryOperation)                                      \
1545   M(VecOr, VecBinaryOperation)                                          \
1546   M(VecXor, VecBinaryOperation)                                         \
1547   M(VecSaturationAdd, VecBinaryOperation)                               \
1548   M(VecSaturationSub, VecBinaryOperation)                               \
1549   M(VecShl, VecBinaryOperation)                                         \
1550   M(VecShr, VecBinaryOperation)                                         \
1551   M(VecUShr, VecBinaryOperation)                                        \
1552   M(VecSetScalars, VecOperation)                                        \
1553   M(VecMultiplyAccumulate, VecOperation)                                \
1554   M(VecSADAccumulate, VecOperation)                                     \
1555   M(VecDotProd, VecOperation)                                           \
1556   M(VecLoad, VecMemoryOperation)                                        \
1557   M(VecStore, VecMemoryOperation)                                       \
1558   M(VecPredSetAll, VecPredSetOperation)                                 \
1559   M(VecPredWhile, VecPredSetOperation)                                  \
1560   M(VecPredCondition, VecOperation)                                     \
1561 
1562 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                         \
1563   FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M)                        \
1564   FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)
1565 
1566 /*
1567  * Instructions, shared across several (not all) architectures.
1568  */
1569 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1570 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1571 #else
1572 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                         \
1573   M(BitwiseNegatedRight, Instruction)                                   \
1574   M(DataProcWithShifterOp, Instruction)                                 \
1575   M(MultiplyAccumulate, Instruction)                                    \
1576   M(IntermediateAddressIndex, Instruction)
1577 #endif
1578 
1579 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1580 
1581 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1582 
1583 #ifndef ART_ENABLE_CODEGEN_x86
1584 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1585 #else
1586 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                            \
1587   M(X86ComputeBaseMethodAddress, Instruction)                           \
1588   M(X86LoadFromConstantTable, Instruction)                              \
1589   M(X86FPNeg, Instruction)                                              \
1590   M(X86PackedSwitch, Instruction)
1591 #endif
1592 
1593 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1594 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)                     \
1595   M(X86AndNot, Instruction)                                             \
1596   M(X86MaskOrResetLeastSetBit, Instruction)
1597 #else
1598 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1599 #endif
1600 
1601 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1602 
1603 #define FOR_EACH_CONCRETE_INSTRUCTION(M)                                \
1604   FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M)                               \
1605   FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)                               \
1606   FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)                                  \
1607   FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)                                \
1608   FOR_EACH_CONCRETE_INSTRUCTION_X86(M)                                  \
1609   FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)                               \
1610   FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1611 
1612 #define FOR_EACH_ABSTRACT_INSTRUCTION(M)                                \
1613   M(Condition, BinaryOperation)                                         \
1614   M(Constant, Instruction)                                              \
1615   M(UnaryOperation, Instruction)                                        \
1616   M(BinaryOperation, Instruction)                                       \
1617   M(Invoke, Instruction)                                                \
1618   M(VecOperation, Instruction)                                          \
1619   M(VecUnaryOperation, VecOperation)                                    \
1620   M(VecBinaryOperation, VecOperation)                                   \
1621   M(VecMemoryOperation, VecOperation)                                   \
1622   M(VecPredSetOperation, VecOperation)
1623 
1624 #define FOR_EACH_INSTRUCTION(M)                                         \
1625   FOR_EACH_CONCRETE_INSTRUCTION(M)                                      \
1626   FOR_EACH_ABSTRACT_INSTRUCTION(M)
1627 
1628 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1629 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1630 #undef FORWARD_DECLARATION
1631 
1632 #define DECLARE_INSTRUCTION(type)                                         \
1633   private:                                                                \
1634   H##type& operator=(const H##type&) = delete;                            \
1635   public:                                                                 \
1636   const char* DebugName() const override { return #type; }                \
1637   HInstruction* Clone(ArenaAllocator* arena) const override {             \
1638     DCHECK(IsClonable());                                                 \
1639     return new (arena) H##type(*this->As##type());                        \
1640   }                                                                       \
1641   void Accept(HGraphVisitor* visitor) override
1642 
1643 #define DECLARE_ABSTRACT_INSTRUCTION(type)                              \
1644   private:                                                              \
1645   H##type& operator=(const H##type&) = delete;                          \
1646   public:
1647 
1648 #define DEFAULT_COPY_CONSTRUCTOR(type)                                  \
1649   explicit H##type(const H##type& other) = default;
1650 
1651 template <typename T>
1652 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1653                      public IntrusiveForwardListNode<HUseListNode<T>> {
1654  public:
1655   // Get the instruction which has this use as one of the inputs.
1656   T GetUser() const { return user_; }
1657   // Get the position of the input record that this use corresponds to.
1658   size_t GetIndex() const { return index_; }
1659   // Set the position of the input record that this use corresponds to.
1660   void SetIndex(size_t index) { index_ = index; }
1661 
1662  private:
1663   HUseListNode(T user, size_t index)
1664       : user_(user), index_(index) {}
1665 
1666   T const user_;
1667   size_t index_;
1668 
1669   friend class HInstruction;
1670 
1671   DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1672 };
1673 
1674 template <typename T>
1675 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1676 
1677 // This class is used by HEnvironment and HInstruction classes to record the
1678 // instructions they use and pointers to the corresponding HUseListNodes kept
1679 // by the used instructions.
1680 template <typename T>
1681 class HUserRecord : public ValueObject {
1682  public:
HUserRecord()1683   HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1684   explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1685 
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1686   HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1687       : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1688   HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1689       : instruction_(instruction), before_use_node_(before_use_node) {
1690     DCHECK(instruction_ != nullptr);
1691   }
1692 
GetInstruction()1693   HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1694   typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1695   typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1696 
1697  private:
1698   // Instruction used by the user.
1699   HInstruction* instruction_;
1700 
1701   // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1702   typename HUseList<T>::iterator before_use_node_;
1703 };
1704 
1705 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1706 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1707 // HInstruction* values even though the underlying container has HUserRecord<>s.
1708 struct HInputExtractor {
operatorHInputExtractor1709   HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1710     return record.GetInstruction();
1711   }
operatorHInputExtractor1712   const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1713     return record.GetInstruction();
1714   }
1715 };
1716 
1717 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1718 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1719 
1720 /**
1721  * Side-effects representation.
1722  *
1723  * For write/read dependences on fields/arrays, the dependence analysis uses
1724  * type disambiguation (e.g. a float field write cannot modify the value of an
1725  * integer field read) and the access type (e.g.  a reference array write cannot
1726  * modify the value of a reference field read [although it may modify the
1727  * reference fetch prior to reading the field, which is represented by its own
1728  * write/read dependence]). The analysis makes conservative points-to
1729  * assumptions on reference types (e.g. two same typed arrays are assumed to be
1730  * the same, and any reference read depends on any reference read without
1731  * further regard of its type).
1732  *
1733  * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1734  * alive across the point where garbage collection might happen.
1735  *
1736  * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1737  *
1738  * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1739  * those instructions from the compiler perspective (between this instruction and the next one
1740  * in the IR).
1741  *
1742  * Note: Instructions which can cause GC only on a fatal slow path do not need
1743  *       kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1744  *       one. However the execution may return to compiled code if there is a catch block in the
1745  *       current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1746  *       set.
1747  *
1748  * The internal representation uses 38-bit and is described in the table below.
1749  * The first line indicates the side effect, and for field/array accesses the
1750  * second line indicates the type of the access (in the order of the
1751  * DataType::Type enum).
1752  * The two numbered lines below indicate the bit position in the bitfield (read
1753  * vertically).
1754  *
1755  *   |Depends on GC|ARRAY-R  |FIELD-R  |Can trigger GC|ARRAY-W  |FIELD-W  |
1756  *   +-------------+---------+---------+--------------+---------+---------+
1757  *   |             |DFJISCBZL|DFJISCBZL|              |DFJISCBZL|DFJISCBZL|
1758  *   |      3      |333333322|222222221|       1      |111111110|000000000|
1759  *   |      7      |654321098|765432109|       8      |765432109|876543210|
1760  *
1761  * Note that, to ease the implementation, 'changes' bits are least significant
1762  * bits, while 'dependency' bits are most significant bits.
1763  */
1764 class SideEffects : public ValueObject {
1765  public:
SideEffects()1766   SideEffects() : flags_(0) {}
1767 
None()1768   static SideEffects None() {
1769     return SideEffects(0);
1770   }
1771 
All()1772   static SideEffects All() {
1773     return SideEffects(kAllChangeBits | kAllDependOnBits);
1774   }
1775 
AllChanges()1776   static SideEffects AllChanges() {
1777     return SideEffects(kAllChangeBits);
1778   }
1779 
AllDependencies()1780   static SideEffects AllDependencies() {
1781     return SideEffects(kAllDependOnBits);
1782   }
1783 
AllExceptGCDependency()1784   static SideEffects AllExceptGCDependency() {
1785     return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1786   }
1787 
AllWritesAndReads()1788   static SideEffects AllWritesAndReads() {
1789     return SideEffects(kAllWrites | kAllReads);
1790   }
1791 
AllWrites()1792   static SideEffects AllWrites() {
1793     return SideEffects(kAllWrites);
1794   }
1795 
AllReads()1796   static SideEffects AllReads() {
1797     return SideEffects(kAllReads);
1798   }
1799 
FieldWriteOfType(DataType::Type type,bool is_volatile)1800   static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1801     return is_volatile
1802         ? AllWritesAndReads()
1803         : SideEffects(TypeFlag(type, kFieldWriteOffset));
1804   }
1805 
ArrayWriteOfType(DataType::Type type)1806   static SideEffects ArrayWriteOfType(DataType::Type type) {
1807     return SideEffects(TypeFlag(type, kArrayWriteOffset));
1808   }
1809 
FieldReadOfType(DataType::Type type,bool is_volatile)1810   static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1811     return is_volatile
1812         ? AllWritesAndReads()
1813         : SideEffects(TypeFlag(type, kFieldReadOffset));
1814   }
1815 
ArrayReadOfType(DataType::Type type)1816   static SideEffects ArrayReadOfType(DataType::Type type) {
1817     return SideEffects(TypeFlag(type, kArrayReadOffset));
1818   }
1819 
1820   // Returns whether GC might happen across this instruction from the compiler perspective so
1821   // the next instruction in the IR would see that.
1822   //
1823   // See the SideEffect class comments.
CanTriggerGC()1824   static SideEffects CanTriggerGC() {
1825     return SideEffects(1ULL << kCanTriggerGCBit);
1826   }
1827 
1828   // Returns whether the instruction must not be alive across a GC point.
1829   //
1830   // See the SideEffect class comments.
DependsOnGC()1831   static SideEffects DependsOnGC() {
1832     return SideEffects(1ULL << kDependsOnGCBit);
1833   }
1834 
1835   // Combines the side-effects of this and the other.
Union(SideEffects other)1836   SideEffects Union(SideEffects other) const {
1837     return SideEffects(flags_ | other.flags_);
1838   }
1839 
Exclusion(SideEffects other)1840   SideEffects Exclusion(SideEffects other) const {
1841     return SideEffects(flags_ & ~other.flags_);
1842   }
1843 
Add(SideEffects other)1844   void Add(SideEffects other) {
1845     flags_ |= other.flags_;
1846   }
1847 
Includes(SideEffects other)1848   bool Includes(SideEffects other) const {
1849     return (other.flags_ & flags_) == other.flags_;
1850   }
1851 
HasSideEffects()1852   bool HasSideEffects() const {
1853     return (flags_ & kAllChangeBits);
1854   }
1855 
HasDependencies()1856   bool HasDependencies() const {
1857     return (flags_ & kAllDependOnBits);
1858   }
1859 
1860   // Returns true if there are no side effects or dependencies.
DoesNothing()1861   bool DoesNothing() const {
1862     return flags_ == 0;
1863   }
1864 
1865   // Returns true if something is written.
DoesAnyWrite()1866   bool DoesAnyWrite() const {
1867     return (flags_ & kAllWrites);
1868   }
1869 
1870   // Returns true if something is read.
DoesAnyRead()1871   bool DoesAnyRead() const {
1872     return (flags_ & kAllReads);
1873   }
1874 
1875   // Returns true if potentially everything is written and read
1876   // (every type and every kind of access).
DoesAllReadWrite()1877   bool DoesAllReadWrite() const {
1878     return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1879   }
1880 
DoesAll()1881   bool DoesAll() const {
1882     return flags_ == (kAllChangeBits | kAllDependOnBits);
1883   }
1884 
1885   // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1886   bool MayDependOn(SideEffects other) const {
1887     const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1888     return (other.flags_ & depends_on_flags);
1889   }
1890 
1891   // Returns string representation of flags (for debugging only).
1892   // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1893   std::string ToString() const {
1894     std::string flags = "|";
1895     for (int s = kLastBit; s >= 0; s--) {
1896       bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1897       if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1898         // This is a bit for the GC side effect.
1899         if (current_bit_is_set) {
1900           flags += "GC";
1901         }
1902         flags += "|";
1903       } else {
1904         // This is a bit for the array/field analysis.
1905         // The underscore character stands for the 'can trigger GC' bit.
1906         static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1907         if (current_bit_is_set) {
1908           flags += kDebug[s];
1909         }
1910         if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1911             (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1912           flags += "|";
1913         }
1914       }
1915     }
1916     return flags;
1917   }
1918 
Equals(const SideEffects & other)1919   bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1920 
1921  private:
1922   static constexpr int kFieldArrayAnalysisBits = 9;
1923 
1924   static constexpr int kFieldWriteOffset = 0;
1925   static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1926   static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1927   static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1928 
1929   static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1930 
1931   static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1932   static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1933   static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1934   static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1935 
1936   static constexpr int kLastBit = kDependsOnGCBit;
1937   static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1938 
1939   // Aliases.
1940 
1941   static_assert(kChangeBits == kDependOnBits,
1942                 "the 'change' bits should match the 'depend on' bits.");
1943 
1944   static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1945   static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1946   static constexpr uint64_t kAllWrites =
1947       ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1948   static constexpr uint64_t kAllReads =
1949       ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1950 
1951   // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)1952   static uint64_t TypeFlag(DataType::Type type, int offset) {
1953     int shift;
1954     switch (type) {
1955       case DataType::Type::kReference: shift = 0; break;
1956       case DataType::Type::kBool:      shift = 1; break;
1957       case DataType::Type::kInt8:      shift = 2; break;
1958       case DataType::Type::kUint16:    shift = 3; break;
1959       case DataType::Type::kInt16:     shift = 4; break;
1960       case DataType::Type::kInt32:     shift = 5; break;
1961       case DataType::Type::kInt64:     shift = 6; break;
1962       case DataType::Type::kFloat32:   shift = 7; break;
1963       case DataType::Type::kFloat64:   shift = 8; break;
1964       default:
1965         LOG(FATAL) << "Unexpected data type " << type;
1966         UNREACHABLE();
1967     }
1968     DCHECK_LE(kFieldWriteOffset, shift);
1969     DCHECK_LT(shift, kArrayWriteOffset);
1970     return UINT64_C(1) << (shift + offset);
1971   }
1972 
1973   // Private constructor on direct flags value.
SideEffects(uint64_t flags)1974   explicit SideEffects(uint64_t flags) : flags_(flags) {}
1975 
1976   uint64_t flags_;
1977 };
1978 
1979 // A HEnvironment object contains the values of virtual registers at a given location.
1980 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1981  public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)1982   ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1983                              size_t number_of_vregs,
1984                              ArtMethod* method,
1985                              uint32_t dex_pc,
1986                              HInstruction* holder)
1987      : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
1988        locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
1989        parent_(nullptr),
1990        method_(method),
1991        dex_pc_(dex_pc),
1992        holder_(holder) {
1993   }
1994 
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)1995   ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
1996                              const HEnvironment& to_copy,
1997                              HInstruction* holder)
1998       : HEnvironment(allocator,
1999                      to_copy.Size(),
2000                      to_copy.GetMethod(),
2001                      to_copy.GetDexPc(),
2002                      holder) {}
2003 
AllocateLocations()2004   void AllocateLocations() {
2005     DCHECK(locations_.empty());
2006     locations_.resize(vregs_.size());
2007   }
2008 
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)2009   void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
2010     if (parent_ != nullptr) {
2011       parent_->SetAndCopyParentChain(allocator, parent);
2012     } else {
2013       parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
2014       parent_->CopyFrom(parent);
2015       if (parent->GetParent() != nullptr) {
2016         parent_->SetAndCopyParentChain(allocator, parent->GetParent());
2017       }
2018     }
2019   }
2020 
2021   void CopyFrom(ArrayRef<HInstruction* const> locals);
2022   void CopyFrom(HEnvironment* environment);
2023 
2024   // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
2025   // input to the loop phi instead. This is for inserting instructions that
2026   // require an environment (like HDeoptimization) in the loop pre-header.
2027   void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
2028 
SetRawEnvAt(size_t index,HInstruction * instruction)2029   void SetRawEnvAt(size_t index, HInstruction* instruction) {
2030     vregs_[index] = HUserRecord<HEnvironment*>(instruction);
2031   }
2032 
GetInstructionAt(size_t index)2033   HInstruction* GetInstructionAt(size_t index) const {
2034     return vregs_[index].GetInstruction();
2035   }
2036 
2037   void RemoveAsUserOfInput(size_t index) const;
2038 
2039   // Replaces the input at the position 'index' with the replacement; the replacement and old
2040   // input instructions' env_uses_ lists are adjusted. The function works similar to
2041   // HInstruction::ReplaceInput.
2042   void ReplaceInput(HInstruction* replacement, size_t index);
2043 
Size()2044   size_t Size() const { return vregs_.size(); }
2045 
GetParent()2046   HEnvironment* GetParent() const { return parent_; }
2047 
SetLocationAt(size_t index,Location location)2048   void SetLocationAt(size_t index, Location location) {
2049     locations_[index] = location;
2050   }
2051 
GetLocationAt(size_t index)2052   Location GetLocationAt(size_t index) const {
2053     return locations_[index];
2054   }
2055 
GetDexPc()2056   uint32_t GetDexPc() const {
2057     return dex_pc_;
2058   }
2059 
GetMethod()2060   ArtMethod* GetMethod() const {
2061     return method_;
2062   }
2063 
GetHolder()2064   HInstruction* GetHolder() const {
2065     return holder_;
2066   }
2067 
2068 
IsFromInlinedInvoke()2069   bool IsFromInlinedInvoke() const {
2070     return GetParent() != nullptr;
2071   }
2072 
2073  private:
2074   ArenaVector<HUserRecord<HEnvironment*>> vregs_;
2075   ArenaVector<Location> locations_;
2076   HEnvironment* parent_;
2077   ArtMethod* method_;
2078   const uint32_t dex_pc_;
2079 
2080   // The instruction that holds this environment.
2081   HInstruction* const holder_;
2082 
2083   friend class HInstruction;
2084 
2085   DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2086 };
2087 
2088 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2089  public:
2090 #define DECLARE_KIND(type, super) k##type,
2091   enum InstructionKind {  // private marker to avoid generate-operator-out.py from processing.
2092     FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2093     kLastInstructionKind
2094   };
2095 #undef DECLARE_KIND
2096 
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2097   HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2098       : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2099 
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2100   HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2101       : previous_(nullptr),
2102         next_(nullptr),
2103         block_(nullptr),
2104         dex_pc_(dex_pc),
2105         id_(-1),
2106         ssa_index_(-1),
2107         packed_fields_(0u),
2108         environment_(nullptr),
2109         locations_(nullptr),
2110         live_interval_(nullptr),
2111         lifetime_position_(kNoLifetime),
2112         side_effects_(side_effects),
2113         reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2114     SetPackedField<InstructionKindField>(kind);
2115     SetPackedField<TypeField>(type);
2116     SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2117   }
2118 
~HInstruction()2119   virtual ~HInstruction() {}
2120 
2121 
GetNext()2122   HInstruction* GetNext() const { return next_; }
GetPrevious()2123   HInstruction* GetPrevious() const { return previous_; }
2124 
2125   HInstruction* GetNextDisregardingMoves() const;
2126   HInstruction* GetPreviousDisregardingMoves() const;
2127 
GetBlock()2128   HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2129   ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2130   void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2131   bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2132   bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2133   bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2134   bool IsIrreducibleLoopHeaderPhi() const {
2135     return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2136   }
2137 
2138   virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2139 
GetInputRecords()2140   ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2141     // One virtual method is enough, just const_cast<> and then re-add the const.
2142     return ArrayRef<const HUserRecord<HInstruction*>>(
2143         const_cast<HInstruction*>(this)->GetInputRecords());
2144   }
2145 
GetInputs()2146   HInputsRef GetInputs() {
2147     return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2148   }
2149 
GetInputs()2150   HConstInputsRef GetInputs() const {
2151     return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2152   }
2153 
InputCount()2154   size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2155   HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2156 
HasInput(HInstruction * input)2157   bool HasInput(HInstruction* input) const {
2158     for (const HInstruction* i : GetInputs()) {
2159       if (i == input) {
2160         return true;
2161       }
2162     }
2163     return false;
2164   }
2165 
SetRawInputAt(size_t index,HInstruction * input)2166   void SetRawInputAt(size_t index, HInstruction* input) {
2167     SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2168   }
2169 
2170   virtual void Accept(HGraphVisitor* visitor) = 0;
2171   virtual const char* DebugName() const = 0;
2172 
GetType()2173   DataType::Type GetType() const {
2174     return TypeField::Decode(GetPackedFields());
2175   }
2176 
NeedsEnvironment()2177   virtual bool NeedsEnvironment() const { return false; }
2178 
GetDexPc()2179   uint32_t GetDexPc() const { return dex_pc_; }
2180 
IsControlFlow()2181   virtual bool IsControlFlow() const { return false; }
2182 
2183   // Can the instruction throw?
2184   // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2185   // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2186   virtual bool CanThrow() const { return false; }
2187 
2188   // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2189   virtual bool AlwaysThrows() const { return false; }
2190 
CanThrowIntoCatchBlock()2191   bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2192 
HasSideEffects()2193   bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2194   bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2195 
2196   // Does not apply for all instructions, but having this at top level greatly
2197   // simplifies the null check elimination.
2198   // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2199   virtual bool CanBeNull() const {
2200     DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2201     return true;
2202   }
2203 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)2204   virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
2205     return false;
2206   }
2207 
2208   // If this instruction will do an implicit null check, return the `HNullCheck` associated
2209   // with it. Otherwise return null.
GetImplicitNullCheck()2210   HNullCheck* GetImplicitNullCheck() const {
2211     // Go over previous non-move instructions that are emitted at use site.
2212     HInstruction* prev_not_move = GetPreviousDisregardingMoves();
2213     while (prev_not_move != nullptr && prev_not_move->IsEmittedAtUseSite()) {
2214       if (prev_not_move->IsNullCheck()) {
2215         return prev_not_move->AsNullCheck();
2216       }
2217       prev_not_move = prev_not_move->GetPreviousDisregardingMoves();
2218     }
2219     return nullptr;
2220   }
2221 
IsActualObject()2222   virtual bool IsActualObject() const {
2223     return GetType() == DataType::Type::kReference;
2224   }
2225 
2226   void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2227 
GetReferenceTypeInfo()2228   ReferenceTypeInfo GetReferenceTypeInfo() const {
2229     DCHECK_EQ(GetType(), DataType::Type::kReference);
2230     return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2231                                               GetPackedFlag<kFlagReferenceTypeIsExact>());
2232   }
2233 
AddUseAt(HInstruction * user,size_t index)2234   void AddUseAt(HInstruction* user, size_t index) {
2235     DCHECK(user != nullptr);
2236     // Note: fixup_end remains valid across push_front().
2237     auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2238     HUseListNode<HInstruction*>* new_node =
2239         new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HInstruction*>(user, index);
2240     uses_.push_front(*new_node);
2241     FixUpUserRecordsAfterUseInsertion(fixup_end);
2242   }
2243 
AddEnvUseAt(HEnvironment * user,size_t index)2244   void AddEnvUseAt(HEnvironment* user, size_t index) {
2245     DCHECK(user != nullptr);
2246     // Note: env_fixup_end remains valid across push_front().
2247     auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2248     HUseListNode<HEnvironment*>* new_node =
2249         new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2250     env_uses_.push_front(*new_node);
2251     FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2252   }
2253 
RemoveAsUserOfInput(size_t input)2254   void RemoveAsUserOfInput(size_t input) {
2255     HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2256     HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2257     input_use.GetInstruction()->uses_.erase_after(before_use_node);
2258     input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2259   }
2260 
RemoveAsUserOfAllInputs()2261   void RemoveAsUserOfAllInputs() {
2262     for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2263       HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2264       input_use.GetInstruction()->uses_.erase_after(before_use_node);
2265       input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2266     }
2267   }
2268 
GetUses()2269   const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2270   const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2271 
HasUses()2272   bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2273   bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2274   bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2275   bool HasOnlyOneNonEnvironmentUse() const {
2276     return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2277   }
2278 
IsRemovable()2279   bool IsRemovable() const {
2280     return
2281         !DoesAnyWrite() &&
2282         !CanThrow() &&
2283         !IsSuspendCheck() &&
2284         !IsControlFlow() &&
2285         !IsNativeDebugInfo() &&
2286         !IsParameterValue() &&
2287         // If we added an explicit barrier then we should keep it.
2288         !IsMemoryBarrier() &&
2289         !IsConstructorFence();
2290   }
2291 
IsDeadAndRemovable()2292   bool IsDeadAndRemovable() const {
2293     return IsRemovable() && !HasUses();
2294   }
2295 
2296   // Does this instruction strictly dominate `other_instruction`?
2297   // Returns false if this instruction and `other_instruction` are the same.
2298   // Aborts if this instruction and `other_instruction` are both phis.
2299   bool StrictlyDominates(HInstruction* other_instruction) const;
2300 
GetId()2301   int GetId() const { return id_; }
SetId(int id)2302   void SetId(int id) { id_ = id; }
2303 
GetSsaIndex()2304   int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2305   void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2306   bool HasSsaIndex() const { return ssa_index_ != -1; }
2307 
HasEnvironment()2308   bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2309   HEnvironment* GetEnvironment() const { return environment_; }
2310   // Set the `environment_` field. Raw because this method does not
2311   // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2312   void SetRawEnvironment(HEnvironment* environment) {
2313     DCHECK(environment_ == nullptr);
2314     DCHECK_EQ(environment->GetHolder(), this);
2315     environment_ = environment;
2316   }
2317 
InsertRawEnvironment(HEnvironment * environment)2318   void InsertRawEnvironment(HEnvironment* environment) {
2319     DCHECK(environment_ != nullptr);
2320     DCHECK_EQ(environment->GetHolder(), this);
2321     DCHECK(environment->GetParent() == nullptr);
2322     environment->parent_ = environment_;
2323     environment_ = environment;
2324   }
2325 
2326   void RemoveEnvironment();
2327 
2328   // Set the environment of this instruction, copying it from `environment`. While
2329   // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2330   void CopyEnvironmentFrom(HEnvironment* environment) {
2331     DCHECK(environment_ == nullptr);
2332     ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2333     environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2334     environment_->CopyFrom(environment);
2335     if (environment->GetParent() != nullptr) {
2336       environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2337     }
2338   }
2339 
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2340   void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2341                                                 HBasicBlock* block) {
2342     DCHECK(environment_ == nullptr);
2343     ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2344     environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2345     environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2346     if (environment->GetParent() != nullptr) {
2347       environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2348     }
2349   }
2350 
2351   // Returns the number of entries in the environment. Typically, that is the
2352   // number of dex registers in a method. It could be more in case of inlining.
2353   size_t EnvironmentSize() const;
2354 
GetLocations()2355   LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2356   void SetLocations(LocationSummary* locations) { locations_ = locations; }
2357 
2358   void ReplaceWith(HInstruction* instruction);
2359   void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2360   void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2361   void ReplaceInput(HInstruction* replacement, size_t index);
2362 
2363   // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2364   // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2365   void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2366     ReplaceWith(other);
2367     other->ReplaceInput(this, use_index);
2368   }
2369 
2370   // Move `this` instruction before `cursor`
2371   void MoveBefore(HInstruction* cursor, bool do_checks = true);
2372 
2373   // Move `this` before its first user and out of any loops. If there is no
2374   // out-of-loop user that dominates all other users, move the instruction
2375   // to the end of the out-of-loop common dominator of the user's blocks.
2376   //
2377   // This can be used only on non-throwing instructions with no side effects that
2378   // have at least one use but no environment uses.
2379   void MoveBeforeFirstUserAndOutOfLoops();
2380 
2381 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
2382   bool Is##type() const;
2383 
2384   FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2385 #undef INSTRUCTION_TYPE_CHECK
2386 
2387 #define INSTRUCTION_TYPE_CAST(type, super)                                     \
2388   const H##type* As##type() const;                                             \
2389   H##type* As##type();
2390 
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2391   FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2392 #undef INSTRUCTION_TYPE_CAST
2393 
2394   // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2395   // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2396   // the instruction then the behaviour of this function is undefined.
2397   //
2398   // Note: It is semantically valid to create a clone of the instruction only until
2399   // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2400   // copied.
2401   //
2402   // Note: HEnvironment and some other fields are not copied and are set to default values, see
2403   // 'explicit HInstruction(const HInstruction& other)' for details.
2404   virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
2405     LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2406                   DebugName() << " " << GetId();
2407     UNREACHABLE();
2408   }
2409 
2410   // Return whether instruction can be cloned (copied).
IsClonable()2411   virtual bool IsClonable() const { return false; }
2412 
2413   // Returns whether the instruction can be moved within the graph.
2414   // TODO: this method is used by LICM and GVN with possibly different
2415   //       meanings? split and rename?
CanBeMoved()2416   virtual bool CanBeMoved() const { return false; }
2417 
2418   // Returns whether any data encoded in the two instructions is equal.
2419   // This method does not look at the inputs. Both instructions must be
2420   // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)2421   virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
2422     return false;
2423   }
2424 
2425   // Returns whether two instructions are equal, that is:
2426   // 1) They have the same type and contain the same data (InstructionDataEquals).
2427   // 2) Their inputs are identical.
2428   bool Equals(const HInstruction* other) const;
2429 
GetKind()2430   InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2431 
ComputeHashCode()2432   virtual size_t ComputeHashCode() const {
2433     size_t result = GetKind();
2434     for (const HInstruction* input : GetInputs()) {
2435       result = (result * 31) + input->GetId();
2436     }
2437     return result;
2438   }
2439 
GetSideEffects()2440   SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2441   void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2442   void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2443 
GetLifetimePosition()2444   size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2445   void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2446   LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2447   void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2448   bool HasLiveInterval() const { return live_interval_ != nullptr; }
2449 
IsSuspendCheckEntry()2450   bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2451 
2452   // Returns whether the code generation of the instruction will require to have access
2453   // to the current method. Such instructions are:
2454   // (1): Instructions that require an environment, as calling the runtime requires
2455   //      to walk the stack and have the current method stored at a specific stack address.
2456   // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2457   //      to access the dex cache.
NeedsCurrentMethod()2458   bool NeedsCurrentMethod() const {
2459     return NeedsEnvironment() || IsCurrentMethod();
2460   }
2461 
2462   // Returns whether the code generation of the instruction will require to have access
2463   // to the dex cache of the current method's declaring class via the current method.
NeedsDexCacheOfDeclaringClass()2464   virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2465 
2466   // Does this instruction have any use in an environment before
2467   // control flow hits 'other'?
2468   bool HasAnyEnvironmentUseBefore(HInstruction* other);
2469 
2470   // Remove all references to environment uses of this instruction.
2471   // The caller must ensure that this is safe to do.
2472   void RemoveEnvironmentUsers();
2473 
IsEmittedAtUseSite()2474   bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2475   void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2476 
2477  protected:
2478   // If set, the machine code for this instruction is assumed to be generated by
2479   // its users. Used by liveness analysis to compute use positions accordingly.
2480   static constexpr size_t kFlagEmittedAtUseSite = 0u;
2481   static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2482   static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2483   static constexpr size_t kFieldInstructionKindSize =
2484       MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2485   static constexpr size_t kFieldType =
2486       kFieldInstructionKind + kFieldInstructionKindSize;
2487   static constexpr size_t kFieldTypeSize =
2488       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2489   static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2490   static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2491 
2492   static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2493                 "Too many generic packed fields");
2494 
2495   using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2496 
InputRecordAt(size_t i)2497   const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2498     return GetInputRecords()[i];
2499   }
2500 
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2501   void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2502     ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2503     input_records[index] = input;
2504   }
2505 
GetPackedFields()2506   uint32_t GetPackedFields() const {
2507     return packed_fields_;
2508   }
2509 
2510   template <size_t flag>
GetPackedFlag()2511   bool GetPackedFlag() const {
2512     return (packed_fields_ & (1u << flag)) != 0u;
2513   }
2514 
2515   template <size_t flag>
2516   void SetPackedFlag(bool value = true) {
2517     packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2518   }
2519 
2520   template <typename BitFieldType>
GetPackedField()2521   typename BitFieldType::value_type GetPackedField() const {
2522     return BitFieldType::Decode(packed_fields_);
2523   }
2524 
2525   template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2526   void SetPackedField(typename BitFieldType::value_type value) {
2527     DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2528     packed_fields_ = BitFieldType::Update(value, packed_fields_);
2529   }
2530 
2531   // Copy construction for the instruction (used for Clone function).
2532   //
2533   // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2534   // prepare_for_register_allocator are not copied (set to default values).
2535   //
2536   // Copy constructors must be provided for every HInstruction type; default copy constructor is
2537   // fine for most of them. However for some of the instructions a custom copy constructor must be
2538   // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2539   // for copying them).
HInstruction(const HInstruction & other)2540   explicit HInstruction(const HInstruction& other)
2541       : previous_(nullptr),
2542         next_(nullptr),
2543         block_(nullptr),
2544         dex_pc_(other.dex_pc_),
2545         id_(-1),
2546         ssa_index_(-1),
2547         packed_fields_(other.packed_fields_),
2548         environment_(nullptr),
2549         locations_(nullptr),
2550         live_interval_(nullptr),
2551         lifetime_position_(kNoLifetime),
2552         side_effects_(other.side_effects_),
2553         reference_type_handle_(other.reference_type_handle_) {
2554   }
2555 
2556  private:
2557   using InstructionKindField =
2558      BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2559 
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2560   void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2561     auto before_use_node = uses_.before_begin();
2562     for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2563       HInstruction* user = use_node->GetUser();
2564       size_t input_index = use_node->GetIndex();
2565       user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2566       before_use_node = use_node;
2567     }
2568   }
2569 
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2570   void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2571     auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2572     if (next != uses_.end()) {
2573       HInstruction* next_user = next->GetUser();
2574       size_t next_index = next->GetIndex();
2575       DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2576       next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2577     }
2578   }
2579 
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2580   void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2581     auto before_env_use_node = env_uses_.before_begin();
2582     for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2583       HEnvironment* user = env_use_node->GetUser();
2584       size_t input_index = env_use_node->GetIndex();
2585       user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2586       before_env_use_node = env_use_node;
2587     }
2588   }
2589 
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2590   void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2591     auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2592     if (next != env_uses_.end()) {
2593       HEnvironment* next_user = next->GetUser();
2594       size_t next_index = next->GetIndex();
2595       DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2596       next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2597     }
2598   }
2599 
2600   HInstruction* previous_;
2601   HInstruction* next_;
2602   HBasicBlock* block_;
2603   const uint32_t dex_pc_;
2604 
2605   // An instruction gets an id when it is added to the graph.
2606   // It reflects creation order. A negative id means the instruction
2607   // has not been added to the graph.
2608   int id_;
2609 
2610   // When doing liveness analysis, instructions that have uses get an SSA index.
2611   int ssa_index_;
2612 
2613   // Packed fields.
2614   uint32_t packed_fields_;
2615 
2616   // List of instructions that have this instruction as input.
2617   HUseList<HInstruction*> uses_;
2618 
2619   // List of environments that contain this instruction.
2620   HUseList<HEnvironment*> env_uses_;
2621 
2622   // The environment associated with this instruction. Not null if the instruction
2623   // might jump out of the method.
2624   HEnvironment* environment_;
2625 
2626   // Set by the code generator.
2627   LocationSummary* locations_;
2628 
2629   // Set by the liveness analysis.
2630   LiveInterval* live_interval_;
2631 
2632   // Set by the liveness analysis, this is the position in a linear
2633   // order of blocks where this instruction's live interval start.
2634   size_t lifetime_position_;
2635 
2636   SideEffects side_effects_;
2637 
2638   // The reference handle part of the reference type info.
2639   // The IsExact() flag is stored in packed fields.
2640   // TODO: for primitive types this should be marked as invalid.
2641   ReferenceTypeInfo::TypeHandle reference_type_handle_;
2642 
2643   friend class GraphChecker;
2644   friend class HBasicBlock;
2645   friend class HEnvironment;
2646   friend class HGraph;
2647   friend class HInstructionList;
2648 };
2649 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs);
2650 
2651 // Iterates over the instructions, while preserving the next instruction
2652 // in case the current instruction gets removed from the list by the user
2653 // of this iterator.
2654 class HInstructionIterator : public ValueObject {
2655  public:
HInstructionIterator(const HInstructionList & instructions)2656   explicit HInstructionIterator(const HInstructionList& instructions)
2657       : instruction_(instructions.first_instruction_) {
2658     next_ = Done() ? nullptr : instruction_->GetNext();
2659   }
2660 
Done()2661   bool Done() const { return instruction_ == nullptr; }
Current()2662   HInstruction* Current() const { return instruction_; }
Advance()2663   void Advance() {
2664     instruction_ = next_;
2665     next_ = Done() ? nullptr : instruction_->GetNext();
2666   }
2667 
2668  private:
2669   HInstruction* instruction_;
2670   HInstruction* next_;
2671 
2672   DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2673 };
2674 
2675 // Iterates over the instructions without saving the next instruction,
2676 // therefore handling changes in the graph potentially made by the user
2677 // of this iterator.
2678 class HInstructionIteratorHandleChanges : public ValueObject {
2679  public:
HInstructionIteratorHandleChanges(const HInstructionList & instructions)2680   explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2681       : instruction_(instructions.first_instruction_) {
2682   }
2683 
Done()2684   bool Done() const { return instruction_ == nullptr; }
Current()2685   HInstruction* Current() const { return instruction_; }
Advance()2686   void Advance() {
2687     instruction_ = instruction_->GetNext();
2688   }
2689 
2690  private:
2691   HInstruction* instruction_;
2692 
2693   DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges);
2694 };
2695 
2696 
2697 class HBackwardInstructionIterator : public ValueObject {
2698  public:
HBackwardInstructionIterator(const HInstructionList & instructions)2699   explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2700       : instruction_(instructions.last_instruction_) {
2701     next_ = Done() ? nullptr : instruction_->GetPrevious();
2702   }
2703 
Done()2704   bool Done() const { return instruction_ == nullptr; }
Current()2705   HInstruction* Current() const { return instruction_; }
Advance()2706   void Advance() {
2707     instruction_ = next_;
2708     next_ = Done() ? nullptr : instruction_->GetPrevious();
2709   }
2710 
2711  private:
2712   HInstruction* instruction_;
2713   HInstruction* next_;
2714 
2715   DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2716 };
2717 
2718 class HVariableInputSizeInstruction : public HInstruction {
2719  public:
2720   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2721   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2722     return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2723   }
2724 
2725   void AddInput(HInstruction* input);
2726   void InsertInputAt(size_t index, HInstruction* input);
2727   void RemoveInputAt(size_t index);
2728 
2729   // Removes all the inputs.
2730   // Also removes this instructions from each input's use list
2731   // (for non-environment uses only).
2732   void RemoveAllInputs();
2733 
2734  protected:
HVariableInputSizeInstruction(InstructionKind inst_kind,SideEffects side_effects,uint32_t dex_pc,ArenaAllocator * allocator,size_t number_of_inputs,ArenaAllocKind kind)2735   HVariableInputSizeInstruction(InstructionKind inst_kind,
2736                                 SideEffects side_effects,
2737                                 uint32_t dex_pc,
2738                                 ArenaAllocator* allocator,
2739                                 size_t number_of_inputs,
2740                                 ArenaAllocKind kind)
2741       : HInstruction(inst_kind, side_effects, dex_pc),
2742         inputs_(number_of_inputs, allocator->Adapter(kind)) {}
HVariableInputSizeInstruction(InstructionKind inst_kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc,ArenaAllocator * allocator,size_t number_of_inputs,ArenaAllocKind kind)2743   HVariableInputSizeInstruction(InstructionKind inst_kind,
2744                                 DataType::Type type,
2745                                 SideEffects side_effects,
2746                                 uint32_t dex_pc,
2747                                 ArenaAllocator* allocator,
2748                                 size_t number_of_inputs,
2749                                 ArenaAllocKind kind)
2750       : HInstruction(inst_kind, type, side_effects, dex_pc),
2751         inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2752 
2753   DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2754 
2755   ArenaVector<HUserRecord<HInstruction*>> inputs_;
2756 };
2757 
2758 template<size_t N>
2759 class HExpression : public HInstruction {
2760  public:
2761   HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
HInstruction(kind,side_effects,dex_pc)2762       : HInstruction(kind, side_effects, dex_pc), inputs_() {}
2763   HExpression<N>(InstructionKind kind,
2764                  DataType::Type type,
2765                  SideEffects side_effects,
2766                  uint32_t dex_pc)
HInstruction(kind,type,side_effects,dex_pc)2767       : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
~HExpression()2768   virtual ~HExpression() {}
2769 
2770   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2771   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2772     return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2773   }
2774 
2775  protected:
2776   DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2777 
2778  private:
2779   std::array<HUserRecord<HInstruction*>, N> inputs_;
2780 
2781   friend class SsaBuilder;
2782 };
2783 
2784 // HExpression specialization for N=0.
2785 template<>
2786 class HExpression<0> : public HInstruction {
2787  public:
2788   using HInstruction::HInstruction;
2789 
~HExpression()2790   virtual ~HExpression() {}
2791 
2792   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()2793   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2794     return ArrayRef<HUserRecord<HInstruction*>>();
2795   }
2796 
2797  protected:
2798   DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
2799 
2800  private:
2801   friend class SsaBuilder;
2802 };
2803 
2804 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2805 // instruction that branches to the exit block.
2806 class HReturnVoid final : public HExpression<0> {
2807  public:
2808   explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
HExpression(kReturnVoid,SideEffects::None (),dex_pc)2809       : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
2810   }
2811 
IsControlFlow()2812   bool IsControlFlow() const override { return true; }
2813 
2814   DECLARE_INSTRUCTION(ReturnVoid);
2815 
2816  protected:
2817   DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
2818 };
2819 
2820 // Represents dex's RETURN opcodes. A HReturn is a control flow
2821 // instruction that branches to the exit block.
2822 class HReturn final : public HExpression<1> {
2823  public:
2824   explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
HExpression(kReturn,SideEffects::None (),dex_pc)2825       : HExpression(kReturn, SideEffects::None(), dex_pc) {
2826     SetRawInputAt(0, value);
2827   }
2828 
IsControlFlow()2829   bool IsControlFlow() const override { return true; }
2830 
2831   DECLARE_INSTRUCTION(Return);
2832 
2833  protected:
2834   DEFAULT_COPY_CONSTRUCTOR(Return);
2835 };
2836 
2837 class HPhi final : public HVariableInputSizeInstruction {
2838  public:
2839   HPhi(ArenaAllocator* allocator,
2840        uint32_t reg_number,
2841        size_t number_of_inputs,
2842        DataType::Type type,
2843        uint32_t dex_pc = kNoDexPc)
HVariableInputSizeInstruction(kPhi,ToPhiType (type),SideEffects::None (),dex_pc,allocator,number_of_inputs,kArenaAllocPhiInputs)2844       : HVariableInputSizeInstruction(
2845             kPhi,
2846             ToPhiType(type),
2847             SideEffects::None(),
2848             dex_pc,
2849             allocator,
2850             number_of_inputs,
2851             kArenaAllocPhiInputs),
2852         reg_number_(reg_number) {
2853     DCHECK_NE(GetType(), DataType::Type::kVoid);
2854     // Phis are constructed live and marked dead if conflicting or unused.
2855     // Individual steps of SsaBuilder should assume that if a phi has been
2856     // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2857     SetPackedFlag<kFlagIsLive>(true);
2858     SetPackedFlag<kFlagCanBeNull>(true);
2859   }
2860 
IsClonable()2861   bool IsClonable() const override { return true; }
2862 
2863   // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
ToPhiType(DataType::Type type)2864   static DataType::Type ToPhiType(DataType::Type type) {
2865     return DataType::Kind(type);
2866   }
2867 
IsCatchPhi()2868   bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2869 
SetType(DataType::Type new_type)2870   void SetType(DataType::Type new_type) {
2871     // Make sure that only valid type changes occur. The following are allowed:
2872     //  (1) int  -> float/ref (primitive type propagation),
2873     //  (2) long -> double (primitive type propagation).
2874     DCHECK(GetType() == new_type ||
2875            (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
2876            (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
2877            (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
2878     SetPackedField<TypeField>(new_type);
2879   }
2880 
CanBeNull()2881   bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)2882   void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2883 
GetRegNumber()2884   uint32_t GetRegNumber() const { return reg_number_; }
2885 
SetDead()2886   void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
SetLive()2887   void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
IsDead()2888   bool IsDead() const { return !IsLive(); }
IsLive()2889   bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2890 
IsVRegEquivalentOf(const HInstruction * other)2891   bool IsVRegEquivalentOf(const HInstruction* other) const {
2892     return other != nullptr
2893         && other->IsPhi()
2894         && other->AsPhi()->GetBlock() == GetBlock()
2895         && other->AsPhi()->GetRegNumber() == GetRegNumber();
2896   }
2897 
HasEquivalentPhi()2898   bool HasEquivalentPhi() const {
2899     if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2900       return true;
2901     }
2902     if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
2903       return true;
2904     }
2905     return false;
2906   }
2907 
2908   // Returns the next equivalent phi (starting from the current one) or null if there is none.
2909   // An equivalent phi is a phi having the same dex register and type.
2910   // It assumes that phis with the same dex register are adjacent.
GetNextEquivalentPhiWithSameType()2911   HPhi* GetNextEquivalentPhiWithSameType() {
2912     HInstruction* next = GetNext();
2913     while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2914       if (next->GetType() == GetType()) {
2915         return next->AsPhi();
2916       }
2917       next = next->GetNext();
2918     }
2919     return nullptr;
2920   }
2921 
2922   DECLARE_INSTRUCTION(Phi);
2923 
2924  protected:
2925   DEFAULT_COPY_CONSTRUCTOR(Phi);
2926 
2927  private:
2928   static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
2929   static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2930   static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2931   static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2932 
2933   const uint32_t reg_number_;
2934 };
2935 
2936 // The exit instruction is the only instruction of the exit block.
2937 // Instructions aborting the method (HThrow and HReturn) must branch to the
2938 // exit block.
2939 class HExit final : public HExpression<0> {
2940  public:
2941   explicit HExit(uint32_t dex_pc = kNoDexPc)
HExpression(kExit,SideEffects::None (),dex_pc)2942       : HExpression(kExit, SideEffects::None(), dex_pc) {
2943   }
2944 
IsControlFlow()2945   bool IsControlFlow() const override { return true; }
2946 
2947   DECLARE_INSTRUCTION(Exit);
2948 
2949  protected:
2950   DEFAULT_COPY_CONSTRUCTOR(Exit);
2951 };
2952 
2953 // Jumps from one block to another.
2954 class HGoto final : public HExpression<0> {
2955  public:
2956   explicit HGoto(uint32_t dex_pc = kNoDexPc)
HExpression(kGoto,SideEffects::None (),dex_pc)2957       : HExpression(kGoto, SideEffects::None(), dex_pc) {
2958   }
2959 
IsClonable()2960   bool IsClonable() const override { return true; }
IsControlFlow()2961   bool IsControlFlow() const override { return true; }
2962 
GetSuccessor()2963   HBasicBlock* GetSuccessor() const {
2964     return GetBlock()->GetSingleSuccessor();
2965   }
2966 
2967   DECLARE_INSTRUCTION(Goto);
2968 
2969  protected:
2970   DEFAULT_COPY_CONSTRUCTOR(Goto);
2971 };
2972 
2973 class HConstant : public HExpression<0> {
2974  public:
2975   explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kind,type,SideEffects::None (),dex_pc)2976       : HExpression(kind, type, SideEffects::None(), dex_pc) {
2977   }
2978 
CanBeMoved()2979   bool CanBeMoved() const override { return true; }
2980 
2981   // Is this constant -1 in the arithmetic sense?
IsMinusOne()2982   virtual bool IsMinusOne() const { return false; }
2983   // Is this constant 0 in the arithmetic sense?
IsArithmeticZero()2984   virtual bool IsArithmeticZero() const { return false; }
2985   // Is this constant a 0-bit pattern?
IsZeroBitPattern()2986   virtual bool IsZeroBitPattern() const { return false; }
2987   // Is this constant 1 in the arithmetic sense?
IsOne()2988   virtual bool IsOne() const { return false; }
2989 
2990   virtual uint64_t GetValueAsUint64() const = 0;
2991 
2992   DECLARE_ABSTRACT_INSTRUCTION(Constant);
2993 
2994  protected:
2995   DEFAULT_COPY_CONSTRUCTOR(Constant);
2996 };
2997 
2998 class HNullConstant final : public HConstant {
2999  public:
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3000   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3001     return true;
3002   }
3003 
GetValueAsUint64()3004   uint64_t GetValueAsUint64() const override { return 0; }
3005 
ComputeHashCode()3006   size_t ComputeHashCode() const override { return 0; }
3007 
3008   // The null constant representation is a 0-bit pattern.
IsZeroBitPattern()3009   bool IsZeroBitPattern() const override { return true; }
3010 
3011   DECLARE_INSTRUCTION(NullConstant);
3012 
3013  protected:
3014   DEFAULT_COPY_CONSTRUCTOR(NullConstant);
3015 
3016  private:
3017   explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
HConstant(kNullConstant,DataType::Type::kReference,dex_pc)3018       : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
3019   }
3020 
3021   friend class HGraph;
3022 };
3023 
3024 // Constants of the type int. Those can be from Dex instructions, or
3025 // synthesized (for example with the if-eqz instruction).
3026 class HIntConstant final : public HConstant {
3027  public:
GetValue()3028   int32_t GetValue() const { return value_; }
3029 
GetValueAsUint64()3030   uint64_t GetValueAsUint64() const override {
3031     return static_cast<uint64_t>(static_cast<uint32_t>(value_));
3032   }
3033 
InstructionDataEquals(const HInstruction * other)3034   bool InstructionDataEquals(const HInstruction* other) const override {
3035     DCHECK(other->IsIntConstant()) << other->DebugName();
3036     return other->AsIntConstant()->value_ == value_;
3037   }
3038 
ComputeHashCode()3039   size_t ComputeHashCode() const override { return GetValue(); }
3040 
IsMinusOne()3041   bool IsMinusOne() const override { return GetValue() == -1; }
IsArithmeticZero()3042   bool IsArithmeticZero() const override { return GetValue() == 0; }
IsZeroBitPattern()3043   bool IsZeroBitPattern() const override { return GetValue() == 0; }
IsOne()3044   bool IsOne() const override { return GetValue() == 1; }
3045 
3046   // Integer constants are used to encode Boolean values as well,
3047   // where 1 means true and 0 means false.
IsTrue()3048   bool IsTrue() const { return GetValue() == 1; }
IsFalse()3049   bool IsFalse() const { return GetValue() == 0; }
3050 
3051   DECLARE_INSTRUCTION(IntConstant);
3052 
3053  protected:
3054   DEFAULT_COPY_CONSTRUCTOR(IntConstant);
3055 
3056  private:
3057   explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)3058       : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
3059   }
3060   explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
HConstant(kIntConstant,DataType::Type::kInt32,dex_pc)3061       : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
3062         value_(value ? 1 : 0) {
3063   }
3064 
3065   const int32_t value_;
3066 
3067   friend class HGraph;
3068   ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
3069   ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
3070 };
3071 
3072 class HLongConstant final : public HConstant {
3073  public:
GetValue()3074   int64_t GetValue() const { return value_; }
3075 
GetValueAsUint64()3076   uint64_t GetValueAsUint64() const override { return value_; }
3077 
InstructionDataEquals(const HInstruction * other)3078   bool InstructionDataEquals(const HInstruction* other) const override {
3079     DCHECK(other->IsLongConstant()) << other->DebugName();
3080     return other->AsLongConstant()->value_ == value_;
3081   }
3082 
ComputeHashCode()3083   size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3084 
IsMinusOne()3085   bool IsMinusOne() const override { return GetValue() == -1; }
IsArithmeticZero()3086   bool IsArithmeticZero() const override { return GetValue() == 0; }
IsZeroBitPattern()3087   bool IsZeroBitPattern() const override { return GetValue() == 0; }
IsOne()3088   bool IsOne() const override { return GetValue() == 1; }
3089 
3090   DECLARE_INSTRUCTION(LongConstant);
3091 
3092  protected:
3093   DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3094 
3095  private:
3096   explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kLongConstant,DataType::Type::kInt64,dex_pc)3097       : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
3098         value_(value) {
3099   }
3100 
3101   const int64_t value_;
3102 
3103   friend class HGraph;
3104 };
3105 
3106 class HFloatConstant final : public HConstant {
3107  public:
GetValue()3108   float GetValue() const { return value_; }
3109 
GetValueAsUint64()3110   uint64_t GetValueAsUint64() const override {
3111     return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3112   }
3113 
InstructionDataEquals(const HInstruction * other)3114   bool InstructionDataEquals(const HInstruction* other) const override {
3115     DCHECK(other->IsFloatConstant()) << other->DebugName();
3116     return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3117   }
3118 
ComputeHashCode()3119   size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3120 
IsMinusOne()3121   bool IsMinusOne() const override {
3122     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3123   }
IsArithmeticZero()3124   bool IsArithmeticZero() const override {
3125     return std::fpclassify(value_) == FP_ZERO;
3126   }
IsArithmeticPositiveZero()3127   bool IsArithmeticPositiveZero() const {
3128     return IsArithmeticZero() && !std::signbit(value_);
3129   }
IsArithmeticNegativeZero()3130   bool IsArithmeticNegativeZero() const {
3131     return IsArithmeticZero() && std::signbit(value_);
3132   }
IsZeroBitPattern()3133   bool IsZeroBitPattern() const override {
3134     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3135   }
IsOne()3136   bool IsOne() const override {
3137     return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3138   }
IsNaN()3139   bool IsNaN() const {
3140     return std::isnan(value_);
3141   }
3142 
3143   DECLARE_INSTRUCTION(FloatConstant);
3144 
3145  protected:
3146   DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3147 
3148  private:
3149   explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3150       : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3151         value_(value) {
3152   }
3153   explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kFloatConstant,DataType::Type::kFloat32,dex_pc)3154       : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3155         value_(bit_cast<float, int32_t>(value)) {
3156   }
3157 
3158   const float value_;
3159 
3160   // Only the SsaBuilder and HGraph can create floating-point constants.
3161   friend class SsaBuilder;
3162   friend class HGraph;
3163 };
3164 
3165 class HDoubleConstant final : public HConstant {
3166  public:
GetValue()3167   double GetValue() const { return value_; }
3168 
GetValueAsUint64()3169   uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3170 
InstructionDataEquals(const HInstruction * other)3171   bool InstructionDataEquals(const HInstruction* other) const override {
3172     DCHECK(other->IsDoubleConstant()) << other->DebugName();
3173     return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3174   }
3175 
ComputeHashCode()3176   size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3177 
IsMinusOne()3178   bool IsMinusOne() const override {
3179     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3180   }
IsArithmeticZero()3181   bool IsArithmeticZero() const override {
3182     return std::fpclassify(value_) == FP_ZERO;
3183   }
IsArithmeticPositiveZero()3184   bool IsArithmeticPositiveZero() const {
3185     return IsArithmeticZero() && !std::signbit(value_);
3186   }
IsArithmeticNegativeZero()3187   bool IsArithmeticNegativeZero() const {
3188     return IsArithmeticZero() && std::signbit(value_);
3189   }
IsZeroBitPattern()3190   bool IsZeroBitPattern() const override {
3191     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3192   }
IsOne()3193   bool IsOne() const override {
3194     return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3195   }
IsNaN()3196   bool IsNaN() const {
3197     return std::isnan(value_);
3198   }
3199 
3200   DECLARE_INSTRUCTION(DoubleConstant);
3201 
3202  protected:
3203   DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3204 
3205  private:
3206   explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3207       : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3208         value_(value) {
3209   }
3210   explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
HConstant(kDoubleConstant,DataType::Type::kFloat64,dex_pc)3211       : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3212         value_(bit_cast<double, int64_t>(value)) {
3213   }
3214 
3215   const double value_;
3216 
3217   // Only the SsaBuilder and HGraph can create floating-point constants.
3218   friend class SsaBuilder;
3219   friend class HGraph;
3220 };
3221 
3222 // Conditional branch. A block ending with an HIf instruction must have
3223 // two successors.
3224 class HIf final : public HExpression<1> {
3225  public:
3226   explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kIf,SideEffects::None (),dex_pc)3227       : HExpression(kIf, SideEffects::None(), dex_pc) {
3228     SetRawInputAt(0, input);
3229   }
3230 
IsClonable()3231   bool IsClonable() const override { return true; }
IsControlFlow()3232   bool IsControlFlow() const override { return true; }
3233 
IfTrueSuccessor()3234   HBasicBlock* IfTrueSuccessor() const {
3235     return GetBlock()->GetSuccessors()[0];
3236   }
3237 
IfFalseSuccessor()3238   HBasicBlock* IfFalseSuccessor() const {
3239     return GetBlock()->GetSuccessors()[1];
3240   }
3241 
3242   DECLARE_INSTRUCTION(If);
3243 
3244  protected:
3245   DEFAULT_COPY_CONSTRUCTOR(If);
3246 };
3247 
3248 
3249 // Abstract instruction which marks the beginning and/or end of a try block and
3250 // links it to the respective exception handlers. Behaves the same as a Goto in
3251 // non-exceptional control flow.
3252 // Normal-flow successor is stored at index zero, exception handlers under
3253 // higher indices in no particular order.
3254 class HTryBoundary final : public HExpression<0> {
3255  public:
3256   enum class BoundaryKind {
3257     kEntry,
3258     kExit,
3259     kLast = kExit
3260   };
3261 
3262   // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3263   // across the catch block entering edges as GC might happen during throwing an exception.
3264   // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3265   // HInstruction which a catch block must start from.
3266   explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3267       : HExpression(kTryBoundary,
3268                     (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3269                                                   : SideEffects::None(),
3270                     dex_pc) {
3271     SetPackedField<BoundaryKindField>(kind);
3272   }
3273 
IsControlFlow()3274   bool IsControlFlow() const override { return true; }
3275 
3276   // Returns the block's non-exceptional successor (index zero).
GetNormalFlowSuccessor()3277   HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3278 
GetExceptionHandlers()3279   ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3280     return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3281   }
3282 
3283   // Returns whether `handler` is among its exception handlers (non-zero index
3284   // successors).
HasExceptionHandler(const HBasicBlock & handler)3285   bool HasExceptionHandler(const HBasicBlock& handler) const {
3286     DCHECK(handler.IsCatchBlock());
3287     return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3288   }
3289 
3290   // If not present already, adds `handler` to its block's list of exception
3291   // handlers.
AddExceptionHandler(HBasicBlock * handler)3292   void AddExceptionHandler(HBasicBlock* handler) {
3293     if (!HasExceptionHandler(*handler)) {
3294       GetBlock()->AddSuccessor(handler);
3295     }
3296   }
3297 
GetBoundaryKind()3298   BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
IsEntry()3299   bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3300 
3301   bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3302 
3303   DECLARE_INSTRUCTION(TryBoundary);
3304 
3305  protected:
3306   DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3307 
3308  private:
3309   static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3310   static constexpr size_t kFieldBoundaryKindSize =
3311       MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3312   static constexpr size_t kNumberOfTryBoundaryPackedBits =
3313       kFieldBoundaryKind + kFieldBoundaryKindSize;
3314   static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3315                 "Too many packed fields.");
3316   using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3317 };
3318 
3319 // Deoptimize to interpreter, upon checking a condition.
3320 class HDeoptimize final : public HVariableInputSizeInstruction {
3321  public:
3322   // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3323   // across.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,DeoptimizationKind kind,uint32_t dex_pc)3324   HDeoptimize(ArenaAllocator* allocator,
3325               HInstruction* cond,
3326               DeoptimizationKind kind,
3327               uint32_t dex_pc)
3328       : HVariableInputSizeInstruction(
3329             kDeoptimize,
3330             SideEffects::All(),
3331             dex_pc,
3332             allocator,
3333             /* number_of_inputs= */ 1,
3334             kArenaAllocMisc) {
3335     SetPackedFlag<kFieldCanBeMoved>(false);
3336     SetPackedField<DeoptimizeKindField>(kind);
3337     SetRawInputAt(0, cond);
3338   }
3339 
IsClonable()3340   bool IsClonable() const override { return true; }
3341 
3342   // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3343   // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3344   // instead of `guard`.
3345   // We set CanTriggerGC to prevent any intermediate address to be live
3346   // at the point of the `HDeoptimize`.
HDeoptimize(ArenaAllocator * allocator,HInstruction * cond,HInstruction * guard,DeoptimizationKind kind,uint32_t dex_pc)3347   HDeoptimize(ArenaAllocator* allocator,
3348               HInstruction* cond,
3349               HInstruction* guard,
3350               DeoptimizationKind kind,
3351               uint32_t dex_pc)
3352       : HVariableInputSizeInstruction(
3353             kDeoptimize,
3354             guard->GetType(),
3355             SideEffects::CanTriggerGC(),
3356             dex_pc,
3357             allocator,
3358             /* number_of_inputs= */ 2,
3359             kArenaAllocMisc) {
3360     SetPackedFlag<kFieldCanBeMoved>(true);
3361     SetPackedField<DeoptimizeKindField>(kind);
3362     SetRawInputAt(0, cond);
3363     SetRawInputAt(1, guard);
3364   }
3365 
CanBeMoved()3366   bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3367 
InstructionDataEquals(const HInstruction * other)3368   bool InstructionDataEquals(const HInstruction* other) const override {
3369     return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind());
3370   }
3371 
NeedsEnvironment()3372   bool NeedsEnvironment() const override { return true; }
3373 
CanThrow()3374   bool CanThrow() const override { return true; }
3375 
GetDeoptimizationKind()3376   DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3377 
GuardsAnInput()3378   bool GuardsAnInput() const {
3379     return InputCount() == 2;
3380   }
3381 
GuardedInput()3382   HInstruction* GuardedInput() const {
3383     DCHECK(GuardsAnInput());
3384     return InputAt(1);
3385   }
3386 
RemoveGuard()3387   void RemoveGuard() {
3388     RemoveInputAt(1);
3389   }
3390 
3391   DECLARE_INSTRUCTION(Deoptimize);
3392 
3393  protected:
3394   DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3395 
3396  private:
3397   static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3398   static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3399   static constexpr size_t kFieldDeoptimizeKindSize =
3400       MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3401   static constexpr size_t kNumberOfDeoptimizePackedBits =
3402       kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3403   static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3404                 "Too many packed fields.");
3405   using DeoptimizeKindField =
3406       BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3407 };
3408 
3409 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3410 // The compiled code checks this flag value in a guard before devirtualized call and
3411 // if it's true, starts to do deoptimization.
3412 // It has a 4-byte slot on stack.
3413 // TODO: allocate a register for this flag.
3414 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3415  public:
3416   // CHA guards are only optimized in a separate pass and it has no side effects
3417   // with regard to other passes.
HShouldDeoptimizeFlag(ArenaAllocator * allocator,uint32_t dex_pc)3418   HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3419       : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3420                                       DataType::Type::kInt32,
3421                                       SideEffects::None(),
3422                                       dex_pc,
3423                                       allocator,
3424                                       0,
3425                                       kArenaAllocCHA) {
3426   }
3427 
3428   // We do all CHA guard elimination/motion in a single pass, after which there is no
3429   // further guard elimination/motion since a guard might have been used for justification
3430   // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3431   // to avoid other optimizations trying to move it.
CanBeMoved()3432   bool CanBeMoved() const override { return false; }
3433 
3434   DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3435 
3436  protected:
3437   DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3438 };
3439 
3440 // Represents the ArtMethod that was passed as a first argument to
3441 // the method. It is used by instructions that depend on it, like
3442 // instructions that work with the dex cache.
3443 class HCurrentMethod final : public HExpression<0> {
3444  public:
3445   explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
HExpression(kCurrentMethod,type,SideEffects::None (),dex_pc)3446       : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3447   }
3448 
3449   DECLARE_INSTRUCTION(CurrentMethod);
3450 
3451  protected:
3452   DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3453 };
3454 
3455 // Fetches an ArtMethod from the virtual table or the interface method table
3456 // of a class.
3457 class HClassTableGet final : public HExpression<1> {
3458  public:
3459   enum class TableKind {
3460     kVTable,
3461     kIMTable,
3462     kLast = kIMTable
3463   };
HClassTableGet(HInstruction * cls,DataType::Type type,TableKind kind,size_t index,uint32_t dex_pc)3464   HClassTableGet(HInstruction* cls,
3465                  DataType::Type type,
3466                  TableKind kind,
3467                  size_t index,
3468                  uint32_t dex_pc)
3469       : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3470         index_(index) {
3471     SetPackedField<TableKindField>(kind);
3472     SetRawInputAt(0, cls);
3473   }
3474 
IsClonable()3475   bool IsClonable() const override { return true; }
CanBeMoved()3476   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other)3477   bool InstructionDataEquals(const HInstruction* other) const override {
3478     return other->AsClassTableGet()->GetIndex() == index_ &&
3479         other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3480   }
3481 
GetTableKind()3482   TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
GetIndex()3483   size_t GetIndex() const { return index_; }
3484 
3485   DECLARE_INSTRUCTION(ClassTableGet);
3486 
3487  protected:
3488   DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3489 
3490  private:
3491   static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3492   static constexpr size_t kFieldTableKindSize =
3493       MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3494   static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3495   static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3496                 "Too many packed fields.");
3497   using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
3498 
3499   // The index of the ArtMethod in the table.
3500   const size_t index_;
3501 };
3502 
3503 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3504 // have one successor for each entry in the switch table, and the final successor
3505 // will be the block containing the next Dex opcode.
3506 class HPackedSwitch final : public HExpression<1> {
3507  public:
3508   HPackedSwitch(int32_t start_value,
3509                 uint32_t num_entries,
3510                 HInstruction* input,
3511                 uint32_t dex_pc = kNoDexPc)
HExpression(kPackedSwitch,SideEffects::None (),dex_pc)3512     : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3513       start_value_(start_value),
3514       num_entries_(num_entries) {
3515     SetRawInputAt(0, input);
3516   }
3517 
IsClonable()3518   bool IsClonable() const override { return true; }
3519 
IsControlFlow()3520   bool IsControlFlow() const override { return true; }
3521 
GetStartValue()3522   int32_t GetStartValue() const { return start_value_; }
3523 
GetNumEntries()3524   uint32_t GetNumEntries() const { return num_entries_; }
3525 
GetDefaultBlock()3526   HBasicBlock* GetDefaultBlock() const {
3527     // Last entry is the default block.
3528     return GetBlock()->GetSuccessors()[num_entries_];
3529   }
3530   DECLARE_INSTRUCTION(PackedSwitch);
3531 
3532  protected:
3533   DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3534 
3535  private:
3536   const int32_t start_value_;
3537   const uint32_t num_entries_;
3538 };
3539 
3540 class HUnaryOperation : public HExpression<1> {
3541  public:
3542   HUnaryOperation(InstructionKind kind,
3543                   DataType::Type result_type,
3544                   HInstruction* input,
3545                   uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,SideEffects::None (),dex_pc)3546       : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3547     SetRawInputAt(0, input);
3548   }
3549 
3550   // All of the UnaryOperation instructions are clonable.
IsClonable()3551   bool IsClonable() const override { return true; }
3552 
GetInput()3553   HInstruction* GetInput() const { return InputAt(0); }
GetResultType()3554   DataType::Type GetResultType() const { return GetType(); }
3555 
CanBeMoved()3556   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3557   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3558     return true;
3559   }
3560 
3561   // Try to statically evaluate `this` and return a HConstant
3562   // containing the result of this evaluation.  If `this` cannot
3563   // be evaluated as a constant, return null.
3564   HConstant* TryStaticEvaluation() const;
3565 
3566   // Apply this operation to `x`.
3567   virtual HConstant* Evaluate(HIntConstant* x) const = 0;
3568   virtual HConstant* Evaluate(HLongConstant* x) const = 0;
3569   virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
3570   virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
3571 
3572   DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3573 
3574  protected:
3575   DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3576 };
3577 
3578 class HBinaryOperation : public HExpression<2> {
3579  public:
3580   HBinaryOperation(InstructionKind kind,
3581                    DataType::Type result_type,
3582                    HInstruction* left,
3583                    HInstruction* right,
3584                    SideEffects side_effects = SideEffects::None(),
3585                    uint32_t dex_pc = kNoDexPc)
HExpression(kind,result_type,side_effects,dex_pc)3586       : HExpression(kind, result_type, side_effects, dex_pc) {
3587     SetRawInputAt(0, left);
3588     SetRawInputAt(1, right);
3589   }
3590 
3591   // All of the BinaryOperation instructions are clonable.
IsClonable()3592   bool IsClonable() const override { return true; }
3593 
GetLeft()3594   HInstruction* GetLeft() const { return InputAt(0); }
GetRight()3595   HInstruction* GetRight() const { return InputAt(1); }
GetResultType()3596   DataType::Type GetResultType() const { return GetType(); }
3597 
IsCommutative()3598   virtual bool IsCommutative() const { return false; }
3599 
3600   // Put constant on the right.
3601   // Returns whether order is changed.
OrderInputsWithConstantOnTheRight()3602   bool OrderInputsWithConstantOnTheRight() {
3603     HInstruction* left = InputAt(0);
3604     HInstruction* right = InputAt(1);
3605     if (left->IsConstant() && !right->IsConstant()) {
3606       ReplaceInput(right, 0);
3607       ReplaceInput(left, 1);
3608       return true;
3609     }
3610     return false;
3611   }
3612 
3613   // Order inputs by instruction id, but favor constant on the right side.
3614   // This helps GVN for commutative ops.
OrderInputs()3615   void OrderInputs() {
3616     DCHECK(IsCommutative());
3617     HInstruction* left = InputAt(0);
3618     HInstruction* right = InputAt(1);
3619     if (left == right || (!left->IsConstant() && right->IsConstant())) {
3620       return;
3621     }
3622     if (OrderInputsWithConstantOnTheRight()) {
3623       return;
3624     }
3625     // Order according to instruction id.
3626     if (left->GetId() > right->GetId()) {
3627       ReplaceInput(right, 0);
3628       ReplaceInput(left, 1);
3629     }
3630   }
3631 
CanBeMoved()3632   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)3633   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
3634     return true;
3635   }
3636 
3637   // Try to statically evaluate `this` and return a HConstant
3638   // containing the result of this evaluation.  If `this` cannot
3639   // be evaluated as a constant, return null.
3640   HConstant* TryStaticEvaluation() const;
3641 
3642   // Apply this operation to `x` and `y`.
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3643   virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3644                               HNullConstant* y ATTRIBUTE_UNUSED) const {
3645     LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3646     UNREACHABLE();
3647   }
3648   virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
3649   virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED,HIntConstant * y ATTRIBUTE_UNUSED)3650   virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
3651                               HIntConstant* y ATTRIBUTE_UNUSED) const {
3652     LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3653     UNREACHABLE();
3654   }
3655   virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
3656   virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
3657 
3658   // Returns an input that can legally be used as the right input and is
3659   // constant, or null.
3660   HConstant* GetConstantRight() const;
3661 
3662   // If `GetConstantRight()` returns one of the input, this returns the other
3663   // one. Otherwise it returns null.
3664   HInstruction* GetLeastConstantLeft() const;
3665 
3666   DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3667 
3668  protected:
3669   DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3670 };
3671 
3672 // The comparison bias applies for floating point operations and indicates how NaN
3673 // comparisons are treated:
3674 enum class ComparisonBias {  // private marker to avoid generate-operator-out.py from processing.
3675   kNoBias,  // bias is not applicable (i.e. for long operation)
3676   kGtBias,  // return 1 for NaN comparisons
3677   kLtBias,  // return -1 for NaN comparisons
3678   kLast = kLtBias
3679 };
3680 
3681 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs);
3682 
3683 class HCondition : public HBinaryOperation {
3684  public:
3685   HCondition(InstructionKind kind,
3686              HInstruction* first,
3687              HInstruction* second,
3688              uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kind,DataType::Type::kBool,first,second,SideEffects::None (),dex_pc)3689       : HBinaryOperation(kind,
3690                          DataType::Type::kBool,
3691                          first,
3692                          second,
3693                          SideEffects::None(),
3694                          dex_pc) {
3695     SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3696   }
3697 
3698   // For code generation purposes, returns whether this instruction is just before
3699   // `instruction`, and disregard moves in between.
3700   bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3701 
3702   DECLARE_ABSTRACT_INSTRUCTION(Condition);
3703 
3704   virtual IfCondition GetCondition() const = 0;
3705 
3706   virtual IfCondition GetOppositeCondition() const = 0;
3707 
IsGtBias()3708   bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
IsLtBias()3709   bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3710 
GetBias()3711   ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
SetBias(ComparisonBias bias)3712   void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3713 
InstructionDataEquals(const HInstruction * other)3714   bool InstructionDataEquals(const HInstruction* other) const override {
3715     return GetPackedFields() == other->AsCondition()->GetPackedFields();
3716   }
3717 
IsFPConditionTrueIfNaN()3718   bool IsFPConditionTrueIfNaN() const {
3719     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3720     IfCondition if_cond = GetCondition();
3721     if (if_cond == kCondNE) {
3722       return true;
3723     } else if (if_cond == kCondEQ) {
3724       return false;
3725     }
3726     return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3727   }
3728 
IsFPConditionFalseIfNaN()3729   bool IsFPConditionFalseIfNaN() const {
3730     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3731     IfCondition if_cond = GetCondition();
3732     if (if_cond == kCondEQ) {
3733       return true;
3734     } else if (if_cond == kCondNE) {
3735       return false;
3736     }
3737     return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3738   }
3739 
3740  protected:
3741   // Needed if we merge a HCompare into a HCondition.
3742   static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
3743   static constexpr size_t kFieldComparisonBiasSize =
3744       MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3745   static constexpr size_t kNumberOfConditionPackedBits =
3746       kFieldComparisonBias + kFieldComparisonBiasSize;
3747   static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3748   using ComparisonBiasField =
3749       BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3750 
3751   template <typename T>
Compare(T x,T y)3752   int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3753 
3754   template <typename T>
CompareFP(T x,T y)3755   int32_t CompareFP(T x, T y) const {
3756     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3757     DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3758     // Handle the bias.
3759     return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3760   }
3761 
3762   // Return an integer constant containing the result of a condition evaluated at compile time.
MakeConstantCondition(bool value,uint32_t dex_pc)3763   HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3764     return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3765   }
3766 
3767   DEFAULT_COPY_CONSTRUCTOR(Condition);
3768 };
3769 
3770 // Instruction to check if two inputs are equal to each other.
3771 class HEqual final : public HCondition {
3772  public:
3773   HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kEqual,first,second,dex_pc)3774       : HCondition(kEqual, first, second, dex_pc) {
3775   }
3776 
IsCommutative()3777   bool IsCommutative() const override { return true; }
3778 
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3779   HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3780                       HNullConstant* y ATTRIBUTE_UNUSED) const override {
3781     return MakeConstantCondition(true, GetDexPc());
3782   }
Evaluate(HIntConstant * x,HIntConstant * y)3783   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3784     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3785   }
3786   // In the following Evaluate methods, a HCompare instruction has
3787   // been merged into this HEqual instruction; evaluate it as
3788   // `Compare(x, y) == 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3789   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3790     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3791                                  GetDexPc());
3792   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3793   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3794     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3795   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3796   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3797     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3798   }
3799 
3800   DECLARE_INSTRUCTION(Equal);
3801 
GetCondition()3802   IfCondition GetCondition() const override {
3803     return kCondEQ;
3804   }
3805 
GetOppositeCondition()3806   IfCondition GetOppositeCondition() const override {
3807     return kCondNE;
3808   }
3809 
3810  protected:
3811   DEFAULT_COPY_CONSTRUCTOR(Equal);
3812 
3813  private:
Compute(T x,T y)3814   template <typename T> static bool Compute(T x, T y) { return x == y; }
3815 };
3816 
3817 class HNotEqual final : public HCondition {
3818  public:
3819   HNotEqual(HInstruction* first, HInstruction* second,
3820             uint32_t dex_pc = kNoDexPc)
HCondition(kNotEqual,first,second,dex_pc)3821       : HCondition(kNotEqual, first, second, dex_pc) {
3822   }
3823 
IsCommutative()3824   bool IsCommutative() const override { return true; }
3825 
Evaluate(HNullConstant * x ATTRIBUTE_UNUSED,HNullConstant * y ATTRIBUTE_UNUSED)3826   HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3827                       HNullConstant* y ATTRIBUTE_UNUSED) const override {
3828     return MakeConstantCondition(false, GetDexPc());
3829   }
Evaluate(HIntConstant * x,HIntConstant * y)3830   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3831     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3832   }
3833   // In the following Evaluate methods, a HCompare instruction has
3834   // been merged into this HNotEqual instruction; evaluate it as
3835   // `Compare(x, y) != 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3836   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3837     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3838   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3839   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3840     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3841   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3842   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3843     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3844   }
3845 
3846   DECLARE_INSTRUCTION(NotEqual);
3847 
GetCondition()3848   IfCondition GetCondition() const override {
3849     return kCondNE;
3850   }
3851 
GetOppositeCondition()3852   IfCondition GetOppositeCondition() const override {
3853     return kCondEQ;
3854   }
3855 
3856  protected:
3857   DEFAULT_COPY_CONSTRUCTOR(NotEqual);
3858 
3859  private:
Compute(T x,T y)3860   template <typename T> static bool Compute(T x, T y) { return x != y; }
3861 };
3862 
3863 class HLessThan final : public HCondition {
3864  public:
3865   HLessThan(HInstruction* first, HInstruction* second,
3866             uint32_t dex_pc = kNoDexPc)
HCondition(kLessThan,first,second,dex_pc)3867       : HCondition(kLessThan, first, second, dex_pc) {
3868   }
3869 
Evaluate(HIntConstant * x,HIntConstant * y)3870   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3871     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3872   }
3873   // In the following Evaluate methods, a HCompare instruction has
3874   // been merged into this HLessThan instruction; evaluate it as
3875   // `Compare(x, y) < 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3876   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3877     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3878   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3879   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3880     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3881   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3882   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3883     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3884   }
3885 
3886   DECLARE_INSTRUCTION(LessThan);
3887 
GetCondition()3888   IfCondition GetCondition() const override {
3889     return kCondLT;
3890   }
3891 
GetOppositeCondition()3892   IfCondition GetOppositeCondition() const override {
3893     return kCondGE;
3894   }
3895 
3896  protected:
3897   DEFAULT_COPY_CONSTRUCTOR(LessThan);
3898 
3899  private:
Compute(T x,T y)3900   template <typename T> static bool Compute(T x, T y) { return x < y; }
3901 };
3902 
3903 class HLessThanOrEqual final : public HCondition {
3904  public:
3905   HLessThanOrEqual(HInstruction* first, HInstruction* second,
3906                    uint32_t dex_pc = kNoDexPc)
HCondition(kLessThanOrEqual,first,second,dex_pc)3907       : HCondition(kLessThanOrEqual, first, second, dex_pc) {
3908   }
3909 
Evaluate(HIntConstant * x,HIntConstant * y)3910   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3911     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3912   }
3913   // In the following Evaluate methods, a HCompare instruction has
3914   // been merged into this HLessThanOrEqual instruction; evaluate it as
3915   // `Compare(x, y) <= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3916   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3917     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3918   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3919   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3920     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3921   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3922   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3923     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3924   }
3925 
3926   DECLARE_INSTRUCTION(LessThanOrEqual);
3927 
GetCondition()3928   IfCondition GetCondition() const override {
3929     return kCondLE;
3930   }
3931 
GetOppositeCondition()3932   IfCondition GetOppositeCondition() const override {
3933     return kCondGT;
3934   }
3935 
3936  protected:
3937   DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
3938 
3939  private:
Compute(T x,T y)3940   template <typename T> static bool Compute(T x, T y) { return x <= y; }
3941 };
3942 
3943 class HGreaterThan final : public HCondition {
3944  public:
3945   HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThan,first,second,dex_pc)3946       : HCondition(kGreaterThan, first, second, dex_pc) {
3947   }
3948 
Evaluate(HIntConstant * x,HIntConstant * y)3949   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3950     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3951   }
3952   // In the following Evaluate methods, a HCompare instruction has
3953   // been merged into this HGreaterThan instruction; evaluate it as
3954   // `Compare(x, y) > 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3955   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3956     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3957   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3958   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3959     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3960   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)3961   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3962     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3963   }
3964 
3965   DECLARE_INSTRUCTION(GreaterThan);
3966 
GetCondition()3967   IfCondition GetCondition() const override {
3968     return kCondGT;
3969   }
3970 
GetOppositeCondition()3971   IfCondition GetOppositeCondition() const override {
3972     return kCondLE;
3973   }
3974 
3975  protected:
3976   DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
3977 
3978  private:
Compute(T x,T y)3979   template <typename T> static bool Compute(T x, T y) { return x > y; }
3980 };
3981 
3982 class HGreaterThanOrEqual final : public HCondition {
3983  public:
3984   HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kGreaterThanOrEqual,first,second,dex_pc)3985       : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
3986   }
3987 
Evaluate(HIntConstant * x,HIntConstant * y)3988   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3989     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3990   }
3991   // In the following Evaluate methods, a HCompare instruction has
3992   // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3993   // `Compare(x, y) >= 0`.
Evaluate(HLongConstant * x,HLongConstant * y)3994   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3995     return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3996   }
Evaluate(HFloatConstant * x,HFloatConstant * y)3997   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3998     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3999   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4000   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4001     return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4002   }
4003 
4004   DECLARE_INSTRUCTION(GreaterThanOrEqual);
4005 
GetCondition()4006   IfCondition GetCondition() const override {
4007     return kCondGE;
4008   }
4009 
GetOppositeCondition()4010   IfCondition GetOppositeCondition() const override {
4011     return kCondLT;
4012   }
4013 
4014  protected:
4015   DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
4016 
4017  private:
Compute(T x,T y)4018   template <typename T> static bool Compute(T x, T y) { return x >= y; }
4019 };
4020 
4021 class HBelow final : public HCondition {
4022  public:
4023   HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelow,first,second,dex_pc)4024       : HCondition(kBelow, first, second, dex_pc) {
4025   }
4026 
Evaluate(HIntConstant * x,HIntConstant * y)4027   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4028     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4029   }
Evaluate(HLongConstant * x,HLongConstant * y)4030   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4031     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4032   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4033   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4034                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4035     LOG(FATAL) << DebugName() << " is not defined for float values";
4036     UNREACHABLE();
4037   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4038   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4039                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4040     LOG(FATAL) << DebugName() << " is not defined for double values";
4041     UNREACHABLE();
4042   }
4043 
4044   DECLARE_INSTRUCTION(Below);
4045 
GetCondition()4046   IfCondition GetCondition() const override {
4047     return kCondB;
4048   }
4049 
GetOppositeCondition()4050   IfCondition GetOppositeCondition() const override {
4051     return kCondAE;
4052   }
4053 
4054  protected:
4055   DEFAULT_COPY_CONSTRUCTOR(Below);
4056 
4057  private:
Compute(T x,T y)4058   template <typename T> static bool Compute(T x, T y) {
4059     return MakeUnsigned(x) < MakeUnsigned(y);
4060   }
4061 };
4062 
4063 class HBelowOrEqual final : public HCondition {
4064  public:
4065   HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kBelowOrEqual,first,second,dex_pc)4066       : HCondition(kBelowOrEqual, first, second, dex_pc) {
4067   }
4068 
Evaluate(HIntConstant * x,HIntConstant * y)4069   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4070     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4071   }
Evaluate(HLongConstant * x,HLongConstant * y)4072   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4073     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4074   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4075   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4076                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4077     LOG(FATAL) << DebugName() << " is not defined for float values";
4078     UNREACHABLE();
4079   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4080   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4081                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4082     LOG(FATAL) << DebugName() << " is not defined for double values";
4083     UNREACHABLE();
4084   }
4085 
4086   DECLARE_INSTRUCTION(BelowOrEqual);
4087 
GetCondition()4088   IfCondition GetCondition() const override {
4089     return kCondBE;
4090   }
4091 
GetOppositeCondition()4092   IfCondition GetOppositeCondition() const override {
4093     return kCondA;
4094   }
4095 
4096  protected:
4097   DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4098 
4099  private:
Compute(T x,T y)4100   template <typename T> static bool Compute(T x, T y) {
4101     return MakeUnsigned(x) <= MakeUnsigned(y);
4102   }
4103 };
4104 
4105 class HAbove final : public HCondition {
4106  public:
4107   HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAbove,first,second,dex_pc)4108       : HCondition(kAbove, first, second, dex_pc) {
4109   }
4110 
Evaluate(HIntConstant * x,HIntConstant * y)4111   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4112     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4113   }
Evaluate(HLongConstant * x,HLongConstant * y)4114   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4115     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4116   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4117   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4118                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4119     LOG(FATAL) << DebugName() << " is not defined for float values";
4120     UNREACHABLE();
4121   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4122   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4123                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4124     LOG(FATAL) << DebugName() << " is not defined for double values";
4125     UNREACHABLE();
4126   }
4127 
4128   DECLARE_INSTRUCTION(Above);
4129 
GetCondition()4130   IfCondition GetCondition() const override {
4131     return kCondA;
4132   }
4133 
GetOppositeCondition()4134   IfCondition GetOppositeCondition() const override {
4135     return kCondBE;
4136   }
4137 
4138  protected:
4139   DEFAULT_COPY_CONSTRUCTOR(Above);
4140 
4141  private:
Compute(T x,T y)4142   template <typename T> static bool Compute(T x, T y) {
4143     return MakeUnsigned(x) > MakeUnsigned(y);
4144   }
4145 };
4146 
4147 class HAboveOrEqual final : public HCondition {
4148  public:
4149   HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
HCondition(kAboveOrEqual,first,second,dex_pc)4150       : HCondition(kAboveOrEqual, first, second, dex_pc) {
4151   }
4152 
Evaluate(HIntConstant * x,HIntConstant * y)4153   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4154     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4155   }
Evaluate(HLongConstant * x,HLongConstant * y)4156   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4157     return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4158   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)4159   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4160                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
4161     LOG(FATAL) << DebugName() << " is not defined for float values";
4162     UNREACHABLE();
4163   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)4164   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4165                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
4166     LOG(FATAL) << DebugName() << " is not defined for double values";
4167     UNREACHABLE();
4168   }
4169 
4170   DECLARE_INSTRUCTION(AboveOrEqual);
4171 
GetCondition()4172   IfCondition GetCondition() const override {
4173     return kCondAE;
4174   }
4175 
GetOppositeCondition()4176   IfCondition GetOppositeCondition() const override {
4177     return kCondB;
4178   }
4179 
4180  protected:
4181   DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4182 
4183  private:
Compute(T x,T y)4184   template <typename T> static bool Compute(T x, T y) {
4185     return MakeUnsigned(x) >= MakeUnsigned(y);
4186   }
4187 };
4188 
4189 // Instruction to check how two inputs compare to each other.
4190 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4191 class HCompare final : public HBinaryOperation {
4192  public:
4193   // Note that `comparison_type` is the type of comparison performed
4194   // between the comparison's inputs, not the type of the instantiated
4195   // HCompare instruction (which is always DataType::Type::kInt).
HCompare(DataType::Type comparison_type,HInstruction * first,HInstruction * second,ComparisonBias bias,uint32_t dex_pc)4196   HCompare(DataType::Type comparison_type,
4197            HInstruction* first,
4198            HInstruction* second,
4199            ComparisonBias bias,
4200            uint32_t dex_pc)
4201       : HBinaryOperation(kCompare,
4202                          DataType::Type::kInt32,
4203                          first,
4204                          second,
4205                          SideEffectsForArchRuntimeCalls(comparison_type),
4206                          dex_pc) {
4207     SetPackedField<ComparisonBiasField>(bias);
4208   }
4209 
4210   template <typename T>
Compute(T x,T y)4211   int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4212 
4213   template <typename T>
ComputeFP(T x,T y)4214   int32_t ComputeFP(T x, T y) const {
4215     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4216     DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4217     // Handle the bias.
4218     return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4219   }
4220 
Evaluate(HIntConstant * x,HIntConstant * y)4221   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4222     // Note that there is no "cmp-int" Dex instruction so we shouldn't
4223     // reach this code path when processing a freshly built HIR
4224     // graph. However HCompare integer instructions can be synthesized
4225     // by the instruction simplifier to implement IntegerCompare and
4226     // IntegerSignum intrinsics, so we have to handle this case.
4227     return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4228   }
Evaluate(HLongConstant * x,HLongConstant * y)4229   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4230     return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4231   }
Evaluate(HFloatConstant * x,HFloatConstant * y)4232   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4233     return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4234   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)4235   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4236     return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4237   }
4238 
InstructionDataEquals(const HInstruction * other)4239   bool InstructionDataEquals(const HInstruction* other) const override {
4240     return GetPackedFields() == other->AsCompare()->GetPackedFields();
4241   }
4242 
GetBias()4243   ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4244 
4245   // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4246   // Only meaningful for floating-point comparisons.
IsGtBias()4247   bool IsGtBias() const {
4248     DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4249     return GetBias() == ComparisonBias::kGtBias;
4250   }
4251 
SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED)4252   static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
4253     // Comparisons do not require a runtime call in any back end.
4254     return SideEffects::None();
4255   }
4256 
4257   DECLARE_INSTRUCTION(Compare);
4258 
4259  protected:
4260   static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4261   static constexpr size_t kFieldComparisonBiasSize =
4262       MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4263   static constexpr size_t kNumberOfComparePackedBits =
4264       kFieldComparisonBias + kFieldComparisonBiasSize;
4265   static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4266   using ComparisonBiasField =
4267       BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4268 
4269   // Return an integer constant containing the result of a comparison evaluated at compile time.
MakeConstantComparison(int32_t value,uint32_t dex_pc)4270   HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4271     DCHECK(value == -1 || value == 0 || value == 1) << value;
4272     return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4273   }
4274 
4275   DEFAULT_COPY_CONSTRUCTOR(Compare);
4276 };
4277 
4278 class HNewInstance final : public HExpression<1> {
4279  public:
HNewInstance(HInstruction * cls,uint32_t dex_pc,dex::TypeIndex type_index,const DexFile & dex_file,bool finalizable,QuickEntrypointEnum entrypoint)4280   HNewInstance(HInstruction* cls,
4281                uint32_t dex_pc,
4282                dex::TypeIndex type_index,
4283                const DexFile& dex_file,
4284                bool finalizable,
4285                QuickEntrypointEnum entrypoint)
4286       : HExpression(kNewInstance,
4287                     DataType::Type::kReference,
4288                     SideEffects::CanTriggerGC(),
4289                     dex_pc),
4290         type_index_(type_index),
4291         dex_file_(dex_file),
4292         entrypoint_(entrypoint) {
4293     SetPackedFlag<kFlagFinalizable>(finalizable);
4294     SetRawInputAt(0, cls);
4295   }
4296 
IsClonable()4297   bool IsClonable() const override { return true; }
4298 
GetTypeIndex()4299   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()4300   const DexFile& GetDexFile() const { return dex_file_; }
4301 
4302   // Calls runtime so needs an environment.
NeedsEnvironment()4303   bool NeedsEnvironment() const override { return true; }
4304 
4305   // Can throw errors when out-of-memory or if it's not instantiable/accessible.
CanThrow()4306   bool CanThrow() const override { return true; }
4307 
NeedsChecks()4308   bool NeedsChecks() const {
4309     return entrypoint_ == kQuickAllocObjectWithChecks;
4310   }
4311 
IsFinalizable()4312   bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4313 
CanBeNull()4314   bool CanBeNull() const override { return false; }
4315 
GetEntrypoint()4316   QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4317 
SetEntrypoint(QuickEntrypointEnum entrypoint)4318   void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4319     entrypoint_ = entrypoint;
4320   }
4321 
GetLoadClass()4322   HLoadClass* GetLoadClass() const {
4323     HInstruction* input = InputAt(0);
4324     if (input->IsClinitCheck()) {
4325       input = input->InputAt(0);
4326     }
4327     DCHECK(input->IsLoadClass());
4328     return input->AsLoadClass();
4329   }
4330 
4331   bool IsStringAlloc() const;
4332 
4333   DECLARE_INSTRUCTION(NewInstance);
4334 
4335  protected:
4336   DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4337 
4338  private:
4339   static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4340   static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
4341   static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4342                 "Too many packed fields.");
4343 
4344   const dex::TypeIndex type_index_;
4345   const DexFile& dex_file_;
4346   QuickEntrypointEnum entrypoint_;
4347 };
4348 
4349 enum IntrinsicNeedsEnvironmentOrCache {
4350   kNoEnvironmentOrCache,        // Intrinsic does not require an environment or dex cache.
4351   kNeedsEnvironmentOrCache      // Intrinsic requires an environment or requires a dex cache.
4352 };
4353 
4354 enum IntrinsicSideEffects {
4355   kNoSideEffects,     // Intrinsic does not have any heap memory side effects.
4356   kReadSideEffects,   // Intrinsic may read heap memory.
4357   kWriteSideEffects,  // Intrinsic may write heap memory.
4358   kAllSideEffects     // Intrinsic may read or write heap memory, or trigger GC.
4359 };
4360 
4361 enum IntrinsicExceptions {
4362   kNoThrow,  // Intrinsic does not throw any exceptions.
4363   kCanThrow  // Intrinsic may throw exceptions.
4364 };
4365 
4366 class HInvoke : public HVariableInputSizeInstruction {
4367  public:
4368   bool NeedsEnvironment() const override;
4369 
SetArgumentAt(size_t index,HInstruction * argument)4370   void SetArgumentAt(size_t index, HInstruction* argument) {
4371     SetRawInputAt(index, argument);
4372   }
4373 
4374   // Return the number of arguments.  This number can be lower than
4375   // the number of inputs returned by InputCount(), as some invoke
4376   // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4377   // inputs at the end of their list of inputs.
GetNumberOfArguments()4378   uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4379 
GetDexMethodIndex()4380   uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4381 
GetInvokeType()4382   InvokeType GetInvokeType() const {
4383     return GetPackedField<InvokeTypeField>();
4384   }
4385 
GetIntrinsic()4386   Intrinsics GetIntrinsic() const {
4387     return intrinsic_;
4388   }
4389 
4390   void SetIntrinsic(Intrinsics intrinsic,
4391                     IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
4392                     IntrinsicSideEffects side_effects,
4393                     IntrinsicExceptions exceptions);
4394 
IsFromInlinedInvoke()4395   bool IsFromInlinedInvoke() const {
4396     return GetEnvironment()->IsFromInlinedInvoke();
4397   }
4398 
SetCanThrow(bool can_throw)4399   void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4400 
CanThrow()4401   bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4402 
SetAlwaysThrows(bool always_throws)4403   void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4404 
AlwaysThrows()4405   bool AlwaysThrows() const override { return GetPackedFlag<kFlagAlwaysThrows>(); }
4406 
CanBeMoved()4407   bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4408 
InstructionDataEquals(const HInstruction * other)4409   bool InstructionDataEquals(const HInstruction* other) const override {
4410     return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4411   }
4412 
GetIntrinsicOptimizations()4413   uint32_t* GetIntrinsicOptimizations() {
4414     return &intrinsic_optimizations_;
4415   }
4416 
GetIntrinsicOptimizations()4417   const uint32_t* GetIntrinsicOptimizations() const {
4418     return &intrinsic_optimizations_;
4419   }
4420 
IsIntrinsic()4421   bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4422 
GetResolvedMethod()4423   ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4424   void SetResolvedMethod(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
4425 
4426   DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4427 
4428  protected:
4429   static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4430   static constexpr size_t kFieldInvokeTypeSize =
4431       MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4432   static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4433   static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4434   static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4435   static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4436   using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4437 
HInvoke(InstructionKind kind,ArenaAllocator * allocator,uint32_t number_of_arguments,uint32_t number_of_other_inputs,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,InvokeType invoke_type)4438   HInvoke(InstructionKind kind,
4439           ArenaAllocator* allocator,
4440           uint32_t number_of_arguments,
4441           uint32_t number_of_other_inputs,
4442           DataType::Type return_type,
4443           uint32_t dex_pc,
4444           uint32_t dex_method_index,
4445           ArtMethod* resolved_method,
4446           InvokeType invoke_type)
4447     : HVariableInputSizeInstruction(
4448           kind,
4449           return_type,
4450           SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
4451           dex_pc,
4452           allocator,
4453           number_of_arguments + number_of_other_inputs,
4454           kArenaAllocInvokeInputs),
4455       number_of_arguments_(number_of_arguments),
4456       dex_method_index_(dex_method_index),
4457       intrinsic_(Intrinsics::kNone),
4458       intrinsic_optimizations_(0) {
4459     SetPackedField<InvokeTypeField>(invoke_type);
4460     SetPackedFlag<kFlagCanThrow>(true);
4461     // Check mutator lock, constructors lack annotalysis support.
4462     Locks::mutator_lock_->AssertNotExclusiveHeld(Thread::Current());
4463     SetResolvedMethod(resolved_method);
4464   }
4465 
4466   DEFAULT_COPY_CONSTRUCTOR(Invoke);
4467 
4468   uint32_t number_of_arguments_;
4469   ArtMethod* resolved_method_;
4470   const uint32_t dex_method_index_;
4471   Intrinsics intrinsic_;
4472 
4473   // A magic word holding optimizations for intrinsics. See intrinsics.h.
4474   uint32_t intrinsic_optimizations_;
4475 };
4476 
4477 class HInvokeUnresolved final : public HInvoke {
4478  public:
HInvokeUnresolved(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,InvokeType invoke_type)4479   HInvokeUnresolved(ArenaAllocator* allocator,
4480                     uint32_t number_of_arguments,
4481                     DataType::Type return_type,
4482                     uint32_t dex_pc,
4483                     uint32_t dex_method_index,
4484                     InvokeType invoke_type)
4485       : HInvoke(kInvokeUnresolved,
4486                 allocator,
4487                 number_of_arguments,
4488                 /* number_of_other_inputs= */ 0u,
4489                 return_type,
4490                 dex_pc,
4491                 dex_method_index,
4492                 nullptr,
4493                 invoke_type) {
4494   }
4495 
IsClonable()4496   bool IsClonable() const override { return true; }
4497 
4498   DECLARE_INSTRUCTION(InvokeUnresolved);
4499 
4500  protected:
4501   DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4502 };
4503 
4504 class HInvokePolymorphic final : public HInvoke {
4505  public:
HInvokePolymorphic(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method)4506   HInvokePolymorphic(ArenaAllocator* allocator,
4507                      uint32_t number_of_arguments,
4508                      DataType::Type return_type,
4509                      uint32_t dex_pc,
4510                      uint32_t dex_method_index,
4511                      // resolved_method is the ArtMethod object corresponding to the polymorphic
4512                      // method (e.g. VarHandle.get), resolved using the class linker. It is needed
4513                      // to pass intrinsic information to the HInvokePolymorphic node.
4514                      ArtMethod* resolved_method)
4515       : HInvoke(kInvokePolymorphic,
4516                 allocator,
4517                 number_of_arguments,
4518                 /* number_of_other_inputs= */ 0u,
4519                 return_type,
4520                 dex_pc,
4521                 dex_method_index,
4522                 resolved_method,
4523                 kPolymorphic) {
4524   }
4525 
IsClonable()4526   bool IsClonable() const override { return true; }
4527 
4528   DECLARE_INSTRUCTION(InvokePolymorphic);
4529 
4530  protected:
4531   DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4532 };
4533 
4534 class HInvokeCustom final : public HInvoke {
4535  public:
HInvokeCustom(ArenaAllocator * allocator,uint32_t number_of_arguments,uint32_t call_site_index,DataType::Type return_type,uint32_t dex_pc)4536   HInvokeCustom(ArenaAllocator* allocator,
4537                 uint32_t number_of_arguments,
4538                 uint32_t call_site_index,
4539                 DataType::Type return_type,
4540                 uint32_t dex_pc)
4541       : HInvoke(kInvokeCustom,
4542                 allocator,
4543                 number_of_arguments,
4544                 /* number_of_other_inputs= */ 0u,
4545                 return_type,
4546                 dex_pc,
4547                 /* dex_method_index= */ dex::kDexNoIndex,
4548                 /* resolved_method= */ nullptr,
4549                 kStatic),
4550       call_site_index_(call_site_index) {
4551   }
4552 
GetCallSiteIndex()4553   uint32_t GetCallSiteIndex() const { return call_site_index_; }
4554 
IsClonable()4555   bool IsClonable() const override { return true; }
4556 
4557   DECLARE_INSTRUCTION(InvokeCustom);
4558 
4559  protected:
4560   DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4561 
4562  private:
4563   uint32_t call_site_index_;
4564 };
4565 
4566 class HInvokeStaticOrDirect final : public HInvoke {
4567  public:
4568   // Requirements of this method call regarding the class
4569   // initialization (clinit) check of its declaring class.
4570   enum class ClinitCheckRequirement {  // private marker to avoid generate-operator-out.py from processing.
4571     kNone,      // Class already initialized.
4572     kExplicit,  // Static call having explicit clinit check as last input.
4573     kImplicit,  // Static call implicitly requiring a clinit check.
4574     kLast = kImplicit
4575   };
4576 
4577   // Determines how to load the target ArtMethod*.
4578   enum class MethodLoadKind {
4579     // Use a String init ArtMethod* loaded from Thread entrypoints.
4580     kStringInit,
4581 
4582     // Use the method's own ArtMethod* loaded by the register allocator.
4583     kRecursive,
4584 
4585     // Use PC-relative boot image ArtMethod* address that will be known at link time.
4586     // Used for boot image methods referenced by boot image code.
4587     kBootImageLinkTimePcRelative,
4588 
4589     // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
4590     // Used for app->boot calls with relocatable image.
4591     kBootImageRelRo,
4592 
4593     // Load from an entry in the .bss section using a PC-relative load.
4594     // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4595     kBssEntry,
4596 
4597     // Use ArtMethod* at a known address, embed the direct address in the code.
4598     // Used for for JIT-compiled calls.
4599     kJitDirectAddress,
4600 
4601     // Make a runtime call to resolve and call the method. This is the last-resort-kind
4602     // used when other kinds are unimplemented on a particular architecture.
4603     kRuntimeCall,
4604   };
4605 
4606   // Determines the location of the code pointer.
4607   enum class CodePtrLocation {
4608     // Recursive call, use local PC-relative call instruction.
4609     kCallSelf,
4610 
4611     // Use native pointer from the Artmethod*.
4612     // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
4613     // a special resolution stub if the class is not initialized or no native code is registered.
4614     kCallCriticalNative,
4615 
4616     // Use code pointer from the ArtMethod*.
4617     // Used when we don't know the target code. This is also the last-resort-kind used when
4618     // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4619     kCallArtMethod,
4620   };
4621 
4622   struct DispatchInfo {
4623     MethodLoadKind method_load_kind;
4624     CodePtrLocation code_ptr_location;
4625     // The method load data holds
4626     //   - thread entrypoint offset for kStringInit method if this is a string init invoke.
4627     //     Note that there are multiple string init methods, each having its own offset.
4628     //   - the method address for kDirectAddress
4629     uint64_t method_load_data;
4630   };
4631 
HInvokeStaticOrDirect(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t method_index,ArtMethod * resolved_method,DispatchInfo dispatch_info,InvokeType invoke_type,MethodReference target_method,ClinitCheckRequirement clinit_check_requirement)4632   HInvokeStaticOrDirect(ArenaAllocator* allocator,
4633                         uint32_t number_of_arguments,
4634                         DataType::Type return_type,
4635                         uint32_t dex_pc,
4636                         uint32_t method_index,
4637                         ArtMethod* resolved_method,
4638                         DispatchInfo dispatch_info,
4639                         InvokeType invoke_type,
4640                         MethodReference target_method,
4641                         ClinitCheckRequirement clinit_check_requirement)
4642       : HInvoke(kInvokeStaticOrDirect,
4643                 allocator,
4644                 number_of_arguments,
4645                 // There is potentially one extra argument for the HCurrentMethod input,
4646                 // and one other if the clinit check is explicit. These can be removed later.
4647                 (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) +
4648                     (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4649                 return_type,
4650                 dex_pc,
4651                 method_index,
4652                 resolved_method,
4653                 invoke_type),
4654         target_method_(target_method),
4655         dispatch_info_(dispatch_info) {
4656     SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4657   }
4658 
IsClonable()4659   bool IsClonable() const override { return true; }
4660 
SetDispatchInfo(DispatchInfo dispatch_info)4661   void SetDispatchInfo(DispatchInfo dispatch_info) {
4662     bool had_current_method_input = HasCurrentMethodInput();
4663     bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info);
4664 
4665     // Using the current method is the default and once we find a better
4666     // method load kind, we should not go back to using the current method.
4667     DCHECK(had_current_method_input || !needs_current_method_input);
4668 
4669     if (had_current_method_input && !needs_current_method_input) {
4670       DCHECK_EQ(InputAt(GetCurrentMethodIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4671       RemoveInputAt(GetCurrentMethodIndex());
4672     }
4673     dispatch_info_ = dispatch_info;
4674   }
4675 
GetDispatchInfo()4676   DispatchInfo GetDispatchInfo() const {
4677     return dispatch_info_;
4678   }
4679 
4680   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()4681   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
4682     ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4683     if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4684       DCHECK(!input_records.empty());
4685       DCHECK_GT(input_records.size(), GetNumberOfArguments());
4686       HInstruction* last_input = input_records.back().GetInstruction();
4687       // Note: `last_input` may be null during arguments setup.
4688       if (last_input != nullptr) {
4689         // `last_input` is the last input of a static invoke marked as having
4690         // an explicit clinit check. It must either be:
4691         // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4692         // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4693         DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4694       }
4695     }
4696     return input_records;
4697   }
4698 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)4699   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
4700     // We do not access the method via object reference, so we cannot do an implicit null check.
4701     // TODO: for intrinsics we can generate implicit null checks.
4702     return false;
4703   }
4704 
CanBeNull()4705   bool CanBeNull() const override {
4706     return GetType() == DataType::Type::kReference && !IsStringInit();
4707   }
4708 
GetMethodLoadKind()4709   MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
GetCodePtrLocation()4710   CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
IsRecursive()4711   bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4712   bool NeedsDexCacheOfDeclaringClass() const override;
IsStringInit()4713   bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
HasMethodAddress()4714   bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
HasPcRelativeMethodLoadKind()4715   bool HasPcRelativeMethodLoadKind() const {
4716     return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative ||
4717            GetMethodLoadKind() == MethodLoadKind::kBootImageRelRo ||
4718            GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4719   }
4720 
GetStringInitEntryPoint()4721   QuickEntrypointEnum GetStringInitEntryPoint() const {
4722     DCHECK(IsStringInit());
4723     return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4724   }
4725 
GetMethodAddress()4726   uint64_t GetMethodAddress() const {
4727     DCHECK(HasMethodAddress());
4728     return dispatch_info_.method_load_data;
4729   }
4730 
4731   const DexFile& GetDexFileForPcRelativeDexCache() const;
4732 
GetClinitCheckRequirement()4733   ClinitCheckRequirement GetClinitCheckRequirement() const {
4734     return GetPackedField<ClinitCheckRequirementField>();
4735   }
4736 
4737   // Is this instruction a call to a static method?
IsStatic()4738   bool IsStatic() const {
4739     return GetInvokeType() == kStatic;
4740   }
4741 
GetTargetMethod()4742   MethodReference GetTargetMethod() const {
4743     return target_method_;
4744   }
4745 
4746   // Does this method load kind need the current method as an input?
NeedsCurrentMethodInput(DispatchInfo dispatch_info)4747   static bool NeedsCurrentMethodInput(DispatchInfo dispatch_info) {
4748     return dispatch_info.method_load_kind == MethodLoadKind::kRecursive ||
4749            dispatch_info.method_load_kind == MethodLoadKind::kRuntimeCall ||
4750            dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative;
4751   }
4752 
4753   // Get the index of the current method input.
GetCurrentMethodIndex()4754   size_t GetCurrentMethodIndex() const {
4755     DCHECK(HasCurrentMethodInput());
4756     return GetCurrentMethodIndexUnchecked();
4757   }
GetCurrentMethodIndexUnchecked()4758   size_t GetCurrentMethodIndexUnchecked() const {
4759     return GetNumberOfArguments();
4760   }
4761 
4762   // Check if the method has a current method input.
HasCurrentMethodInput()4763   bool HasCurrentMethodInput() const {
4764     if (NeedsCurrentMethodInput(GetDispatchInfo())) {
4765       DCHECK(InputAt(GetCurrentMethodIndexUnchecked()) == nullptr ||  // During argument setup.
4766              InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
4767       return true;
4768     } else {
4769       DCHECK(InputCount() == GetCurrentMethodIndexUnchecked() ||
4770              InputAt(GetCurrentMethodIndexUnchecked()) == nullptr ||  // During argument setup.
4771              !InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
4772       return false;
4773     }
4774   }
4775 
4776   // Get the index of the special input.
GetSpecialInputIndex()4777   size_t GetSpecialInputIndex() const {
4778     DCHECK(HasSpecialInput());
4779     return GetSpecialInputIndexUnchecked();
4780   }
GetSpecialInputIndexUnchecked()4781   size_t GetSpecialInputIndexUnchecked() const {
4782     return GetNumberOfArguments() + (HasCurrentMethodInput() ? 1u : 0u);
4783   }
4784 
4785   // Check if the method has a special input.
HasSpecialInput()4786   bool HasSpecialInput() const {
4787     size_t other_inputs =
4788         GetSpecialInputIndexUnchecked() + (IsStaticWithExplicitClinitCheck() ? 1u : 0u);
4789     size_t input_count = InputCount();
4790     DCHECK_LE(input_count - other_inputs, 1u) << other_inputs << " " << input_count;
4791     return other_inputs != input_count;
4792   }
4793 
AddSpecialInput(HInstruction * input)4794   void AddSpecialInput(HInstruction* input) {
4795     // We allow only one special input.
4796     DCHECK(!HasSpecialInput());
4797     InsertInputAt(GetSpecialInputIndexUnchecked(), input);
4798   }
4799 
4800   // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4801   // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4802   // instruction; only relevant for static calls with explicit clinit check.
RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement)4803   void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4804     DCHECK(IsStaticWithExplicitClinitCheck());
4805     size_t last_input_index = inputs_.size() - 1u;
4806     HInstruction* last_input = inputs_.back().GetInstruction();
4807     DCHECK(last_input != nullptr);
4808     DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4809     RemoveAsUserOfInput(last_input_index);
4810     inputs_.pop_back();
4811     SetPackedField<ClinitCheckRequirementField>(new_requirement);
4812     DCHECK(!IsStaticWithExplicitClinitCheck());
4813   }
4814 
4815   // Is this a call to a static method whose declaring class has an
4816   // explicit initialization check in the graph?
IsStaticWithExplicitClinitCheck()4817   bool IsStaticWithExplicitClinitCheck() const {
4818     return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4819   }
4820 
4821   // Is this a call to a static method whose declaring class has an
4822   // implicit intialization check requirement?
IsStaticWithImplicitClinitCheck()4823   bool IsStaticWithImplicitClinitCheck() const {
4824     return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4825   }
4826 
4827   DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4828 
4829  protected:
4830   DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
4831 
4832  private:
4833   static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
4834   static constexpr size_t kFieldClinitCheckRequirementSize =
4835       MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4836   static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4837       kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4838   static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4839                 "Too many packed fields.");
4840   using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4841                                                kFieldClinitCheckRequirement,
4842                                                kFieldClinitCheckRequirementSize>;
4843 
4844   // Cached values of the resolved method, to avoid needing the mutator lock.
4845   const MethodReference target_method_;
4846   DispatchInfo dispatch_info_;
4847 };
4848 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4849 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::CodePtrLocation rhs);
4850 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4851 
4852 class HInvokeVirtual final : public HInvoke {
4853  public:
HInvokeVirtual(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t vtable_index)4854   HInvokeVirtual(ArenaAllocator* allocator,
4855                  uint32_t number_of_arguments,
4856                  DataType::Type return_type,
4857                  uint32_t dex_pc,
4858                  uint32_t dex_method_index,
4859                  ArtMethod* resolved_method,
4860                  uint32_t vtable_index)
4861       : HInvoke(kInvokeVirtual,
4862                 allocator,
4863                 number_of_arguments,
4864                 0u,
4865                 return_type,
4866                 dex_pc,
4867                 dex_method_index,
4868                 resolved_method,
4869                 kVirtual),
4870         vtable_index_(vtable_index) {
4871   }
4872 
IsClonable()4873   bool IsClonable() const override { return true; }
4874 
CanBeNull()4875   bool CanBeNull() const override {
4876     switch (GetIntrinsic()) {
4877       case Intrinsics::kThreadCurrentThread:
4878       case Intrinsics::kStringBufferAppend:
4879       case Intrinsics::kStringBufferToString:
4880       case Intrinsics::kStringBuilderAppendObject:
4881       case Intrinsics::kStringBuilderAppendString:
4882       case Intrinsics::kStringBuilderAppendCharSequence:
4883       case Intrinsics::kStringBuilderAppendCharArray:
4884       case Intrinsics::kStringBuilderAppendBoolean:
4885       case Intrinsics::kStringBuilderAppendChar:
4886       case Intrinsics::kStringBuilderAppendInt:
4887       case Intrinsics::kStringBuilderAppendLong:
4888       case Intrinsics::kStringBuilderAppendFloat:
4889       case Intrinsics::kStringBuilderAppendDouble:
4890       case Intrinsics::kStringBuilderToString:
4891         return false;
4892       default:
4893         return HInvoke::CanBeNull();
4894     }
4895   }
4896 
CanDoImplicitNullCheckOn(HInstruction * obj)4897   bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
4898     // TODO: Add implicit null checks in intrinsics.
4899     return (obj == InputAt(0)) && !IsIntrinsic();
4900   }
4901 
GetVTableIndex()4902   uint32_t GetVTableIndex() const { return vtable_index_; }
4903 
4904   DECLARE_INSTRUCTION(InvokeVirtual);
4905 
4906  protected:
4907   DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
4908 
4909  private:
4910   // Cached value of the resolved method, to avoid needing the mutator lock.
4911   const uint32_t vtable_index_;
4912 };
4913 
4914 class HInvokeInterface final : public HInvoke {
4915  public:
HInvokeInterface(ArenaAllocator * allocator,uint32_t number_of_arguments,DataType::Type return_type,uint32_t dex_pc,uint32_t dex_method_index,ArtMethod * resolved_method,uint32_t imt_index)4916   HInvokeInterface(ArenaAllocator* allocator,
4917                    uint32_t number_of_arguments,
4918                    DataType::Type return_type,
4919                    uint32_t dex_pc,
4920                    uint32_t dex_method_index,
4921                    ArtMethod* resolved_method,
4922                    uint32_t imt_index)
4923       : HInvoke(kInvokeInterface,
4924                 allocator,
4925                 number_of_arguments,
4926                 0u,
4927                 return_type,
4928                 dex_pc,
4929                 dex_method_index,
4930                 resolved_method,
4931                 kInterface),
4932         imt_index_(imt_index) {
4933   }
4934 
IsClonable()4935   bool IsClonable() const override { return true; }
4936 
CanDoImplicitNullCheckOn(HInstruction * obj)4937   bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
4938     // TODO: Add implicit null checks in intrinsics.
4939     return (obj == InputAt(0)) && !IsIntrinsic();
4940   }
4941 
NeedsDexCacheOfDeclaringClass()4942   bool NeedsDexCacheOfDeclaringClass() const override {
4943     // The assembly stub currently needs it.
4944     return true;
4945   }
4946 
GetImtIndex()4947   uint32_t GetImtIndex() const { return imt_index_; }
4948 
4949   DECLARE_INSTRUCTION(InvokeInterface);
4950 
4951  protected:
4952   DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
4953 
4954  private:
4955   // Cached value of the resolved method, to avoid needing the mutator lock.
4956   const uint32_t imt_index_;
4957 };
4958 
4959 class HNeg final : public HUnaryOperation {
4960  public:
4961   HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNeg,result_type,input,dex_pc)4962       : HUnaryOperation(kNeg, result_type, input, dex_pc) {
4963     DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
4964   }
4965 
Compute(T x)4966   template <typename T> static T Compute(T x) { return -x; }
4967 
Evaluate(HIntConstant * x)4968   HConstant* Evaluate(HIntConstant* x) const override {
4969     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4970   }
Evaluate(HLongConstant * x)4971   HConstant* Evaluate(HLongConstant* x) const override {
4972     return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4973   }
Evaluate(HFloatConstant * x)4974   HConstant* Evaluate(HFloatConstant* x) const override {
4975     return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4976   }
Evaluate(HDoubleConstant * x)4977   HConstant* Evaluate(HDoubleConstant* x) const override {
4978     return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4979   }
4980 
4981   DECLARE_INSTRUCTION(Neg);
4982 
4983  protected:
4984   DEFAULT_COPY_CONSTRUCTOR(Neg);
4985 };
4986 
4987 class HNewArray final : public HExpression<2> {
4988  public:
HNewArray(HInstruction * cls,HInstruction * length,uint32_t dex_pc,size_t component_size_shift)4989   HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
4990       : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
4991     SetRawInputAt(0, cls);
4992     SetRawInputAt(1, length);
4993     SetPackedField<ComponentSizeShiftField>(component_size_shift);
4994   }
4995 
IsClonable()4996   bool IsClonable() const override { return true; }
4997 
4998   // Calls runtime so needs an environment.
NeedsEnvironment()4999   bool NeedsEnvironment() const override { return true; }
5000 
5001   // May throw NegativeArraySizeException, OutOfMemoryError, etc.
CanThrow()5002   bool CanThrow() const override { return true; }
5003 
CanBeNull()5004   bool CanBeNull() const override { return false; }
5005 
GetLoadClass()5006   HLoadClass* GetLoadClass() const {
5007     DCHECK(InputAt(0)->IsLoadClass());
5008     return InputAt(0)->AsLoadClass();
5009   }
5010 
GetLength()5011   HInstruction* GetLength() const {
5012     return InputAt(1);
5013   }
5014 
GetComponentSizeShift()5015   size_t GetComponentSizeShift() {
5016     return GetPackedField<ComponentSizeShiftField>();
5017   }
5018 
5019   DECLARE_INSTRUCTION(NewArray);
5020 
5021  protected:
5022   DEFAULT_COPY_CONSTRUCTOR(NewArray);
5023 
5024  private:
5025   static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
5026   static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
5027   static constexpr size_t kNumberOfNewArrayPackedBits =
5028       kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
5029   static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5030   using ComponentSizeShiftField =
5031       BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShift>;
5032 };
5033 
5034 class HAdd final : public HBinaryOperation {
5035  public:
5036   HAdd(DataType::Type result_type,
5037        HInstruction* left,
5038        HInstruction* right,
5039        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAdd,result_type,left,right,SideEffects::None (),dex_pc)5040       : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
5041   }
5042 
IsCommutative()5043   bool IsCommutative() const override { return true; }
5044 
Compute(T x,T y)5045   template <typename T> static T Compute(T x, T y) { return x + y; }
5046 
Evaluate(HIntConstant * x,HIntConstant * y)5047   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5048     return GetBlock()->GetGraph()->GetIntConstant(
5049         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5050   }
Evaluate(HLongConstant * x,HLongConstant * y)5051   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5052     return GetBlock()->GetGraph()->GetLongConstant(
5053         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5054   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5055   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5056     return GetBlock()->GetGraph()->GetFloatConstant(
5057         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5058   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5059   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5060     return GetBlock()->GetGraph()->GetDoubleConstant(
5061         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5062   }
5063 
5064   DECLARE_INSTRUCTION(Add);
5065 
5066  protected:
5067   DEFAULT_COPY_CONSTRUCTOR(Add);
5068 };
5069 
5070 class HSub final : public HBinaryOperation {
5071  public:
5072   HSub(DataType::Type result_type,
5073        HInstruction* left,
5074        HInstruction* right,
5075        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kSub,result_type,left,right,SideEffects::None (),dex_pc)5076       : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
5077   }
5078 
Compute(T x,T y)5079   template <typename T> static T Compute(T x, T y) { return x - y; }
5080 
Evaluate(HIntConstant * x,HIntConstant * y)5081   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5082     return GetBlock()->GetGraph()->GetIntConstant(
5083         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5084   }
Evaluate(HLongConstant * x,HLongConstant * y)5085   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5086     return GetBlock()->GetGraph()->GetLongConstant(
5087         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5088   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5089   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5090     return GetBlock()->GetGraph()->GetFloatConstant(
5091         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5092   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5093   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5094     return GetBlock()->GetGraph()->GetDoubleConstant(
5095         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5096   }
5097 
5098   DECLARE_INSTRUCTION(Sub);
5099 
5100  protected:
5101   DEFAULT_COPY_CONSTRUCTOR(Sub);
5102 };
5103 
5104 class HMul final : public HBinaryOperation {
5105  public:
5106   HMul(DataType::Type result_type,
5107        HInstruction* left,
5108        HInstruction* right,
5109        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kMul,result_type,left,right,SideEffects::None (),dex_pc)5110       : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5111   }
5112 
IsCommutative()5113   bool IsCommutative() const override { return true; }
5114 
Compute(T x,T y)5115   template <typename T> static T Compute(T x, T y) { return x * y; }
5116 
Evaluate(HIntConstant * x,HIntConstant * y)5117   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5118     return GetBlock()->GetGraph()->GetIntConstant(
5119         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5120   }
Evaluate(HLongConstant * x,HLongConstant * y)5121   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5122     return GetBlock()->GetGraph()->GetLongConstant(
5123         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5124   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5125   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5126     return GetBlock()->GetGraph()->GetFloatConstant(
5127         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5128   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5129   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5130     return GetBlock()->GetGraph()->GetDoubleConstant(
5131         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5132   }
5133 
5134   DECLARE_INSTRUCTION(Mul);
5135 
5136  protected:
5137   DEFAULT_COPY_CONSTRUCTOR(Mul);
5138 };
5139 
5140 class HDiv final : public HBinaryOperation {
5141  public:
HDiv(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5142   HDiv(DataType::Type result_type,
5143        HInstruction* left,
5144        HInstruction* right,
5145        uint32_t dex_pc)
5146       : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5147   }
5148 
5149   template <typename T>
ComputeIntegral(T x,T y)5150   T ComputeIntegral(T x, T y) const {
5151     DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5152     // Our graph structure ensures we never have 0 for `y` during
5153     // constant folding.
5154     DCHECK_NE(y, 0);
5155     // Special case -1 to avoid getting a SIGFPE on x86(_64).
5156     return (y == -1) ? -x : x / y;
5157   }
5158 
5159   template <typename T>
ComputeFP(T x,T y)5160   T ComputeFP(T x, T y) const {
5161     DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5162     return x / y;
5163   }
5164 
Evaluate(HIntConstant * x,HIntConstant * y)5165   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5166     return GetBlock()->GetGraph()->GetIntConstant(
5167         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5168   }
Evaluate(HLongConstant * x,HLongConstant * y)5169   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5170     return GetBlock()->GetGraph()->GetLongConstant(
5171         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5172   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5173   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5174     return GetBlock()->GetGraph()->GetFloatConstant(
5175         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5176   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5177   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5178     return GetBlock()->GetGraph()->GetDoubleConstant(
5179         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5180   }
5181 
5182   DECLARE_INSTRUCTION(Div);
5183 
5184  protected:
5185   DEFAULT_COPY_CONSTRUCTOR(Div);
5186 };
5187 
5188 class HRem final : public HBinaryOperation {
5189  public:
HRem(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5190   HRem(DataType::Type result_type,
5191        HInstruction* left,
5192        HInstruction* right,
5193        uint32_t dex_pc)
5194       : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5195   }
5196 
5197   template <typename T>
ComputeIntegral(T x,T y)5198   T ComputeIntegral(T x, T y) const {
5199     DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5200     // Our graph structure ensures we never have 0 for `y` during
5201     // constant folding.
5202     DCHECK_NE(y, 0);
5203     // Special case -1 to avoid getting a SIGFPE on x86(_64).
5204     return (y == -1) ? 0 : x % y;
5205   }
5206 
5207   template <typename T>
ComputeFP(T x,T y)5208   T ComputeFP(T x, T y) const {
5209     DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5210     return std::fmod(x, y);
5211   }
5212 
Evaluate(HIntConstant * x,HIntConstant * y)5213   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5214     return GetBlock()->GetGraph()->GetIntConstant(
5215         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5216   }
Evaluate(HLongConstant * x,HLongConstant * y)5217   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5218     return GetBlock()->GetGraph()->GetLongConstant(
5219         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5220   }
Evaluate(HFloatConstant * x,HFloatConstant * y)5221   HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5222     return GetBlock()->GetGraph()->GetFloatConstant(
5223         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5224   }
Evaluate(HDoubleConstant * x,HDoubleConstant * y)5225   HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5226     return GetBlock()->GetGraph()->GetDoubleConstant(
5227         ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5228   }
5229 
5230   DECLARE_INSTRUCTION(Rem);
5231 
5232  protected:
5233   DEFAULT_COPY_CONSTRUCTOR(Rem);
5234 };
5235 
5236 class HMin final : public HBinaryOperation {
5237  public:
HMin(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5238   HMin(DataType::Type result_type,
5239        HInstruction* left,
5240        HInstruction* right,
5241        uint32_t dex_pc)
5242       : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5243 
IsCommutative()5244   bool IsCommutative() const override { return true; }
5245 
5246   // Evaluation for integral values.
ComputeIntegral(T x,T y)5247   template <typename T> static T ComputeIntegral(T x, T y) {
5248     return (x <= y) ? x : y;
5249   }
5250 
Evaluate(HIntConstant * x,HIntConstant * y)5251   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5252     return GetBlock()->GetGraph()->GetIntConstant(
5253         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5254   }
Evaluate(HLongConstant * x,HLongConstant * y)5255   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5256     return GetBlock()->GetGraph()->GetLongConstant(
5257         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5258   }
5259   // TODO: Evaluation for floating-point values.
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5260   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5261                       HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5262   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5263                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5264 
5265   DECLARE_INSTRUCTION(Min);
5266 
5267  protected:
5268   DEFAULT_COPY_CONSTRUCTOR(Min);
5269 };
5270 
5271 class HMax final : public HBinaryOperation {
5272  public:
HMax(DataType::Type result_type,HInstruction * left,HInstruction * right,uint32_t dex_pc)5273   HMax(DataType::Type result_type,
5274        HInstruction* left,
5275        HInstruction* right,
5276        uint32_t dex_pc)
5277       : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5278 
IsCommutative()5279   bool IsCommutative() const override { return true; }
5280 
5281   // Evaluation for integral values.
ComputeIntegral(T x,T y)5282   template <typename T> static T ComputeIntegral(T x, T y) {
5283     return (x >= y) ? x : y;
5284   }
5285 
Evaluate(HIntConstant * x,HIntConstant * y)5286   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5287     return GetBlock()->GetGraph()->GetIntConstant(
5288         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5289   }
Evaluate(HLongConstant * x,HLongConstant * y)5290   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5291     return GetBlock()->GetGraph()->GetLongConstant(
5292         ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5293   }
5294   // TODO: Evaluation for floating-point values.
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5295   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5296                       HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5297   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5298                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
5299 
5300   DECLARE_INSTRUCTION(Max);
5301 
5302  protected:
5303   DEFAULT_COPY_CONSTRUCTOR(Max);
5304 };
5305 
5306 class HAbs final : public HUnaryOperation {
5307  public:
5308   HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kAbs,result_type,input,dex_pc)5309       : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5310 
5311   // Evaluation for integral values.
ComputeIntegral(T x)5312   template <typename T> static T ComputeIntegral(T x) {
5313     return x < 0 ? -x : x;
5314   }
5315 
5316   // Evaluation for floating-point values.
5317   // Note, as a "quality of implementation", rather than pure "spec compliance",
5318   // we require that Math.abs() clears the sign bit (but changes nothing else)
5319   // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5320   // http://b/30758343
ComputeFP(T x)5321   template <typename T, typename S> static T ComputeFP(T x) {
5322     S bits = bit_cast<S, T>(x);
5323     return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5324   }
5325 
Evaluate(HIntConstant * x)5326   HConstant* Evaluate(HIntConstant* x) const override {
5327     return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5328   }
Evaluate(HLongConstant * x)5329   HConstant* Evaluate(HLongConstant* x) const override {
5330     return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5331   }
Evaluate(HFloatConstant * x)5332   HConstant* Evaluate(HFloatConstant* x) const override {
5333     return GetBlock()->GetGraph()->GetFloatConstant(
5334         ComputeFP<float, int32_t>(x->GetValue()), GetDexPc());
5335   }
Evaluate(HDoubleConstant * x)5336   HConstant* Evaluate(HDoubleConstant* x) const override {
5337     return GetBlock()->GetGraph()->GetDoubleConstant(
5338         ComputeFP<double, int64_t>(x->GetValue()), GetDexPc());
5339   }
5340 
5341   DECLARE_INSTRUCTION(Abs);
5342 
5343  protected:
5344   DEFAULT_COPY_CONSTRUCTOR(Abs);
5345 };
5346 
5347 class HDivZeroCheck final : public HExpression<1> {
5348  public:
5349   // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5350   // constructor. However it can only do it on a fatal slow path so execution never returns to the
5351   // instruction following the current one; thus 'SideEffects::None()' is used.
HDivZeroCheck(HInstruction * value,uint32_t dex_pc)5352   HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5353       : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5354     SetRawInputAt(0, value);
5355   }
5356 
IsClonable()5357   bool IsClonable() const override { return true; }
CanBeMoved()5358   bool CanBeMoved() const override { return true; }
5359 
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5360   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5361     return true;
5362   }
5363 
NeedsEnvironment()5364   bool NeedsEnvironment() const override { return true; }
CanThrow()5365   bool CanThrow() const override { return true; }
5366 
5367   DECLARE_INSTRUCTION(DivZeroCheck);
5368 
5369  protected:
5370   DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5371 };
5372 
5373 class HShl final : public HBinaryOperation {
5374  public:
5375   HShl(DataType::Type result_type,
5376        HInstruction* value,
5377        HInstruction* distance,
5378        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShl,result_type,value,distance,SideEffects::None (),dex_pc)5379       : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5380     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5381     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5382   }
5383 
5384   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5385   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5386     return value << (distance & max_shift_distance);
5387   }
5388 
Evaluate(HIntConstant * value,HIntConstant * distance)5389   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5390     return GetBlock()->GetGraph()->GetIntConstant(
5391         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5392   }
Evaluate(HLongConstant * value,HIntConstant * distance)5393   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5394     return GetBlock()->GetGraph()->GetLongConstant(
5395         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5396   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5397   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5398                       HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5399     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5400     UNREACHABLE();
5401   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5402   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5403                       HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5404     LOG(FATAL) << DebugName() << " is not defined for float values";
5405     UNREACHABLE();
5406   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5407   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5408                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5409     LOG(FATAL) << DebugName() << " is not defined for double values";
5410     UNREACHABLE();
5411   }
5412 
5413   DECLARE_INSTRUCTION(Shl);
5414 
5415  protected:
5416   DEFAULT_COPY_CONSTRUCTOR(Shl);
5417 };
5418 
5419 class HShr final : public HBinaryOperation {
5420  public:
5421   HShr(DataType::Type result_type,
5422        HInstruction* value,
5423        HInstruction* distance,
5424        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kShr,result_type,value,distance,SideEffects::None (),dex_pc)5425       : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5426     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5427     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5428   }
5429 
5430   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5431   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5432     return value >> (distance & max_shift_distance);
5433   }
5434 
Evaluate(HIntConstant * value,HIntConstant * distance)5435   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5436     return GetBlock()->GetGraph()->GetIntConstant(
5437         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5438   }
Evaluate(HLongConstant * value,HIntConstant * distance)5439   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5440     return GetBlock()->GetGraph()->GetLongConstant(
5441         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5442   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5443   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5444                       HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5445     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5446     UNREACHABLE();
5447   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5448   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5449                       HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5450     LOG(FATAL) << DebugName() << " is not defined for float values";
5451     UNREACHABLE();
5452   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5453   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5454                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5455     LOG(FATAL) << DebugName() << " is not defined for double values";
5456     UNREACHABLE();
5457   }
5458 
5459   DECLARE_INSTRUCTION(Shr);
5460 
5461  protected:
5462   DEFAULT_COPY_CONSTRUCTOR(Shr);
5463 };
5464 
5465 class HUShr final : public HBinaryOperation {
5466  public:
5467   HUShr(DataType::Type result_type,
5468         HInstruction* value,
5469         HInstruction* distance,
5470         uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kUShr,result_type,value,distance,SideEffects::None (),dex_pc)5471       : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5472     DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5473     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5474   }
5475 
5476   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_distance)5477   static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5478     typedef typename std::make_unsigned<T>::type V;
5479     V ux = static_cast<V>(value);
5480     return static_cast<T>(ux >> (distance & max_shift_distance));
5481   }
5482 
Evaluate(HIntConstant * value,HIntConstant * distance)5483   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5484     return GetBlock()->GetGraph()->GetIntConstant(
5485         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5486   }
Evaluate(HLongConstant * value,HIntConstant * distance)5487   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5488     return GetBlock()->GetGraph()->GetLongConstant(
5489         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5490   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5491   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5492                       HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5493     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5494     UNREACHABLE();
5495   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5496   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5497                       HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5498     LOG(FATAL) << DebugName() << " is not defined for float values";
5499     UNREACHABLE();
5500   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5501   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5502                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5503     LOG(FATAL) << DebugName() << " is not defined for double values";
5504     UNREACHABLE();
5505   }
5506 
5507   DECLARE_INSTRUCTION(UShr);
5508 
5509  protected:
5510   DEFAULT_COPY_CONSTRUCTOR(UShr);
5511 };
5512 
5513 class HAnd final : public HBinaryOperation {
5514  public:
5515   HAnd(DataType::Type result_type,
5516        HInstruction* left,
5517        HInstruction* right,
5518        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kAnd,result_type,left,right,SideEffects::None (),dex_pc)5519       : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5520   }
5521 
IsCommutative()5522   bool IsCommutative() const override { return true; }
5523 
Compute(T x,T y)5524   template <typename T> static T Compute(T x, T y) { return x & y; }
5525 
Evaluate(HIntConstant * x,HIntConstant * y)5526   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5527     return GetBlock()->GetGraph()->GetIntConstant(
5528         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5529   }
Evaluate(HLongConstant * x,HLongConstant * y)5530   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5531     return GetBlock()->GetGraph()->GetLongConstant(
5532         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5533   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5534   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5535                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5536     LOG(FATAL) << DebugName() << " is not defined for float values";
5537     UNREACHABLE();
5538   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5539   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5540                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5541     LOG(FATAL) << DebugName() << " is not defined for double values";
5542     UNREACHABLE();
5543   }
5544 
5545   DECLARE_INSTRUCTION(And);
5546 
5547  protected:
5548   DEFAULT_COPY_CONSTRUCTOR(And);
5549 };
5550 
5551 class HOr final : public HBinaryOperation {
5552  public:
5553   HOr(DataType::Type result_type,
5554       HInstruction* left,
5555       HInstruction* right,
5556       uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kOr,result_type,left,right,SideEffects::None (),dex_pc)5557       : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5558   }
5559 
IsCommutative()5560   bool IsCommutative() const override { return true; }
5561 
Compute(T x,T y)5562   template <typename T> static T Compute(T x, T y) { return x | y; }
5563 
Evaluate(HIntConstant * x,HIntConstant * y)5564   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5565     return GetBlock()->GetGraph()->GetIntConstant(
5566         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5567   }
Evaluate(HLongConstant * x,HLongConstant * y)5568   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5569     return GetBlock()->GetGraph()->GetLongConstant(
5570         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5571   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5572   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5573                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5574     LOG(FATAL) << DebugName() << " is not defined for float values";
5575     UNREACHABLE();
5576   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5577   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5578                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5579     LOG(FATAL) << DebugName() << " is not defined for double values";
5580     UNREACHABLE();
5581   }
5582 
5583   DECLARE_INSTRUCTION(Or);
5584 
5585  protected:
5586   DEFAULT_COPY_CONSTRUCTOR(Or);
5587 };
5588 
5589 class HXor final : public HBinaryOperation {
5590  public:
5591   HXor(DataType::Type result_type,
5592        HInstruction* left,
5593        HInstruction* right,
5594        uint32_t dex_pc = kNoDexPc)
HBinaryOperation(kXor,result_type,left,right,SideEffects::None (),dex_pc)5595       : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5596   }
5597 
IsCommutative()5598   bool IsCommutative() const override { return true; }
5599 
Compute(T x,T y)5600   template <typename T> static T Compute(T x, T y) { return x ^ y; }
5601 
Evaluate(HIntConstant * x,HIntConstant * y)5602   HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5603     return GetBlock()->GetGraph()->GetIntConstant(
5604         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5605   }
Evaluate(HLongConstant * x,HLongConstant * y)5606   HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5607     return GetBlock()->GetGraph()->GetLongConstant(
5608         Compute(x->GetValue(), y->GetValue()), GetDexPc());
5609   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED,HFloatConstant * y ATTRIBUTE_UNUSED)5610   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
5611                       HFloatConstant* y ATTRIBUTE_UNUSED) const override {
5612     LOG(FATAL) << DebugName() << " is not defined for float values";
5613     UNREACHABLE();
5614   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED,HDoubleConstant * y ATTRIBUTE_UNUSED)5615   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
5616                       HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
5617     LOG(FATAL) << DebugName() << " is not defined for double values";
5618     UNREACHABLE();
5619   }
5620 
5621   DECLARE_INSTRUCTION(Xor);
5622 
5623  protected:
5624   DEFAULT_COPY_CONSTRUCTOR(Xor);
5625 };
5626 
5627 class HRor final : public HBinaryOperation {
5628  public:
HRor(DataType::Type result_type,HInstruction * value,HInstruction * distance)5629   HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5630       : HBinaryOperation(kRor, result_type, value, distance) {
5631   }
5632 
5633   template <typename T>
Compute(T value,int32_t distance,int32_t max_shift_value)5634   static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5635     typedef typename std::make_unsigned<T>::type V;
5636     V ux = static_cast<V>(value);
5637     if ((distance & max_shift_value) == 0) {
5638       return static_cast<T>(ux);
5639     } else {
5640       const V reg_bits = sizeof(T) * 8;
5641       return static_cast<T>(ux >> (distance & max_shift_value)) |
5642                            (value << (reg_bits - (distance & max_shift_value)));
5643     }
5644   }
5645 
Evaluate(HIntConstant * value,HIntConstant * distance)5646   HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5647     return GetBlock()->GetGraph()->GetIntConstant(
5648         Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5649   }
Evaluate(HLongConstant * value,HIntConstant * distance)5650   HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5651     return GetBlock()->GetGraph()->GetLongConstant(
5652         Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5653   }
Evaluate(HLongConstant * value ATTRIBUTE_UNUSED,HLongConstant * distance ATTRIBUTE_UNUSED)5654   HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
5655                       HLongConstant* distance ATTRIBUTE_UNUSED) const override {
5656     LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
5657     UNREACHABLE();
5658   }
Evaluate(HFloatConstant * value ATTRIBUTE_UNUSED,HFloatConstant * distance ATTRIBUTE_UNUSED)5659   HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
5660                       HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
5661     LOG(FATAL) << DebugName() << " is not defined for float values";
5662     UNREACHABLE();
5663   }
Evaluate(HDoubleConstant * value ATTRIBUTE_UNUSED,HDoubleConstant * distance ATTRIBUTE_UNUSED)5664   HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
5665                       HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
5666     LOG(FATAL) << DebugName() << " is not defined for double values";
5667     UNREACHABLE();
5668   }
5669 
5670   DECLARE_INSTRUCTION(Ror);
5671 
5672  protected:
5673   DEFAULT_COPY_CONSTRUCTOR(Ror);
5674 };
5675 
5676 // The value of a parameter in this method. Its location depends on
5677 // the calling convention.
5678 class HParameterValue final : public HExpression<0> {
5679  public:
5680   HParameterValue(const DexFile& dex_file,
5681                   dex::TypeIndex type_index,
5682                   uint8_t index,
5683                   DataType::Type parameter_type,
5684                   bool is_this = false)
HExpression(kParameterValue,parameter_type,SideEffects::None (),kNoDexPc)5685       : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5686         dex_file_(dex_file),
5687         type_index_(type_index),
5688         index_(index) {
5689     SetPackedFlag<kFlagIsThis>(is_this);
5690     SetPackedFlag<kFlagCanBeNull>(!is_this);
5691   }
5692 
GetDexFile()5693   const DexFile& GetDexFile() const { return dex_file_; }
GetTypeIndex()5694   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetIndex()5695   uint8_t GetIndex() const { return index_; }
IsThis()5696   bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5697 
CanBeNull()5698   bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
SetCanBeNull(bool can_be_null)5699   void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5700 
5701   DECLARE_INSTRUCTION(ParameterValue);
5702 
5703  protected:
5704   DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5705 
5706  private:
5707   // Whether or not the parameter value corresponds to 'this' argument.
5708   static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
5709   static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5710   static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5711   static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5712                 "Too many packed fields.");
5713 
5714   const DexFile& dex_file_;
5715   const dex::TypeIndex type_index_;
5716   // The index of this parameter in the parameters list. Must be less
5717   // than HGraph::number_of_in_vregs_.
5718   const uint8_t index_;
5719 };
5720 
5721 class HNot final : public HUnaryOperation {
5722  public:
5723   HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kNot,result_type,input,dex_pc)5724       : HUnaryOperation(kNot, result_type, input, dex_pc) {
5725   }
5726 
CanBeMoved()5727   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5728   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5729     return true;
5730   }
5731 
Compute(T x)5732   template <typename T> static T Compute(T x) { return ~x; }
5733 
Evaluate(HIntConstant * x)5734   HConstant* Evaluate(HIntConstant* x) const override {
5735     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5736   }
Evaluate(HLongConstant * x)5737   HConstant* Evaluate(HLongConstant* x) const override {
5738     return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5739   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5740   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
5741     LOG(FATAL) << DebugName() << " is not defined for float values";
5742     UNREACHABLE();
5743   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5744   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
5745     LOG(FATAL) << DebugName() << " is not defined for double values";
5746     UNREACHABLE();
5747   }
5748 
5749   DECLARE_INSTRUCTION(Not);
5750 
5751  protected:
5752   DEFAULT_COPY_CONSTRUCTOR(Not);
5753 };
5754 
5755 class HBooleanNot final : public HUnaryOperation {
5756  public:
5757   explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HUnaryOperation(kBooleanNot,DataType::Type::kBool,input,dex_pc)5758       : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
5759   }
5760 
CanBeMoved()5761   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5762   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5763     return true;
5764   }
5765 
Compute(T x)5766   template <typename T> static bool Compute(T x) {
5767     DCHECK(IsUint<1>(x)) << x;
5768     return !x;
5769   }
5770 
Evaluate(HIntConstant * x)5771   HConstant* Evaluate(HIntConstant* x) const override {
5772     return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5773   }
Evaluate(HLongConstant * x ATTRIBUTE_UNUSED)5774   HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override {
5775     LOG(FATAL) << DebugName() << " is not defined for long values";
5776     UNREACHABLE();
5777   }
Evaluate(HFloatConstant * x ATTRIBUTE_UNUSED)5778   HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
5779     LOG(FATAL) << DebugName() << " is not defined for float values";
5780     UNREACHABLE();
5781   }
Evaluate(HDoubleConstant * x ATTRIBUTE_UNUSED)5782   HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
5783     LOG(FATAL) << DebugName() << " is not defined for double values";
5784     UNREACHABLE();
5785   }
5786 
5787   DECLARE_INSTRUCTION(BooleanNot);
5788 
5789  protected:
5790   DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
5791 };
5792 
5793 class HTypeConversion final : public HExpression<1> {
5794  public:
5795   // Instantiate a type conversion of `input` to `result_type`.
5796   HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kTypeConversion,result_type,SideEffects::None (),dex_pc)5797       : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
5798     SetRawInputAt(0, input);
5799     // Invariant: We should never generate a conversion to a Boolean value.
5800     DCHECK_NE(DataType::Type::kBool, result_type);
5801   }
5802 
GetInput()5803   HInstruction* GetInput() const { return InputAt(0); }
GetInputType()5804   DataType::Type GetInputType() const { return GetInput()->GetType(); }
GetResultType()5805   DataType::Type GetResultType() const { return GetType(); }
5806 
IsClonable()5807   bool IsClonable() const override { return true; }
CanBeMoved()5808   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5809   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5810     return true;
5811   }
5812   // Return whether the conversion is implicit. This includes conversion to the same type.
IsImplicitConversion()5813   bool IsImplicitConversion() const {
5814     return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
5815   }
5816 
5817   // Try to statically evaluate the conversion and return a HConstant
5818   // containing the result.  If the input cannot be converted, return nullptr.
5819   HConstant* TryStaticEvaluation() const;
5820 
5821   DECLARE_INSTRUCTION(TypeConversion);
5822 
5823  protected:
5824   DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
5825 };
5826 
5827 static constexpr uint32_t kNoRegNumber = -1;
5828 
5829 class HNullCheck final : public HExpression<1> {
5830  public:
5831   // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5832   // constructor. However it can only do it on a fatal slow path so execution never returns to the
5833   // instruction following the current one; thus 'SideEffects::None()' is used.
HNullCheck(HInstruction * value,uint32_t dex_pc)5834   HNullCheck(HInstruction* value, uint32_t dex_pc)
5835       : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
5836     SetRawInputAt(0, value);
5837   }
5838 
IsClonable()5839   bool IsClonable() const override { return true; }
CanBeMoved()5840   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)5841   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
5842     return true;
5843   }
5844 
NeedsEnvironment()5845   bool NeedsEnvironment() const override { return true; }
5846 
CanThrow()5847   bool CanThrow() const override { return true; }
5848 
CanBeNull()5849   bool CanBeNull() const override { return false; }
5850 
5851   DECLARE_INSTRUCTION(NullCheck);
5852 
5853  protected:
5854   DEFAULT_COPY_CONSTRUCTOR(NullCheck);
5855 };
5856 
5857 // Embeds an ArtField and all the information required by the compiler. We cache
5858 // that information to avoid requiring the mutator lock every time we need it.
5859 class FieldInfo : public ValueObject {
5860  public:
FieldInfo(ArtField * field,MemberOffset field_offset,DataType::Type field_type,bool is_volatile,uint32_t index,uint16_t declaring_class_def_index,const DexFile & dex_file)5861   FieldInfo(ArtField* field,
5862             MemberOffset field_offset,
5863             DataType::Type field_type,
5864             bool is_volatile,
5865             uint32_t index,
5866             uint16_t declaring_class_def_index,
5867             const DexFile& dex_file)
5868       : field_(field),
5869         field_offset_(field_offset),
5870         field_type_(field_type),
5871         is_volatile_(is_volatile),
5872         index_(index),
5873         declaring_class_def_index_(declaring_class_def_index),
5874         dex_file_(dex_file) {}
5875 
GetField()5876   ArtField* GetField() const { return field_; }
GetFieldOffset()5877   MemberOffset GetFieldOffset() const { return field_offset_; }
GetFieldType()5878   DataType::Type GetFieldType() const { return field_type_; }
GetFieldIndex()5879   uint32_t GetFieldIndex() const { return index_; }
GetDeclaringClassDefIndex()5880   uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
GetDexFile()5881   const DexFile& GetDexFile() const { return dex_file_; }
IsVolatile()5882   bool IsVolatile() const { return is_volatile_; }
5883 
5884  private:
5885   ArtField* const field_;
5886   const MemberOffset field_offset_;
5887   const DataType::Type field_type_;
5888   const bool is_volatile_;
5889   const uint32_t index_;
5890   const uint16_t declaring_class_def_index_;
5891   const DexFile& dex_file_;
5892 };
5893 
5894 class HInstanceFieldGet final : public HExpression<1> {
5895  public:
HInstanceFieldGet(HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5896   HInstanceFieldGet(HInstruction* value,
5897                     ArtField* field,
5898                     DataType::Type field_type,
5899                     MemberOffset field_offset,
5900                     bool is_volatile,
5901                     uint32_t field_idx,
5902                     uint16_t declaring_class_def_index,
5903                     const DexFile& dex_file,
5904                     uint32_t dex_pc)
5905       : HExpression(kInstanceFieldGet,
5906                     field_type,
5907                     SideEffects::FieldReadOfType(field_type, is_volatile),
5908                     dex_pc),
5909         field_info_(field,
5910                     field_offset,
5911                     field_type,
5912                     is_volatile,
5913                     field_idx,
5914                     declaring_class_def_index,
5915                     dex_file) {
5916     SetRawInputAt(0, value);
5917   }
5918 
IsClonable()5919   bool IsClonable() const override { return true; }
CanBeMoved()5920   bool CanBeMoved() const override { return !IsVolatile(); }
5921 
InstructionDataEquals(const HInstruction * other)5922   bool InstructionDataEquals(const HInstruction* other) const override {
5923     const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5924     return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5925   }
5926 
CanDoImplicitNullCheckOn(HInstruction * obj)5927   bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5928     return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5929   }
5930 
ComputeHashCode()5931   size_t ComputeHashCode() const override {
5932     return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5933   }
5934 
GetFieldInfo()5935   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5936   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5937   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5938   bool IsVolatile() const { return field_info_.IsVolatile(); }
5939 
SetType(DataType::Type new_type)5940   void SetType(DataType::Type new_type) {
5941     DCHECK(DataType::IsIntegralType(GetType()));
5942     DCHECK(DataType::IsIntegralType(new_type));
5943     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
5944     SetPackedField<TypeField>(new_type);
5945   }
5946 
5947   DECLARE_INSTRUCTION(InstanceFieldGet);
5948 
5949  protected:
5950   DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
5951 
5952  private:
5953   const FieldInfo field_info_;
5954 };
5955 
5956 class HInstanceFieldSet final : public HExpression<2> {
5957  public:
HInstanceFieldSet(HInstruction * object,HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)5958   HInstanceFieldSet(HInstruction* object,
5959                     HInstruction* value,
5960                     ArtField* field,
5961                     DataType::Type field_type,
5962                     MemberOffset field_offset,
5963                     bool is_volatile,
5964                     uint32_t field_idx,
5965                     uint16_t declaring_class_def_index,
5966                     const DexFile& dex_file,
5967                     uint32_t dex_pc)
5968       : HExpression(kInstanceFieldSet,
5969                     SideEffects::FieldWriteOfType(field_type, is_volatile),
5970                     dex_pc),
5971         field_info_(field,
5972                     field_offset,
5973                     field_type,
5974                     is_volatile,
5975                     field_idx,
5976                     declaring_class_def_index,
5977                     dex_file) {
5978     SetPackedFlag<kFlagValueCanBeNull>(true);
5979     SetRawInputAt(0, object);
5980     SetRawInputAt(1, value);
5981   }
5982 
IsClonable()5983   bool IsClonable() const override { return true; }
5984 
CanDoImplicitNullCheckOn(HInstruction * obj)5985   bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5986     return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
5987   }
5988 
GetFieldInfo()5989   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()5990   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()5991   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()5992   bool IsVolatile() const { return field_info_.IsVolatile(); }
GetValue()5993   HInstruction* GetValue() const { return InputAt(1); }
GetValueCanBeNull()5994   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
ClearValueCanBeNull()5995   void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5996 
5997   DECLARE_INSTRUCTION(InstanceFieldSet);
5998 
5999  protected:
6000   DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
6001 
6002  private:
6003   static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6004   static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
6005   static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
6006                 "Too many packed fields.");
6007 
6008   const FieldInfo field_info_;
6009 };
6010 
6011 class HArrayGet final : public HExpression<2> {
6012  public:
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,uint32_t dex_pc)6013   HArrayGet(HInstruction* array,
6014             HInstruction* index,
6015             DataType::Type type,
6016             uint32_t dex_pc)
6017      : HArrayGet(array,
6018                  index,
6019                  type,
6020                  SideEffects::ArrayReadOfType(type),
6021                  dex_pc,
6022                  /* is_string_char_at= */ false) {
6023   }
6024 
HArrayGet(HInstruction * array,HInstruction * index,DataType::Type type,SideEffects side_effects,uint32_t dex_pc,bool is_string_char_at)6025   HArrayGet(HInstruction* array,
6026             HInstruction* index,
6027             DataType::Type type,
6028             SideEffects side_effects,
6029             uint32_t dex_pc,
6030             bool is_string_char_at)
6031       : HExpression(kArrayGet, type, side_effects, dex_pc) {
6032     SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6033     SetRawInputAt(0, array);
6034     SetRawInputAt(1, index);
6035   }
6036 
IsClonable()6037   bool IsClonable() const override { return true; }
CanBeMoved()6038   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6039   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6040     return true;
6041   }
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)6042   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6043     // TODO: We can be smarter here.
6044     // Currently, unless the array is the result of NewArray, the array access is always
6045     // preceded by some form of null NullCheck necessary for the bounds check, usually
6046     // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
6047     // dynamic BCE. There are cases when these could be removed to produce better code.
6048     // If we ever add optimizations to do so we should allow an implicit check here
6049     // (as long as the address falls in the first page).
6050     //
6051     // As an example of such fancy optimization, we could eliminate BoundsCheck for
6052     //     a = cond ? new int[1] : null;
6053     //     a[0];  // The Phi does not need bounds check for either input.
6054     return false;
6055   }
6056 
IsEquivalentOf(HArrayGet * other)6057   bool IsEquivalentOf(HArrayGet* other) const {
6058     bool result = (GetDexPc() == other->GetDexPc());
6059     if (kIsDebugBuild && result) {
6060       DCHECK_EQ(GetBlock(), other->GetBlock());
6061       DCHECK_EQ(GetArray(), other->GetArray());
6062       DCHECK_EQ(GetIndex(), other->GetIndex());
6063       if (DataType::IsIntOrLongType(GetType())) {
6064         DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
6065       } else {
6066         DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
6067         DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
6068       }
6069     }
6070     return result;
6071   }
6072 
IsStringCharAt()6073   bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6074 
GetArray()6075   HInstruction* GetArray() const { return InputAt(0); }
GetIndex()6076   HInstruction* GetIndex() const { return InputAt(1); }
6077 
SetType(DataType::Type new_type)6078   void SetType(DataType::Type new_type) {
6079     DCHECK(DataType::IsIntegralType(GetType()));
6080     DCHECK(DataType::IsIntegralType(new_type));
6081     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6082     SetPackedField<TypeField>(new_type);
6083   }
6084 
6085   DECLARE_INSTRUCTION(ArrayGet);
6086 
6087  protected:
6088   DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
6089 
6090  private:
6091   // We treat a String as an array, creating the HArrayGet from String.charAt()
6092   // intrinsic in the instruction simplifier. We can always determine whether
6093   // a particular HArrayGet is actually a String.charAt() by looking at the type
6094   // of the input but that requires holding the mutator lock, so we prefer to use
6095   // a flag, so that code generators don't need to do the locking.
6096   static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6097   static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
6098   static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6099                 "Too many packed fields.");
6100 };
6101 
6102 class HArraySet final : public HExpression<3> {
6103  public:
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,uint32_t dex_pc)6104   HArraySet(HInstruction* array,
6105             HInstruction* index,
6106             HInstruction* value,
6107             DataType::Type expected_component_type,
6108             uint32_t dex_pc)
6109       : HArraySet(array,
6110                   index,
6111                   value,
6112                   expected_component_type,
6113                   // Make a best guess for side effects now, may be refined during SSA building.
6114                   ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6115                   dex_pc) {
6116   }
6117 
HArraySet(HInstruction * array,HInstruction * index,HInstruction * value,DataType::Type expected_component_type,SideEffects side_effects,uint32_t dex_pc)6118   HArraySet(HInstruction* array,
6119             HInstruction* index,
6120             HInstruction* value,
6121             DataType::Type expected_component_type,
6122             SideEffects side_effects,
6123             uint32_t dex_pc)
6124       : HExpression(kArraySet, side_effects, dex_pc) {
6125     SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6126     SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6127     SetPackedFlag<kFlagValueCanBeNull>(true);
6128     SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6129     SetRawInputAt(0, array);
6130     SetRawInputAt(1, index);
6131     SetRawInputAt(2, value);
6132   }
6133 
IsClonable()6134   bool IsClonable() const override { return true; }
6135 
NeedsEnvironment()6136   bool NeedsEnvironment() const override {
6137     // We call a runtime method to throw ArrayStoreException.
6138     return NeedsTypeCheck();
6139   }
6140 
6141   // Can throw ArrayStoreException.
CanThrow()6142   bool CanThrow() const override { return NeedsTypeCheck(); }
6143 
CanDoImplicitNullCheckOn(HInstruction * obj ATTRIBUTE_UNUSED)6144   bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
6145     // TODO: Same as for ArrayGet.
6146     return false;
6147   }
6148 
ClearNeedsTypeCheck()6149   void ClearNeedsTypeCheck() {
6150     SetPackedFlag<kFlagNeedsTypeCheck>(false);
6151   }
6152 
ClearValueCanBeNull()6153   void ClearValueCanBeNull() {
6154     SetPackedFlag<kFlagValueCanBeNull>(false);
6155   }
6156 
SetStaticTypeOfArrayIsObjectArray()6157   void SetStaticTypeOfArrayIsObjectArray() {
6158     SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6159   }
6160 
GetValueCanBeNull()6161   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
NeedsTypeCheck()6162   bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
StaticTypeOfArrayIsObjectArray()6163   bool StaticTypeOfArrayIsObjectArray() const {
6164     return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6165   }
6166 
GetArray()6167   HInstruction* GetArray() const { return InputAt(0); }
GetIndex()6168   HInstruction* GetIndex() const { return InputAt(1); }
GetValue()6169   HInstruction* GetValue() const { return InputAt(2); }
6170 
GetComponentType()6171   DataType::Type GetComponentType() const {
6172     return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6173   }
6174 
GetComponentType(DataType::Type value_type,DataType::Type expected_component_type)6175   static DataType::Type GetComponentType(DataType::Type value_type,
6176                                          DataType::Type expected_component_type) {
6177     // The Dex format does not type floating point index operations. Since the
6178     // `expected_component_type` comes from SSA building and can therefore not
6179     // be correct, we also check what is the value type. If it is a floating
6180     // point type, we must use that type.
6181     return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6182         ? value_type
6183         : expected_component_type;
6184   }
6185 
GetRawExpectedComponentType()6186   DataType::Type GetRawExpectedComponentType() const {
6187     return GetPackedField<ExpectedComponentTypeField>();
6188   }
6189 
ComputeSideEffects(DataType::Type type)6190   static SideEffects ComputeSideEffects(DataType::Type type) {
6191     return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6192   }
6193 
SideEffectsForArchRuntimeCalls(DataType::Type value_type)6194   static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6195     return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6196                                                       : SideEffects::None();
6197   }
6198 
6199   DECLARE_INSTRUCTION(ArraySet);
6200 
6201  protected:
6202   DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6203 
6204  private:
6205   static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6206   static constexpr size_t kFieldExpectedComponentTypeSize =
6207       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6208   static constexpr size_t kFlagNeedsTypeCheck =
6209       kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6210   static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6211   // Cached information for the reference_type_info_ so that codegen
6212   // does not need to inspect the static type.
6213   static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6214   static constexpr size_t kNumberOfArraySetPackedBits =
6215       kFlagStaticTypeOfArrayIsObjectArray + 1;
6216   static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6217   using ExpectedComponentTypeField =
6218       BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6219 };
6220 
6221 class HArrayLength final : public HExpression<1> {
6222  public:
6223   HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
HExpression(kArrayLength,DataType::Type::kInt32,SideEffects::None (),dex_pc)6224       : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6225     SetPackedFlag<kFlagIsStringLength>(is_string_length);
6226     // Note that arrays do not change length, so the instruction does not
6227     // depend on any write.
6228     SetRawInputAt(0, array);
6229   }
6230 
IsClonable()6231   bool IsClonable() const override { return true; }
CanBeMoved()6232   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6233   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6234     return true;
6235   }
CanDoImplicitNullCheckOn(HInstruction * obj)6236   bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6237     return obj == InputAt(0);
6238   }
6239 
IsStringLength()6240   bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6241 
6242   DECLARE_INSTRUCTION(ArrayLength);
6243 
6244  protected:
6245   DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6246 
6247  private:
6248   // We treat a String as an array, creating the HArrayLength from String.length()
6249   // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6250   // determine whether a particular HArrayLength is actually a String.length() by
6251   // looking at the type of the input but that requires holding the mutator lock, so
6252   // we prefer to use a flag, so that code generators don't need to do the locking.
6253   static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6254   static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6255   static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6256                 "Too many packed fields.");
6257 };
6258 
6259 class HBoundsCheck final : public HExpression<2> {
6260  public:
6261   // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6262   // constructor. However it can only do it on a fatal slow path so execution never returns to the
6263   // instruction following the current one; thus 'SideEffects::None()' is used.
6264   HBoundsCheck(HInstruction* index,
6265                HInstruction* length,
6266                uint32_t dex_pc,
6267                bool is_string_char_at = false)
6268       : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6269     DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6270     SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6271     SetRawInputAt(0, index);
6272     SetRawInputAt(1, length);
6273   }
6274 
IsClonable()6275   bool IsClonable() const override { return true; }
CanBeMoved()6276   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6277   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6278     return true;
6279   }
6280 
NeedsEnvironment()6281   bool NeedsEnvironment() const override { return true; }
6282 
CanThrow()6283   bool CanThrow() const override { return true; }
6284 
IsStringCharAt()6285   bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6286 
GetIndex()6287   HInstruction* GetIndex() const { return InputAt(0); }
6288 
6289   DECLARE_INSTRUCTION(BoundsCheck);
6290 
6291  protected:
6292   DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6293 
6294  private:
6295   static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6296   static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6297   static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6298                 "Too many packed fields.");
6299 };
6300 
6301 class HSuspendCheck final : public HExpression<0> {
6302  public:
6303   explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
HExpression(kSuspendCheck,SideEffects::CanTriggerGC (),dex_pc)6304       : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6305         slow_path_(nullptr) {
6306   }
6307 
IsClonable()6308   bool IsClonable() const override { return true; }
6309 
NeedsEnvironment()6310   bool NeedsEnvironment() const override {
6311     return true;
6312   }
6313 
SetSlowPath(SlowPathCode * slow_path)6314   void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
GetSlowPath()6315   SlowPathCode* GetSlowPath() const { return slow_path_; }
6316 
6317   DECLARE_INSTRUCTION(SuspendCheck);
6318 
6319  protected:
6320   DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6321 
6322  private:
6323   // Only used for code generation, in order to share the same slow path between back edges
6324   // of a same loop.
6325   SlowPathCode* slow_path_;
6326 };
6327 
6328 // Pseudo-instruction which provides the native debugger with mapping information.
6329 // It ensures that we can generate line number and local variables at this point.
6330 class HNativeDebugInfo : public HExpression<0> {
6331  public:
HNativeDebugInfo(uint32_t dex_pc)6332   explicit HNativeDebugInfo(uint32_t dex_pc)
6333       : HExpression<0>(kNativeDebugInfo, SideEffects::None(), dex_pc) {
6334   }
6335 
NeedsEnvironment()6336   bool NeedsEnvironment() const override {
6337     return true;
6338   }
6339 
6340   DECLARE_INSTRUCTION(NativeDebugInfo);
6341 
6342  protected:
6343   DEFAULT_COPY_CONSTRUCTOR(NativeDebugInfo);
6344 };
6345 
6346 /**
6347  * Instruction to load a Class object.
6348  */
6349 class HLoadClass final : public HInstruction {
6350  public:
6351   // Determines how to load the Class.
6352   enum class LoadKind {
6353     // We cannot load this class. See HSharpening::SharpenLoadClass.
6354     kInvalid = -1,
6355 
6356     // Use the Class* from the method's own ArtMethod*.
6357     kReferrersClass,
6358 
6359     // Use PC-relative boot image Class* address that will be known at link time.
6360     // Used for boot image classes referenced by boot image code.
6361     kBootImageLinkTimePcRelative,
6362 
6363     // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6364     // Used for boot image classes referenced by apps in AOT-compiled code.
6365     kBootImageRelRo,
6366 
6367     // Load from an entry in the .bss section using a PC-relative load.
6368     // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6369     kBssEntry,
6370 
6371     // Use a known boot image Class* address, embedded in the code by the codegen.
6372     // Used for boot image classes referenced by apps in JIT-compiled code.
6373     kJitBootImageAddress,
6374 
6375     // Load from the root table associated with the JIT compiled method.
6376     kJitTableAddress,
6377 
6378     // Load using a simple runtime call. This is the fall-back load kind when
6379     // the codegen is unable to use another appropriate kind.
6380     kRuntimeCall,
6381 
6382     kLast = kRuntimeCall
6383   };
6384 
HLoadClass(HCurrentMethod * current_method,dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,bool is_referrers_class,uint32_t dex_pc,bool needs_access_check)6385   HLoadClass(HCurrentMethod* current_method,
6386              dex::TypeIndex type_index,
6387              const DexFile& dex_file,
6388              Handle<mirror::Class> klass,
6389              bool is_referrers_class,
6390              uint32_t dex_pc,
6391              bool needs_access_check)
6392       : HInstruction(kLoadClass,
6393                      DataType::Type::kReference,
6394                      SideEffectsForArchRuntimeCalls(),
6395                      dex_pc),
6396         special_input_(HUserRecord<HInstruction*>(current_method)),
6397         type_index_(type_index),
6398         dex_file_(dex_file),
6399         klass_(klass) {
6400     // Referrers class should not need access check. We never inline unverified
6401     // methods so we can't possibly end up in this situation.
6402     DCHECK(!is_referrers_class || !needs_access_check);
6403 
6404     SetPackedField<LoadKindField>(
6405         is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6406     SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6407     SetPackedFlag<kFlagIsInBootImage>(false);
6408     SetPackedFlag<kFlagGenerateClInitCheck>(false);
6409     SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6410   }
6411 
IsClonable()6412   bool IsClonable() const override { return true; }
6413 
6414   void SetLoadKind(LoadKind load_kind);
6415 
GetLoadKind()6416   LoadKind GetLoadKind() const {
6417     return GetPackedField<LoadKindField>();
6418   }
6419 
HasPcRelativeLoadKind()6420   bool HasPcRelativeLoadKind() const {
6421     return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6422            GetLoadKind() == LoadKind::kBootImageRelRo ||
6423            GetLoadKind() == LoadKind::kBssEntry;
6424   }
6425 
CanBeMoved()6426   bool CanBeMoved() const override { return true; }
6427 
6428   bool InstructionDataEquals(const HInstruction* other) const override;
6429 
ComputeHashCode()6430   size_t ComputeHashCode() const override { return type_index_.index_; }
6431 
CanBeNull()6432   bool CanBeNull() const override { return false; }
6433 
NeedsEnvironment()6434   bool NeedsEnvironment() const override {
6435     return CanCallRuntime();
6436   }
6437 
SetMustGenerateClinitCheck(bool generate_clinit_check)6438   void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6439     // The entrypoint the code generator is going to call does not do
6440     // clinit of the class.
6441     DCHECK(!NeedsAccessCheck());
6442     SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6443   }
6444 
CanCallRuntime()6445   bool CanCallRuntime() const {
6446     return NeedsAccessCheck() ||
6447            MustGenerateClinitCheck() ||
6448            GetLoadKind() == LoadKind::kRuntimeCall ||
6449            GetLoadKind() == LoadKind::kBssEntry;
6450   }
6451 
CanThrow()6452   bool CanThrow() const override {
6453     return NeedsAccessCheck() ||
6454            MustGenerateClinitCheck() ||
6455            // If the class is in the boot image, the lookup in the runtime call cannot throw.
6456            ((GetLoadKind() == LoadKind::kRuntimeCall ||
6457              GetLoadKind() == LoadKind::kBssEntry) &&
6458             !IsInBootImage());
6459   }
6460 
GetLoadedClassRTI()6461   ReferenceTypeInfo GetLoadedClassRTI() {
6462     if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
6463       // Note: The is_exact flag from the return value should not be used.
6464       return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
6465     } else {
6466       return ReferenceTypeInfo::CreateInvalid();
6467     }
6468   }
6469 
6470   // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
SetValidLoadedClassRTI()6471   void SetValidLoadedClassRTI() REQUIRES_SHARED(Locks::mutator_lock_) {
6472     DCHECK(klass_ != nullptr);
6473     SetPackedFlag<kFlagValidLoadedClassRTI>(true);
6474   }
6475 
GetTypeIndex()6476   dex::TypeIndex GetTypeIndex() const { return type_index_; }
GetDexFile()6477   const DexFile& GetDexFile() const { return dex_file_; }
6478 
NeedsDexCacheOfDeclaringClass()6479   bool NeedsDexCacheOfDeclaringClass() const override {
6480     return GetLoadKind() == LoadKind::kRuntimeCall;
6481   }
6482 
SideEffectsForArchRuntimeCalls()6483   static SideEffects SideEffectsForArchRuntimeCalls() {
6484     return SideEffects::CanTriggerGC();
6485   }
6486 
IsReferrersClass()6487   bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
NeedsAccessCheck()6488   bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
IsInBootImage()6489   bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
MustGenerateClinitCheck()6490   bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6491 
MustResolveTypeOnSlowPath()6492   bool MustResolveTypeOnSlowPath() const {
6493     // Check that this instruction has a slow path.
6494     DCHECK(GetLoadKind() != LoadKind::kRuntimeCall);  // kRuntimeCall calls on main path.
6495     DCHECK(GetLoadKind() == LoadKind::kBssEntry || MustGenerateClinitCheck());
6496     return GetLoadKind() == LoadKind::kBssEntry;
6497   }
6498 
MarkInBootImage()6499   void MarkInBootImage() {
6500     SetPackedFlag<kFlagIsInBootImage>(true);
6501   }
6502 
6503   void AddSpecialInput(HInstruction* special_input);
6504 
6505   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6506   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6507     return ArrayRef<HUserRecord<HInstruction*>>(
6508         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6509   }
6510 
GetClass()6511   Handle<mirror::Class> GetClass() const {
6512     return klass_;
6513   }
6514 
6515   DECLARE_INSTRUCTION(LoadClass);
6516 
6517  protected:
6518   DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6519 
6520  private:
6521   static constexpr size_t kFlagNeedsAccessCheck    = kNumberOfGenericPackedBits;
6522   static constexpr size_t kFlagIsInBootImage       = kFlagNeedsAccessCheck + 1;
6523   // Whether this instruction must generate the initialization check.
6524   // Used for code generation.
6525   static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
6526   static constexpr size_t kFieldLoadKind           = kFlagGenerateClInitCheck + 1;
6527   static constexpr size_t kFieldLoadKindSize =
6528       MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6529   static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
6530   static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
6531   static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6532   using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6533 
HasTypeReference(LoadKind load_kind)6534   static bool HasTypeReference(LoadKind load_kind) {
6535     return load_kind == LoadKind::kReferrersClass ||
6536         load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6537         load_kind == LoadKind::kBssEntry ||
6538         load_kind == LoadKind::kRuntimeCall;
6539   }
6540 
6541   void SetLoadKindInternal(LoadKind load_kind);
6542 
6543   // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6544   // For other load kinds it's empty or possibly some architecture-specific instruction
6545   // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6546   HUserRecord<HInstruction*> special_input_;
6547 
6548   // A type index and dex file where the class can be accessed. The dex file can be:
6549   // - The compiling method's dex file if the class is defined there too.
6550   // - The compiling method's dex file if the class is referenced there.
6551   // - The dex file where the class is defined. When the load kind can only be
6552   //   kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6553   const dex::TypeIndex type_index_;
6554   const DexFile& dex_file_;
6555 
6556   Handle<mirror::Class> klass_;
6557 };
6558 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6559 
6560 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
SetLoadKind(LoadKind load_kind)6561 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6562   // The load kind should be determined before inserting the instruction to the graph.
6563   DCHECK(GetBlock() == nullptr);
6564   DCHECK(GetEnvironment() == nullptr);
6565   SetPackedField<LoadKindField>(load_kind);
6566   if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6567     special_input_ = HUserRecord<HInstruction*>(nullptr);
6568   }
6569   if (!NeedsEnvironment()) {
6570     SetSideEffects(SideEffects::None());
6571   }
6572 }
6573 
6574 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
AddSpecialInput(HInstruction * special_input)6575 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6576   // The special input is used for PC-relative loads on some architectures,
6577   // including literal pool loads, which are PC-relative too.
6578   DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6579          GetLoadKind() == LoadKind::kBootImageRelRo ||
6580          GetLoadKind() == LoadKind::kBssEntry ||
6581          GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6582   DCHECK(special_input_.GetInstruction() == nullptr);
6583   special_input_ = HUserRecord<HInstruction*>(special_input);
6584   special_input->AddUseAt(this, 0);
6585 }
6586 
6587 class HLoadString final : public HInstruction {
6588  public:
6589   // Determines how to load the String.
6590   enum class LoadKind {
6591     // Use PC-relative boot image String* address that will be known at link time.
6592     // Used for boot image strings referenced by boot image code.
6593     kBootImageLinkTimePcRelative,
6594 
6595     // Load from an entry in the .data.bimg.rel.ro using a PC-relative load.
6596     // Used for boot image strings referenced by apps in AOT-compiled code.
6597     kBootImageRelRo,
6598 
6599     // Load from an entry in the .bss section using a PC-relative load.
6600     // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
6601     kBssEntry,
6602 
6603     // Use a known boot image String* address, embedded in the code by the codegen.
6604     // Used for boot image strings referenced by apps in JIT-compiled code.
6605     kJitBootImageAddress,
6606 
6607     // Load from the root table associated with the JIT compiled method.
6608     kJitTableAddress,
6609 
6610     // Load using a simple runtime call. This is the fall-back load kind when
6611     // the codegen is unable to use another appropriate kind.
6612     kRuntimeCall,
6613 
6614     kLast = kRuntimeCall,
6615   };
6616 
HLoadString(HCurrentMethod * current_method,dex::StringIndex string_index,const DexFile & dex_file,uint32_t dex_pc)6617   HLoadString(HCurrentMethod* current_method,
6618               dex::StringIndex string_index,
6619               const DexFile& dex_file,
6620               uint32_t dex_pc)
6621       : HInstruction(kLoadString,
6622                      DataType::Type::kReference,
6623                      SideEffectsForArchRuntimeCalls(),
6624                      dex_pc),
6625         special_input_(HUserRecord<HInstruction*>(current_method)),
6626         string_index_(string_index),
6627         dex_file_(dex_file) {
6628     SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6629   }
6630 
IsClonable()6631   bool IsClonable() const override { return true; }
6632 
6633   void SetLoadKind(LoadKind load_kind);
6634 
GetLoadKind()6635   LoadKind GetLoadKind() const {
6636     return GetPackedField<LoadKindField>();
6637   }
6638 
HasPcRelativeLoadKind()6639   bool HasPcRelativeLoadKind() const {
6640     return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6641            GetLoadKind() == LoadKind::kBootImageRelRo ||
6642            GetLoadKind() == LoadKind::kBssEntry;
6643   }
6644 
GetDexFile()6645   const DexFile& GetDexFile() const {
6646     return dex_file_;
6647   }
6648 
GetStringIndex()6649   dex::StringIndex GetStringIndex() const {
6650     return string_index_;
6651   }
6652 
GetString()6653   Handle<mirror::String> GetString() const {
6654     return string_;
6655   }
6656 
SetString(Handle<mirror::String> str)6657   void SetString(Handle<mirror::String> str) {
6658     string_ = str;
6659   }
6660 
CanBeMoved()6661   bool CanBeMoved() const override { return true; }
6662 
6663   bool InstructionDataEquals(const HInstruction* other) const override;
6664 
ComputeHashCode()6665   size_t ComputeHashCode() const override { return string_index_.index_; }
6666 
6667   // Will call the runtime if we need to load the string through
6668   // the dex cache and the string is not guaranteed to be there yet.
NeedsEnvironment()6669   bool NeedsEnvironment() const override {
6670     LoadKind load_kind = GetLoadKind();
6671     if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6672         load_kind == LoadKind::kBootImageRelRo ||
6673         load_kind == LoadKind::kJitBootImageAddress ||
6674         load_kind == LoadKind::kJitTableAddress) {
6675       return false;
6676     }
6677     return true;
6678   }
6679 
NeedsDexCacheOfDeclaringClass()6680   bool NeedsDexCacheOfDeclaringClass() const override {
6681     return GetLoadKind() == LoadKind::kRuntimeCall;
6682   }
6683 
CanBeNull()6684   bool CanBeNull() const override { return false; }
CanThrow()6685   bool CanThrow() const override { return NeedsEnvironment(); }
6686 
SideEffectsForArchRuntimeCalls()6687   static SideEffects SideEffectsForArchRuntimeCalls() {
6688     return SideEffects::CanTriggerGC();
6689   }
6690 
6691   void AddSpecialInput(HInstruction* special_input);
6692 
6693   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6694   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6695     return ArrayRef<HUserRecord<HInstruction*>>(
6696         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6697   }
6698 
6699   DECLARE_INSTRUCTION(LoadString);
6700 
6701  protected:
6702   DEFAULT_COPY_CONSTRUCTOR(LoadString);
6703 
6704  private:
6705   static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6706   static constexpr size_t kFieldLoadKindSize =
6707       MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6708   static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6709   static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6710   using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6711 
6712   void SetLoadKindInternal(LoadKind load_kind);
6713 
6714   // The special input is the HCurrentMethod for kRuntimeCall.
6715   // For other load kinds it's empty or possibly some architecture-specific instruction
6716   // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6717   HUserRecord<HInstruction*> special_input_;
6718 
6719   dex::StringIndex string_index_;
6720   const DexFile& dex_file_;
6721 
6722   Handle<mirror::String> string_;
6723 };
6724 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6725 
6726 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
SetLoadKind(LoadKind load_kind)6727 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
6728   // The load kind should be determined before inserting the instruction to the graph.
6729   DCHECK(GetBlock() == nullptr);
6730   DCHECK(GetEnvironment() == nullptr);
6731   DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
6732   SetPackedField<LoadKindField>(load_kind);
6733   if (load_kind != LoadKind::kRuntimeCall) {
6734     special_input_ = HUserRecord<HInstruction*>(nullptr);
6735   }
6736   if (!NeedsEnvironment()) {
6737     SetSideEffects(SideEffects::None());
6738   }
6739 }
6740 
6741 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
AddSpecialInput(HInstruction * special_input)6742 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
6743   // The special input is used for PC-relative loads on some architectures,
6744   // including literal pool loads, which are PC-relative too.
6745   DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6746          GetLoadKind() == LoadKind::kBootImageRelRo ||
6747          GetLoadKind() == LoadKind::kBssEntry ||
6748          GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6749   // HLoadString::GetInputRecords() returns an empty array at this point,
6750   // so use the GetInputRecords() from the base class to set the input record.
6751   DCHECK(special_input_.GetInstruction() == nullptr);
6752   special_input_ = HUserRecord<HInstruction*>(special_input);
6753   special_input->AddUseAt(this, 0);
6754 }
6755 
6756 class HLoadMethodHandle final : public HInstruction {
6757  public:
HLoadMethodHandle(HCurrentMethod * current_method,uint16_t method_handle_idx,const DexFile & dex_file,uint32_t dex_pc)6758   HLoadMethodHandle(HCurrentMethod* current_method,
6759                     uint16_t method_handle_idx,
6760                     const DexFile& dex_file,
6761                     uint32_t dex_pc)
6762       : HInstruction(kLoadMethodHandle,
6763                      DataType::Type::kReference,
6764                      SideEffectsForArchRuntimeCalls(),
6765                      dex_pc),
6766         special_input_(HUserRecord<HInstruction*>(current_method)),
6767         method_handle_idx_(method_handle_idx),
6768         dex_file_(dex_file) {
6769   }
6770 
6771   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6772   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6773     return ArrayRef<HUserRecord<HInstruction*>>(
6774         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6775   }
6776 
IsClonable()6777   bool IsClonable() const override { return true; }
6778 
GetMethodHandleIndex()6779   uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
6780 
GetDexFile()6781   const DexFile& GetDexFile() const { return dex_file_; }
6782 
SideEffectsForArchRuntimeCalls()6783   static SideEffects SideEffectsForArchRuntimeCalls() {
6784     return SideEffects::CanTriggerGC();
6785   }
6786 
6787   DECLARE_INSTRUCTION(LoadMethodHandle);
6788 
6789  protected:
6790   DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
6791 
6792  private:
6793   // The special input is the HCurrentMethod for kRuntimeCall.
6794   HUserRecord<HInstruction*> special_input_;
6795 
6796   const uint16_t method_handle_idx_;
6797   const DexFile& dex_file_;
6798 };
6799 
6800 class HLoadMethodType final : public HInstruction {
6801  public:
HLoadMethodType(HCurrentMethod * current_method,dex::ProtoIndex proto_index,const DexFile & dex_file,uint32_t dex_pc)6802   HLoadMethodType(HCurrentMethod* current_method,
6803                   dex::ProtoIndex proto_index,
6804                   const DexFile& dex_file,
6805                   uint32_t dex_pc)
6806       : HInstruction(kLoadMethodType,
6807                      DataType::Type::kReference,
6808                      SideEffectsForArchRuntimeCalls(),
6809                      dex_pc),
6810         special_input_(HUserRecord<HInstruction*>(current_method)),
6811         proto_index_(proto_index),
6812         dex_file_(dex_file) {
6813   }
6814 
6815   using HInstruction::GetInputRecords;  // Keep the const version visible.
GetInputRecords()6816   ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6817     return ArrayRef<HUserRecord<HInstruction*>>(
6818         &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6819   }
6820 
IsClonable()6821   bool IsClonable() const override { return true; }
6822 
GetProtoIndex()6823   dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
6824 
GetDexFile()6825   const DexFile& GetDexFile() const { return dex_file_; }
6826 
SideEffectsForArchRuntimeCalls()6827   static SideEffects SideEffectsForArchRuntimeCalls() {
6828     return SideEffects::CanTriggerGC();
6829   }
6830 
6831   DECLARE_INSTRUCTION(LoadMethodType);
6832 
6833  protected:
6834   DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
6835 
6836  private:
6837   // The special input is the HCurrentMethod for kRuntimeCall.
6838   HUserRecord<HInstruction*> special_input_;
6839 
6840   const dex::ProtoIndex proto_index_;
6841   const DexFile& dex_file_;
6842 };
6843 
6844 /**
6845  * Performs an initialization check on its Class object input.
6846  */
6847 class HClinitCheck final : public HExpression<1> {
6848  public:
HClinitCheck(HLoadClass * constant,uint32_t dex_pc)6849   HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
6850       : HExpression(
6851             kClinitCheck,
6852             DataType::Type::kReference,
6853             SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
6854             dex_pc) {
6855     SetRawInputAt(0, constant);
6856   }
6857   // TODO: Make ClinitCheck clonable.
CanBeMoved()6858   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)6859   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
6860     return true;
6861   }
6862 
NeedsEnvironment()6863   bool NeedsEnvironment() const override {
6864     // May call runtime to initialize the class.
6865     return true;
6866   }
6867 
CanThrow()6868   bool CanThrow() const override { return true; }
6869 
GetLoadClass()6870   HLoadClass* GetLoadClass() const {
6871     DCHECK(InputAt(0)->IsLoadClass());
6872     return InputAt(0)->AsLoadClass();
6873   }
6874 
6875   DECLARE_INSTRUCTION(ClinitCheck);
6876 
6877 
6878  protected:
6879   DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
6880 };
6881 
6882 class HStaticFieldGet final : public HExpression<1> {
6883  public:
HStaticFieldGet(HInstruction * cls,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)6884   HStaticFieldGet(HInstruction* cls,
6885                   ArtField* field,
6886                   DataType::Type field_type,
6887                   MemberOffset field_offset,
6888                   bool is_volatile,
6889                   uint32_t field_idx,
6890                   uint16_t declaring_class_def_index,
6891                   const DexFile& dex_file,
6892                   uint32_t dex_pc)
6893       : HExpression(kStaticFieldGet,
6894                     field_type,
6895                     SideEffects::FieldReadOfType(field_type, is_volatile),
6896                     dex_pc),
6897         field_info_(field,
6898                     field_offset,
6899                     field_type,
6900                     is_volatile,
6901                     field_idx,
6902                     declaring_class_def_index,
6903                     dex_file) {
6904     SetRawInputAt(0, cls);
6905   }
6906 
6907 
IsClonable()6908   bool IsClonable() const override { return true; }
CanBeMoved()6909   bool CanBeMoved() const override { return !IsVolatile(); }
6910 
InstructionDataEquals(const HInstruction * other)6911   bool InstructionDataEquals(const HInstruction* other) const override {
6912     const HStaticFieldGet* other_get = other->AsStaticFieldGet();
6913     return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6914   }
6915 
ComputeHashCode()6916   size_t ComputeHashCode() const override {
6917     return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6918   }
6919 
GetFieldInfo()6920   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()6921   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()6922   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()6923   bool IsVolatile() const { return field_info_.IsVolatile(); }
6924 
SetType(DataType::Type new_type)6925   void SetType(DataType::Type new_type) {
6926     DCHECK(DataType::IsIntegralType(GetType()));
6927     DCHECK(DataType::IsIntegralType(new_type));
6928     DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6929     SetPackedField<TypeField>(new_type);
6930   }
6931 
6932   DECLARE_INSTRUCTION(StaticFieldGet);
6933 
6934  protected:
6935   DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
6936 
6937  private:
6938   const FieldInfo field_info_;
6939 };
6940 
6941 class HStaticFieldSet final : public HExpression<2> {
6942  public:
HStaticFieldSet(HInstruction * cls,HInstruction * value,ArtField * field,DataType::Type field_type,MemberOffset field_offset,bool is_volatile,uint32_t field_idx,uint16_t declaring_class_def_index,const DexFile & dex_file,uint32_t dex_pc)6943   HStaticFieldSet(HInstruction* cls,
6944                   HInstruction* value,
6945                   ArtField* field,
6946                   DataType::Type field_type,
6947                   MemberOffset field_offset,
6948                   bool is_volatile,
6949                   uint32_t field_idx,
6950                   uint16_t declaring_class_def_index,
6951                   const DexFile& dex_file,
6952                   uint32_t dex_pc)
6953       : HExpression(kStaticFieldSet,
6954                     SideEffects::FieldWriteOfType(field_type, is_volatile),
6955                     dex_pc),
6956         field_info_(field,
6957                     field_offset,
6958                     field_type,
6959                     is_volatile,
6960                     field_idx,
6961                     declaring_class_def_index,
6962                     dex_file) {
6963     SetPackedFlag<kFlagValueCanBeNull>(true);
6964     SetRawInputAt(0, cls);
6965     SetRawInputAt(1, value);
6966   }
6967 
IsClonable()6968   bool IsClonable() const override { return true; }
GetFieldInfo()6969   const FieldInfo& GetFieldInfo() const { return field_info_; }
GetFieldOffset()6970   MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
GetFieldType()6971   DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
IsVolatile()6972   bool IsVolatile() const { return field_info_.IsVolatile(); }
6973 
GetValue()6974   HInstruction* GetValue() const { return InputAt(1); }
GetValueCanBeNull()6975   bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
ClearValueCanBeNull()6976   void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6977 
6978   DECLARE_INSTRUCTION(StaticFieldSet);
6979 
6980  protected:
6981   DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
6982 
6983  private:
6984   static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6985   static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
6986   static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
6987                 "Too many packed fields.");
6988 
6989   const FieldInfo field_info_;
6990 };
6991 
6992 class HStringBuilderAppend final : public HVariableInputSizeInstruction {
6993  public:
HStringBuilderAppend(HIntConstant * format,uint32_t number_of_arguments,ArenaAllocator * allocator,uint32_t dex_pc)6994   HStringBuilderAppend(HIntConstant* format,
6995                        uint32_t number_of_arguments,
6996                        ArenaAllocator* allocator,
6997                        uint32_t dex_pc)
6998       : HVariableInputSizeInstruction(
6999             kStringBuilderAppend,
7000             DataType::Type::kReference,
7001             // The runtime call may read memory from inputs. It never writes outside
7002             // of the newly allocated result object (or newly allocated helper objects).
7003             SideEffects::AllReads().Union(SideEffects::CanTriggerGC()),
7004             dex_pc,
7005             allocator,
7006             number_of_arguments + /* format */ 1u,
7007             kArenaAllocInvokeInputs) {
7008     DCHECK_GE(number_of_arguments, 1u);  // There must be something to append.
7009     SetRawInputAt(FormatIndex(), format);
7010   }
7011 
SetArgumentAt(size_t index,HInstruction * argument)7012   void SetArgumentAt(size_t index, HInstruction* argument) {
7013     DCHECK_LE(index, GetNumberOfArguments());
7014     SetRawInputAt(index, argument);
7015   }
7016 
7017   // Return the number of arguments, excluding the format.
GetNumberOfArguments()7018   size_t GetNumberOfArguments() const {
7019     DCHECK_GE(InputCount(), 1u);
7020     return InputCount() - 1u;
7021   }
7022 
FormatIndex()7023   size_t FormatIndex() const {
7024     return GetNumberOfArguments();
7025   }
7026 
GetFormat()7027   HIntConstant* GetFormat() {
7028     return InputAt(FormatIndex())->AsIntConstant();
7029   }
7030 
NeedsEnvironment()7031   bool NeedsEnvironment() const override { return true; }
7032 
CanThrow()7033   bool CanThrow() const override { return true; }
7034 
CanBeNull()7035   bool CanBeNull() const override { return false; }
7036 
7037   DECLARE_INSTRUCTION(StringBuilderAppend);
7038 
7039  protected:
7040   DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend);
7041 };
7042 
7043 class HUnresolvedInstanceFieldGet final : public HExpression<1> {
7044  public:
HUnresolvedInstanceFieldGet(HInstruction * obj,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)7045   HUnresolvedInstanceFieldGet(HInstruction* obj,
7046                               DataType::Type field_type,
7047                               uint32_t field_index,
7048                               uint32_t dex_pc)
7049       : HExpression(kUnresolvedInstanceFieldGet,
7050                     field_type,
7051                     SideEffects::AllExceptGCDependency(),
7052                     dex_pc),
7053         field_index_(field_index) {
7054     SetRawInputAt(0, obj);
7055   }
7056 
IsClonable()7057   bool IsClonable() const override { return true; }
NeedsEnvironment()7058   bool NeedsEnvironment() const override { return true; }
CanThrow()7059   bool CanThrow() const override { return true; }
7060 
GetFieldType()7061   DataType::Type GetFieldType() const { return GetType(); }
GetFieldIndex()7062   uint32_t GetFieldIndex() const { return field_index_; }
7063 
7064   DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
7065 
7066  protected:
7067   DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
7068 
7069  private:
7070   const uint32_t field_index_;
7071 };
7072 
7073 class HUnresolvedInstanceFieldSet final : public HExpression<2> {
7074  public:
HUnresolvedInstanceFieldSet(HInstruction * obj,HInstruction * value,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)7075   HUnresolvedInstanceFieldSet(HInstruction* obj,
7076                               HInstruction* value,
7077                               DataType::Type field_type,
7078                               uint32_t field_index,
7079                               uint32_t dex_pc)
7080       : HExpression(kUnresolvedInstanceFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7081         field_index_(field_index) {
7082     SetPackedField<FieldTypeField>(field_type);
7083     DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7084     SetRawInputAt(0, obj);
7085     SetRawInputAt(1, value);
7086   }
7087 
IsClonable()7088   bool IsClonable() const override { return true; }
NeedsEnvironment()7089   bool NeedsEnvironment() const override { return true; }
CanThrow()7090   bool CanThrow() const override { return true; }
7091 
GetFieldType()7092   DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
GetFieldIndex()7093   uint32_t GetFieldIndex() const { return field_index_; }
7094 
7095   DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
7096 
7097  protected:
7098   DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
7099 
7100  private:
7101   static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7102   static constexpr size_t kFieldFieldTypeSize =
7103       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7104   static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7105       kFieldFieldType + kFieldFieldTypeSize;
7106   static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7107                 "Too many packed fields.");
7108   using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7109 
7110   const uint32_t field_index_;
7111 };
7112 
7113 class HUnresolvedStaticFieldGet final : public HExpression<0> {
7114  public:
HUnresolvedStaticFieldGet(DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)7115   HUnresolvedStaticFieldGet(DataType::Type field_type,
7116                             uint32_t field_index,
7117                             uint32_t dex_pc)
7118       : HExpression(kUnresolvedStaticFieldGet,
7119                     field_type,
7120                     SideEffects::AllExceptGCDependency(),
7121                     dex_pc),
7122         field_index_(field_index) {
7123   }
7124 
IsClonable()7125   bool IsClonable() const override { return true; }
NeedsEnvironment()7126   bool NeedsEnvironment() const override { return true; }
CanThrow()7127   bool CanThrow() const override { return true; }
7128 
GetFieldType()7129   DataType::Type GetFieldType() const { return GetType(); }
GetFieldIndex()7130   uint32_t GetFieldIndex() const { return field_index_; }
7131 
7132   DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
7133 
7134  protected:
7135   DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
7136 
7137  private:
7138   const uint32_t field_index_;
7139 };
7140 
7141 class HUnresolvedStaticFieldSet final : public HExpression<1> {
7142  public:
HUnresolvedStaticFieldSet(HInstruction * value,DataType::Type field_type,uint32_t field_index,uint32_t dex_pc)7143   HUnresolvedStaticFieldSet(HInstruction* value,
7144                             DataType::Type field_type,
7145                             uint32_t field_index,
7146                             uint32_t dex_pc)
7147       : HExpression(kUnresolvedStaticFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7148         field_index_(field_index) {
7149     SetPackedField<FieldTypeField>(field_type);
7150     DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7151     SetRawInputAt(0, value);
7152   }
7153 
IsClonable()7154   bool IsClonable() const override { return true; }
NeedsEnvironment()7155   bool NeedsEnvironment() const override { return true; }
CanThrow()7156   bool CanThrow() const override { return true; }
7157 
GetFieldType()7158   DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
GetFieldIndex()7159   uint32_t GetFieldIndex() const { return field_index_; }
7160 
7161   DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
7162 
7163  protected:
7164   DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
7165 
7166  private:
7167   static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7168   static constexpr size_t kFieldFieldTypeSize =
7169       MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7170   static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7171       kFieldFieldType + kFieldFieldTypeSize;
7172   static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7173                 "Too many packed fields.");
7174   using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7175 
7176   const uint32_t field_index_;
7177 };
7178 
7179 // Implement the move-exception DEX instruction.
7180 class HLoadException final : public HExpression<0> {
7181  public:
7182   explicit HLoadException(uint32_t dex_pc = kNoDexPc)
HExpression(kLoadException,DataType::Type::kReference,SideEffects::None (),dex_pc)7183       : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
7184   }
7185 
CanBeNull()7186   bool CanBeNull() const override { return false; }
7187 
7188   DECLARE_INSTRUCTION(LoadException);
7189 
7190  protected:
7191   DEFAULT_COPY_CONSTRUCTOR(LoadException);
7192 };
7193 
7194 // Implicit part of move-exception which clears thread-local exception storage.
7195 // Must not be removed because the runtime expects the TLS to get cleared.
7196 class HClearException final : public HExpression<0> {
7197  public:
7198   explicit HClearException(uint32_t dex_pc = kNoDexPc)
HExpression(kClearException,SideEffects::AllWrites (),dex_pc)7199       : HExpression(kClearException, SideEffects::AllWrites(), dex_pc) {
7200   }
7201 
7202   DECLARE_INSTRUCTION(ClearException);
7203 
7204  protected:
7205   DEFAULT_COPY_CONSTRUCTOR(ClearException);
7206 };
7207 
7208 class HThrow final : public HExpression<1> {
7209  public:
HThrow(HInstruction * exception,uint32_t dex_pc)7210   HThrow(HInstruction* exception, uint32_t dex_pc)
7211       : HExpression(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
7212     SetRawInputAt(0, exception);
7213   }
7214 
IsControlFlow()7215   bool IsControlFlow() const override { return true; }
7216 
NeedsEnvironment()7217   bool NeedsEnvironment() const override { return true; }
7218 
CanThrow()7219   bool CanThrow() const override { return true; }
7220 
AlwaysThrows()7221   bool AlwaysThrows() const override { return true; }
7222 
7223   DECLARE_INSTRUCTION(Throw);
7224 
7225  protected:
7226   DEFAULT_COPY_CONSTRUCTOR(Throw);
7227 };
7228 
7229 /**
7230  * Implementation strategies for the code generator of a HInstanceOf
7231  * or `HCheckCast`.
7232  */
7233 enum class TypeCheckKind {  // private marker to avoid generate-operator-out.py from processing.
7234   kUnresolvedCheck,       // Check against an unresolved type.
7235   kExactCheck,            // Can do a single class compare.
7236   kClassHierarchyCheck,   // Can just walk the super class chain.
7237   kAbstractClassCheck,    // Can just walk the super class chain, starting one up.
7238   kInterfaceCheck,        // No optimization yet when checking against an interface.
7239   kArrayObjectCheck,      // Can just check if the array is not primitive.
7240   kArrayCheck,            // No optimization yet when checking against a generic array.
7241   kBitstringCheck,        // Compare the type check bitstring.
7242   kLast = kArrayCheck
7243 };
7244 
7245 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
7246 
7247 // Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
7248 // `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
7249 class HTypeCheckInstruction : public HVariableInputSizeInstruction {
7250  public:
HTypeCheckInstruction(InstructionKind kind,DataType::Type type,HInstruction * object,HInstruction * target_class_or_null,TypeCheckKind check_kind,Handle<mirror::Class> klass,uint32_t dex_pc,ArenaAllocator * allocator,HIntConstant * bitstring_path_to_root,HIntConstant * bitstring_mask,SideEffects side_effects)7251   HTypeCheckInstruction(InstructionKind kind,
7252                         DataType::Type type,
7253                         HInstruction* object,
7254                         HInstruction* target_class_or_null,
7255                         TypeCheckKind check_kind,
7256                         Handle<mirror::Class> klass,
7257                         uint32_t dex_pc,
7258                         ArenaAllocator* allocator,
7259                         HIntConstant* bitstring_path_to_root,
7260                         HIntConstant* bitstring_mask,
7261                         SideEffects side_effects)
7262       : HVariableInputSizeInstruction(
7263           kind,
7264           type,
7265           side_effects,
7266           dex_pc,
7267           allocator,
7268           /* number_of_inputs= */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
7269           kArenaAllocTypeCheckInputs),
7270         klass_(klass) {
7271     SetPackedField<TypeCheckKindField>(check_kind);
7272     SetPackedFlag<kFlagMustDoNullCheck>(true);
7273     SetPackedFlag<kFlagValidTargetClassRTI>(false);
7274     SetRawInputAt(0, object);
7275     SetRawInputAt(1, target_class_or_null);
7276     DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
7277     DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
7278     if (check_kind == TypeCheckKind::kBitstringCheck) {
7279       DCHECK(target_class_or_null->IsNullConstant());
7280       SetRawInputAt(2, bitstring_path_to_root);
7281       SetRawInputAt(3, bitstring_mask);
7282     } else {
7283       DCHECK(target_class_or_null->IsLoadClass());
7284     }
7285   }
7286 
GetTargetClass()7287   HLoadClass* GetTargetClass() const {
7288     DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7289     HInstruction* load_class = InputAt(1);
7290     DCHECK(load_class->IsLoadClass());
7291     return load_class->AsLoadClass();
7292   }
7293 
GetBitstringPathToRoot()7294   uint32_t GetBitstringPathToRoot() const {
7295     DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7296     HInstruction* path_to_root = InputAt(2);
7297     DCHECK(path_to_root->IsIntConstant());
7298     return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
7299   }
7300 
GetBitstringMask()7301   uint32_t GetBitstringMask() const {
7302     DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7303     HInstruction* mask = InputAt(3);
7304     DCHECK(mask->IsIntConstant());
7305     return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
7306   }
7307 
IsClonable()7308   bool IsClonable() const override { return true; }
CanBeMoved()7309   bool CanBeMoved() const override { return true; }
7310 
InstructionDataEquals(const HInstruction * other)7311   bool InstructionDataEquals(const HInstruction* other) const override {
7312     DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
7313     return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
7314   }
7315 
MustDoNullCheck()7316   bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
ClearMustDoNullCheck()7317   void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
GetTypeCheckKind()7318   TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
IsExactCheck()7319   bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
7320 
GetTargetClassRTI()7321   ReferenceTypeInfo GetTargetClassRTI() {
7322     if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
7323       // Note: The is_exact flag from the return value should not be used.
7324       return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7325     } else {
7326       return ReferenceTypeInfo::CreateInvalid();
7327     }
7328   }
7329 
7330   // Target class RTI is marked as valid by RTP if the klass_ is admissible.
SetValidTargetClassRTI()7331   void SetValidTargetClassRTI() REQUIRES_SHARED(Locks::mutator_lock_) {
7332     DCHECK(klass_ != nullptr);
7333     SetPackedFlag<kFlagValidTargetClassRTI>(true);
7334   }
7335 
GetClass()7336   Handle<mirror::Class> GetClass() const {
7337     return klass_;
7338   }
7339 
7340  protected:
7341   DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
7342 
7343  private:
7344   static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
7345   static constexpr size_t kFieldTypeCheckKindSize =
7346       MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
7347   static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
7348   static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
7349   static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
7350   static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7351   using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
7352 
7353   Handle<mirror::Class> klass_;
7354 };
7355 
7356 class HInstanceOf final : public HTypeCheckInstruction {
7357  public:
HInstanceOf(HInstruction * object,HInstruction * target_class_or_null,TypeCheckKind check_kind,Handle<mirror::Class> klass,uint32_t dex_pc,ArenaAllocator * allocator,HIntConstant * bitstring_path_to_root,HIntConstant * bitstring_mask)7358   HInstanceOf(HInstruction* object,
7359               HInstruction* target_class_or_null,
7360               TypeCheckKind check_kind,
7361               Handle<mirror::Class> klass,
7362               uint32_t dex_pc,
7363               ArenaAllocator* allocator,
7364               HIntConstant* bitstring_path_to_root,
7365               HIntConstant* bitstring_mask)
7366       : HTypeCheckInstruction(kInstanceOf,
7367                               DataType::Type::kBool,
7368                               object,
7369                               target_class_or_null,
7370                               check_kind,
7371                               klass,
7372                               dex_pc,
7373                               allocator,
7374                               bitstring_path_to_root,
7375                               bitstring_mask,
7376                               SideEffectsForArchRuntimeCalls(check_kind)) {}
7377 
IsClonable()7378   bool IsClonable() const override { return true; }
7379 
NeedsEnvironment()7380   bool NeedsEnvironment() const override {
7381     return CanCallRuntime(GetTypeCheckKind());
7382   }
7383 
CanCallRuntime(TypeCheckKind check_kind)7384   static bool CanCallRuntime(TypeCheckKind check_kind) {
7385     // TODO: Re-evaluate now that mips codegen has been removed.
7386     return check_kind != TypeCheckKind::kExactCheck;
7387   }
7388 
SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind)7389   static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
7390     return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
7391   }
7392 
7393   DECLARE_INSTRUCTION(InstanceOf);
7394 
7395  protected:
7396   DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
7397 };
7398 
7399 class HBoundType final : public HExpression<1> {
7400  public:
7401   explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
HExpression(kBoundType,DataType::Type::kReference,SideEffects::None (),dex_pc)7402       : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
7403         upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
7404     SetPackedFlag<kFlagUpperCanBeNull>(true);
7405     SetPackedFlag<kFlagCanBeNull>(true);
7406     DCHECK_EQ(input->GetType(), DataType::Type::kReference);
7407     SetRawInputAt(0, input);
7408   }
7409 
7410   bool InstructionDataEquals(const HInstruction* other) const override;
IsClonable()7411   bool IsClonable() const override { return true; }
7412 
7413   // {Get,Set}Upper* should only be used in reference type propagation.
GetUpperBound()7414   const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
GetUpperCanBeNull()7415   bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
7416   void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
7417 
SetCanBeNull(bool can_be_null)7418   void SetCanBeNull(bool can_be_null) {
7419     DCHECK(GetUpperCanBeNull() || !can_be_null);
7420     SetPackedFlag<kFlagCanBeNull>(can_be_null);
7421   }
7422 
CanBeNull()7423   bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
7424 
7425   DECLARE_INSTRUCTION(BoundType);
7426 
7427  protected:
7428   DEFAULT_COPY_CONSTRUCTOR(BoundType);
7429 
7430  private:
7431   // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
7432   // is false then CanBeNull() cannot be true).
7433   static constexpr size_t kFlagUpperCanBeNull = kNumberOfGenericPackedBits;
7434   static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
7435   static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
7436   static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7437 
7438   // Encodes the most upper class that this instruction can have. In other words
7439   // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
7440   // It is used to bound the type in cases like:
7441   //   if (x instanceof ClassX) {
7442   //     // uper_bound_ will be ClassX
7443   //   }
7444   ReferenceTypeInfo upper_bound_;
7445 };
7446 
7447 class HCheckCast final : public HTypeCheckInstruction {
7448  public:
HCheckCast(HInstruction * object,HInstruction * target_class_or_null,TypeCheckKind check_kind,Handle<mirror::Class> klass,uint32_t dex_pc,ArenaAllocator * allocator,HIntConstant * bitstring_path_to_root,HIntConstant * bitstring_mask)7449   HCheckCast(HInstruction* object,
7450              HInstruction* target_class_or_null,
7451              TypeCheckKind check_kind,
7452              Handle<mirror::Class> klass,
7453              uint32_t dex_pc,
7454              ArenaAllocator* allocator,
7455              HIntConstant* bitstring_path_to_root,
7456              HIntConstant* bitstring_mask)
7457       : HTypeCheckInstruction(kCheckCast,
7458                               DataType::Type::kVoid,
7459                               object,
7460                               target_class_or_null,
7461                               check_kind,
7462                               klass,
7463                               dex_pc,
7464                               allocator,
7465                               bitstring_path_to_root,
7466                               bitstring_mask,
7467                               SideEffects::CanTriggerGC()) {}
7468 
IsClonable()7469   bool IsClonable() const override { return true; }
NeedsEnvironment()7470   bool NeedsEnvironment() const override {
7471     // Instruction may throw a CheckCastError.
7472     return true;
7473   }
7474 
CanThrow()7475   bool CanThrow() const override { return true; }
7476 
7477   DECLARE_INSTRUCTION(CheckCast);
7478 
7479  protected:
7480   DEFAULT_COPY_CONSTRUCTOR(CheckCast);
7481 };
7482 
7483 /**
7484  * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
7485  * @details We define the combined barrier types that are actually required
7486  * by the Java Memory Model, rather than using exactly the terminology from
7487  * the JSR-133 cookbook.  These should, in many cases, be replaced by acquire/release
7488  * primitives.  Note that the JSR-133 cookbook generally does not deal with
7489  * store atomicity issues, and the recipes there are not always entirely sufficient.
7490  * The current recipe is as follows:
7491  * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
7492  * -# Use AnyAny barrier after volatile store.  (StoreLoad is as expensive.)
7493  * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
7494  * -# Use StoreStore barrier after all stores but before return from any constructor whose
7495  *    class has final fields.
7496  * -# Use NTStoreStore to order non-temporal stores with respect to all later
7497  *    store-to-memory instructions.  Only generated together with non-temporal stores.
7498  */
7499 enum MemBarrierKind {
7500   kAnyStore,
7501   kLoadAny,
7502   kStoreStore,
7503   kAnyAny,
7504   kNTStoreStore,
7505   kLastBarrierKind = kNTStoreStore
7506 };
7507 std::ostream& operator<<(std::ostream& os, MemBarrierKind kind);
7508 
7509 class HMemoryBarrier final : public HExpression<0> {
7510  public:
7511   explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
HExpression(kMemoryBarrier,SideEffects::AllWritesAndReads (),dex_pc)7512       : HExpression(kMemoryBarrier,
7513                     SideEffects::AllWritesAndReads(),  // Assume write/read on all fields/arrays.
7514                     dex_pc) {
7515     SetPackedField<BarrierKindField>(barrier_kind);
7516   }
7517 
IsClonable()7518   bool IsClonable() const override { return true; }
7519 
GetBarrierKind()7520   MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
7521 
7522   DECLARE_INSTRUCTION(MemoryBarrier);
7523 
7524  protected:
7525   DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
7526 
7527  private:
7528   static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
7529   static constexpr size_t kFieldBarrierKindSize =
7530       MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
7531   static constexpr size_t kNumberOfMemoryBarrierPackedBits =
7532       kFieldBarrierKind + kFieldBarrierKindSize;
7533   static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
7534                 "Too many packed fields.");
7535   using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
7536 };
7537 
7538 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
7539 // the specified object(s), with respect to any subsequent store that might "publish"
7540 // (i.e. make visible) the specified object to another thread.
7541 //
7542 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
7543 // for all final fields (that were set) at the end of the invoked constructor.
7544 //
7545 // The constructor fence models the freeze actions for the final fields of an object
7546 // being constructed (semantically at the end of the constructor). Constructor fences
7547 // have a per-object affinity; two separate objects being constructed get two separate
7548 // constructor fences.
7549 //
7550 // (Note: that if calling a super-constructor or forwarding to another constructor,
7551 // the freezes would happen at the end of *that* constructor being invoked).
7552 //
7553 // The memory model guarantees that when the object being constructed is "published" after
7554 // constructor completion (i.e. escapes the current thread via a store), then any final field
7555 // writes must be observable on other threads (once they observe that publication).
7556 //
7557 // Further, anything written before the freeze, and read by dereferencing through the final field,
7558 // must also be visible (so final object field could itself have an object with non-final fields;
7559 // yet the freeze must also extend to them).
7560 //
7561 // Constructor example:
7562 //
7563 //     class HasFinal {
7564 //        final int field;                              Optimizing IR for <init>()V:
7565 //        HasFinal() {
7566 //          field = 123;                                HInstanceFieldSet(this, HasFinal.field, 123)
7567 //          // freeze(this.field);                      HConstructorFence(this)
7568 //        }                                             HReturn
7569 //     }
7570 //
7571 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
7572 // already-initialized classes; in that case the allocation must act as a "default-initializer"
7573 // of the object which effectively writes the class pointer "final field".
7574 //
7575 // For example, we can model default-initialiation as roughly the equivalent of the following:
7576 //
7577 //     class Object {
7578 //       private final Class header;
7579 //     }
7580 //
7581 //  Java code:                                           Optimizing IR:
7582 //
7583 //     T new_instance<T>() {
7584 //       Object obj = allocate_memory(T.class.size);     obj = HInvoke(art_quick_alloc_object, T)
7585 //       obj.header = T.class;                           // header write is done by above call.
7586 //       // freeze(obj.header)                           HConstructorFence(obj)
7587 //       return (T)obj;
7588 //     }
7589 //
7590 // See also:
7591 // * DexCompilationUnit::RequiresConstructorBarrier
7592 // * QuasiAtomic::ThreadFenceForConstructor
7593 //
7594 class HConstructorFence final : public HVariableInputSizeInstruction {
7595                                   // A fence has variable inputs because the inputs can be removed
7596                                   // after prepare_for_register_allocation phase.
7597                                   // (TODO: In the future a fence could freeze multiple objects
7598                                   //        after merging two fences together.)
7599  public:
7600   // `fence_object` is the reference that needs to be protected for correct publication.
7601   //
7602   // It makes sense in the following situations:
7603   // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
7604   // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
7605   //
7606   // After construction the `fence_object` becomes the 0th input.
7607   // This is not an input in a real sense, but just a convenient place to stash the information
7608   // about the associated object.
HConstructorFence(HInstruction * fence_object,uint32_t dex_pc,ArenaAllocator * allocator)7609   HConstructorFence(HInstruction* fence_object,
7610                     uint32_t dex_pc,
7611                     ArenaAllocator* allocator)
7612     // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
7613     // constraints described in the class header. We claim that these SideEffects constraints
7614     // enforce a superset of the real constraints.
7615     //
7616     // The ordering described above is conservatively modeled with SideEffects as follows:
7617     //
7618     // * To prevent reordering of the publication stores:
7619     // ----> "Reads of objects" is the initial SideEffect.
7620     // * For every primitive final field store in the constructor:
7621     // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
7622     // * If there are any stores to reference final fields in the constructor:
7623     // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
7624     //       that are reachable from `fence_object` also need to be prevented for reordering
7625     //       (and we do not want to do alias analysis to figure out what those stores are).
7626     //
7627     // In the implementation, this initially starts out as an "all reads" side effect; this is an
7628     // even more conservative approach than the one described above, and prevents all of the
7629     // above reordering without analyzing any of the instructions in the constructor.
7630     //
7631     // If in a later phase we discover that there are no writes to reference final fields,
7632     // we can refine the side effect to a smaller set of type reads (see above constraints).
7633       : HVariableInputSizeInstruction(kConstructorFence,
7634                                       SideEffects::AllReads(),
7635                                       dex_pc,
7636                                       allocator,
7637                                       /* number_of_inputs= */ 1,
7638                                       kArenaAllocConstructorFenceInputs) {
7639     DCHECK(fence_object != nullptr);
7640     SetRawInputAt(0, fence_object);
7641   }
7642 
7643   // The object associated with this constructor fence.
7644   //
7645   // (Note: This will be null after the prepare_for_register_allocation phase,
7646   // as all constructor fence inputs are removed there).
GetFenceObject()7647   HInstruction* GetFenceObject() const {
7648     return InputAt(0);
7649   }
7650 
7651   // Find all the HConstructorFence uses (`fence_use`) for `this` and:
7652   // - Delete `fence_use` from `this`'s use list.
7653   // - Delete `this` from `fence_use`'s inputs list.
7654   // - If the `fence_use` is dead, remove it from the graph.
7655   //
7656   // A fence is considered dead once it no longer has any uses
7657   // and all of the inputs are dead.
7658   //
7659   // This must *not* be called during/after prepare_for_register_allocation,
7660   // because that removes all the inputs to the fences but the fence is actually
7661   // still considered live.
7662   //
7663   // Returns how many HConstructorFence instructions were removed from graph.
7664   static size_t RemoveConstructorFences(HInstruction* instruction);
7665 
7666   // Combine all inputs of `this` and `other` instruction and remove
7667   // `other` from the graph.
7668   //
7669   // Inputs are unique after the merge.
7670   //
7671   // Requirement: `this` must not be the same as `other.
7672   void Merge(HConstructorFence* other);
7673 
7674   // Check if this constructor fence is protecting
7675   // an HNewInstance or HNewArray that is also the immediate
7676   // predecessor of `this`.
7677   //
7678   // If `ignore_inputs` is true, then the immediate predecessor doesn't need
7679   // to be one of the inputs of `this`.
7680   //
7681   // Returns the associated HNewArray or HNewInstance,
7682   // or null otherwise.
7683   HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
7684 
7685   DECLARE_INSTRUCTION(ConstructorFence);
7686 
7687  protected:
7688   DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
7689 };
7690 
7691 class HMonitorOperation final : public HExpression<1> {
7692  public:
7693   enum class OperationKind {
7694     kEnter,
7695     kExit,
7696     kLast = kExit
7697   };
7698 
HMonitorOperation(HInstruction * object,OperationKind kind,uint32_t dex_pc)7699   HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
7700     : HExpression(kMonitorOperation,
7701                   SideEffects::AllExceptGCDependency(),  // Assume write/read on all fields/arrays.
7702                   dex_pc) {
7703     SetPackedField<OperationKindField>(kind);
7704     SetRawInputAt(0, object);
7705   }
7706 
7707   // Instruction may go into runtime, so we need an environment.
NeedsEnvironment()7708   bool NeedsEnvironment() const override { return true; }
7709 
CanThrow()7710   bool CanThrow() const override {
7711     // Verifier guarantees that monitor-exit cannot throw.
7712     // This is important because it allows the HGraphBuilder to remove
7713     // a dead throw-catch loop generated for `synchronized` blocks/methods.
7714     return IsEnter();
7715   }
7716 
GetOperationKind()7717   OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
IsEnter()7718   bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
7719 
7720   DECLARE_INSTRUCTION(MonitorOperation);
7721 
7722  protected:
7723   DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
7724 
7725  private:
7726   static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
7727   static constexpr size_t kFieldOperationKindSize =
7728       MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
7729   static constexpr size_t kNumberOfMonitorOperationPackedBits =
7730       kFieldOperationKind + kFieldOperationKindSize;
7731   static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7732                 "Too many packed fields.");
7733   using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
7734 };
7735 
7736 class HSelect final : public HExpression<3> {
7737  public:
HSelect(HInstruction * condition,HInstruction * true_value,HInstruction * false_value,uint32_t dex_pc)7738   HSelect(HInstruction* condition,
7739           HInstruction* true_value,
7740           HInstruction* false_value,
7741           uint32_t dex_pc)
7742       : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
7743     DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
7744 
7745     // First input must be `true_value` or `false_value` to allow codegens to
7746     // use the SameAsFirstInput allocation policy. We make it `false_value`, so
7747     // that architectures which implement HSelect as a conditional move also
7748     // will not need to invert the condition.
7749     SetRawInputAt(0, false_value);
7750     SetRawInputAt(1, true_value);
7751     SetRawInputAt(2, condition);
7752   }
7753 
IsClonable()7754   bool IsClonable() const override { return true; }
GetFalseValue()7755   HInstruction* GetFalseValue() const { return InputAt(0); }
GetTrueValue()7756   HInstruction* GetTrueValue() const { return InputAt(1); }
GetCondition()7757   HInstruction* GetCondition() const { return InputAt(2); }
7758 
CanBeMoved()7759   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)7760   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
7761     return true;
7762   }
7763 
CanBeNull()7764   bool CanBeNull() const override {
7765     return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
7766   }
7767 
7768   DECLARE_INSTRUCTION(Select);
7769 
7770  protected:
7771   DEFAULT_COPY_CONSTRUCTOR(Select);
7772 };
7773 
7774 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
7775  public:
MoveOperands(Location source,Location destination,DataType::Type type,HInstruction * instruction)7776   MoveOperands(Location source,
7777                Location destination,
7778                DataType::Type type,
7779                HInstruction* instruction)
7780       : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
7781 
GetSource()7782   Location GetSource() const { return source_; }
GetDestination()7783   Location GetDestination() const { return destination_; }
7784 
SetSource(Location value)7785   void SetSource(Location value) { source_ = value; }
SetDestination(Location value)7786   void SetDestination(Location value) { destination_ = value; }
7787 
7788   // The parallel move resolver marks moves as "in-progress" by clearing the
7789   // destination (but not the source).
MarkPending()7790   Location MarkPending() {
7791     DCHECK(!IsPending());
7792     Location dest = destination_;
7793     destination_ = Location::NoLocation();
7794     return dest;
7795   }
7796 
ClearPending(Location dest)7797   void ClearPending(Location dest) {
7798     DCHECK(IsPending());
7799     destination_ = dest;
7800   }
7801 
IsPending()7802   bool IsPending() const {
7803     DCHECK(source_.IsValid() || destination_.IsInvalid());
7804     return destination_.IsInvalid() && source_.IsValid();
7805   }
7806 
7807   // True if this blocks a move from the given location.
Blocks(Location loc)7808   bool Blocks(Location loc) const {
7809     return !IsEliminated() && source_.OverlapsWith(loc);
7810   }
7811 
7812   // A move is redundant if it's been eliminated, if its source and
7813   // destination are the same, or if its destination is unneeded.
IsRedundant()7814   bool IsRedundant() const {
7815     return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
7816   }
7817 
7818   // We clear both operands to indicate move that's been eliminated.
Eliminate()7819   void Eliminate() {
7820     source_ = destination_ = Location::NoLocation();
7821   }
7822 
IsEliminated()7823   bool IsEliminated() const {
7824     DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
7825     return source_.IsInvalid();
7826   }
7827 
GetType()7828   DataType::Type GetType() const { return type_; }
7829 
Is64BitMove()7830   bool Is64BitMove() const {
7831     return DataType::Is64BitType(type_);
7832   }
7833 
GetInstruction()7834   HInstruction* GetInstruction() const { return instruction_; }
7835 
7836  private:
7837   Location source_;
7838   Location destination_;
7839   // The type this move is for.
7840   DataType::Type type_;
7841   // The instruction this move is assocatied with. Null when this move is
7842   // for moving an input in the expected locations of user (including a phi user).
7843   // This is only used in debug mode, to ensure we do not connect interval siblings
7844   // in the same parallel move.
7845   HInstruction* instruction_;
7846 };
7847 
7848 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
7849 
7850 static constexpr size_t kDefaultNumberOfMoves = 4;
7851 
7852 class HParallelMove final : public HExpression<0> {
7853  public:
7854   explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
HExpression(kParallelMove,SideEffects::None (),dex_pc)7855       : HExpression(kParallelMove, SideEffects::None(), dex_pc),
7856         moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
7857     moves_.reserve(kDefaultNumberOfMoves);
7858   }
7859 
AddMove(Location source,Location destination,DataType::Type type,HInstruction * instruction)7860   void AddMove(Location source,
7861                Location destination,
7862                DataType::Type type,
7863                HInstruction* instruction) {
7864     DCHECK(source.IsValid());
7865     DCHECK(destination.IsValid());
7866     if (kIsDebugBuild) {
7867       if (instruction != nullptr) {
7868         for (const MoveOperands& move : moves_) {
7869           if (move.GetInstruction() == instruction) {
7870             // Special case the situation where the move is for the spill slot
7871             // of the instruction.
7872             if ((GetPrevious() == instruction)
7873                 || ((GetPrevious() == nullptr)
7874                     && instruction->IsPhi()
7875                     && instruction->GetBlock() == GetBlock())) {
7876               DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
7877                   << "Doing parallel moves for the same instruction.";
7878             } else {
7879               DCHECK(false) << "Doing parallel moves for the same instruction.";
7880             }
7881           }
7882         }
7883       }
7884       for (const MoveOperands& move : moves_) {
7885         DCHECK(!destination.OverlapsWith(move.GetDestination()))
7886             << "Overlapped destination for two moves in a parallel move: "
7887             << move.GetSource() << " ==> " << move.GetDestination() << " and "
7888             << source << " ==> " << destination;
7889       }
7890     }
7891     moves_.emplace_back(source, destination, type, instruction);
7892   }
7893 
MoveOperandsAt(size_t index)7894   MoveOperands* MoveOperandsAt(size_t index) {
7895     return &moves_[index];
7896   }
7897 
NumMoves()7898   size_t NumMoves() const { return moves_.size(); }
7899 
7900   DECLARE_INSTRUCTION(ParallelMove);
7901 
7902  protected:
7903   DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
7904 
7905  private:
7906   ArenaVector<MoveOperands> moves_;
7907 };
7908 
7909 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
7910 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
7911 // never used across anything that can trigger GC.
7912 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
7913 // So we represent it by the type `DataType::Type::kInt`.
7914 class HIntermediateAddress final : public HExpression<2> {
7915  public:
HIntermediateAddress(HInstruction * base_address,HInstruction * offset,uint32_t dex_pc)7916   HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
7917       : HExpression(kIntermediateAddress,
7918                     DataType::Type::kInt32,
7919                     SideEffects::DependsOnGC(),
7920                     dex_pc) {
7921         DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
7922                   DataType::Size(DataType::Type::kReference))
7923             << "kPrimInt and kPrimNot have different sizes.";
7924     SetRawInputAt(0, base_address);
7925     SetRawInputAt(1, offset);
7926   }
7927 
IsClonable()7928   bool IsClonable() const override { return true; }
CanBeMoved()7929   bool CanBeMoved() const override { return true; }
InstructionDataEquals(const HInstruction * other ATTRIBUTE_UNUSED)7930   bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
7931     return true;
7932   }
IsActualObject()7933   bool IsActualObject() const override { return false; }
7934 
GetBaseAddress()7935   HInstruction* GetBaseAddress() const { return InputAt(0); }
GetOffset()7936   HInstruction* GetOffset() const { return InputAt(1); }
7937 
7938   DECLARE_INSTRUCTION(IntermediateAddress);
7939 
7940  protected:
7941   DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
7942 };
7943 
7944 
7945 }  // namespace art
7946 
7947 #include "nodes_vector.h"
7948 
7949 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
7950 #include "nodes_shared.h"
7951 #endif
7952 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
7953 #include "nodes_x86.h"
7954 #endif
7955 
7956 namespace art {
7957 
7958 class OptimizingCompilerStats;
7959 
7960 class HGraphVisitor : public ValueObject {
7961  public:
7962   explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
stats_(stats)7963       : stats_(stats),
7964         graph_(graph) {}
~HGraphVisitor()7965   virtual ~HGraphVisitor() {}
7966 
VisitInstruction(HInstruction * instruction ATTRIBUTE_UNUSED)7967   virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
7968   virtual void VisitBasicBlock(HBasicBlock* block);
7969 
7970   // Visit the graph following basic block insertion order.
7971   void VisitInsertionOrder();
7972 
7973   // Visit the graph following dominator tree reverse post-order.
7974   void VisitReversePostOrder();
7975 
GetGraph()7976   HGraph* GetGraph() const { return graph_; }
7977 
7978   // Visit functions for instruction classes.
7979 #define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
7980   virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
7981 
7982   FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
7983 
7984 #undef DECLARE_VISIT_INSTRUCTION
7985 
7986  protected:
7987   OptimizingCompilerStats* stats_;
7988 
7989  private:
7990   HGraph* const graph_;
7991 
7992   DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
7993 };
7994 
7995 class HGraphDelegateVisitor : public HGraphVisitor {
7996  public:
7997   explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
HGraphVisitor(graph,stats)7998       : HGraphVisitor(graph, stats) {}
~HGraphDelegateVisitor()7999   virtual ~HGraphDelegateVisitor() {}
8000 
8001   // Visit functions that delegate to to super class.
8002 #define DECLARE_VISIT_INSTRUCTION(name, super)                                        \
8003   void Visit##name(H##name* instr) override { Visit##super(instr); }
8004 
8005   FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8006 
8007 #undef DECLARE_VISIT_INSTRUCTION
8008 
8009  private:
8010   DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
8011 };
8012 
8013 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
8014 // and remove the old instruction.
8015 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
8016 
8017 // Create a clone for each clonable instructions/phis and replace the original with the clone.
8018 //
8019 // Used for testing individual instruction cloner.
8020 class CloneAndReplaceInstructionVisitor : public HGraphDelegateVisitor {
8021  public:
CloneAndReplaceInstructionVisitor(HGraph * graph)8022   explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
8023       : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
8024 
VisitInstruction(HInstruction * instruction)8025   void VisitInstruction(HInstruction* instruction) override {
8026     if (instruction->IsClonable()) {
8027       ReplaceInstrOrPhiByClone(instruction);
8028       instr_replaced_by_clones_count_++;
8029     }
8030   }
8031 
GetInstrReplacedByClonesCount()8032   size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
8033 
8034  private:
8035   size_t instr_replaced_by_clones_count_;
8036 
8037   DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
8038 };
8039 
8040 // Iterator over the blocks that art part of the loop. Includes blocks part
8041 // of an inner loop. The order in which the blocks are iterated is on their
8042 // block id.
8043 class HBlocksInLoopIterator : public ValueObject {
8044  public:
HBlocksInLoopIterator(const HLoopInformation & info)8045   explicit HBlocksInLoopIterator(const HLoopInformation& info)
8046       : blocks_in_loop_(info.GetBlocks()),
8047         blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
8048         index_(0) {
8049     if (!blocks_in_loop_.IsBitSet(index_)) {
8050       Advance();
8051     }
8052   }
8053 
Done()8054   bool Done() const { return index_ == blocks_.size(); }
Current()8055   HBasicBlock* Current() const { return blocks_[index_]; }
Advance()8056   void Advance() {
8057     ++index_;
8058     for (size_t e = blocks_.size(); index_ < e; ++index_) {
8059       if (blocks_in_loop_.IsBitSet(index_)) {
8060         break;
8061       }
8062     }
8063   }
8064 
8065  private:
8066   const BitVector& blocks_in_loop_;
8067   const ArenaVector<HBasicBlock*>& blocks_;
8068   size_t index_;
8069 
8070   DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
8071 };
8072 
8073 // Iterator over the blocks that art part of the loop. Includes blocks part
8074 // of an inner loop. The order in which the blocks are iterated is reverse
8075 // post order.
8076 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
8077  public:
HBlocksInLoopReversePostOrderIterator(const HLoopInformation & info)8078   explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
8079       : blocks_in_loop_(info.GetBlocks()),
8080         blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8081         index_(0) {
8082     if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8083       Advance();
8084     }
8085   }
8086 
Done()8087   bool Done() const { return index_ == blocks_.size(); }
Current()8088   HBasicBlock* Current() const { return blocks_[index_]; }
Advance()8089   void Advance() {
8090     ++index_;
8091     for (size_t e = blocks_.size(); index_ < e; ++index_) {
8092       if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8093         break;
8094       }
8095     }
8096   }
8097 
8098  private:
8099   const BitVector& blocks_in_loop_;
8100   const ArenaVector<HBasicBlock*>& blocks_;
8101   size_t index_;
8102 
8103   DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
8104 };
8105 
8106 // Returns int64_t value of a properly typed constant.
Int64FromConstant(HConstant * constant)8107 inline int64_t Int64FromConstant(HConstant* constant) {
8108   if (constant->IsIntConstant()) {
8109     return constant->AsIntConstant()->GetValue();
8110   } else if (constant->IsLongConstant()) {
8111     return constant->AsLongConstant()->GetValue();
8112   } else {
8113     DCHECK(constant->IsNullConstant()) << constant->DebugName();
8114     return 0;
8115   }
8116 }
8117 
8118 // Returns true iff instruction is an integral constant (and sets value on success).
IsInt64AndGet(HInstruction * instruction,int64_t * value)8119 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
8120   if (instruction->IsIntConstant()) {
8121     *value = instruction->AsIntConstant()->GetValue();
8122     return true;
8123   } else if (instruction->IsLongConstant()) {
8124     *value = instruction->AsLongConstant()->GetValue();
8125     return true;
8126   } else if (instruction->IsNullConstant()) {
8127     *value = 0;
8128     return true;
8129   }
8130   return false;
8131 }
8132 
8133 // Returns true iff instruction is the given integral constant.
IsInt64Value(HInstruction * instruction,int64_t value)8134 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
8135   int64_t val = 0;
8136   return IsInt64AndGet(instruction, &val) && val == value;
8137 }
8138 
8139 // Returns true iff instruction is a zero bit pattern.
IsZeroBitPattern(HInstruction * instruction)8140 inline bool IsZeroBitPattern(HInstruction* instruction) {
8141   return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
8142 }
8143 
8144 // Implement HInstruction::Is##type() for concrete instructions.
8145 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
8146   inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
8147   FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8148 #undef INSTRUCTION_TYPE_CHECK
8149 
8150 // Implement HInstruction::Is##type() for abstract instructions.
8151 #define INSTRUCTION_TYPE_CHECK_RESULT(type, super)                             \
8152   std::is_base_of<BaseType, H##type>::value,
8153 #define INSTRUCTION_TYPE_CHECK(type, super)                                    \
8154   inline bool HInstruction::Is##type() const {                                 \
8155     DCHECK_LT(GetKind(), kLastInstructionKind);                                \
8156     using BaseType = H##type;                                                  \
8157     static constexpr bool results[] = {                                        \
8158         FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT)           \
8159     };                                                                         \
8160     return results[static_cast<size_t>(GetKind())];                            \
8161   }
8162 
FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)8163   FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8164 #undef INSTRUCTION_TYPE_CHECK
8165 #undef INSTRUCTION_TYPE_CHECK_RESULT
8166 
8167 #define INSTRUCTION_TYPE_CAST(type, super)                                     \
8168   inline const H##type* HInstruction::As##type() const {                       \
8169     return Is##type() ? down_cast<const H##type*>(this) : nullptr;             \
8170   }                                                                            \
8171   inline H##type* HInstruction::As##type() {                                   \
8172     return Is##type() ? static_cast<H##type*>(this) : nullptr;                 \
8173   }
8174 
8175   FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
8176 #undef INSTRUCTION_TYPE_CAST
8177 
8178 
8179 // Create space in `blocks` for adding `number_of_new_blocks` entries
8180 // starting at location `at`. Blocks after `at` are moved accordingly.
8181 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
8182                         size_t number_of_new_blocks,
8183                         size_t after) {
8184   DCHECK_LT(after, blocks->size());
8185   size_t old_size = blocks->size();
8186   size_t new_size = old_size + number_of_new_blocks;
8187   blocks->resize(new_size);
8188   std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
8189 }
8190 
8191 /*
8192  * Hunt "under the hood" of array lengths (leading to array references),
8193  * null checks (also leading to array references), and new arrays
8194  * (leading to the actual length). This makes it more likely related
8195  * instructions become actually comparable.
8196  */
HuntForDeclaration(HInstruction * instruction)8197 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
8198   while (instruction->IsArrayLength() ||
8199          instruction->IsNullCheck() ||
8200          instruction->IsNewArray()) {
8201     instruction = instruction->IsNewArray()
8202         ? instruction->AsNewArray()->GetLength()
8203         : instruction->InputAt(0);
8204   }
8205   return instruction;
8206 }
8207 
IsAddOrSub(const HInstruction * instruction)8208 inline bool IsAddOrSub(const HInstruction* instruction) {
8209   return instruction->IsAdd() || instruction->IsSub();
8210 }
8211 
8212 void RemoveEnvironmentUses(HInstruction* instruction);
8213 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
8214 void ResetEnvironmentInputRecords(HInstruction* instruction);
8215 
8216 // Detects an instruction that is >= 0. As long as the value is carried by
8217 // a single instruction, arithmetic wrap-around cannot occur.
8218 bool IsGEZero(HInstruction* instruction);
8219 
8220 }  // namespace art
8221 
8222 #endif  // ART_COMPILER_OPTIMIZING_NODES_H_
8223