1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "code_sinking.h"
18 
19 #include "base/arena_bit_vector.h"
20 #include "base/bit_vector-inl.h"
21 #include "base/scoped_arena_allocator.h"
22 #include "base/scoped_arena_containers.h"
23 #include "common_dominator.h"
24 #include "nodes.h"
25 
26 namespace art {
27 
Run()28 bool CodeSinking::Run() {
29   HBasicBlock* exit = graph_->GetExitBlock();
30   if (exit == nullptr) {
31     // Infinite loop, just bail.
32     return false;
33   }
34   // TODO(ngeoffray): we do not profile branches yet, so use throw instructions
35   // as an indicator of an uncommon branch.
36   for (HBasicBlock* exit_predecessor : exit->GetPredecessors()) {
37     HInstruction* last = exit_predecessor->GetLastInstruction();
38     // Any predecessor of the exit that does not return, throws an exception.
39     if (!last->IsReturn() && !last->IsReturnVoid()) {
40       SinkCodeToUncommonBranch(exit_predecessor);
41     }
42   }
43   return true;
44 }
45 
IsInterestingInstruction(HInstruction * instruction)46 static bool IsInterestingInstruction(HInstruction* instruction) {
47   // Instructions from the entry graph (for example constants) are never interesting to move.
48   if (instruction->GetBlock() == instruction->GetBlock()->GetGraph()->GetEntryBlock()) {
49     return false;
50   }
51   // We want to move moveable instructions that cannot throw, as well as
52   // heap stores and allocations.
53 
54   // Volatile stores cannot be moved.
55   if (instruction->IsInstanceFieldSet()) {
56     if (instruction->AsInstanceFieldSet()->IsVolatile()) {
57       return false;
58     }
59   }
60 
61   // Check allocations and strings first, as they can throw, but it is safe to move them.
62   if (instruction->IsNewInstance() || instruction->IsNewArray() || instruction->IsLoadString()) {
63     return true;
64   }
65 
66   // Check it is safe to move ConstructorFence.
67   // (Safe to move ConstructorFence for only protecting the new-instance but not for finals.)
68   if (instruction->IsConstructorFence()) {
69     HConstructorFence* ctor_fence = instruction->AsConstructorFence();
70 
71     // A fence with "0" inputs is dead and should've been removed in a prior pass.
72     DCHECK_NE(0u, ctor_fence->InputCount());
73 
74     // TODO: this should be simplified to 'return true' since it's
75     // potentially pessimizing any code sinking for inlined constructors with final fields.
76     // TODO: double check that if the final field assignments are not moved,
77     // then the fence is not moved either.
78 
79     return ctor_fence->GetAssociatedAllocation() != nullptr;
80   }
81 
82   // All other instructions that can throw cannot be moved.
83   if (instruction->CanThrow()) {
84     return false;
85   }
86 
87   // We can only store on local allocations. Other heap references can
88   // be escaping. Note that allocations can escape too, but we only move
89   // allocations if their users can move to, or are in the list of
90   // post dominated blocks.
91   if (instruction->IsInstanceFieldSet()) {
92     if (!instruction->InputAt(0)->IsNewInstance()) {
93       return false;
94     }
95   }
96 
97   if (instruction->IsArraySet()) {
98     if (!instruction->InputAt(0)->IsNewArray()) {
99       return false;
100     }
101   }
102 
103   // Heap accesses cannot go pass instructions that have memory side effects, which
104   // we are not tracking here. Note that the load/store elimination optimization
105   // runs before this optimization, and should have removed interesting ones.
106   // In theory, we could handle loads of local allocations, but this is currently
107   // hard to test, as LSE removes them.
108   if (instruction->IsStaticFieldGet() ||
109       instruction->IsInstanceFieldGet() ||
110       instruction->IsArrayGet()) {
111     return false;
112   }
113 
114   if (instruction->IsInstanceFieldSet() ||
115       instruction->IsArraySet() ||
116       instruction->CanBeMoved()) {
117     return true;
118   }
119   return false;
120 }
121 
AddInstruction(HInstruction * instruction,const ArenaBitVector & processed_instructions,const ArenaBitVector & discard_blocks,ScopedArenaVector<HInstruction * > * worklist)122 static void AddInstruction(HInstruction* instruction,
123                            const ArenaBitVector& processed_instructions,
124                            const ArenaBitVector& discard_blocks,
125                            ScopedArenaVector<HInstruction*>* worklist) {
126   // Add to the work list if the instruction is not in the list of blocks
127   // to discard, hasn't been already processed and is of interest.
128   if (!discard_blocks.IsBitSet(instruction->GetBlock()->GetBlockId()) &&
129       !processed_instructions.IsBitSet(instruction->GetId()) &&
130       IsInterestingInstruction(instruction)) {
131     worklist->push_back(instruction);
132   }
133 }
134 
AddInputs(HInstruction * instruction,const ArenaBitVector & processed_instructions,const ArenaBitVector & discard_blocks,ScopedArenaVector<HInstruction * > * worklist)135 static void AddInputs(HInstruction* instruction,
136                       const ArenaBitVector& processed_instructions,
137                       const ArenaBitVector& discard_blocks,
138                       ScopedArenaVector<HInstruction*>* worklist) {
139   for (HInstruction* input : instruction->GetInputs()) {
140     AddInstruction(input, processed_instructions, discard_blocks, worklist);
141   }
142 }
143 
AddInputs(HBasicBlock * block,const ArenaBitVector & processed_instructions,const ArenaBitVector & discard_blocks,ScopedArenaVector<HInstruction * > * worklist)144 static void AddInputs(HBasicBlock* block,
145                       const ArenaBitVector& processed_instructions,
146                       const ArenaBitVector& discard_blocks,
147                       ScopedArenaVector<HInstruction*>* worklist) {
148   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
149     AddInputs(it.Current(), processed_instructions, discard_blocks, worklist);
150   }
151   for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
152     AddInputs(it.Current(), processed_instructions, discard_blocks, worklist);
153   }
154 }
155 
ShouldFilterUse(HInstruction * instruction,HInstruction * user,const ArenaBitVector & post_dominated)156 static bool ShouldFilterUse(HInstruction* instruction,
157                             HInstruction* user,
158                             const ArenaBitVector& post_dominated) {
159   if (instruction->IsNewInstance()) {
160     return (user->IsInstanceFieldSet() || user->IsConstructorFence()) &&
161         (user->InputAt(0) == instruction) &&
162         !post_dominated.IsBitSet(user->GetBlock()->GetBlockId());
163   } else if (instruction->IsNewArray()) {
164     return (user->IsArraySet() || user->IsConstructorFence()) &&
165         (user->InputAt(0) == instruction) &&
166         !post_dominated.IsBitSet(user->GetBlock()->GetBlockId());
167   }
168   return false;
169 }
170 
171 
172 // Find the ideal position for moving `instruction`. If `filter` is true,
173 // we filter out store instructions to that instruction, which are processed
174 // first in the step (3) of the sinking algorithm.
175 // This method is tailored to the sinking algorithm, unlike
176 // the generic HInstruction::MoveBeforeFirstUserAndOutOfLoops.
FindIdealPosition(HInstruction * instruction,const ArenaBitVector & post_dominated,bool filter=false)177 static HInstruction* FindIdealPosition(HInstruction* instruction,
178                                        const ArenaBitVector& post_dominated,
179                                        bool filter = false) {
180   DCHECK(!instruction->IsPhi());  // Makes no sense for Phi.
181 
182   // Find the target block.
183   CommonDominator finder(/* block= */ nullptr);
184   for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) {
185     HInstruction* user = use.GetUser();
186     if (!(filter && ShouldFilterUse(instruction, user, post_dominated))) {
187       HBasicBlock* block = user->GetBlock();
188       if (user->IsPhi()) {
189         // Special case phis by taking the incoming block for regular ones,
190         // or the dominator for catch phis.
191         block = user->AsPhi()->IsCatchPhi()
192             ? block->GetDominator()
193             : block->GetPredecessors()[use.GetIndex()];
194       }
195       finder.Update(block);
196     }
197   }
198   for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) {
199     DCHECK(!use.GetUser()->GetHolder()->IsPhi());
200     DCHECK(!filter || !ShouldFilterUse(instruction, use.GetUser()->GetHolder(), post_dominated));
201     finder.Update(use.GetUser()->GetHolder()->GetBlock());
202   }
203   HBasicBlock* target_block = finder.Get();
204   if (target_block == nullptr) {
205     // No user we can go next to? Likely a LSE or DCE limitation.
206     return nullptr;
207   }
208 
209   // Move to the first dominator not in a loop, if we can.
210   while (target_block->IsInLoop()) {
211     if (!post_dominated.IsBitSet(target_block->GetDominator()->GetBlockId())) {
212       break;
213     }
214     target_block = target_block->GetDominator();
215     DCHECK(target_block != nullptr);
216   }
217 
218   // Bail if the instruction can throw and we are about to move into a catch block.
219   if (instruction->CanThrow() && target_block->GetTryCatchInformation() != nullptr) {
220     return nullptr;
221   }
222 
223   // Find insertion position. No need to filter anymore, as we have found a
224   // target block.
225   HInstruction* insert_pos = nullptr;
226   for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) {
227     if (use.GetUser()->GetBlock() == target_block &&
228         (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
229       insert_pos = use.GetUser();
230     }
231   }
232   for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) {
233     HInstruction* user = use.GetUser()->GetHolder();
234     if (user->GetBlock() == target_block &&
235         (insert_pos == nullptr || user->StrictlyDominates(insert_pos))) {
236       insert_pos = user;
237     }
238   }
239   if (insert_pos == nullptr) {
240     // No user in `target_block`, insert before the control flow instruction.
241     insert_pos = target_block->GetLastInstruction();
242     DCHECK(insert_pos->IsControlFlow());
243     // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
244     if (insert_pos->IsIf()) {
245       HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
246       if (if_input == insert_pos->GetPrevious()) {
247         insert_pos = if_input;
248       }
249     }
250   }
251   DCHECK(!insert_pos->IsPhi());
252   return insert_pos;
253 }
254 
255 
SinkCodeToUncommonBranch(HBasicBlock * end_block)256 void CodeSinking::SinkCodeToUncommonBranch(HBasicBlock* end_block) {
257   // Local allocator to discard data structures created below at the end of this optimization.
258   ScopedArenaAllocator allocator(graph_->GetArenaStack());
259 
260   size_t number_of_instructions = graph_->GetCurrentInstructionId();
261   ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc));
262   ArenaBitVector processed_instructions(&allocator, number_of_instructions, /* expandable= */ false);
263   processed_instructions.ClearAllBits();
264   ArenaBitVector post_dominated(&allocator, graph_->GetBlocks().size(), /* expandable= */ false);
265   post_dominated.ClearAllBits();
266   ArenaBitVector instructions_that_can_move(
267       &allocator, number_of_instructions, /* expandable= */ false);
268   instructions_that_can_move.ClearAllBits();
269   ScopedArenaVector<HInstruction*> move_in_order(allocator.Adapter(kArenaAllocMisc));
270 
271   // Step (1): Visit post order to get a subset of blocks post dominated by `end_block`.
272   // TODO(ngeoffray): Getting the full set of post-dominated shoud be done by
273   // computint the post dominator tree, but that could be too time consuming. Also,
274   // we should start the analysis from blocks dominated by an uncommon branch, but we
275   // don't profile branches yet.
276   bool found_block = false;
277   for (HBasicBlock* block : graph_->GetPostOrder()) {
278     if (block == end_block) {
279       found_block = true;
280       post_dominated.SetBit(block->GetBlockId());
281     } else if (found_block) {
282       bool is_post_dominated = true;
283       if (block->GetSuccessors().empty()) {
284         // We currently bail for loops.
285         is_post_dominated = false;
286       } else {
287         for (HBasicBlock* successor : block->GetSuccessors()) {
288           if (!post_dominated.IsBitSet(successor->GetBlockId())) {
289             is_post_dominated = false;
290             break;
291           }
292         }
293       }
294       if (is_post_dominated) {
295         post_dominated.SetBit(block->GetBlockId());
296       }
297     }
298   }
299 
300   // Now that we have found a subset of post-dominated blocks, add to the worklist all inputs
301   // of instructions in these blocks that are not themselves in these blocks.
302   // Also find the common dominator of the found post dominated blocks, to help filtering
303   // out un-movable uses in step (2).
304   CommonDominator finder(end_block);
305   for (size_t i = 0, e = graph_->GetBlocks().size(); i < e; ++i) {
306     if (post_dominated.IsBitSet(i)) {
307       finder.Update(graph_->GetBlocks()[i]);
308       AddInputs(graph_->GetBlocks()[i], processed_instructions, post_dominated, &worklist);
309     }
310   }
311   HBasicBlock* common_dominator = finder.Get();
312 
313   // Step (2): iterate over the worklist to find sinking candidates.
314   while (!worklist.empty()) {
315     HInstruction* instruction = worklist.back();
316     if (processed_instructions.IsBitSet(instruction->GetId())) {
317       // The instruction has already been processed, continue. This happens
318       // when the instruction is the input/user of multiple instructions.
319       worklist.pop_back();
320       continue;
321     }
322     bool all_users_in_post_dominated_blocks = true;
323     bool can_move = true;
324     // Check users of the instruction.
325     for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) {
326       HInstruction* user = use.GetUser();
327       if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId()) &&
328           !instructions_that_can_move.IsBitSet(user->GetId())) {
329         all_users_in_post_dominated_blocks = false;
330         // If we've already processed this user, or the user cannot be moved, or
331         // is not dominating the post dominated blocks, bail.
332         // TODO(ngeoffray): The domination check is an approximation. We should
333         // instead check if the dominated blocks post dominate the user's block,
334         // but we do not have post dominance information here.
335         if (processed_instructions.IsBitSet(user->GetId()) ||
336             !IsInterestingInstruction(user) ||
337             !user->GetBlock()->Dominates(common_dominator)) {
338           can_move = false;
339           break;
340         }
341       }
342     }
343 
344     // Check environment users of the instruction. Some of these users require
345     // the instruction not to move.
346     if (all_users_in_post_dominated_blocks) {
347       for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) {
348         HEnvironment* environment = use.GetUser();
349         HInstruction* user = environment->GetHolder();
350         if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId())) {
351           if (graph_->IsDebuggable() ||
352               user->IsDeoptimize() ||
353               user->CanThrowIntoCatchBlock() ||
354               (user->IsSuspendCheck() && graph_->IsCompilingOsr())) {
355             can_move = false;
356             break;
357           }
358         }
359       }
360     }
361     if (!can_move) {
362       // Instruction cannot be moved, mark it as processed and remove it from the work
363       // list.
364       processed_instructions.SetBit(instruction->GetId());
365       worklist.pop_back();
366     } else if (all_users_in_post_dominated_blocks) {
367       // Instruction is a candidate for being sunk. Mark it as such, remove it from the
368       // work list, and add its inputs to the work list.
369       instructions_that_can_move.SetBit(instruction->GetId());
370       move_in_order.push_back(instruction);
371       processed_instructions.SetBit(instruction->GetId());
372       worklist.pop_back();
373       AddInputs(instruction, processed_instructions, post_dominated, &worklist);
374       // Drop the environment use not in the list of post-dominated block. This is
375       // to help step (3) of this optimization, when we start moving instructions
376       // closer to their use.
377       for (const HUseListNode<HEnvironment*>& use : instruction->GetEnvUses()) {
378         HEnvironment* environment = use.GetUser();
379         HInstruction* user = environment->GetHolder();
380         if (!post_dominated.IsBitSet(user->GetBlock()->GetBlockId())) {
381           environment->RemoveAsUserOfInput(use.GetIndex());
382           environment->SetRawEnvAt(use.GetIndex(), nullptr);
383         }
384       }
385     } else {
386       // The information we have on the users was not enough to decide whether the
387       // instruction could be moved.
388       // Add the users to the work list, and keep the instruction in the work list
389       // to process it again once all users have been processed.
390       for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) {
391         AddInstruction(use.GetUser(), processed_instructions, post_dominated, &worklist);
392       }
393     }
394   }
395 
396   // Make sure we process instructions in dominated order. This is required for heap
397   // stores.
398   std::sort(move_in_order.begin(), move_in_order.end(), [](HInstruction* a, HInstruction* b) {
399     return b->StrictlyDominates(a);
400   });
401 
402   // Step (3): Try to move sinking candidates.
403   for (HInstruction* instruction : move_in_order) {
404     HInstruction* position = nullptr;
405     if (instruction->IsArraySet()
406             || instruction->IsInstanceFieldSet()
407             || instruction->IsConstructorFence()) {
408       if (!instructions_that_can_move.IsBitSet(instruction->InputAt(0)->GetId())) {
409         // A store can trivially move, but it can safely do so only if the heap
410         // location it stores to can also move.
411         // TODO(ngeoffray): Handle allocation/store cycles by pruning these instructions
412         // from the set and all their inputs.
413         continue;
414       }
415       // Find the position of the instruction we're storing into, filtering out this
416       // store and all other stores to that instruction.
417       position = FindIdealPosition(instruction->InputAt(0), post_dominated, /* filter= */ true);
418 
419       // The position needs to be dominated by the store, in order for the store to move there.
420       if (position == nullptr || !instruction->GetBlock()->Dominates(position->GetBlock())) {
421         continue;
422       }
423     } else {
424       // Find the ideal position within the post dominated blocks.
425       position = FindIdealPosition(instruction, post_dominated);
426       if (position == nullptr) {
427         continue;
428       }
429     }
430     // Bail if we could not find a position in the post dominated blocks (for example,
431     // if there are multiple users whose common dominator is not in the list of
432     // post dominated blocks).
433     if (!post_dominated.IsBitSet(position->GetBlock()->GetBlockId())) {
434       continue;
435     }
436     MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSunk);
437     instruction->MoveBefore(position, /* do_checks= */ false);
438   }
439 }
440 
441 }  // namespace art
442