1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23
24 #include "art_method.h"
25 #include "base/arena_allocator.h"
26 #include "base/arena_bit_vector.h"
27 #include "base/arena_containers.h"
28 #include "base/arena_object.h"
29 #include "base/array_ref.h"
30 #include "base/intrusive_forward_list.h"
31 #include "base/iteration_range.h"
32 #include "base/macros.h"
33 #include "base/mutex.h"
34 #include "base/quasi_atomic.h"
35 #include "base/stl_util.h"
36 #include "base/transform_array_ref.h"
37 #include "block_namer.h"
38 #include "class_root.h"
39 #include "compilation_kind.h"
40 #include "data_type.h"
41 #include "deoptimization_kind.h"
42 #include "dex/dex_file.h"
43 #include "dex/dex_file_types.h"
44 #include "dex/invoke_type.h"
45 #include "dex/method_reference.h"
46 #include "entrypoints/quick/quick_entrypoints_enum.h"
47 #include "handle.h"
48 #include "handle_cache.h"
49 #include "intrinsics_enum.h"
50 #include "locations.h"
51 #include "mirror/class.h"
52 #include "mirror/method_type.h"
53 #include "offsets.h"
54 #include "reference_type_info.h"
55
56 namespace art HIDDEN {
57
58 class ArenaStack;
59 class CodeGenerator;
60 class GraphChecker;
61 class HBasicBlock;
62 class HCondition;
63 class HConstructorFence;
64 class HCurrentMethod;
65 class HDoubleConstant;
66 class HEnvironment;
67 class HFloatConstant;
68 class HGraphBuilder;
69 class HGraphVisitor;
70 class HInstruction;
71 class HIntConstant;
72 class HInvoke;
73 class HLongConstant;
74 class HNullConstant;
75 class HParameterValue;
76 class HPhi;
77 class HSuspendCheck;
78 class HTryBoundary;
79 class HVecCondition;
80 class FieldInfo;
81 class LiveInterval;
82 class LocationSummary;
83 class ProfilingInfo;
84 class SlowPathCode;
85 class SsaBuilder;
86
87 namespace mirror {
88 class DexCache;
89 } // namespace mirror
90
91 static const int kDefaultNumberOfBlocks = 8;
92 static const int kDefaultNumberOfSuccessors = 2;
93 static const int kDefaultNumberOfPredecessors = 2;
94 static const int kDefaultNumberOfExceptionalPredecessors = 0;
95 static const int kDefaultNumberOfDominatedBlocks = 1;
96 static const int kDefaultNumberOfBackEdges = 1;
97
98 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
99 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
100 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
101 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
102
103 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
104 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
105
106 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
107
108 static constexpr uint32_t kNoDexPc = -1;
109
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)110 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
111 // For the purposes of the compiler, the dex files must actually be the same object
112 // if we want to safely treat them as the same. This is especially important for JIT
113 // as custom class loaders can open the same underlying file (or memory) multiple
114 // times and provide different class resolution but no two class loaders should ever
115 // use the same DexFile object - doing so is an unsupported hack that can lead to
116 // all sorts of weird failures.
117 return &lhs == &rhs;
118 }
119
120 enum IfCondition {
121 // All types.
122 kCondEQ, // ==
123 kCondNE, // !=
124 // Signed integers and floating-point numbers.
125 kCondLT, // <
126 kCondLE, // <=
127 kCondGT, // >
128 kCondGE, // >=
129 // Unsigned integers.
130 kCondB, // <
131 kCondBE, // <=
132 kCondA, // >
133 kCondAE, // >=
134 // First and last aliases.
135 kCondFirst = kCondEQ,
136 kCondLast = kCondAE,
137 };
138
139 enum GraphAnalysisResult {
140 kAnalysisSkipped,
141 kAnalysisInvalidBytecode,
142 kAnalysisFailThrowCatchLoop,
143 kAnalysisFailAmbiguousArrayOp,
144 kAnalysisFailIrreducibleLoopAndStringInit,
145 kAnalysisFailPhiEquivalentInOsr,
146 kAnalysisSuccess,
147 };
148
149 std::ostream& operator<<(std::ostream& os, GraphAnalysisResult ga);
150
151 template <typename T>
MakeUnsigned(T x)152 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
153 return static_cast<typename std::make_unsigned<T>::type>(x);
154 }
155
156 class HInstructionList : public ValueObject {
157 public:
HInstructionList()158 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
159
160 void AddInstruction(HInstruction* instruction);
161 void RemoveInstruction(HInstruction* instruction);
162
163 // Insert `instruction` before/after an existing instruction `cursor`.
164 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
165 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
166
167 // Return true if this list contains `instruction`.
168 bool Contains(HInstruction* instruction) const;
169
170 // Return true if `instruction1` is found before `instruction2` in
171 // this instruction list and false otherwise. Abort if none
172 // of these instructions is found.
173 bool FoundBefore(const HInstruction* instruction1,
174 const HInstruction* instruction2) const;
175
IsEmpty()176 bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()177 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
178
179 // Update the block of all instructions to be `block`.
180 void SetBlockOfInstructions(HBasicBlock* block) const;
181
182 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
183 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
184 void Add(const HInstructionList& instruction_list);
185
186 // Return the number of instructions in the list. This is an expensive operation.
187 size_t CountSize() const;
188
189 private:
190 HInstruction* first_instruction_;
191 HInstruction* last_instruction_;
192
193 friend class HBasicBlock;
194 friend class HGraph;
195 friend class HInstruction;
196 friend class HInstructionIterator;
197 friend class HInstructionIteratorHandleChanges;
198 friend class HBackwardInstructionIterator;
199
200 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
201 };
202
203 // Control-flow graph of a method. Contains a list of basic blocks.
204 class HGraph : public ArenaObject<kArenaAllocGraph> {
205 public:
206 HGraph(ArenaAllocator* allocator,
207 ArenaStack* arena_stack,
208 VariableSizedHandleScope* handles,
209 const DexFile& dex_file,
210 uint32_t method_idx,
211 InstructionSet instruction_set,
212 InvokeType invoke_type = kInvalidInvokeType,
213 bool dead_reference_safe = false,
214 bool debuggable = false,
215 CompilationKind compilation_kind = CompilationKind::kOptimized,
216 int start_instruction_id = 0)
allocator_(allocator)217 : allocator_(allocator),
218 arena_stack_(arena_stack),
219 handle_cache_(handles),
220 blocks_(allocator->Adapter(kArenaAllocBlockList)),
221 reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
222 linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
223 entry_block_(nullptr),
224 exit_block_(nullptr),
225 number_of_vregs_(0),
226 number_of_in_vregs_(0),
227 temporaries_vreg_slots_(0),
228 has_bounds_checks_(false),
229 has_try_catch_(false),
230 has_monitor_operations_(false),
231 has_traditional_simd_(false),
232 has_predicated_simd_(false),
233 has_loops_(false),
234 has_irreducible_loops_(false),
235 has_direct_critical_native_call_(false),
236 has_always_throwing_invokes_(false),
237 dead_reference_safe_(dead_reference_safe),
238 debuggable_(debuggable),
239 current_instruction_id_(start_instruction_id),
240 dex_file_(dex_file),
241 method_idx_(method_idx),
242 invoke_type_(invoke_type),
243 in_ssa_form_(false),
244 number_of_cha_guards_(0),
245 instruction_set_(instruction_set),
246 cached_null_constant_(nullptr),
247 cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
248 cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
249 cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
250 cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
251 cached_current_method_(nullptr),
252 art_method_(nullptr),
253 compilation_kind_(compilation_kind),
254 useful_optimizing_(false),
255 cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
256 blocks_.reserve(kDefaultNumberOfBlocks);
257 }
258
259 std::ostream& Dump(std::ostream& os,
260 CodeGenerator* codegen,
261 std::optional<std::reference_wrapper<const BlockNamer>> namer = std::nullopt);
262
GetAllocator()263 ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()264 ArenaStack* GetArenaStack() const { return arena_stack_; }
265
GetHandleCache()266 HandleCache* GetHandleCache() { return &handle_cache_; }
267
GetBlocks()268 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
269
270 // An iterator to only blocks that are still actually in the graph (when
271 // blocks are removed they are replaced with 'nullptr' in GetBlocks to
272 // simplify block-id assignment and avoid memmoves in the block-list).
GetActiveBlocks()273 IterationRange<FilterNull<ArenaVector<HBasicBlock*>::const_iterator>> GetActiveBlocks() const {
274 return FilterOutNull(MakeIterationRange(GetBlocks()));
275 }
276
IsInSsaForm()277 bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()278 void SetInSsaForm() { in_ssa_form_ = true; }
279
GetEntryBlock()280 HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()281 HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()282 bool HasExitBlock() const { return exit_block_ != nullptr; }
283
SetEntryBlock(HBasicBlock * block)284 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)285 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
286
287 void AddBlock(HBasicBlock* block);
288
289 void ComputeDominanceInformation();
290 void ClearDominanceInformation();
291 void ClearLoopInformation();
292 void FindBackEdges(ArenaBitVector* visited);
293 GraphAnalysisResult BuildDominatorTree();
294 GraphAnalysisResult RecomputeDominatorTree();
295 void SimplifyCFG();
296 void SimplifyCatchBlocks();
297
298 // Analyze all natural loops in this graph. Returns a code specifying that it
299 // was successful or the reason for failure. The method will fail if a loop
300 // is a throw-catch loop, i.e. the header is a catch block.
301 GraphAnalysisResult AnalyzeLoops() const;
302
303 // Iterate over blocks to compute try block membership. Needs reverse post
304 // order and loop information.
305 void ComputeTryBlockInformation();
306
307 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
308 // Returns the instruction to replace the invoke expression or null if the
309 // invoke is for a void method. Note that the caller is responsible for replacing
310 // and removing the invoke instruction.
311 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
312
313 // Update the loop and try membership of `block`, which was spawned from `reference`.
314 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
315 // should be the new back edge.
316 // `has_more_specific_try_catch_info` will be set to true when inlining a try catch.
317 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
318 HBasicBlock* reference,
319 bool replace_if_back_edge,
320 bool has_more_specific_try_catch_info = false);
321
322 // Need to add a couple of blocks to test if the loop body is entered and
323 // put deoptimization instructions, etc.
324 void TransformLoopHeaderForBCE(HBasicBlock* header);
325
326 // Adds a new loop directly after the loop with the given header and exit.
327 // Returns the new preheader.
328 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
329 HBasicBlock* body,
330 HBasicBlock* exit);
331
332 // Removes `block` from the graph. Assumes `block` has been disconnected from
333 // other blocks and has no instructions or phis.
334 void DeleteDeadEmptyBlock(HBasicBlock* block);
335
336 // Splits the edge between `block` and `successor` while preserving the
337 // indices in the predecessor/successor lists. If there are multiple edges
338 // between the blocks, the lowest indices are used.
339 // Returns the new block which is empty and has the same dex pc as `successor`.
340 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
341
342 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
343
344 // Splits the edge between `block` and `successor` and then updates the graph's RPO to keep
345 // consistency without recomputing the whole graph.
346 HBasicBlock* SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor);
347
348 void OrderLoopHeaderPredecessors(HBasicBlock* header);
349
350 // Transform a loop into a format with a single preheader.
351 //
352 // Each phi in the header should be split: original one in the header should only hold
353 // inputs reachable from the back edges and a single input from the preheader. The newly created
354 // phi in the preheader should collate the inputs from the original multiple incoming blocks.
355 //
356 // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
357 // that no longer have this property.
358 void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
359
360 void SimplifyLoop(HBasicBlock* header);
361
GetNextInstructionId()362 int32_t GetNextInstructionId() {
363 CHECK_NE(current_instruction_id_, INT32_MAX);
364 return current_instruction_id_++;
365 }
366
GetCurrentInstructionId()367 int32_t GetCurrentInstructionId() const {
368 return current_instruction_id_;
369 }
370
SetCurrentInstructionId(int32_t id)371 void SetCurrentInstructionId(int32_t id) {
372 CHECK_GE(id, current_instruction_id_);
373 current_instruction_id_ = id;
374 }
375
UpdateTemporariesVRegSlots(size_t slots)376 void UpdateTemporariesVRegSlots(size_t slots) {
377 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
378 }
379
GetTemporariesVRegSlots()380 size_t GetTemporariesVRegSlots() const {
381 DCHECK(!in_ssa_form_);
382 return temporaries_vreg_slots_;
383 }
384
SetNumberOfVRegs(uint16_t number_of_vregs)385 void SetNumberOfVRegs(uint16_t number_of_vregs) {
386 number_of_vregs_ = number_of_vregs;
387 }
388
GetNumberOfVRegs()389 uint16_t GetNumberOfVRegs() const {
390 return number_of_vregs_;
391 }
392
SetNumberOfInVRegs(uint16_t value)393 void SetNumberOfInVRegs(uint16_t value) {
394 number_of_in_vregs_ = value;
395 }
396
GetNumberOfInVRegs()397 uint16_t GetNumberOfInVRegs() const {
398 return number_of_in_vregs_;
399 }
400
GetNumberOfLocalVRegs()401 uint16_t GetNumberOfLocalVRegs() const {
402 DCHECK(!in_ssa_form_);
403 return number_of_vregs_ - number_of_in_vregs_;
404 }
405
GetReversePostOrder()406 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
407 return reverse_post_order_;
408 }
409
GetReversePostOrderSkipEntryBlock()410 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() const {
411 DCHECK(GetReversePostOrder()[0] == entry_block_);
412 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
413 }
414
GetPostOrder()415 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
416 return ReverseRange(GetReversePostOrder());
417 }
418
GetLinearOrder()419 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
420 return linear_order_;
421 }
422
GetLinearPostOrder()423 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
424 return ReverseRange(GetLinearOrder());
425 }
426
HasBoundsChecks()427 bool HasBoundsChecks() const {
428 return has_bounds_checks_;
429 }
430
SetHasBoundsChecks(bool value)431 void SetHasBoundsChecks(bool value) {
432 has_bounds_checks_ = value;
433 }
434
435 // Is the code known to be robust against eliminating dead references
436 // and the effects of early finalization?
IsDeadReferenceSafe()437 bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
438
MarkDeadReferenceUnsafe()439 void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
440
IsDebuggable()441 bool IsDebuggable() const { return debuggable_; }
442
443 // Returns a constant of the given type and value. If it does not exist
444 // already, it is created and inserted into the graph. This method is only for
445 // integral types.
446 HConstant* GetConstant(DataType::Type type, int64_t value);
447
448 // TODO: This is problematic for the consistency of reference type propagation
449 // because it can be created anytime after the pass and thus it will be left
450 // with an invalid type.
451 HNullConstant* GetNullConstant();
452
453 HIntConstant* GetIntConstant(int32_t value);
454 HLongConstant* GetLongConstant(int64_t value);
455 HFloatConstant* GetFloatConstant(float value);
456 HDoubleConstant* GetDoubleConstant(double value);
457
458 HCurrentMethod* GetCurrentMethod();
459
GetDexFile()460 const DexFile& GetDexFile() const {
461 return dex_file_;
462 }
463
GetMethodIdx()464 uint32_t GetMethodIdx() const {
465 return method_idx_;
466 }
467
468 // Get the method name (without the signature), e.g. "<init>"
469 const char* GetMethodName() const;
470
471 // Get the pretty method name (class + name + optionally signature).
472 std::string PrettyMethod(bool with_signature = true) const;
473
GetInvokeType()474 InvokeType GetInvokeType() const {
475 return invoke_type_;
476 }
477
GetInstructionSet()478 InstructionSet GetInstructionSet() const {
479 return instruction_set_;
480 }
481
IsCompilingOsr()482 bool IsCompilingOsr() const { return compilation_kind_ == CompilationKind::kOsr; }
483
IsCompilingBaseline()484 bool IsCompilingBaseline() const { return compilation_kind_ == CompilationKind::kBaseline; }
485
GetCompilationKind()486 CompilationKind GetCompilationKind() const { return compilation_kind_; }
487
GetCHASingleImplementationList()488 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
489 return cha_single_implementation_list_;
490 }
491
492 // In case of OSR we intend to use SuspendChecks as an entry point to the
493 // function; for debuggable graphs we might deoptimize to interpreter from
494 // SuspendChecks. In these cases we should always generate code for them.
SuspendChecksAreAllowedToNoOp()495 bool SuspendChecksAreAllowedToNoOp() const {
496 return !IsDebuggable() && !IsCompilingOsr();
497 }
498
AddCHASingleImplementationDependency(ArtMethod * method)499 void AddCHASingleImplementationDependency(ArtMethod* method) {
500 cha_single_implementation_list_.insert(method);
501 }
502
HasShouldDeoptimizeFlag()503 bool HasShouldDeoptimizeFlag() const {
504 return number_of_cha_guards_ != 0 || debuggable_;
505 }
506
HasTryCatch()507 bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)508 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
509
HasMonitorOperations()510 bool HasMonitorOperations() const { return has_monitor_operations_; }
SetHasMonitorOperations(bool value)511 void SetHasMonitorOperations(bool value) { has_monitor_operations_ = value; }
512
HasTraditionalSIMD()513 bool HasTraditionalSIMD() { return has_traditional_simd_; }
SetHasTraditionalSIMD(bool value)514 void SetHasTraditionalSIMD(bool value) { has_traditional_simd_ = value; }
515
HasPredicatedSIMD()516 bool HasPredicatedSIMD() { return has_predicated_simd_; }
SetHasPredicatedSIMD(bool value)517 void SetHasPredicatedSIMD(bool value) { has_predicated_simd_ = value; }
518
HasSIMD()519 bool HasSIMD() const { return has_traditional_simd_ || has_predicated_simd_; }
520
HasLoops()521 bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)522 void SetHasLoops(bool value) { has_loops_ = value; }
523
HasIrreducibleLoops()524 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)525 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
526
HasDirectCriticalNativeCall()527 bool HasDirectCriticalNativeCall() const { return has_direct_critical_native_call_; }
SetHasDirectCriticalNativeCall(bool value)528 void SetHasDirectCriticalNativeCall(bool value) { has_direct_critical_native_call_ = value; }
529
HasAlwaysThrowingInvokes()530 bool HasAlwaysThrowingInvokes() const { return has_always_throwing_invokes_; }
SetHasAlwaysThrowingInvokes(bool value)531 void SetHasAlwaysThrowingInvokes(bool value) { has_always_throwing_invokes_ = value; }
532
GetArtMethod()533 ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)534 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
535
SetProfilingInfo(ProfilingInfo * info)536 void SetProfilingInfo(ProfilingInfo* info) { profiling_info_ = info; }
GetProfilingInfo()537 ProfilingInfo* GetProfilingInfo() const { return profiling_info_; }
538
GetInexactObjectRti()539 ReferenceTypeInfo GetInexactObjectRti() {
540 return ReferenceTypeInfo::Create(handle_cache_.GetObjectClassHandle(), /* is_exact= */ false);
541 }
542
GetNumberOfCHAGuards()543 uint32_t GetNumberOfCHAGuards() const { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)544 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()545 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
546
SetUsefulOptimizing()547 void SetUsefulOptimizing() { useful_optimizing_ = true; }
IsUsefulOptimizing()548 bool IsUsefulOptimizing() const { return useful_optimizing_; }
549
550 private:
551 void RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector& visited) const;
552 void RemoveDeadBlocks(const ArenaBitVector& visited);
553
554 template <class InstructionType, typename ValueType>
555 InstructionType* CreateConstant(ValueType value,
556 ArenaSafeMap<ValueType, InstructionType*>* cache);
557
558 void InsertConstant(HConstant* instruction);
559
560 // Cache a float constant into the graph. This method should only be
561 // called by the SsaBuilder when creating "equivalent" instructions.
562 void CacheFloatConstant(HFloatConstant* constant);
563
564 // See CacheFloatConstant comment.
565 void CacheDoubleConstant(HDoubleConstant* constant);
566
567 ArenaAllocator* const allocator_;
568 ArenaStack* const arena_stack_;
569
570 HandleCache handle_cache_;
571
572 // List of blocks in insertion order.
573 ArenaVector<HBasicBlock*> blocks_;
574
575 // List of blocks to perform a reverse post order tree traversal.
576 ArenaVector<HBasicBlock*> reverse_post_order_;
577
578 // List of blocks to perform a linear order tree traversal. Unlike the reverse
579 // post order, this order is not incrementally kept up-to-date.
580 ArenaVector<HBasicBlock*> linear_order_;
581
582 HBasicBlock* entry_block_;
583 HBasicBlock* exit_block_;
584
585 // The number of virtual registers in this method. Contains the parameters.
586 uint16_t number_of_vregs_;
587
588 // The number of virtual registers used by parameters of this method.
589 uint16_t number_of_in_vregs_;
590
591 // Number of vreg size slots that the temporaries use (used in baseline compiler).
592 size_t temporaries_vreg_slots_;
593
594 // Flag whether there are bounds checks in the graph. We can skip
595 // BCE if it's false.
596 bool has_bounds_checks_;
597
598 // Flag whether there are try/catch blocks in the graph. We will skip
599 // try/catch-related passes if it's false.
600 bool has_try_catch_;
601
602 // Flag whether there are any HMonitorOperation in the graph. If yes this will mandate
603 // DexRegisterMap to be present to allow deadlock analysis for non-debuggable code.
604 bool has_monitor_operations_;
605
606 // Flags whether SIMD (traditional or predicated) instructions appear in the graph.
607 // If either is true, the code generators may have to be more careful spilling the wider
608 // contents of SIMD registers.
609 bool has_traditional_simd_;
610 bool has_predicated_simd_;
611
612 // Flag whether there are any loops in the graph. We can skip loop
613 // optimization if it's false.
614 bool has_loops_;
615
616 // Flag whether there are any irreducible loops in the graph.
617 bool has_irreducible_loops_;
618
619 // Flag whether there are any direct calls to native code registered
620 // for @CriticalNative methods.
621 bool has_direct_critical_native_call_;
622
623 // Flag whether the graph contains invokes that always throw.
624 bool has_always_throwing_invokes_;
625
626 // Is the code known to be robust against eliminating dead references
627 // and the effects of early finalization? If false, dead reference variables
628 // are kept if they might be visible to the garbage collector.
629 // Currently this means that the class was declared to be dead-reference-safe,
630 // the method accesses no reachability-sensitive fields or data, and the same
631 // is true for any methods that were inlined into the current one.
632 bool dead_reference_safe_;
633
634 // Indicates whether the graph should be compiled in a way that
635 // ensures full debuggability. If false, we can apply more
636 // aggressive optimizations that may limit the level of debugging.
637 const bool debuggable_;
638
639 // The current id to assign to a newly added instruction. See HInstruction.id_.
640 int32_t current_instruction_id_;
641
642 // The dex file from which the method is from.
643 const DexFile& dex_file_;
644
645 // The method index in the dex file.
646 const uint32_t method_idx_;
647
648 // If inlined, this encodes how the callee is being invoked.
649 const InvokeType invoke_type_;
650
651 // Whether the graph has been transformed to SSA form. Only used
652 // in debug mode to ensure we are not using properties only valid
653 // for non-SSA form (like the number of temporaries).
654 bool in_ssa_form_;
655
656 // Number of CHA guards in the graph. Used to short-circuit the
657 // CHA guard optimization pass when there is no CHA guard left.
658 uint32_t number_of_cha_guards_;
659
660 const InstructionSet instruction_set_;
661
662 // Cached constants.
663 HNullConstant* cached_null_constant_;
664 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
665 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
666 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
667 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
668
669 HCurrentMethod* cached_current_method_;
670
671 // The ArtMethod this graph is for. Note that for AOT, it may be null,
672 // for example for methods whose declaring class could not be resolved
673 // (such as when the superclass could not be found).
674 ArtMethod* art_method_;
675
676 // The `ProfilingInfo` associated with the method being compiled.
677 ProfilingInfo* profiling_info_;
678
679 // How we are compiling the graph: either optimized, osr, or baseline.
680 // For osr, we will make all loops seen as irreducible and emit special
681 // stack maps to mark compiled code entries which the interpreter can
682 // directly jump to.
683 const CompilationKind compilation_kind_;
684
685 // Whether after compiling baseline it is still useful re-optimizing this
686 // method.
687 bool useful_optimizing_;
688
689 // List of methods that are assumed to have single implementation.
690 ArenaSet<ArtMethod*> cha_single_implementation_list_;
691
692 friend class SsaBuilder; // For caching constants.
693 friend class SsaLivenessAnalysis; // For the linear order.
694 friend class HInliner; // For the reverse post order.
695 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
696 DISALLOW_COPY_AND_ASSIGN(HGraph);
697 };
698
699 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
700 public:
HLoopInformation(HBasicBlock * header,HGraph * graph)701 HLoopInformation(HBasicBlock* header, HGraph* graph)
702 : header_(header),
703 suspend_check_(nullptr),
704 irreducible_(false),
705 contains_irreducible_loop_(false),
706 back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
707 // Make bit vector growable, as the number of blocks may change.
708 blocks_(graph->GetAllocator(),
709 graph->GetBlocks().size(),
710 true,
711 kArenaAllocLoopInfoBackEdges) {
712 back_edges_.reserve(kDefaultNumberOfBackEdges);
713 }
714
IsIrreducible()715 bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()716 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
717
718 void Dump(std::ostream& os);
719
GetHeader()720 HBasicBlock* GetHeader() const {
721 return header_;
722 }
723
SetHeader(HBasicBlock * block)724 void SetHeader(HBasicBlock* block) {
725 header_ = block;
726 }
727
GetSuspendCheck()728 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)729 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()730 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
731
AddBackEdge(HBasicBlock * back_edge)732 void AddBackEdge(HBasicBlock* back_edge) {
733 back_edges_.push_back(back_edge);
734 }
735
RemoveBackEdge(HBasicBlock * back_edge)736 void RemoveBackEdge(HBasicBlock* back_edge) {
737 RemoveElement(back_edges_, back_edge);
738 }
739
IsBackEdge(const HBasicBlock & block)740 bool IsBackEdge(const HBasicBlock& block) const {
741 return ContainsElement(back_edges_, &block);
742 }
743
NumberOfBackEdges()744 size_t NumberOfBackEdges() const {
745 return back_edges_.size();
746 }
747
748 HBasicBlock* GetPreHeader() const;
749
GetBackEdges()750 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
751 return back_edges_;
752 }
753
754 // Returns the lifetime position of the back edge that has the
755 // greatest lifetime position.
756 size_t GetLifetimeEnd() const;
757
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)758 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
759 ReplaceElement(back_edges_, existing, new_back_edge);
760 }
761
762 // Finds blocks that are part of this loop.
763 void Populate();
764
765 // Updates blocks population of the loop and all of its outer' ones recursively after the
766 // population of the inner loop is updated.
767 void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
768
769 // Returns whether this loop information contains `block`.
770 // Note that this loop information *must* be populated before entering this function.
771 bool Contains(const HBasicBlock& block) const;
772
773 // Returns whether this loop information is an inner loop of `other`.
774 // Note that `other` *must* be populated before entering this function.
775 bool IsIn(const HLoopInformation& other) const;
776
777 // Returns true if instruction is not defined within this loop.
778 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
779
GetBlocks()780 const ArenaBitVector& GetBlocks() const { return blocks_; }
781
782 void Add(HBasicBlock* block);
783 void Remove(HBasicBlock* block);
784
ClearAllBlocks()785 void ClearAllBlocks() {
786 blocks_.ClearAllBits();
787 }
788
789 bool HasBackEdgeNotDominatedByHeader() const;
790
IsPopulated()791 bool IsPopulated() const {
792 return blocks_.GetHighestBitSet() != -1;
793 }
794
795 bool DominatesAllBackEdges(HBasicBlock* block);
796
797 bool HasExitEdge() const;
798
799 // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()800 void ResetBasicBlockData() {
801 back_edges_.clear();
802 ClearAllBlocks();
803 }
804
805 private:
806 // Internal recursive implementation of `Populate`.
807 void PopulateRecursive(HBasicBlock* block);
808 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
809
810 HBasicBlock* header_;
811 HSuspendCheck* suspend_check_;
812 bool irreducible_;
813 bool contains_irreducible_loop_;
814 ArenaVector<HBasicBlock*> back_edges_;
815 ArenaBitVector blocks_;
816
817 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
818 };
819
820 // Stores try/catch information for basic blocks.
821 // Note that HGraph is constructed so that catch blocks cannot simultaneously
822 // be try blocks.
823 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
824 public:
825 // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)826 explicit TryCatchInformation(const HTryBoundary& try_entry)
827 : try_entry_(&try_entry),
828 catch_dex_file_(nullptr),
829 catch_type_index_(dex::TypeIndex::Invalid()) {
830 DCHECK(try_entry_ != nullptr);
831 }
832
833 // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)834 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
835 : try_entry_(nullptr),
836 catch_dex_file_(&dex_file),
837 catch_type_index_(catch_type_index) {}
838
IsTryBlock()839 bool IsTryBlock() const { return try_entry_ != nullptr; }
840
GetTryEntry()841 const HTryBoundary& GetTryEntry() const {
842 DCHECK(IsTryBlock());
843 return *try_entry_;
844 }
845
IsCatchBlock()846 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
847
IsValidTypeIndex()848 bool IsValidTypeIndex() const {
849 DCHECK(IsCatchBlock());
850 return catch_type_index_.IsValid();
851 }
852
GetCatchTypeIndex()853 dex::TypeIndex GetCatchTypeIndex() const {
854 DCHECK(IsCatchBlock());
855 return catch_type_index_;
856 }
857
GetCatchDexFile()858 const DexFile& GetCatchDexFile() const {
859 DCHECK(IsCatchBlock());
860 return *catch_dex_file_;
861 }
862
SetInvalidTypeIndex()863 void SetInvalidTypeIndex() {
864 catch_type_index_ = dex::TypeIndex::Invalid();
865 }
866
867 private:
868 // One of possibly several TryBoundary instructions entering the block's try.
869 // Only set for try blocks.
870 const HTryBoundary* try_entry_;
871
872 // Exception type information. Only set for catch blocks.
873 const DexFile* catch_dex_file_;
874 dex::TypeIndex catch_type_index_;
875 };
876
877 static constexpr size_t kNoLifetime = -1;
878 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
879
880 // A block in a method. Contains the list of instructions represented
881 // as a double linked list. Each block knows its predecessors and
882 // successors.
883
884 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
885 public:
886 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)887 : graph_(graph),
888 predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
889 successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
890 loop_information_(nullptr),
891 dominator_(nullptr),
892 dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
893 block_id_(kInvalidBlockId),
894 dex_pc_(dex_pc),
895 lifetime_start_(kNoLifetime),
896 lifetime_end_(kNoLifetime),
897 try_catch_information_(nullptr) {
898 predecessors_.reserve(kDefaultNumberOfPredecessors);
899 successors_.reserve(kDefaultNumberOfSuccessors);
900 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
901 }
902
GetPredecessors()903 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
904 return predecessors_;
905 }
906
GetNumberOfPredecessors()907 size_t GetNumberOfPredecessors() const {
908 return GetPredecessors().size();
909 }
910
GetSuccessors()911 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
912 return successors_;
913 }
914
915 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
916 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
917
918 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
919 return ContainsElement(successors_, block, start_from);
920 }
921
GetDominatedBlocks()922 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
923 return dominated_blocks_;
924 }
925
IsEntryBlock()926 bool IsEntryBlock() const {
927 return graph_->GetEntryBlock() == this;
928 }
929
IsExitBlock()930 bool IsExitBlock() const {
931 return graph_->GetExitBlock() == this;
932 }
933
934 bool IsSingleGoto() const;
935 bool IsSingleReturn() const;
936 bool IsSingleReturnOrReturnVoidAllowingPhis() const;
937 bool IsSingleTryBoundary() const;
938
939 // Returns true if this block emits nothing but a jump.
IsSingleJump()940 bool IsSingleJump() const {
941 HLoopInformation* loop_info = GetLoopInformation();
942 return (IsSingleGoto() || IsSingleTryBoundary())
943 // Back edges generate a suspend check.
944 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
945 }
946
AddBackEdge(HBasicBlock * back_edge)947 void AddBackEdge(HBasicBlock* back_edge) {
948 if (loop_information_ == nullptr) {
949 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
950 }
951 DCHECK_EQ(loop_information_->GetHeader(), this);
952 loop_information_->AddBackEdge(back_edge);
953 }
954
955 // Registers a back edge; if the block was not a loop header before the call associates a newly
956 // created loop info with it.
957 //
958 // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
959 // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)960 void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
961 if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
962 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
963 }
964 loop_information_->AddBackEdge(back_edge);
965 }
966
GetGraph()967 HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)968 void SetGraph(HGraph* graph) { graph_ = graph; }
969
GetBlockId()970 uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)971 void SetBlockId(int id) { block_id_ = id; }
GetDexPc()972 uint32_t GetDexPc() const { return dex_pc_; }
973
GetDominator()974 HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)975 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)976 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
977
RemoveDominatedBlock(HBasicBlock * block)978 void RemoveDominatedBlock(HBasicBlock* block) {
979 RemoveElement(dominated_blocks_, block);
980 }
981
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)982 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
983 ReplaceElement(dominated_blocks_, existing, new_block);
984 }
985
986 void ClearDominanceInformation();
987
NumberOfBackEdges()988 int NumberOfBackEdges() const {
989 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
990 }
991
GetFirstInstruction()992 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()993 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()994 const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()995 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()996 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()997 const HInstructionList& GetPhis() const { return phis_; }
998
999 HInstruction* GetFirstInstructionDisregardMoves() const;
1000
AddSuccessor(HBasicBlock * block)1001 void AddSuccessor(HBasicBlock* block) {
1002 successors_.push_back(block);
1003 block->predecessors_.push_back(this);
1004 }
1005
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1006 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1007 size_t successor_index = GetSuccessorIndexOf(existing);
1008 existing->RemovePredecessor(this);
1009 new_block->predecessors_.push_back(this);
1010 successors_[successor_index] = new_block;
1011 }
1012
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1013 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1014 size_t predecessor_index = GetPredecessorIndexOf(existing);
1015 existing->RemoveSuccessor(this);
1016 new_block->successors_.push_back(this);
1017 predecessors_[predecessor_index] = new_block;
1018 }
1019
1020 // Insert `this` between `predecessor` and `successor. This method
1021 // preserves the indices, and will update the first edge found between
1022 // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1023 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1024 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1025 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1026 successor->predecessors_[predecessor_index] = this;
1027 predecessor->successors_[successor_index] = this;
1028 successors_.push_back(successor);
1029 predecessors_.push_back(predecessor);
1030 }
1031
RemovePredecessor(HBasicBlock * block)1032 void RemovePredecessor(HBasicBlock* block) {
1033 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1034 }
1035
RemoveSuccessor(HBasicBlock * block)1036 void RemoveSuccessor(HBasicBlock* block) {
1037 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1038 }
1039
ClearAllPredecessors()1040 void ClearAllPredecessors() {
1041 predecessors_.clear();
1042 }
1043
AddPredecessor(HBasicBlock * block)1044 void AddPredecessor(HBasicBlock* block) {
1045 predecessors_.push_back(block);
1046 block->successors_.push_back(this);
1047 }
1048
SwapPredecessors()1049 void SwapPredecessors() {
1050 DCHECK_EQ(predecessors_.size(), 2u);
1051 std::swap(predecessors_[0], predecessors_[1]);
1052 }
1053
SwapSuccessors()1054 void SwapSuccessors() {
1055 DCHECK_EQ(successors_.size(), 2u);
1056 std::swap(successors_[0], successors_[1]);
1057 }
1058
GetPredecessorIndexOf(HBasicBlock * predecessor)1059 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1060 return IndexOfElement(predecessors_, predecessor);
1061 }
1062
GetSuccessorIndexOf(HBasicBlock * successor)1063 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1064 return IndexOfElement(successors_, successor);
1065 }
1066
GetSinglePredecessor()1067 HBasicBlock* GetSinglePredecessor() const {
1068 DCHECK_EQ(GetPredecessors().size(), 1u);
1069 return GetPredecessors()[0];
1070 }
1071
GetSingleSuccessor()1072 HBasicBlock* GetSingleSuccessor() const {
1073 DCHECK_EQ(GetSuccessors().size(), 1u);
1074 return GetSuccessors()[0];
1075 }
1076
1077 // Returns whether the first occurrence of `predecessor` in the list of
1078 // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1079 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1080 DCHECK_EQ(GetPredecessors()[idx], predecessor);
1081 return GetPredecessorIndexOf(predecessor) == idx;
1082 }
1083
1084 // Create a new block between this block and its predecessors. The new block
1085 // is added to the graph, all predecessor edges are relinked to it and an edge
1086 // is created to `this`. Returns the new empty block. Reverse post order or
1087 // loop and try/catch information are not updated.
1088 HBasicBlock* CreateImmediateDominator();
1089
1090 // Split the block into two blocks just before `cursor`. Returns the newly
1091 // created, latter block. Note that this method will add the block to the
1092 // graph, create a Goto at the end of the former block and will create an edge
1093 // between the blocks. It will not, however, update the reverse post order or
1094 // loop and try/catch information.
1095 HBasicBlock* SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form = true);
1096
1097 // Split the block into two blocks just before `cursor`. Returns the newly
1098 // created block. Note that this method just updates raw block information,
1099 // like predecessors, successors, dominators, and instruction list. It does not
1100 // update the graph, reverse post order, loop information, nor make sure the
1101 // blocks are consistent (for example ending with a control flow instruction).
1102 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1103
1104 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1105 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1106
1107 // Merge `other` at the end of `this`. Successors and dominated blocks of
1108 // `other` are changed to be successors and dominated blocks of `this`. Note
1109 // that this method does not update the graph, reverse post order, loop
1110 // information, nor make sure the blocks are consistent (for example ending
1111 // with a control flow instruction).
1112 void MergeWithInlined(HBasicBlock* other);
1113
1114 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1115 // of `this` are moved to `other`.
1116 // Note that this method does not update the graph, reverse post order, loop
1117 // information, nor make sure the blocks are consistent (for example ending
1118 // with a control flow instruction).
1119 void ReplaceWith(HBasicBlock* other);
1120
1121 // Merges the instructions of `other` at the end of `this`.
1122 void MergeInstructionsWith(HBasicBlock* other);
1123
1124 // Merge `other` at the end of `this`. This method updates loops, reverse post
1125 // order, links to predecessors, successors, dominators and deletes the block
1126 // from the graph. The two blocks must be successive, i.e. `this` the only
1127 // predecessor of `other` and vice versa.
1128 void MergeWith(HBasicBlock* other);
1129
1130 // Disconnects `this` from all its predecessors, successors and dominator,
1131 // removes it from all loops it is included in and eventually from the graph.
1132 // The block must not dominate any other block. Predecessors and successors
1133 // are safely updated.
1134 void DisconnectAndDelete();
1135
1136 // Disconnects `this` from all its successors and updates their phis, if the successors have them.
1137 // If `visited` is provided, it will use the information to know if a successor is reachable and
1138 // skip updating those phis.
1139 void DisconnectFromSuccessors(const ArenaBitVector* visited = nullptr);
1140
1141 // Removes the catch phi uses of the instructions in `this`, and then remove the instruction
1142 // itself. If `building_dominator_tree` is true, it will not remove the instruction as user, since
1143 // we do it in a previous step. This is a special case for building up the dominator tree: we want
1144 // to eliminate uses before inputs but we don't have domination information, so we remove all
1145 // connections from input/uses first before removing any instruction.
1146 // This method assumes the instructions have been removed from all users with the exception of
1147 // catch phis because of missing exceptional edges in the graph.
1148 void RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree);
1149
1150 void AddInstruction(HInstruction* instruction);
1151 // Insert `instruction` before/after an existing instruction `cursor`.
1152 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1153 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1154 // Replace phi `initial` with `replacement` within this block.
1155 void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1156 // Replace instruction `initial` with `replacement` within this block.
1157 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1158 HInstruction* replacement);
1159 void AddPhi(HPhi* phi);
1160 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1161 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1162 // instruction list. With 'ensure_safety' set to true, it verifies that the
1163 // instruction is not in use and removes it from the use lists of its inputs.
1164 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1165 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1166 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1167
IsLoopHeader()1168 bool IsLoopHeader() const {
1169 return IsInLoop() && (loop_information_->GetHeader() == this);
1170 }
1171
IsLoopPreHeaderFirstPredecessor()1172 bool IsLoopPreHeaderFirstPredecessor() const {
1173 DCHECK(IsLoopHeader());
1174 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1175 }
1176
IsFirstPredecessorBackEdge()1177 bool IsFirstPredecessorBackEdge() const {
1178 DCHECK(IsLoopHeader());
1179 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1180 }
1181
GetLoopInformation()1182 HLoopInformation* GetLoopInformation() const {
1183 return loop_information_;
1184 }
1185
1186 // Set the loop_information_ on this block. Overrides the current
1187 // loop_information if it is an outer loop of the passed loop information.
1188 // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1189 void SetInLoop(HLoopInformation* info) {
1190 if (IsLoopHeader()) {
1191 // Nothing to do. This just means `info` is an outer loop.
1192 } else if (!IsInLoop()) {
1193 loop_information_ = info;
1194 } else if (loop_information_->Contains(*info->GetHeader())) {
1195 // Block is currently part of an outer loop. Make it part of this inner loop.
1196 // Note that a non loop header having a loop information means this loop information
1197 // has already been populated
1198 loop_information_ = info;
1199 } else {
1200 // Block is part of an inner loop. Do not update the loop information.
1201 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1202 // at this point, because this method is being called while populating `info`.
1203 }
1204 }
1205
1206 // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1207 void SetLoopInformation(HLoopInformation* info) {
1208 loop_information_ = info;
1209 }
1210
IsInLoop()1211 bool IsInLoop() const { return loop_information_ != nullptr; }
1212
GetTryCatchInformation()1213 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1214
SetTryCatchInformation(TryCatchInformation * try_catch_information)1215 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1216 try_catch_information_ = try_catch_information;
1217 }
1218
IsTryBlock()1219 bool IsTryBlock() const {
1220 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1221 }
1222
IsCatchBlock()1223 bool IsCatchBlock() const {
1224 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1225 }
1226
1227 // Returns the try entry that this block's successors should have. They will
1228 // be in the same try, unless the block ends in a try boundary. In that case,
1229 // the appropriate try entry will be returned.
1230 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1231
1232 bool HasThrowingInstructions() const;
1233
1234 // Returns whether this block dominates the blocked passed as parameter.
1235 bool Dominates(const HBasicBlock* block) const;
1236
GetLifetimeStart()1237 size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1238 size_t GetLifetimeEnd() const { return lifetime_end_; }
1239
SetLifetimeStart(size_t start)1240 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1241 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1242
1243 bool EndsWithControlFlowInstruction() const;
1244 bool EndsWithReturn() const;
1245 bool EndsWithIf() const;
1246 bool EndsWithTryBoundary() const;
1247 bool HasSinglePhi() const;
1248
1249 private:
1250 HGraph* graph_;
1251 ArenaVector<HBasicBlock*> predecessors_;
1252 ArenaVector<HBasicBlock*> successors_;
1253 HInstructionList instructions_;
1254 HInstructionList phis_;
1255 HLoopInformation* loop_information_;
1256 HBasicBlock* dominator_;
1257 ArenaVector<HBasicBlock*> dominated_blocks_;
1258 uint32_t block_id_;
1259 // The dex program counter of the first instruction of this block.
1260 const uint32_t dex_pc_;
1261 size_t lifetime_start_;
1262 size_t lifetime_end_;
1263 TryCatchInformation* try_catch_information_;
1264
1265 friend class HGraph;
1266 friend class HInstruction;
1267 // Allow manual control of the ordering of predecessors/successors
1268 friend class OptimizingUnitTestHelper;
1269
1270 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1271 };
1272
1273 // Iterates over the LoopInformation of all loops which contain 'block'
1274 // from the innermost to the outermost.
1275 class HLoopInformationOutwardIterator : public ValueObject {
1276 public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1277 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1278 : current_(block.GetLoopInformation()) {}
1279
Done()1280 bool Done() const { return current_ == nullptr; }
1281
Advance()1282 void Advance() {
1283 DCHECK(!Done());
1284 current_ = current_->GetPreHeader()->GetLoopInformation();
1285 }
1286
Current()1287 HLoopInformation* Current() const {
1288 DCHECK(!Done());
1289 return current_;
1290 }
1291
1292 private:
1293 HLoopInformation* current_;
1294
1295 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1296 };
1297
1298 #define FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1299 M(Above, Condition) \
1300 M(AboveOrEqual, Condition) \
1301 M(Abs, UnaryOperation) \
1302 M(Add, BinaryOperation) \
1303 M(And, BinaryOperation) \
1304 M(ArrayGet, Instruction) \
1305 M(ArrayLength, Instruction) \
1306 M(ArraySet, Instruction) \
1307 M(Below, Condition) \
1308 M(BelowOrEqual, Condition) \
1309 M(BitwiseNegatedRight, BinaryOperation) \
1310 M(BooleanNot, UnaryOperation) \
1311 M(BoundsCheck, Instruction) \
1312 M(BoundType, Instruction) \
1313 M(CheckCast, Instruction) \
1314 M(ClassTableGet, Instruction) \
1315 M(ClearException, Instruction) \
1316 M(ClinitCheck, Instruction) \
1317 M(Compare, BinaryOperation) \
1318 M(ConstructorFence, Instruction) \
1319 M(CurrentMethod, Instruction) \
1320 M(ShouldDeoptimizeFlag, Instruction) \
1321 M(Deoptimize, Instruction) \
1322 M(Div, BinaryOperation) \
1323 M(DivZeroCheck, Instruction) \
1324 M(DoubleConstant, Constant) \
1325 M(Equal, Condition) \
1326 M(Exit, Instruction) \
1327 M(FloatConstant, Constant) \
1328 M(Goto, Instruction) \
1329 M(GreaterThan, Condition) \
1330 M(GreaterThanOrEqual, Condition) \
1331 M(If, Instruction) \
1332 M(InstanceFieldGet, Instruction) \
1333 M(InstanceFieldSet, Instruction) \
1334 M(InstanceOf, Instruction) \
1335 M(IntConstant, Constant) \
1336 M(IntermediateAddress, Instruction) \
1337 M(InvokeUnresolved, Invoke) \
1338 M(InvokeInterface, Invoke) \
1339 M(InvokeStaticOrDirect, Invoke) \
1340 M(InvokeVirtual, Invoke) \
1341 M(InvokePolymorphic, Invoke) \
1342 M(InvokeCustom, Invoke) \
1343 M(LessThan, Condition) \
1344 M(LessThanOrEqual, Condition) \
1345 M(LoadClass, Instruction) \
1346 M(LoadException, Instruction) \
1347 M(LoadMethodHandle, Instruction) \
1348 M(LoadMethodType, Instruction) \
1349 M(LoadString, Instruction) \
1350 M(LongConstant, Constant) \
1351 M(Max, Instruction) \
1352 M(MemoryBarrier, Instruction) \
1353 M(MethodEntryHook, Instruction) \
1354 M(MethodExitHook, Instruction) \
1355 M(Min, BinaryOperation) \
1356 M(MonitorOperation, Instruction) \
1357 M(Mul, BinaryOperation) \
1358 M(Neg, UnaryOperation) \
1359 M(NewArray, Instruction) \
1360 M(NewInstance, Instruction) \
1361 M(Nop, Instruction) \
1362 M(Not, UnaryOperation) \
1363 M(NotEqual, Condition) \
1364 M(NullConstant, Instruction) \
1365 M(NullCheck, Instruction) \
1366 M(Or, BinaryOperation) \
1367 M(PackedSwitch, Instruction) \
1368 M(ParallelMove, Instruction) \
1369 M(ParameterValue, Instruction) \
1370 M(Phi, Instruction) \
1371 M(Rem, BinaryOperation) \
1372 M(Return, Instruction) \
1373 M(ReturnVoid, Instruction) \
1374 M(Rol, BinaryOperation) \
1375 M(Ror, BinaryOperation) \
1376 M(Shl, BinaryOperation) \
1377 M(Shr, BinaryOperation) \
1378 M(StaticFieldGet, Instruction) \
1379 M(StaticFieldSet, Instruction) \
1380 M(StringBuilderAppend, Instruction) \
1381 M(UnresolvedInstanceFieldGet, Instruction) \
1382 M(UnresolvedInstanceFieldSet, Instruction) \
1383 M(UnresolvedStaticFieldGet, Instruction) \
1384 M(UnresolvedStaticFieldSet, Instruction) \
1385 M(Select, Instruction) \
1386 M(Sub, BinaryOperation) \
1387 M(SuspendCheck, Instruction) \
1388 M(Throw, Instruction) \
1389 M(TryBoundary, Instruction) \
1390 M(TypeConversion, Instruction) \
1391 M(UShr, BinaryOperation) \
1392 M(Xor, BinaryOperation)
1393
1394 #define FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M) \
1395 M(VecReplicateScalar, VecUnaryOperation) \
1396 M(VecExtractScalar, VecUnaryOperation) \
1397 M(VecReduce, VecUnaryOperation) \
1398 M(VecCnv, VecUnaryOperation) \
1399 M(VecNeg, VecUnaryOperation) \
1400 M(VecAbs, VecUnaryOperation) \
1401 M(VecNot, VecUnaryOperation) \
1402 M(VecAdd, VecBinaryOperation) \
1403 M(VecHalvingAdd, VecBinaryOperation) \
1404 M(VecSub, VecBinaryOperation) \
1405 M(VecMul, VecBinaryOperation) \
1406 M(VecDiv, VecBinaryOperation) \
1407 M(VecMin, VecBinaryOperation) \
1408 M(VecMax, VecBinaryOperation) \
1409 M(VecAnd, VecBinaryOperation) \
1410 M(VecAndNot, VecBinaryOperation) \
1411 M(VecOr, VecBinaryOperation) \
1412 M(VecXor, VecBinaryOperation) \
1413 M(VecSaturationAdd, VecBinaryOperation) \
1414 M(VecSaturationSub, VecBinaryOperation) \
1415 M(VecShl, VecBinaryOperation) \
1416 M(VecShr, VecBinaryOperation) \
1417 M(VecUShr, VecBinaryOperation) \
1418 M(VecSetScalars, VecOperation) \
1419 M(VecMultiplyAccumulate, VecOperation) \
1420 M(VecSADAccumulate, VecOperation) \
1421 M(VecDotProd, VecOperation) \
1422 M(VecLoad, VecMemoryOperation) \
1423 M(VecStore, VecMemoryOperation) \
1424 M(VecPredSetAll, VecPredSetOperation) \
1425 M(VecPredWhile, VecPredSetOperation) \
1426 M(VecPredToBoolean, VecOperation) \
1427 M(VecEqual, VecCondition) \
1428 M(VecNotEqual, VecCondition) \
1429 M(VecLessThan, VecCondition) \
1430 M(VecLessThanOrEqual, VecCondition) \
1431 M(VecGreaterThan, VecCondition) \
1432 M(VecGreaterThanOrEqual, VecCondition) \
1433 M(VecBelow, VecCondition) \
1434 M(VecBelowOrEqual, VecCondition) \
1435 M(VecAbove, VecCondition) \
1436 M(VecAboveOrEqual, VecCondition) \
1437 M(VecPredNot, VecPredSetOperation)
1438
1439 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1440 FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1441 FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)
1442
1443 /*
1444 * Instructions, shared across several (not all) architectures.
1445 */
1446 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1447 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1448 #else
1449 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1450 M(DataProcWithShifterOp, Instruction) \
1451 M(MultiplyAccumulate, Instruction) \
1452 M(IntermediateAddressIndex, Instruction)
1453 #endif
1454
1455 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1456
1457 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1458
1459 #if defined(ART_ENABLE_CODEGEN_riscv64)
1460 #define FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M) M(Riscv64ShiftAdd, Instruction)
1461 #else
1462 #define FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M)
1463 #endif
1464
1465 #ifndef ART_ENABLE_CODEGEN_x86
1466 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1467 #else
1468 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1469 M(X86ComputeBaseMethodAddress, Instruction) \
1470 M(X86LoadFromConstantTable, Instruction) \
1471 M(X86FPNeg, Instruction) \
1472 M(X86PackedSwitch, Instruction)
1473 #endif
1474
1475 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1476 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M) \
1477 M(X86AndNot, Instruction) \
1478 M(X86MaskOrResetLeastSetBit, Instruction)
1479 #else
1480 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1481 #endif
1482
1483 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1484
1485 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1486 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1487 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1488 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1489 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1490 FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M) \
1491 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1492 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) \
1493 FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1494
1495 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1496 M(Condition, BinaryOperation) \
1497 M(Constant, Instruction) \
1498 M(UnaryOperation, Instruction) \
1499 M(BinaryOperation, Instruction) \
1500 M(Invoke, Instruction) \
1501 M(VecOperation, Instruction) \
1502 M(VecUnaryOperation, VecOperation) \
1503 M(VecBinaryOperation, VecOperation) \
1504 M(VecMemoryOperation, VecOperation) \
1505 M(VecPredSetOperation, VecOperation) \
1506 M(VecCondition, VecPredSetOperation)
1507
1508 #define FOR_EACH_INSTRUCTION(M) \
1509 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1510 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1511
1512 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1513 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1514 #undef FORWARD_DECLARATION
1515
1516 #define DECLARE_INSTRUCTION(type) \
1517 private: \
1518 H##type& operator=(const H##type&) = delete; \
1519 public: \
1520 const char* DebugName() const override { return #type; } \
1521 HInstruction* Clone(ArenaAllocator* arena) const override { \
1522 DCHECK(IsClonable()); \
1523 return new (arena) H##type(*this); \
1524 } \
1525 void Accept(HGraphVisitor* visitor) override
1526
1527 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1528 private: \
1529 H##type& operator=(const H##type&) = delete; \
1530 public:
1531
1532 #define DEFAULT_COPY_CONSTRUCTOR(type) H##type(const H##type& other) = default;
1533
1534 template <typename T>
1535 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1536 public IntrusiveForwardListNode<HUseListNode<T>> {
1537 public:
1538 // Get the instruction which has this use as one of the inputs.
1539 T GetUser() const { return user_; }
1540 // Get the position of the input record that this use corresponds to.
1541 size_t GetIndex() const { return index_; }
1542 // Set the position of the input record that this use corresponds to.
1543 void SetIndex(size_t index) { index_ = index; }
1544
1545 private:
1546 HUseListNode(T user, size_t index)
1547 : user_(user), index_(index) {}
1548
1549 T const user_;
1550 size_t index_;
1551
1552 friend class HInstruction;
1553
1554 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1555 };
1556
1557 template <typename T>
1558 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1559
1560 // This class is used by HEnvironment and HInstruction classes to record the
1561 // instructions they use and pointers to the corresponding HUseListNodes kept
1562 // by the used instructions.
1563 template <typename T>
1564 class HUserRecord : public ValueObject {
1565 public:
HUserRecord()1566 HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1567 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1568
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1569 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1570 : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1571 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1572 : instruction_(instruction), before_use_node_(before_use_node) {
1573 DCHECK(instruction_ != nullptr);
1574 }
1575
GetInstruction()1576 HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1577 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1578 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1579
1580 private:
1581 // Instruction used by the user.
1582 HInstruction* instruction_;
1583
1584 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1585 typename HUseList<T>::iterator before_use_node_;
1586 };
1587
1588 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1589 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1590 // HInstruction* values even though the underlying container has HUserRecord<>s.
1591 struct HInputExtractor {
operatorHInputExtractor1592 HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1593 return record.GetInstruction();
1594 }
operatorHInputExtractor1595 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1596 return record.GetInstruction();
1597 }
1598 };
1599
1600 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1601 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1602
1603 /**
1604 * Side-effects representation.
1605 *
1606 * For write/read dependences on fields/arrays, the dependence analysis uses
1607 * type disambiguation (e.g. a float field write cannot modify the value of an
1608 * integer field read) and the access type (e.g. a reference array write cannot
1609 * modify the value of a reference field read [although it may modify the
1610 * reference fetch prior to reading the field, which is represented by its own
1611 * write/read dependence]). The analysis makes conservative points-to
1612 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1613 * the same, and any reference read depends on any reference read without
1614 * further regard of its type).
1615 *
1616 * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1617 * alive across the point where garbage collection might happen.
1618 *
1619 * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1620 *
1621 * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1622 * those instructions from the compiler perspective (between this instruction and the next one
1623 * in the IR).
1624 *
1625 * Note: Instructions which can cause GC only on a fatal slow path do not need
1626 * kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1627 * one. However the execution may return to compiled code if there is a catch block in the
1628 * current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1629 * set.
1630 *
1631 * The internal representation uses 38-bit and is described in the table below.
1632 * The first line indicates the side effect, and for field/array accesses the
1633 * second line indicates the type of the access (in the order of the
1634 * DataType::Type enum).
1635 * The two numbered lines below indicate the bit position in the bitfield (read
1636 * vertically).
1637 *
1638 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1639 * +-------------+---------+---------+--------------+---------+---------+
1640 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1641 * | 3 |333333322|222222221| 1 |111111110|000000000|
1642 * | 7 |654321098|765432109| 8 |765432109|876543210|
1643 *
1644 * Note that, to ease the implementation, 'changes' bits are least significant
1645 * bits, while 'dependency' bits are most significant bits.
1646 */
1647 class SideEffects : public ValueObject {
1648 public:
SideEffects()1649 SideEffects() : flags_(0) {}
1650
None()1651 static SideEffects None() {
1652 return SideEffects(0);
1653 }
1654
All()1655 static SideEffects All() {
1656 return SideEffects(kAllChangeBits | kAllDependOnBits);
1657 }
1658
AllChanges()1659 static SideEffects AllChanges() {
1660 return SideEffects(kAllChangeBits);
1661 }
1662
AllDependencies()1663 static SideEffects AllDependencies() {
1664 return SideEffects(kAllDependOnBits);
1665 }
1666
AllExceptGCDependency()1667 static SideEffects AllExceptGCDependency() {
1668 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1669 }
1670
AllWritesAndReads()1671 static SideEffects AllWritesAndReads() {
1672 return SideEffects(kAllWrites | kAllReads);
1673 }
1674
AllWrites()1675 static SideEffects AllWrites() {
1676 return SideEffects(kAllWrites);
1677 }
1678
AllReads()1679 static SideEffects AllReads() {
1680 return SideEffects(kAllReads);
1681 }
1682
FieldWriteOfType(DataType::Type type,bool is_volatile)1683 static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1684 return is_volatile
1685 ? AllWritesAndReads()
1686 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1687 }
1688
ArrayWriteOfType(DataType::Type type)1689 static SideEffects ArrayWriteOfType(DataType::Type type) {
1690 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1691 }
1692
FieldReadOfType(DataType::Type type,bool is_volatile)1693 static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1694 return is_volatile
1695 ? AllWritesAndReads()
1696 : SideEffects(TypeFlag(type, kFieldReadOffset));
1697 }
1698
ArrayReadOfType(DataType::Type type)1699 static SideEffects ArrayReadOfType(DataType::Type type) {
1700 return SideEffects(TypeFlag(type, kArrayReadOffset));
1701 }
1702
1703 // Returns whether GC might happen across this instruction from the compiler perspective so
1704 // the next instruction in the IR would see that.
1705 //
1706 // See the SideEffect class comments.
CanTriggerGC()1707 static SideEffects CanTriggerGC() {
1708 return SideEffects(1ULL << kCanTriggerGCBit);
1709 }
1710
1711 // Returns whether the instruction must not be alive across a GC point.
1712 //
1713 // See the SideEffect class comments.
DependsOnGC()1714 static SideEffects DependsOnGC() {
1715 return SideEffects(1ULL << kDependsOnGCBit);
1716 }
1717
1718 // Combines the side-effects of this and the other.
Union(SideEffects other)1719 SideEffects Union(SideEffects other) const {
1720 return SideEffects(flags_ | other.flags_);
1721 }
1722
Exclusion(SideEffects other)1723 SideEffects Exclusion(SideEffects other) const {
1724 return SideEffects(flags_ & ~other.flags_);
1725 }
1726
Add(SideEffects other)1727 void Add(SideEffects other) {
1728 flags_ |= other.flags_;
1729 }
1730
Includes(SideEffects other)1731 bool Includes(SideEffects other) const {
1732 return (other.flags_ & flags_) == other.flags_;
1733 }
1734
HasSideEffects()1735 bool HasSideEffects() const {
1736 return (flags_ & kAllChangeBits);
1737 }
1738
HasDependencies()1739 bool HasDependencies() const {
1740 return (flags_ & kAllDependOnBits);
1741 }
1742
1743 // Returns true if there are no side effects or dependencies.
DoesNothing()1744 bool DoesNothing() const {
1745 return flags_ == 0;
1746 }
1747
1748 // Returns true if something is written.
DoesAnyWrite()1749 bool DoesAnyWrite() const {
1750 return (flags_ & kAllWrites);
1751 }
1752
1753 // Returns true if something is read.
DoesAnyRead()1754 bool DoesAnyRead() const {
1755 return (flags_ & kAllReads);
1756 }
1757
1758 // Returns true if potentially everything is written and read
1759 // (every type and every kind of access).
DoesAllReadWrite()1760 bool DoesAllReadWrite() const {
1761 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1762 }
1763
DoesAll()1764 bool DoesAll() const {
1765 return flags_ == (kAllChangeBits | kAllDependOnBits);
1766 }
1767
1768 // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1769 bool MayDependOn(SideEffects other) const {
1770 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1771 return (other.flags_ & depends_on_flags);
1772 }
1773
1774 // Returns string representation of flags (for debugging only).
1775 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1776 std::string ToString() const {
1777 std::string flags = "|";
1778 for (int s = kLastBit; s >= 0; s--) {
1779 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1780 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1781 // This is a bit for the GC side effect.
1782 if (current_bit_is_set) {
1783 flags += "GC";
1784 }
1785 flags += "|";
1786 } else {
1787 // This is a bit for the array/field analysis.
1788 // The underscore character stands for the 'can trigger GC' bit.
1789 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1790 if (current_bit_is_set) {
1791 flags += kDebug[s];
1792 }
1793 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1794 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1795 flags += "|";
1796 }
1797 }
1798 }
1799 return flags;
1800 }
1801
Equals(const SideEffects & other)1802 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1803
1804 private:
1805 static constexpr int kFieldArrayAnalysisBits = 9;
1806
1807 static constexpr int kFieldWriteOffset = 0;
1808 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1809 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1810 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1811
1812 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1813
1814 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1815 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1816 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1817 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1818
1819 static constexpr int kLastBit = kDependsOnGCBit;
1820 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1821
1822 // Aliases.
1823
1824 static_assert(kChangeBits == kDependOnBits,
1825 "the 'change' bits should match the 'depend on' bits.");
1826
1827 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1828 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1829 static constexpr uint64_t kAllWrites =
1830 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1831 static constexpr uint64_t kAllReads =
1832 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1833
1834 // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)1835 static uint64_t TypeFlag(DataType::Type type, int offset) {
1836 int shift;
1837 switch (type) {
1838 case DataType::Type::kReference: shift = 0; break;
1839 case DataType::Type::kBool: shift = 1; break;
1840 case DataType::Type::kInt8: shift = 2; break;
1841 case DataType::Type::kUint16: shift = 3; break;
1842 case DataType::Type::kInt16: shift = 4; break;
1843 case DataType::Type::kInt32: shift = 5; break;
1844 case DataType::Type::kInt64: shift = 6; break;
1845 case DataType::Type::kFloat32: shift = 7; break;
1846 case DataType::Type::kFloat64: shift = 8; break;
1847 default:
1848 LOG(FATAL) << "Unexpected data type " << type;
1849 UNREACHABLE();
1850 }
1851 DCHECK_LE(kFieldWriteOffset, shift);
1852 DCHECK_LT(shift, kArrayWriteOffset);
1853 return UINT64_C(1) << (shift + offset);
1854 }
1855
1856 // Private constructor on direct flags value.
SideEffects(uint64_t flags)1857 explicit SideEffects(uint64_t flags) : flags_(flags) {}
1858
1859 uint64_t flags_;
1860 };
1861
1862 // A HEnvironment object contains the values of virtual registers at a given location.
1863 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1864 public:
Create(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)1865 static HEnvironment* Create(ArenaAllocator* allocator,
1866 size_t number_of_vregs,
1867 ArtMethod* method,
1868 uint32_t dex_pc,
1869 HInstruction* holder) {
1870 // The storage for vreg records is allocated right after the `HEnvironment` itself.
1871 static_assert(IsAligned<alignof(HUserRecord<HEnvironment*>)>(sizeof(HEnvironment)));
1872 static_assert(IsAligned<alignof(HUserRecord<HEnvironment*>)>(ArenaAllocator::kAlignment));
1873 size_t alloc_size = sizeof(HEnvironment) + number_of_vregs * sizeof(HUserRecord<HEnvironment*>);
1874 void* storage = allocator->Alloc(alloc_size, kArenaAllocEnvironment);
1875 return new (storage) HEnvironment(number_of_vregs, method, dex_pc, holder);
1876 }
1877
Create(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)1878 static HEnvironment* Create(ArenaAllocator* allocator,
1879 const HEnvironment& to_copy,
1880 HInstruction* holder) {
1881 return Create(allocator, to_copy.Size(), to_copy.GetMethod(), to_copy.GetDexPc(), holder);
1882 }
1883
AllocateLocations(ArenaAllocator * allocator)1884 void AllocateLocations(ArenaAllocator* allocator) {
1885 DCHECK(locations_ == nullptr);
1886 if (Size() != 0u) {
1887 locations_ = allocator->AllocArray<Location>(Size(), kArenaAllocEnvironmentLocations);
1888 }
1889 }
1890
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)1891 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1892 if (parent_ != nullptr) {
1893 parent_->SetAndCopyParentChain(allocator, parent);
1894 } else {
1895 parent_ = Create(allocator, *parent, holder_);
1896 parent_->CopyFrom(parent);
1897 if (parent->GetParent() != nullptr) {
1898 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1899 }
1900 }
1901 }
1902
1903 void CopyFrom(ArrayRef<HInstruction* const> locals);
1904 void CopyFrom(const HEnvironment* environment);
1905
1906 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1907 // input to the loop phi instead. This is for inserting instructions that
1908 // require an environment (like HDeoptimization) in the loop pre-header.
1909 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1910
SetRawEnvAt(size_t index,HInstruction * instruction)1911 void SetRawEnvAt(size_t index, HInstruction* instruction) {
1912 GetVRegs()[index] = HUserRecord<HEnvironment*>(instruction);
1913 }
1914
GetInstructionAt(size_t index)1915 HInstruction* GetInstructionAt(size_t index) const {
1916 return GetVRegs()[index].GetInstruction();
1917 }
1918
1919 void RemoveAsUserOfInput(size_t index) const;
1920
1921 // Replaces the input at the position 'index' with the replacement; the replacement and old
1922 // input instructions' env_uses_ lists are adjusted. The function works similar to
1923 // HInstruction::ReplaceInput.
1924 void ReplaceInput(HInstruction* replacement, size_t index);
1925
Size()1926 size_t Size() const { return number_of_vregs_; }
1927
GetParent()1928 HEnvironment* GetParent() const { return parent_; }
1929
SetLocationAt(size_t index,Location location)1930 void SetLocationAt(size_t index, Location location) {
1931 DCHECK_LT(index, number_of_vregs_);
1932 DCHECK(locations_ != nullptr);
1933 locations_[index] = location;
1934 }
1935
GetLocationAt(size_t index)1936 Location GetLocationAt(size_t index) const {
1937 DCHECK_LT(index, number_of_vregs_);
1938 DCHECK(locations_ != nullptr);
1939 return locations_[index];
1940 }
1941
GetDexPc()1942 uint32_t GetDexPc() const {
1943 return dex_pc_;
1944 }
1945
GetMethod()1946 ArtMethod* GetMethod() const {
1947 return method_;
1948 }
1949
GetHolder()1950 HInstruction* GetHolder() const {
1951 return holder_;
1952 }
1953
1954
IsFromInlinedInvoke()1955 bool IsFromInlinedInvoke() const {
1956 return GetParent() != nullptr;
1957 }
1958
1959 class EnvInputSelector {
1960 public:
EnvInputSelector(const HEnvironment * e)1961 explicit EnvInputSelector(const HEnvironment* e) : env_(e) {}
operator()1962 HInstruction* operator()(size_t s) const {
1963 return env_->GetInstructionAt(s);
1964 }
1965 private:
1966 const HEnvironment* env_;
1967 };
1968
1969 using HConstEnvInputRef = TransformIterator<CountIter, EnvInputSelector>;
GetEnvInputs()1970 IterationRange<HConstEnvInputRef> GetEnvInputs() const {
1971 IterationRange<CountIter> range(Range(Size()));
1972 return MakeIterationRange(MakeTransformIterator(range.begin(), EnvInputSelector(this)),
1973 MakeTransformIterator(range.end(), EnvInputSelector(this)));
1974 }
1975
1976 private:
HEnvironment(size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)1977 ALWAYS_INLINE HEnvironment(size_t number_of_vregs,
1978 ArtMethod* method,
1979 uint32_t dex_pc,
1980 HInstruction* holder)
1981 : number_of_vregs_(dchecked_integral_cast<uint32_t>(number_of_vregs)),
1982 dex_pc_(dex_pc),
1983 holder_(holder),
1984 parent_(nullptr),
1985 method_(method),
1986 locations_(nullptr) {
1987 }
1988
GetVRegs()1989 ArrayRef<HUserRecord<HEnvironment*>> GetVRegs() {
1990 auto* vregs = reinterpret_cast<HUserRecord<HEnvironment*>*>(this + 1);
1991 return ArrayRef<HUserRecord<HEnvironment*>>(vregs, number_of_vregs_);
1992 }
1993
GetVRegs()1994 ArrayRef<const HUserRecord<HEnvironment*>> GetVRegs() const {
1995 auto* vregs = reinterpret_cast<const HUserRecord<HEnvironment*>*>(this + 1);
1996 return ArrayRef<const HUserRecord<HEnvironment*>>(vregs, number_of_vregs_);
1997 }
1998
1999 const uint32_t number_of_vregs_;
2000 const uint32_t dex_pc_;
2001
2002 // The instruction that holds this environment.
2003 HInstruction* const holder_;
2004
2005 // The parent environment for inlined code.
2006 HEnvironment* parent_;
2007
2008 // The environment's method, if resolved.
2009 ArtMethod* method_;
2010
2011 // Locations assigned by the register allocator.
2012 Location* locations_;
2013
2014 friend class HInstruction;
2015
2016 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2017 };
2018
2019 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs);
2020
2021 // Iterates over the Environments
2022 class HEnvironmentIterator : public ValueObject {
2023 public:
2024 using iterator_category = std::forward_iterator_tag;
2025 using value_type = HEnvironment*;
2026 using difference_type = ptrdiff_t;
2027 using pointer = void;
2028 using reference = void;
2029
HEnvironmentIterator(HEnvironment * cur)2030 explicit HEnvironmentIterator(HEnvironment* cur) : cur_(cur) {}
2031
2032 HEnvironment* operator*() const {
2033 return cur_;
2034 }
2035
2036 HEnvironmentIterator& operator++() {
2037 DCHECK(cur_ != nullptr);
2038 cur_ = cur_->GetParent();
2039 return *this;
2040 }
2041
2042 HEnvironmentIterator operator++(int) {
2043 HEnvironmentIterator prev(*this);
2044 ++(*this);
2045 return prev;
2046 }
2047
2048 bool operator==(const HEnvironmentIterator& other) const {
2049 return other.cur_ == cur_;
2050 }
2051
2052 bool operator!=(const HEnvironmentIterator& other) const {
2053 return !(*this == other);
2054 }
2055
2056 private:
2057 HEnvironment* cur_;
2058 };
2059
2060 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2061 public:
2062 #define DECLARE_KIND(type, super) k##type,
2063 enum InstructionKind { // private marker to avoid generate-operator-out.py from processing.
2064 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2065 kLastInstructionKind
2066 };
2067 #undef DECLARE_KIND
2068
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2069 HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2070 : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2071
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2072 HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2073 : previous_(nullptr),
2074 next_(nullptr),
2075 block_(nullptr),
2076 dex_pc_(dex_pc),
2077 id_(-1),
2078 ssa_index_(-1),
2079 packed_fields_(0u),
2080 environment_(nullptr),
2081 locations_(nullptr),
2082 live_interval_(nullptr),
2083 lifetime_position_(kNoLifetime),
2084 side_effects_(side_effects),
2085 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2086 SetPackedField<InstructionKindField>(kind);
2087 SetPackedField<TypeField>(type);
2088 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2089 }
2090
~HInstruction()2091 virtual ~HInstruction() {}
2092
2093 std::ostream& Dump(std::ostream& os, bool dump_args = false);
2094
2095 // Helper for dumping without argument information using operator<<
2096 struct NoArgsDump {
2097 const HInstruction* ins;
2098 };
DumpWithoutArgs()2099 NoArgsDump DumpWithoutArgs() const {
2100 return NoArgsDump{this};
2101 }
2102 // Helper for dumping with argument information using operator<<
2103 struct ArgsDump {
2104 const HInstruction* ins;
2105 };
DumpWithArgs()2106 ArgsDump DumpWithArgs() const {
2107 return ArgsDump{this};
2108 }
2109
GetNext()2110 HInstruction* GetNext() const { return next_; }
GetPrevious()2111 HInstruction* GetPrevious() const { return previous_; }
2112
2113 HInstruction* GetNextDisregardingMoves() const;
2114 HInstruction* GetPreviousDisregardingMoves() const;
2115
GetBlock()2116 HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2117 ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2118 void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2119 bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2120 bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2121 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2122 bool IsIrreducibleLoopHeaderPhi() const {
2123 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2124 }
2125
2126 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2127
GetInputRecords()2128 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2129 // One virtual method is enough, just const_cast<> and then re-add the const.
2130 return ArrayRef<const HUserRecord<HInstruction*>>(
2131 const_cast<HInstruction*>(this)->GetInputRecords());
2132 }
2133
GetInputs()2134 HInputsRef GetInputs() {
2135 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2136 }
2137
GetInputs()2138 HConstInputsRef GetInputs() const {
2139 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2140 }
2141
InputCount()2142 size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2143 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2144
HasInput(HInstruction * input)2145 bool HasInput(HInstruction* input) const {
2146 for (const HInstruction* i : GetInputs()) {
2147 if (i == input) {
2148 return true;
2149 }
2150 }
2151 return false;
2152 }
2153
SetRawInputAt(size_t index,HInstruction * input)2154 void SetRawInputAt(size_t index, HInstruction* input) {
2155 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2156 }
2157
2158 virtual void Accept(HGraphVisitor* visitor) = 0;
2159 virtual const char* DebugName() const = 0;
2160
GetType()2161 DataType::Type GetType() const {
2162 return TypeField::Decode(GetPackedFields());
2163 }
2164
NeedsEnvironment()2165 virtual bool NeedsEnvironment() const { return false; }
NeedsBss()2166 virtual bool NeedsBss() const {
2167 return false;
2168 }
2169
GetDexPc()2170 uint32_t GetDexPc() const { return dex_pc_; }
2171
IsControlFlow()2172 virtual bool IsControlFlow() const { return false; }
2173
2174 // Can the instruction throw?
2175 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2176 // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2177 virtual bool CanThrow() const { return false; }
2178
2179 // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2180 virtual bool AlwaysThrows() const { return false; }
2181 // Will this instruction only cause async exceptions if it causes any at all?
OnlyThrowsAsyncExceptions()2182 virtual bool OnlyThrowsAsyncExceptions() const {
2183 return false;
2184 }
2185
CanThrowIntoCatchBlock()2186 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2187
HasSideEffects()2188 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2189 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2190
2191 // Does not apply for all instructions, but having this at top level greatly
2192 // simplifies the null check elimination.
2193 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2194 virtual bool CanBeNull() const {
2195 DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2196 return true;
2197 }
2198
CanDoImplicitNullCheckOn(HInstruction * obj)2199 virtual bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const { return false; }
2200
2201 // If this instruction will do an implicit null check, return the `HNullCheck` associated
2202 // with it. Otherwise return null.
GetImplicitNullCheck()2203 HNullCheck* GetImplicitNullCheck() const {
2204 // Go over previous non-move instructions that are emitted at use site.
2205 HInstruction* prev_not_move = GetPreviousDisregardingMoves();
2206 while (prev_not_move != nullptr && prev_not_move->IsEmittedAtUseSite()) {
2207 if (prev_not_move->IsNullCheck()) {
2208 return prev_not_move->AsNullCheck();
2209 }
2210 prev_not_move = prev_not_move->GetPreviousDisregardingMoves();
2211 }
2212 return nullptr;
2213 }
2214
IsActualObject()2215 virtual bool IsActualObject() const {
2216 return GetType() == DataType::Type::kReference;
2217 }
2218
2219 // Sets the ReferenceTypeInfo. The RTI must be valid.
2220 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2221 // Same as above, but we only set it if it's valid. Otherwise, we don't change the current RTI.
2222 void SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti);
2223
GetReferenceTypeInfo()2224 ReferenceTypeInfo GetReferenceTypeInfo() const {
2225 DCHECK_EQ(GetType(), DataType::Type::kReference);
2226 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2227 GetPackedFlag<kFlagReferenceTypeIsExact>());
2228 }
2229
AddUseAt(HInstruction * user,size_t index)2230 void AddUseAt(HInstruction* user, size_t index) {
2231 DCHECK(user != nullptr);
2232 // Note: fixup_end remains valid across push_front().
2233 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2234 ArenaAllocator* allocator = user->GetBlock()->GetGraph()->GetAllocator();
2235 HUseListNode<HInstruction*>* new_node =
2236 new (allocator) HUseListNode<HInstruction*>(user, index);
2237 uses_.push_front(*new_node);
2238 FixUpUserRecordsAfterUseInsertion(fixup_end);
2239 }
2240
AddEnvUseAt(HEnvironment * user,size_t index)2241 void AddEnvUseAt(HEnvironment* user, size_t index) {
2242 DCHECK(user != nullptr);
2243 // Note: env_fixup_end remains valid across push_front().
2244 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2245 HUseListNode<HEnvironment*>* new_node =
2246 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2247 env_uses_.push_front(*new_node);
2248 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2249 }
2250
RemoveAsUserOfInput(size_t input)2251 void RemoveAsUserOfInput(size_t input) {
2252 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2253 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2254 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2255 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2256 }
2257
RemoveAsUserOfAllInputs()2258 void RemoveAsUserOfAllInputs() {
2259 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2260 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2261 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2262 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2263 }
2264 }
2265
GetUses()2266 const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2267 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2268
HasUses()2269 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2270 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2271 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2272 bool HasOnlyOneNonEnvironmentUse() const {
2273 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2274 }
2275
IsRemovable()2276 bool IsRemovable() const {
2277 return
2278 !DoesAnyWrite() &&
2279 // TODO(solanes): Merge calls from IsSuspendCheck to IsControlFlow into one that doesn't
2280 // do virtual dispatching.
2281 !IsSuspendCheck() &&
2282 !IsNop() &&
2283 !IsParameterValue() &&
2284 // If we added an explicit barrier then we should keep it.
2285 !IsMemoryBarrier() &&
2286 !IsConstructorFence() &&
2287 !IsControlFlow() &&
2288 !CanThrow();
2289 }
2290
IsDeadAndRemovable()2291 bool IsDeadAndRemovable() const {
2292 return !HasUses() && IsRemovable();
2293 }
2294
IsPhiDeadAndRemovable()2295 bool IsPhiDeadAndRemovable() const {
2296 DCHECK(IsPhi());
2297 DCHECK(IsRemovable()) << " phis are always removable";
2298 return !HasUses();
2299 }
2300
2301 // Does this instruction dominate `other_instruction`?
2302 // Aborts if this instruction and `other_instruction` are different phis.
2303 bool Dominates(HInstruction* other_instruction) const;
2304
2305 // Same but with `strictly dominates` i.e. returns false if this instruction and
2306 // `other_instruction` are the same.
2307 bool StrictlyDominates(HInstruction* other_instruction) const;
2308
GetId()2309 int GetId() const { return id_; }
SetId(int id)2310 void SetId(int id) { id_ = id; }
2311
GetSsaIndex()2312 int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2313 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2314 bool HasSsaIndex() const { return ssa_index_ != -1; }
2315
HasEnvironment()2316 bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2317 HEnvironment* GetEnvironment() const { return environment_; }
GetAllEnvironments()2318 IterationRange<HEnvironmentIterator> GetAllEnvironments() const {
2319 return MakeIterationRange(HEnvironmentIterator(GetEnvironment()),
2320 HEnvironmentIterator(nullptr));
2321 }
2322 // Set the `environment_` field. Raw because this method does not
2323 // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2324 void SetRawEnvironment(HEnvironment* environment) {
2325 DCHECK(environment_ == nullptr);
2326 DCHECK_EQ(environment->GetHolder(), this);
2327 environment_ = environment;
2328 }
2329
InsertRawEnvironment(HEnvironment * environment)2330 void InsertRawEnvironment(HEnvironment* environment) {
2331 DCHECK(environment_ != nullptr);
2332 DCHECK_EQ(environment->GetHolder(), this);
2333 DCHECK(environment->GetParent() == nullptr);
2334 environment->parent_ = environment_;
2335 environment_ = environment;
2336 }
2337
2338 void RemoveEnvironment();
2339
2340 // Set the environment of this instruction, copying it from `environment`. While
2341 // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2342 void CopyEnvironmentFrom(HEnvironment* environment) {
2343 DCHECK(environment_ == nullptr);
2344 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2345 environment_ = HEnvironment::Create(allocator, *environment, this);
2346 environment_->CopyFrom(environment);
2347 if (environment->GetParent() != nullptr) {
2348 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2349 }
2350 }
2351
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2352 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2353 HBasicBlock* block) {
2354 DCHECK(environment_ == nullptr);
2355 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2356 environment_ = HEnvironment::Create(allocator, *environment, this);
2357 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2358 if (environment->GetParent() != nullptr) {
2359 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2360 }
2361 }
2362
2363 // Returns the number of entries in the environment. Typically, that is the
2364 // number of dex registers in a method. It could be more in case of inlining.
2365 size_t EnvironmentSize() const;
2366
GetLocations()2367 LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2368 void SetLocations(LocationSummary* locations) { locations_ = locations; }
2369
2370 void ReplaceWith(HInstruction* instruction);
2371 void ReplaceUsesDominatedBy(HInstruction* dominator,
2372 HInstruction* replacement,
2373 bool strictly_dominated = true);
2374 void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2375 void ReplaceInput(HInstruction* replacement, size_t index);
2376
2377 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2378 // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2379 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2380 ReplaceWith(other);
2381 other->ReplaceInput(this, use_index);
2382 }
2383
2384 // Move `this` instruction before `cursor`
2385 void MoveBefore(HInstruction* cursor, bool do_checks = true);
2386
2387 // Move `this` before its first user and out of any loops. If there is no
2388 // out-of-loop user that dominates all other users, move the instruction
2389 // to the end of the out-of-loop common dominator of the user's blocks.
2390 //
2391 // This can be used only on non-throwing instructions with no side effects that
2392 // have at least one use but no environment uses.
2393 void MoveBeforeFirstUserAndOutOfLoops();
2394
2395 #define INSTRUCTION_TYPE_CHECK(type, super) \
2396 bool Is##type() const;
2397
2398 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2399 #undef INSTRUCTION_TYPE_CHECK
2400
2401 #define INSTRUCTION_TYPE_CAST(type, super) \
2402 const H##type* As##type() const; \
2403 H##type* As##type(); \
2404 const H##type* As##type##OrNull() const; \
2405 H##type* As##type##OrNull();
2406
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2407 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2408 #undef INSTRUCTION_TYPE_CAST
2409
2410 // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2411 // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2412 // the instruction then the behaviour of this function is undefined.
2413 //
2414 // Note: It is semantically valid to create a clone of the instruction only until
2415 // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2416 // copied.
2417 //
2418 // Note: HEnvironment and some other fields are not copied and are set to default values, see
2419 // 'explicit HInstruction(const HInstruction& other)' for details.
2420 virtual HInstruction* Clone([[maybe_unused]] ArenaAllocator* arena) const {
2421 LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2422 DebugName() << " " << GetId();
2423 UNREACHABLE();
2424 }
2425
IsFieldAccess()2426 virtual bool IsFieldAccess() const {
2427 return false;
2428 }
2429
GetFieldInfo()2430 virtual const FieldInfo& GetFieldInfo() const {
2431 CHECK(IsFieldAccess()) << "Only callable on field accessors not " << DebugName() << " "
2432 << *this;
2433 LOG(FATAL) << "Must be overridden by field accessors. Not implemented by " << *this;
2434 UNREACHABLE();
2435 }
2436
2437 // Return whether instruction can be cloned (copied).
IsClonable()2438 virtual bool IsClonable() const { return false; }
2439
2440 // Returns whether the instruction can be moved within the graph.
2441 // TODO: this method is used by LICM and GVN with possibly different
2442 // meanings? split and rename?
CanBeMoved()2443 virtual bool CanBeMoved() const { return false; }
2444
2445 // Returns whether any data encoded in the two instructions is equal.
2446 // This method does not look at the inputs. Both instructions must be
2447 // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other)2448 virtual bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const {
2449 return false;
2450 }
2451
2452 // Returns whether two instructions are equal, that is:
2453 // 1) They have the same type and contain the same data (InstructionDataEquals).
2454 // 2) Their inputs are identical.
2455 bool Equals(const HInstruction* other) const;
2456
GetKind()2457 InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2458
ComputeHashCode()2459 virtual size_t ComputeHashCode() const {
2460 size_t result = GetKind();
2461 for (const HInstruction* input : GetInputs()) {
2462 result = (result * 31) + input->GetId();
2463 }
2464 return result;
2465 }
2466
GetSideEffects()2467 SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2468 void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2469 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2470
GetLifetimePosition()2471 size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2472 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2473 LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2474 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2475 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2476
IsSuspendCheckEntry()2477 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2478
2479 // Returns whether the code generation of the instruction will require to have access
2480 // to the current method. Such instructions are:
2481 // (1): Instructions that require an environment, as calling the runtime requires
2482 // to walk the stack and have the current method stored at a specific stack address.
2483 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2484 // to access the dex cache.
NeedsCurrentMethod()2485 bool NeedsCurrentMethod() const {
2486 return NeedsEnvironment() || IsCurrentMethod();
2487 }
2488
2489 // Does this instruction have any use in an environment before
2490 // control flow hits 'other'?
2491 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2492
2493 // Remove all references to environment uses of this instruction.
2494 // The caller must ensure that this is safe to do.
2495 void RemoveEnvironmentUsers();
2496
IsEmittedAtUseSite()2497 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2498 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2499
2500 protected:
2501 // If set, the machine code for this instruction is assumed to be generated by
2502 // its users. Used by liveness analysis to compute use positions accordingly.
2503 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2504 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2505 static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2506 static constexpr size_t kFieldInstructionKindSize =
2507 MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2508 static constexpr size_t kFieldType =
2509 kFieldInstructionKind + kFieldInstructionKindSize;
2510 static constexpr size_t kFieldTypeSize =
2511 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2512 static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2513 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2514
2515 static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2516 "Too many generic packed fields");
2517
2518 using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2519
InputRecordAt(size_t i)2520 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2521 return GetInputRecords()[i];
2522 }
2523
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2524 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2525 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2526 input_records[index] = input;
2527 }
2528
GetPackedFields()2529 uint32_t GetPackedFields() const {
2530 return packed_fields_;
2531 }
2532
2533 template <size_t flag>
GetPackedFlag()2534 bool GetPackedFlag() const {
2535 return (packed_fields_ & (1u << flag)) != 0u;
2536 }
2537
2538 template <size_t flag>
2539 void SetPackedFlag(bool value = true) {
2540 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2541 }
2542
2543 template <typename BitFieldType>
GetPackedField()2544 typename BitFieldType::value_type GetPackedField() const {
2545 return BitFieldType::Decode(packed_fields_);
2546 }
2547
2548 template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2549 void SetPackedField(typename BitFieldType::value_type value) {
2550 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2551 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2552 }
2553
2554 // Copy construction for the instruction (used for Clone function).
2555 //
2556 // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2557 // prepare_for_register_allocator are not copied (set to default values).
2558 //
2559 // Copy constructors must be provided for every HInstruction type; default copy constructor is
2560 // fine for most of them. However for some of the instructions a custom copy constructor must be
2561 // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2562 // for copying them).
HInstruction(const HInstruction & other)2563 explicit HInstruction(const HInstruction& other)
2564 : previous_(nullptr),
2565 next_(nullptr),
2566 block_(nullptr),
2567 dex_pc_(other.dex_pc_),
2568 id_(-1),
2569 ssa_index_(-1),
2570 packed_fields_(other.packed_fields_),
2571 environment_(nullptr),
2572 locations_(nullptr),
2573 live_interval_(nullptr),
2574 lifetime_position_(kNoLifetime),
2575 side_effects_(other.side_effects_),
2576 reference_type_handle_(other.reference_type_handle_) {
2577 }
2578
2579 private:
2580 using InstructionKindField =
2581 BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2582
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2583 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2584 auto before_use_node = uses_.before_begin();
2585 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2586 HInstruction* user = use_node->GetUser();
2587 size_t input_index = use_node->GetIndex();
2588 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2589 before_use_node = use_node;
2590 }
2591 }
2592
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2593 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2594 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2595 if (next != uses_.end()) {
2596 HInstruction* next_user = next->GetUser();
2597 size_t next_index = next->GetIndex();
2598 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2599 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2600 }
2601 }
2602
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2603 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2604 auto before_env_use_node = env_uses_.before_begin();
2605 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2606 HEnvironment* user = env_use_node->GetUser();
2607 size_t input_index = env_use_node->GetIndex();
2608 user->GetVRegs()[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2609 before_env_use_node = env_use_node;
2610 }
2611 }
2612
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2613 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2614 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2615 if (next != env_uses_.end()) {
2616 HEnvironment* next_user = next->GetUser();
2617 size_t next_index = next->GetIndex();
2618 DCHECK(next_user->GetVRegs()[next_index].GetInstruction() == this);
2619 next_user->GetVRegs()[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2620 }
2621 }
2622
2623 HInstruction* previous_;
2624 HInstruction* next_;
2625 HBasicBlock* block_;
2626 const uint32_t dex_pc_;
2627
2628 // An instruction gets an id when it is added to the graph.
2629 // It reflects creation order. A negative id means the instruction
2630 // has not been added to the graph.
2631 int id_;
2632
2633 // When doing liveness analysis, instructions that have uses get an SSA index.
2634 int ssa_index_;
2635
2636 // Packed fields.
2637 uint32_t packed_fields_;
2638
2639 // List of instructions that have this instruction as input.
2640 HUseList<HInstruction*> uses_;
2641
2642 // List of environments that contain this instruction.
2643 HUseList<HEnvironment*> env_uses_;
2644
2645 // The environment associated with this instruction. Not null if the instruction
2646 // might jump out of the method.
2647 HEnvironment* environment_;
2648
2649 // Set by the code generator.
2650 LocationSummary* locations_;
2651
2652 // Set by the liveness analysis.
2653 LiveInterval* live_interval_;
2654
2655 // Set by the liveness analysis, this is the position in a linear
2656 // order of blocks where this instruction's live interval start.
2657 size_t lifetime_position_;
2658
2659 SideEffects side_effects_;
2660
2661 // The reference handle part of the reference type info.
2662 // The IsExact() flag is stored in packed fields.
2663 // TODO: for primitive types this should be marked as invalid.
2664 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2665
2666 friend class GraphChecker;
2667 friend class HBasicBlock;
2668 friend class HEnvironment;
2669 friend class HGraph;
2670 friend class HInstructionList;
2671 };
2672
2673 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs);
2674 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs);
2675 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs);
2676 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst);
2677 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst);
2678
2679 // Forward declarations for friends
2680 template <typename InnerIter> struct HSTLInstructionIterator;
2681
2682 // Iterates over the instructions, while preserving the next instruction
2683 // in case the current instruction gets removed from the list by the user
2684 // of this iterator.
2685 class HInstructionIterator : public ValueObject {
2686 public:
HInstructionIterator(const HInstructionList & instructions)2687 explicit HInstructionIterator(const HInstructionList& instructions)
2688 : instruction_(instructions.first_instruction_) {
2689 next_ = Done() ? nullptr : instruction_->GetNext();
2690 }
2691
Done()2692 bool Done() const { return instruction_ == nullptr; }
Current()2693 HInstruction* Current() const { return instruction_; }
Advance()2694 void Advance() {
2695 instruction_ = next_;
2696 next_ = Done() ? nullptr : instruction_->GetNext();
2697 }
2698
2699 private:
HInstructionIterator()2700 HInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2701
2702 HInstruction* instruction_;
2703 HInstruction* next_;
2704
2705 friend struct HSTLInstructionIterator<HInstructionIterator>;
2706 };
2707
2708 // Iterates over the instructions without saving the next instruction,
2709 // therefore handling changes in the graph potentially made by the user
2710 // of this iterator.
2711 class HInstructionIteratorHandleChanges : public ValueObject {
2712 public:
2713 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2714 : instruction_(instructions.first_instruction_) {
2715 }
2716
2717 bool Done() const { return instruction_ == nullptr; }
2718 HInstruction* Current() const { return instruction_; }
2719 void Advance() {
2720 instruction_ = instruction_->GetNext();
2721 }
2722
2723 private:
2724 HInstructionIteratorHandleChanges() : instruction_(nullptr) {}
2725
2726 HInstruction* instruction_;
2727
2728 friend struct HSTLInstructionIterator<HInstructionIteratorHandleChanges>;
2729 };
2730
2731
2732 class HBackwardInstructionIterator : public ValueObject {
2733 public:
2734 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2735 : instruction_(instructions.last_instruction_) {
2736 next_ = Done() ? nullptr : instruction_->GetPrevious();
2737 }
2738
2739 explicit HBackwardInstructionIterator(HInstruction* instruction) : instruction_(instruction) {
2740 next_ = Done() ? nullptr : instruction_->GetPrevious();
2741 }
2742
2743 bool Done() const { return instruction_ == nullptr; }
2744 HInstruction* Current() const { return instruction_; }
2745 void Advance() {
2746 instruction_ = next_;
2747 next_ = Done() ? nullptr : instruction_->GetPrevious();
2748 }
2749
2750 private:
2751 HBackwardInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2752
2753 HInstruction* instruction_;
2754 HInstruction* next_;
2755
2756 friend struct HSTLInstructionIterator<HBackwardInstructionIterator>;
2757 };
2758
2759 template <typename InnerIter>
2760 struct HSTLInstructionIterator : public ValueObject {
2761 public:
2762 using iterator_category = std::forward_iterator_tag;
2763 using value_type = HInstruction*;
2764 using difference_type = ptrdiff_t;
2765 using pointer = void;
2766 using reference = void;
2767
2768 static_assert(std::is_same_v<InnerIter, HBackwardInstructionIterator> ||
2769 std::is_same_v<InnerIter, HInstructionIterator> ||
2770 std::is_same_v<InnerIter, HInstructionIteratorHandleChanges>,
2771 "Unknown wrapped iterator!");
2772
2773 explicit HSTLInstructionIterator(InnerIter inner) : inner_(inner) {}
2774 HInstruction* operator*() const {
2775 DCHECK(inner_.Current() != nullptr);
2776 return inner_.Current();
2777 }
2778
2779 HSTLInstructionIterator<InnerIter>& operator++() {
2780 DCHECK(*this != HSTLInstructionIterator<InnerIter>::EndIter());
2781 inner_.Advance();
2782 return *this;
2783 }
2784
2785 HSTLInstructionIterator<InnerIter> operator++(int) {
2786 HSTLInstructionIterator<InnerIter> prev(*this);
2787 ++(*this);
2788 return prev;
2789 }
2790
2791 bool operator==(const HSTLInstructionIterator<InnerIter>& other) const {
2792 return inner_.Current() == other.inner_.Current();
2793 }
2794
2795 bool operator!=(const HSTLInstructionIterator<InnerIter>& other) const {
2796 return !(*this == other);
2797 }
2798
2799 static HSTLInstructionIterator<InnerIter> EndIter() {
2800 return HSTLInstructionIterator<InnerIter>(InnerIter());
2801 }
2802
2803 private:
2804 InnerIter inner_;
2805 };
2806
2807 template <typename InnerIter>
2808 IterationRange<HSTLInstructionIterator<InnerIter>> MakeSTLInstructionIteratorRange(InnerIter iter) {
2809 return MakeIterationRange(HSTLInstructionIterator<InnerIter>(iter),
2810 HSTLInstructionIterator<InnerIter>::EndIter());
2811 }
2812
2813 class HVariableInputSizeInstruction : public HInstruction {
2814 public:
2815 using HInstruction::GetInputRecords; // Keep the const version visible.
2816 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2817 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2818 }
2819
2820 void AddInput(HInstruction* input);
2821 void InsertInputAt(size_t index, HInstruction* input);
2822 void RemoveInputAt(size_t index);
2823
2824 // Removes all the inputs.
2825 // Also removes this instructions from each input's use list
2826 // (for non-environment uses only).
2827 void RemoveAllInputs();
2828
2829 protected:
2830 HVariableInputSizeInstruction(InstructionKind inst_kind,
2831 SideEffects side_effects,
2832 uint32_t dex_pc,
2833 ArenaAllocator* allocator,
2834 size_t number_of_inputs,
2835 ArenaAllocKind kind)
2836 : HInstruction(inst_kind, side_effects, dex_pc),
2837 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2838 HVariableInputSizeInstruction(InstructionKind inst_kind,
2839 DataType::Type type,
2840 SideEffects side_effects,
2841 uint32_t dex_pc,
2842 ArenaAllocator* allocator,
2843 size_t number_of_inputs,
2844 ArenaAllocKind kind)
2845 : HInstruction(inst_kind, type, side_effects, dex_pc),
2846 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
2847
2848 DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
2849
2850 ArenaVector<HUserRecord<HInstruction*>> inputs_;
2851 };
2852
2853 template<size_t N>
2854 class HExpression : public HInstruction {
2855 public:
2856 HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2857 : HInstruction(kind, side_effects, dex_pc), inputs_() {}
2858 HExpression<N>(InstructionKind kind,
2859 DataType::Type type,
2860 SideEffects side_effects,
2861 uint32_t dex_pc)
2862 : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
2863 virtual ~HExpression() {}
2864
2865 using HInstruction::GetInputRecords; // Keep the const version visible.
2866 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2867 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2868 }
2869
2870 protected:
2871 DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
2872
2873 private:
2874 std::array<HUserRecord<HInstruction*>, N> inputs_;
2875
2876 friend class SsaBuilder;
2877 };
2878
2879 // HExpression specialization for N=0.
2880 template<>
2881 class HExpression<0> : public HInstruction {
2882 public:
2883 using HInstruction::HInstruction;
2884
2885 virtual ~HExpression() {}
2886
2887 using HInstruction::GetInputRecords; // Keep the const version visible.
2888 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
2889 return ArrayRef<HUserRecord<HInstruction*>>();
2890 }
2891
2892 protected:
2893 DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
2894
2895 private:
2896 friend class SsaBuilder;
2897 };
2898
2899 class HMethodEntryHook : public HExpression<0> {
2900 public:
2901 explicit HMethodEntryHook(uint32_t dex_pc)
2902 : HExpression(kMethodEntryHook, SideEffects::All(), dex_pc) {}
2903
2904 bool NeedsEnvironment() const override {
2905 return true;
2906 }
2907
2908 bool CanThrow() const override { return true; }
2909
2910 DECLARE_INSTRUCTION(MethodEntryHook);
2911
2912 protected:
2913 DEFAULT_COPY_CONSTRUCTOR(MethodEntryHook);
2914 };
2915
2916 class HMethodExitHook : public HExpression<1> {
2917 public:
2918 HMethodExitHook(HInstruction* value, uint32_t dex_pc)
2919 : HExpression(kMethodExitHook, SideEffects::All(), dex_pc) {
2920 SetRawInputAt(0, value);
2921 }
2922
2923 bool NeedsEnvironment() const override {
2924 return true;
2925 }
2926
2927 bool CanThrow() const override { return true; }
2928
2929 DECLARE_INSTRUCTION(MethodExitHook);
2930
2931 protected:
2932 DEFAULT_COPY_CONSTRUCTOR(MethodExitHook);
2933 };
2934
2935 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2936 // instruction that branches to the exit block.
2937 class HReturnVoid final : public HExpression<0> {
2938 public:
2939 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2940 : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
2941 }
2942
2943 bool IsControlFlow() const override { return true; }
2944
2945 DECLARE_INSTRUCTION(ReturnVoid);
2946
2947 protected:
2948 DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
2949 };
2950
2951 // Represents dex's RETURN opcodes. A HReturn is a control flow
2952 // instruction that branches to the exit block.
2953 class HReturn final : public HExpression<1> {
2954 public:
2955 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2956 : HExpression(kReturn, SideEffects::None(), dex_pc) {
2957 SetRawInputAt(0, value);
2958 }
2959
2960 bool IsControlFlow() const override { return true; }
2961
2962 DECLARE_INSTRUCTION(Return);
2963
2964 protected:
2965 DEFAULT_COPY_CONSTRUCTOR(Return);
2966 };
2967
2968 class HPhi final : public HVariableInputSizeInstruction {
2969 public:
2970 HPhi(ArenaAllocator* allocator,
2971 uint32_t reg_number,
2972 size_t number_of_inputs,
2973 DataType::Type type,
2974 uint32_t dex_pc = kNoDexPc)
2975 : HVariableInputSizeInstruction(
2976 kPhi,
2977 ToPhiType(type),
2978 SideEffects::None(),
2979 dex_pc,
2980 allocator,
2981 number_of_inputs,
2982 kArenaAllocPhiInputs),
2983 reg_number_(reg_number) {
2984 DCHECK_NE(GetType(), DataType::Type::kVoid);
2985 // Phis are constructed live and marked dead if conflicting or unused.
2986 // Individual steps of SsaBuilder should assume that if a phi has been
2987 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2988 SetPackedFlag<kFlagIsLive>(true);
2989 SetPackedFlag<kFlagCanBeNull>(true);
2990 }
2991
2992 bool IsClonable() const override { return true; }
2993
2994 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2995 static DataType::Type ToPhiType(DataType::Type type) {
2996 return DataType::Kind(type);
2997 }
2998
2999 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
3000
3001 void SetType(DataType::Type new_type) {
3002 // Make sure that only valid type changes occur. The following are allowed:
3003 // (1) int -> float/ref (primitive type propagation),
3004 // (2) long -> double (primitive type propagation).
3005 DCHECK(GetType() == new_type ||
3006 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
3007 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
3008 (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
3009 SetPackedField<TypeField>(new_type);
3010 }
3011
3012 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
3013 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
3014
3015 uint32_t GetRegNumber() const { return reg_number_; }
3016
3017 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
3018 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
3019 bool IsDead() const { return !IsLive(); }
3020 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
3021
3022 bool IsVRegEquivalentOf(const HInstruction* other) const {
3023 return other != nullptr
3024 && other->IsPhi()
3025 && other->GetBlock() == GetBlock()
3026 && other->AsPhi()->GetRegNumber() == GetRegNumber();
3027 }
3028
3029 bool HasEquivalentPhi() const {
3030 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3031 return true;
3032 }
3033 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3034 return true;
3035 }
3036 return false;
3037 }
3038
3039 // Returns the next equivalent phi (starting from the current one) or null if there is none.
3040 // An equivalent phi is a phi having the same dex register and type.
3041 // It assumes that phis with the same dex register are adjacent.
3042 HPhi* GetNextEquivalentPhiWithSameType() {
3043 HInstruction* next = GetNext();
3044 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
3045 if (next->GetType() == GetType()) {
3046 return next->AsPhi();
3047 }
3048 next = next->GetNext();
3049 }
3050 return nullptr;
3051 }
3052
3053 DECLARE_INSTRUCTION(Phi);
3054
3055 protected:
3056 DEFAULT_COPY_CONSTRUCTOR(Phi);
3057
3058 private:
3059 static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
3060 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
3061 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
3062 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3063
3064 const uint32_t reg_number_;
3065 };
3066
3067 // The exit instruction is the only instruction of the exit block.
3068 // Instructions aborting the method (HThrow and HReturn) must branch to the
3069 // exit block.
3070 class HExit final : public HExpression<0> {
3071 public:
3072 explicit HExit(uint32_t dex_pc = kNoDexPc)
3073 : HExpression(kExit, SideEffects::None(), dex_pc) {
3074 }
3075
3076 bool IsControlFlow() const override { return true; }
3077
3078 DECLARE_INSTRUCTION(Exit);
3079
3080 protected:
3081 DEFAULT_COPY_CONSTRUCTOR(Exit);
3082 };
3083
3084 // Jumps from one block to another.
3085 class HGoto final : public HExpression<0> {
3086 public:
3087 explicit HGoto(uint32_t dex_pc = kNoDexPc)
3088 : HExpression(kGoto, SideEffects::None(), dex_pc) {
3089 }
3090
3091 bool IsClonable() const override { return true; }
3092 bool IsControlFlow() const override { return true; }
3093
3094 HBasicBlock* GetSuccessor() const {
3095 return GetBlock()->GetSingleSuccessor();
3096 }
3097
3098 DECLARE_INSTRUCTION(Goto);
3099
3100 protected:
3101 DEFAULT_COPY_CONSTRUCTOR(Goto);
3102 };
3103
3104 class HConstant : public HExpression<0> {
3105 public:
3106 explicit HConstant(InstructionKind kind, DataType::Type type)
3107 : HExpression(kind, type, SideEffects::None(), kNoDexPc) {
3108 }
3109
3110 bool CanBeMoved() const override { return true; }
3111
3112 // Is this constant -1 in the arithmetic sense?
3113 virtual bool IsMinusOne() const { return false; }
3114 // Is this constant 0 in the arithmetic sense?
3115 virtual bool IsArithmeticZero() const { return false; }
3116 // Is this constant a 0-bit pattern?
3117 virtual bool IsZeroBitPattern() const { return false; }
3118 // Is this constant 1 in the arithmetic sense?
3119 virtual bool IsOne() const { return false; }
3120
3121 virtual uint64_t GetValueAsUint64() const = 0;
3122
3123 DECLARE_ABSTRACT_INSTRUCTION(Constant);
3124
3125 protected:
3126 DEFAULT_COPY_CONSTRUCTOR(Constant);
3127 };
3128
3129 class HNullConstant final : public HConstant {
3130 public:
3131 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3132 return true;
3133 }
3134
3135 uint64_t GetValueAsUint64() const override { return 0; }
3136
3137 size_t ComputeHashCode() const override { return 0; }
3138
3139 // The null constant representation is a 0-bit pattern.
3140 bool IsZeroBitPattern() const override { return true; }
3141
3142 DECLARE_INSTRUCTION(NullConstant);
3143
3144 protected:
3145 DEFAULT_COPY_CONSTRUCTOR(NullConstant);
3146
3147 private:
3148 explicit HNullConstant()
3149 : HConstant(kNullConstant, DataType::Type::kReference) {
3150 }
3151
3152 friend class HGraph;
3153 };
3154
3155 // Constants of the type int. Those can be from Dex instructions, or
3156 // synthesized (for example with the if-eqz instruction).
3157 class HIntConstant final : public HConstant {
3158 public:
3159 int32_t GetValue() const { return value_; }
3160
3161 uint64_t GetValueAsUint64() const override {
3162 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
3163 }
3164
3165 bool InstructionDataEquals(const HInstruction* other) const override {
3166 DCHECK(other->IsIntConstant()) << other->DebugName();
3167 return other->AsIntConstant()->value_ == value_;
3168 }
3169
3170 size_t ComputeHashCode() const override { return GetValue(); }
3171
3172 bool IsMinusOne() const override { return GetValue() == -1; }
3173 bool IsArithmeticZero() const override { return GetValue() == 0; }
3174 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3175 bool IsOne() const override { return GetValue() == 1; }
3176
3177 // Integer constants are used to encode Boolean values as well,
3178 // where 1 means true and 0 means false.
3179 bool IsTrue() const { return GetValue() == 1; }
3180 bool IsFalse() const { return GetValue() == 0; }
3181
3182 DECLARE_INSTRUCTION(IntConstant);
3183
3184 protected:
3185 DEFAULT_COPY_CONSTRUCTOR(IntConstant);
3186
3187 private:
3188 explicit HIntConstant(int32_t value)
3189 : HConstant(kIntConstant, DataType::Type::kInt32), value_(value) {
3190 }
3191 explicit HIntConstant(bool value)
3192 : HConstant(kIntConstant, DataType::Type::kInt32),
3193 value_(value ? 1 : 0) {
3194 }
3195
3196 const int32_t value_;
3197
3198 friend class HGraph;
3199 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
3200 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
3201 };
3202
3203 class HLongConstant final : public HConstant {
3204 public:
3205 int64_t GetValue() const { return value_; }
3206
3207 uint64_t GetValueAsUint64() const override { return value_; }
3208
3209 bool InstructionDataEquals(const HInstruction* other) const override {
3210 DCHECK(other->IsLongConstant()) << other->DebugName();
3211 return other->AsLongConstant()->value_ == value_;
3212 }
3213
3214 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3215
3216 bool IsMinusOne() const override { return GetValue() == -1; }
3217 bool IsArithmeticZero() const override { return GetValue() == 0; }
3218 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3219 bool IsOne() const override { return GetValue() == 1; }
3220
3221 DECLARE_INSTRUCTION(LongConstant);
3222
3223 protected:
3224 DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3225
3226 private:
3227 explicit HLongConstant(int64_t value)
3228 : HConstant(kLongConstant, DataType::Type::kInt64),
3229 value_(value) {
3230 }
3231
3232 const int64_t value_;
3233
3234 friend class HGraph;
3235 };
3236
3237 class HFloatConstant final : public HConstant {
3238 public:
3239 float GetValue() const { return value_; }
3240
3241 uint64_t GetValueAsUint64() const override {
3242 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3243 }
3244
3245 bool InstructionDataEquals(const HInstruction* other) const override {
3246 DCHECK(other->IsFloatConstant()) << other->DebugName();
3247 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3248 }
3249
3250 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3251
3252 bool IsMinusOne() const override {
3253 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3254 }
3255 bool IsArithmeticZero() const override {
3256 return std::fpclassify(value_) == FP_ZERO;
3257 }
3258 bool IsArithmeticPositiveZero() const {
3259 return IsArithmeticZero() && !std::signbit(value_);
3260 }
3261 bool IsArithmeticNegativeZero() const {
3262 return IsArithmeticZero() && std::signbit(value_);
3263 }
3264 bool IsZeroBitPattern() const override {
3265 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3266 }
3267 bool IsOne() const override {
3268 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3269 }
3270 bool IsNaN() const {
3271 return std::isnan(value_);
3272 }
3273
3274 DECLARE_INSTRUCTION(FloatConstant);
3275
3276 protected:
3277 DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3278
3279 private:
3280 explicit HFloatConstant(float value)
3281 : HConstant(kFloatConstant, DataType::Type::kFloat32),
3282 value_(value) {
3283 }
3284 explicit HFloatConstant(int32_t value)
3285 : HConstant(kFloatConstant, DataType::Type::kFloat32),
3286 value_(bit_cast<float, int32_t>(value)) {
3287 }
3288
3289 const float value_;
3290
3291 // Only the SsaBuilder and HGraph can create floating-point constants.
3292 friend class SsaBuilder;
3293 friend class HGraph;
3294 };
3295
3296 class HDoubleConstant final : public HConstant {
3297 public:
3298 double GetValue() const { return value_; }
3299
3300 uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3301
3302 bool InstructionDataEquals(const HInstruction* other) const override {
3303 DCHECK(other->IsDoubleConstant()) << other->DebugName();
3304 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3305 }
3306
3307 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3308
3309 bool IsMinusOne() const override {
3310 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3311 }
3312 bool IsArithmeticZero() const override {
3313 return std::fpclassify(value_) == FP_ZERO;
3314 }
3315 bool IsArithmeticPositiveZero() const {
3316 return IsArithmeticZero() && !std::signbit(value_);
3317 }
3318 bool IsArithmeticNegativeZero() const {
3319 return IsArithmeticZero() && std::signbit(value_);
3320 }
3321 bool IsZeroBitPattern() const override {
3322 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3323 }
3324 bool IsOne() const override {
3325 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3326 }
3327 bool IsNaN() const {
3328 return std::isnan(value_);
3329 }
3330
3331 DECLARE_INSTRUCTION(DoubleConstant);
3332
3333 protected:
3334 DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3335
3336 private:
3337 explicit HDoubleConstant(double value)
3338 : HConstant(kDoubleConstant, DataType::Type::kFloat64),
3339 value_(value) {
3340 }
3341 explicit HDoubleConstant(int64_t value)
3342 : HConstant(kDoubleConstant, DataType::Type::kFloat64),
3343 value_(bit_cast<double, int64_t>(value)) {
3344 }
3345
3346 const double value_;
3347
3348 // Only the SsaBuilder and HGraph can create floating-point constants.
3349 friend class SsaBuilder;
3350 friend class HGraph;
3351 };
3352
3353 // Conditional branch. A block ending with an HIf instruction must have
3354 // two successors.
3355 class HIf final : public HExpression<1> {
3356 public:
3357 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
3358 : HExpression(kIf, SideEffects::None(), dex_pc),
3359 true_count_(std::numeric_limits<uint16_t>::max()),
3360 false_count_(std::numeric_limits<uint16_t>::max()) {
3361 SetRawInputAt(0, input);
3362 }
3363
3364 bool IsClonable() const override { return true; }
3365 bool IsControlFlow() const override { return true; }
3366
3367 HBasicBlock* IfTrueSuccessor() const {
3368 return GetBlock()->GetSuccessors()[0];
3369 }
3370
3371 HBasicBlock* IfFalseSuccessor() const {
3372 return GetBlock()->GetSuccessors()[1];
3373 }
3374
3375 void SetTrueCount(uint16_t count) { true_count_ = count; }
3376 uint16_t GetTrueCount() const { return true_count_; }
3377
3378 void SetFalseCount(uint16_t count) { false_count_ = count; }
3379 uint16_t GetFalseCount() const { return false_count_; }
3380
3381 DECLARE_INSTRUCTION(If);
3382
3383 protected:
3384 DEFAULT_COPY_CONSTRUCTOR(If);
3385
3386 private:
3387 uint16_t true_count_;
3388 uint16_t false_count_;
3389 };
3390
3391
3392 // Abstract instruction which marks the beginning and/or end of a try block and
3393 // links it to the respective exception handlers. Behaves the same as a Goto in
3394 // non-exceptional control flow.
3395 // Normal-flow successor is stored at index zero, exception handlers under
3396 // higher indices in no particular order.
3397 class HTryBoundary final : public HExpression<0> {
3398 public:
3399 enum class BoundaryKind {
3400 kEntry,
3401 kExit,
3402 kLast = kExit
3403 };
3404
3405 // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3406 // across the catch block entering edges as GC might happen during throwing an exception.
3407 // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3408 // HInstruction which a catch block must start from.
3409 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3410 : HExpression(kTryBoundary,
3411 (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3412 : SideEffects::None(),
3413 dex_pc) {
3414 SetPackedField<BoundaryKindField>(kind);
3415 }
3416
3417 bool IsControlFlow() const override { return true; }
3418
3419 // Returns the block's non-exceptional successor (index zero).
3420 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3421
3422 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3423 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3424 }
3425
3426 // Returns whether `handler` is among its exception handlers (non-zero index
3427 // successors).
3428 bool HasExceptionHandler(const HBasicBlock& handler) const {
3429 DCHECK(handler.IsCatchBlock());
3430 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3431 }
3432
3433 // If not present already, adds `handler` to its block's list of exception
3434 // handlers.
3435 void AddExceptionHandler(HBasicBlock* handler) {
3436 if (!HasExceptionHandler(*handler)) {
3437 GetBlock()->AddSuccessor(handler);
3438 }
3439 }
3440
3441 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3442 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3443
3444 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3445
3446 DECLARE_INSTRUCTION(TryBoundary);
3447
3448 protected:
3449 DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3450
3451 private:
3452 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3453 static constexpr size_t kFieldBoundaryKindSize =
3454 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3455 static constexpr size_t kNumberOfTryBoundaryPackedBits =
3456 kFieldBoundaryKind + kFieldBoundaryKindSize;
3457 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3458 "Too many packed fields.");
3459 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3460 };
3461
3462 // Deoptimize to interpreter, upon checking a condition.
3463 class HDeoptimize final : public HVariableInputSizeInstruction {
3464 public:
3465 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3466 // across.
3467 HDeoptimize(ArenaAllocator* allocator,
3468 HInstruction* cond,
3469 DeoptimizationKind kind,
3470 uint32_t dex_pc)
3471 : HVariableInputSizeInstruction(
3472 kDeoptimize,
3473 SideEffects::All(),
3474 dex_pc,
3475 allocator,
3476 /* number_of_inputs= */ 1,
3477 kArenaAllocMisc) {
3478 SetPackedFlag<kFieldCanBeMoved>(false);
3479 SetPackedField<DeoptimizeKindField>(kind);
3480 SetRawInputAt(0, cond);
3481 }
3482
3483 bool IsClonable() const override { return true; }
3484
3485 // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3486 // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3487 // instead of `guard`.
3488 // We set CanTriggerGC to prevent any intermediate address to be live
3489 // at the point of the `HDeoptimize`.
3490 HDeoptimize(ArenaAllocator* allocator,
3491 HInstruction* cond,
3492 HInstruction* guard,
3493 DeoptimizationKind kind,
3494 uint32_t dex_pc)
3495 : HVariableInputSizeInstruction(
3496 kDeoptimize,
3497 guard->GetType(),
3498 SideEffects::CanTriggerGC(),
3499 dex_pc,
3500 allocator,
3501 /* number_of_inputs= */ 2,
3502 kArenaAllocMisc) {
3503 SetPackedFlag<kFieldCanBeMoved>(true);
3504 SetPackedField<DeoptimizeKindField>(kind);
3505 SetRawInputAt(0, cond);
3506 SetRawInputAt(1, guard);
3507 }
3508
3509 bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3510
3511 bool InstructionDataEquals(const HInstruction* other) const override {
3512 return (other->CanBeMoved() == CanBeMoved()) &&
3513 (other->AsDeoptimize()->GetDeoptimizationKind() == GetDeoptimizationKind());
3514 }
3515
3516 bool NeedsEnvironment() const override { return true; }
3517
3518 bool CanThrow() const override { return true; }
3519
3520 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3521
3522 bool GuardsAnInput() const {
3523 return InputCount() == 2;
3524 }
3525
3526 HInstruction* GuardedInput() const {
3527 DCHECK(GuardsAnInput());
3528 return InputAt(1);
3529 }
3530
3531 void RemoveGuard() {
3532 RemoveInputAt(1);
3533 }
3534
3535 DECLARE_INSTRUCTION(Deoptimize);
3536
3537 protected:
3538 DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3539
3540 private:
3541 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3542 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3543 static constexpr size_t kFieldDeoptimizeKindSize =
3544 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3545 static constexpr size_t kNumberOfDeoptimizePackedBits =
3546 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3547 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3548 "Too many packed fields.");
3549 using DeoptimizeKindField =
3550 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3551 };
3552
3553 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3554 // The compiled code checks this flag value in a guard before devirtualized call and
3555 // if it's true, starts to do deoptimization.
3556 // It has a 4-byte slot on stack.
3557 // TODO: allocate a register for this flag.
3558 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3559 public:
3560 // CHA guards are only optimized in a separate pass and it has no side effects
3561 // with regard to other passes.
3562 HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3563 : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3564 DataType::Type::kInt32,
3565 SideEffects::None(),
3566 dex_pc,
3567 allocator,
3568 0,
3569 kArenaAllocCHA) {
3570 }
3571
3572 // We do all CHA guard elimination/motion in a single pass, after which there is no
3573 // further guard elimination/motion since a guard might have been used for justification
3574 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3575 // to avoid other optimizations trying to move it.
3576 bool CanBeMoved() const override { return false; }
3577
3578 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3579
3580 protected:
3581 DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3582 };
3583
3584 // Represents the ArtMethod that was passed as a first argument to
3585 // the method. It is used by instructions that depend on it, like
3586 // instructions that work with the dex cache.
3587 class HCurrentMethod final : public HExpression<0> {
3588 public:
3589 explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3590 : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3591 }
3592
3593 DECLARE_INSTRUCTION(CurrentMethod);
3594
3595 protected:
3596 DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3597 };
3598
3599 // Fetches an ArtMethod from the virtual table or the interface method table
3600 // of a class.
3601 class HClassTableGet final : public HExpression<1> {
3602 public:
3603 enum class TableKind {
3604 kVTable,
3605 kIMTable,
3606 kLast = kIMTable
3607 };
3608 HClassTableGet(HInstruction* cls,
3609 DataType::Type type,
3610 TableKind kind,
3611 size_t index,
3612 uint32_t dex_pc)
3613 : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3614 index_(index) {
3615 SetPackedField<TableKindField>(kind);
3616 SetRawInputAt(0, cls);
3617 }
3618
3619 bool IsClonable() const override { return true; }
3620 bool CanBeMoved() const override { return true; }
3621 bool InstructionDataEquals(const HInstruction* other) const override {
3622 return other->AsClassTableGet()->GetIndex() == index_ &&
3623 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3624 }
3625
3626 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3627 size_t GetIndex() const { return index_; }
3628
3629 DECLARE_INSTRUCTION(ClassTableGet);
3630
3631 protected:
3632 DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3633
3634 private:
3635 static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3636 static constexpr size_t kFieldTableKindSize =
3637 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3638 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3639 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3640 "Too many packed fields.");
3641 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKindSize>;
3642
3643 // The index of the ArtMethod in the table.
3644 const size_t index_;
3645 };
3646
3647 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3648 // have one successor for each entry in the switch table, and the final successor
3649 // will be the block containing the next Dex opcode.
3650 class HPackedSwitch final : public HExpression<1> {
3651 public:
3652 HPackedSwitch(int32_t start_value,
3653 uint32_t num_entries,
3654 HInstruction* input,
3655 uint32_t dex_pc = kNoDexPc)
3656 : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3657 start_value_(start_value),
3658 num_entries_(num_entries) {
3659 SetRawInputAt(0, input);
3660 }
3661
3662 bool IsClonable() const override { return true; }
3663
3664 bool IsControlFlow() const override { return true; }
3665
3666 int32_t GetStartValue() const { return start_value_; }
3667
3668 uint32_t GetNumEntries() const { return num_entries_; }
3669
3670 HBasicBlock* GetDefaultBlock() const {
3671 // Last entry is the default block.
3672 return GetBlock()->GetSuccessors()[num_entries_];
3673 }
3674 DECLARE_INSTRUCTION(PackedSwitch);
3675
3676 protected:
3677 DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3678
3679 private:
3680 const int32_t start_value_;
3681 const uint32_t num_entries_;
3682 };
3683
3684 class HUnaryOperation : public HExpression<1> {
3685 public:
3686 HUnaryOperation(InstructionKind kind,
3687 DataType::Type result_type,
3688 HInstruction* input,
3689 uint32_t dex_pc = kNoDexPc)
3690 : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3691 SetRawInputAt(0, input);
3692 }
3693
3694 // All of the UnaryOperation instructions are clonable.
3695 bool IsClonable() const override { return true; }
3696
3697 HInstruction* GetInput() const { return InputAt(0); }
3698 DataType::Type GetResultType() const { return GetType(); }
3699
3700 bool CanBeMoved() const override { return true; }
3701 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3702 return true;
3703 }
3704
3705 // Try to statically evaluate `this` and return a HConstant
3706 // containing the result of this evaluation. If `this` cannot
3707 // be evaluated as a constant, return null.
3708 HConstant* TryStaticEvaluation() const;
3709
3710 // Same but for `input` instead of GetInput().
3711 HConstant* TryStaticEvaluation(HInstruction* input) const;
3712
3713 // Apply this operation to `x`.
3714 virtual HConstant* Evaluate([[maybe_unused]] HIntConstant* x) const {
3715 LOG(FATAL) << DebugName() << " is not defined for int values";
3716 UNREACHABLE();
3717 }
3718 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x) const {
3719 LOG(FATAL) << DebugName() << " is not defined for long values";
3720 UNREACHABLE();
3721 }
3722 virtual HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const {
3723 LOG(FATAL) << DebugName() << " is not defined for float values";
3724 UNREACHABLE();
3725 }
3726 virtual HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const {
3727 LOG(FATAL) << DebugName() << " is not defined for double values";
3728 UNREACHABLE();
3729 }
3730
3731 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3732
3733 protected:
3734 DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3735 };
3736
3737 class HBinaryOperation : public HExpression<2> {
3738 public:
3739 HBinaryOperation(InstructionKind kind,
3740 DataType::Type result_type,
3741 HInstruction* left,
3742 HInstruction* right,
3743 SideEffects side_effects = SideEffects::None(),
3744 uint32_t dex_pc = kNoDexPc)
3745 : HExpression(kind, result_type, side_effects, dex_pc) {
3746 SetRawInputAt(0, left);
3747 SetRawInputAt(1, right);
3748 }
3749
3750 // All of the BinaryOperation instructions are clonable.
3751 bool IsClonable() const override { return true; }
3752
3753 HInstruction* GetLeft() const { return InputAt(0); }
3754 HInstruction* GetRight() const { return InputAt(1); }
3755 DataType::Type GetResultType() const { return GetType(); }
3756
3757 virtual bool IsCommutative() const { return false; }
3758
3759 // Put constant on the right.
3760 // Returns whether order is changed.
3761 bool OrderInputsWithConstantOnTheRight() {
3762 HInstruction* left = InputAt(0);
3763 HInstruction* right = InputAt(1);
3764 if (left->IsConstant() && !right->IsConstant()) {
3765 ReplaceInput(right, 0);
3766 ReplaceInput(left, 1);
3767 return true;
3768 }
3769 return false;
3770 }
3771
3772 // Order inputs by instruction id, but favor constant on the right side.
3773 // This helps GVN for commutative ops.
3774 void OrderInputs() {
3775 DCHECK(IsCommutative());
3776 HInstruction* left = InputAt(0);
3777 HInstruction* right = InputAt(1);
3778 if (left == right || (!left->IsConstant() && right->IsConstant())) {
3779 return;
3780 }
3781 if (OrderInputsWithConstantOnTheRight()) {
3782 return;
3783 }
3784 // Order according to instruction id.
3785 if (left->GetId() > right->GetId()) {
3786 ReplaceInput(right, 0);
3787 ReplaceInput(left, 1);
3788 }
3789 }
3790
3791 bool CanBeMoved() const override { return true; }
3792 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3793 return true;
3794 }
3795
3796 // Try to statically evaluate `this` and return a HConstant
3797 // containing the result of this evaluation. If `this` cannot
3798 // be evaluated as a constant, return null.
3799 HConstant* TryStaticEvaluation() const;
3800
3801 // Same but for `left` and `right` instead of GetLeft() and GetRight().
3802 HConstant* TryStaticEvaluation(HInstruction* left, HInstruction* right) const;
3803
3804 // Apply this operation to `x` and `y`.
3805 virtual HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
3806 [[maybe_unused]] HNullConstant* y) const {
3807 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3808 UNREACHABLE();
3809 }
3810 virtual HConstant* Evaluate([[maybe_unused]] HIntConstant* x,
3811 [[maybe_unused]] HIntConstant* y) const {
3812 LOG(FATAL) << DebugName() << " is not defined for the (int, int) case.";
3813 UNREACHABLE();
3814 }
3815 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x,
3816 [[maybe_unused]] HLongConstant* y) const {
3817 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
3818 UNREACHABLE();
3819 }
3820 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x,
3821 [[maybe_unused]] HIntConstant* y) const {
3822 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
3823 UNREACHABLE();
3824 }
3825 virtual HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
3826 [[maybe_unused]] HFloatConstant* y) const {
3827 LOG(FATAL) << DebugName() << " is not defined for float values";
3828 UNREACHABLE();
3829 }
3830 virtual HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
3831 [[maybe_unused]] HDoubleConstant* y) const {
3832 LOG(FATAL) << DebugName() << " is not defined for double values";
3833 UNREACHABLE();
3834 }
3835
3836 // Returns an input that can legally be used as the right input and is
3837 // constant, or null.
3838 HConstant* GetConstantRight() const;
3839
3840 // If `GetConstantRight()` returns one of the input, this returns the other
3841 // one. Otherwise it returns null.
3842 HInstruction* GetLeastConstantLeft() const;
3843
3844 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3845
3846 protected:
3847 DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
3848 };
3849
3850 // The comparison bias applies for floating point operations and indicates how NaN
3851 // comparisons are treated:
3852 enum class ComparisonBias { // private marker to avoid generate-operator-out.py from processing.
3853 kNoBias, // bias is not applicable (i.e. for long operation)
3854 kGtBias, // return 1 for NaN comparisons
3855 kLtBias, // return -1 for NaN comparisons
3856 kLast = kLtBias
3857 };
3858
3859 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs);
3860
3861 class HCondition : public HBinaryOperation {
3862 public:
3863 HCondition(InstructionKind kind,
3864 HInstruction* first,
3865 HInstruction* second,
3866 uint32_t dex_pc = kNoDexPc)
3867 : HBinaryOperation(kind,
3868 DataType::Type::kBool,
3869 first,
3870 second,
3871 SideEffects::None(),
3872 dex_pc) {
3873 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3874 }
3875
3876 static HCondition* Create(HGraph* graph,
3877 IfCondition cond,
3878 HInstruction* lhs,
3879 HInstruction* rhs,
3880 uint32_t dex_pc = kNoDexPc);
3881
3882 // For code generation purposes, returns whether this instruction is just before
3883 // `instruction`, and disregard moves in between.
3884 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3885
3886 DECLARE_ABSTRACT_INSTRUCTION(Condition);
3887
3888 virtual IfCondition GetCondition() const = 0;
3889
3890 virtual IfCondition GetOppositeCondition() const = 0;
3891
3892 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3893 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3894
3895 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3896 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3897
3898 bool InstructionDataEquals(const HInstruction* other) const override {
3899 return GetPackedFields() == other->AsCondition()->GetPackedFields();
3900 }
3901
3902 bool IsFPConditionTrueIfNaN() const {
3903 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3904 IfCondition if_cond = GetCondition();
3905 if (if_cond == kCondNE) {
3906 return true;
3907 } else if (if_cond == kCondEQ) {
3908 return false;
3909 }
3910 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3911 }
3912
3913 bool IsFPConditionFalseIfNaN() const {
3914 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3915 IfCondition if_cond = GetCondition();
3916 if (if_cond == kCondEQ) {
3917 return true;
3918 } else if (if_cond == kCondNE) {
3919 return false;
3920 }
3921 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3922 }
3923
3924 protected:
3925 // Needed if we merge a HCompare into a HCondition.
3926 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
3927 static constexpr size_t kFieldComparisonBiasSize =
3928 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3929 static constexpr size_t kNumberOfConditionPackedBits =
3930 kFieldComparisonBias + kFieldComparisonBiasSize;
3931 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3932 using ComparisonBiasField =
3933 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3934
3935 template <typename T>
3936 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3937
3938 template <typename T>
3939 int32_t CompareFP(T x, T y) const {
3940 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3941 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3942 // Handle the bias.
3943 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3944 }
3945
3946 // Return an integer constant containing the result of a condition evaluated at compile time.
3947 HIntConstant* MakeConstantCondition(bool value) const {
3948 return GetBlock()->GetGraph()->GetIntConstant(value);
3949 }
3950
3951 DEFAULT_COPY_CONSTRUCTOR(Condition);
3952 };
3953
3954 // Instruction to check if two inputs are equal to each other.
3955 class HEqual final : public HCondition {
3956 public:
3957 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3958 : HCondition(kEqual, first, second, dex_pc) {
3959 }
3960
3961 bool IsCommutative() const override { return true; }
3962
3963 HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
3964 [[maybe_unused]] HNullConstant* y) const override {
3965 return MakeConstantCondition(true);
3966 }
3967 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
3968 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
3969 }
3970 // In the following Evaluate methods, a HCompare instruction has
3971 // been merged into this HEqual instruction; evaluate it as
3972 // `Compare(x, y) == 0`.
3973 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
3974 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
3975 }
3976 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
3977 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
3978 }
3979 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
3980 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
3981 }
3982
3983 DECLARE_INSTRUCTION(Equal);
3984
3985 IfCondition GetCondition() const override {
3986 return kCondEQ;
3987 }
3988
3989 IfCondition GetOppositeCondition() const override {
3990 return kCondNE;
3991 }
3992
3993 protected:
3994 DEFAULT_COPY_CONSTRUCTOR(Equal);
3995
3996 private:
3997 template <typename T> static bool Compute(T x, T y) { return x == y; }
3998 };
3999
4000 class HNotEqual final : public HCondition {
4001 public:
4002 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4003 : HCondition(kNotEqual, first, second, dex_pc) {
4004 }
4005
4006 bool IsCommutative() const override { return true; }
4007
4008 HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
4009 [[maybe_unused]] HNullConstant* y) const override {
4010 return MakeConstantCondition(false);
4011 }
4012 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4013 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4014 }
4015 // In the following Evaluate methods, a HCompare instruction has
4016 // been merged into this HNotEqual instruction; evaluate it as
4017 // `Compare(x, y) != 0`.
4018 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4019 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
4020 }
4021 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4022 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4023 }
4024 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4025 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4026 }
4027
4028 DECLARE_INSTRUCTION(NotEqual);
4029
4030 IfCondition GetCondition() const override {
4031 return kCondNE;
4032 }
4033
4034 IfCondition GetOppositeCondition() const override {
4035 return kCondEQ;
4036 }
4037
4038 protected:
4039 DEFAULT_COPY_CONSTRUCTOR(NotEqual);
4040
4041 private:
4042 template <typename T> static bool Compute(T x, T y) { return x != y; }
4043 };
4044
4045 class HLessThan final : public HCondition {
4046 public:
4047 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4048 : HCondition(kLessThan, first, second, dex_pc) {
4049 }
4050
4051 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4052 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4053 }
4054 // In the following Evaluate methods, a HCompare instruction has
4055 // been merged into this HLessThan instruction; evaluate it as
4056 // `Compare(x, y) < 0`.
4057 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4058 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
4059 }
4060 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4061 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4062 }
4063 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4064 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4065 }
4066
4067 DECLARE_INSTRUCTION(LessThan);
4068
4069 IfCondition GetCondition() const override {
4070 return kCondLT;
4071 }
4072
4073 IfCondition GetOppositeCondition() const override {
4074 return kCondGE;
4075 }
4076
4077 protected:
4078 DEFAULT_COPY_CONSTRUCTOR(LessThan);
4079
4080 private:
4081 template <typename T> static bool Compute(T x, T y) { return x < y; }
4082 };
4083
4084 class HLessThanOrEqual final : public HCondition {
4085 public:
4086 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4087 : HCondition(kLessThanOrEqual, first, second, dex_pc) {
4088 }
4089
4090 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4091 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4092 }
4093 // In the following Evaluate methods, a HCompare instruction has
4094 // been merged into this HLessThanOrEqual instruction; evaluate it as
4095 // `Compare(x, y) <= 0`.
4096 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4097 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
4098 }
4099 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4100 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4101 }
4102 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4103 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4104 }
4105
4106 DECLARE_INSTRUCTION(LessThanOrEqual);
4107
4108 IfCondition GetCondition() const override {
4109 return kCondLE;
4110 }
4111
4112 IfCondition GetOppositeCondition() const override {
4113 return kCondGT;
4114 }
4115
4116 protected:
4117 DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
4118
4119 private:
4120 template <typename T> static bool Compute(T x, T y) { return x <= y; }
4121 };
4122
4123 class HGreaterThan final : public HCondition {
4124 public:
4125 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4126 : HCondition(kGreaterThan, first, second, dex_pc) {
4127 }
4128
4129 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4130 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4131 }
4132 // In the following Evaluate methods, a HCompare instruction has
4133 // been merged into this HGreaterThan instruction; evaluate it as
4134 // `Compare(x, y) > 0`.
4135 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4136 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
4137 }
4138 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4139 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4140 }
4141 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4142 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4143 }
4144
4145 DECLARE_INSTRUCTION(GreaterThan);
4146
4147 IfCondition GetCondition() const override {
4148 return kCondGT;
4149 }
4150
4151 IfCondition GetOppositeCondition() const override {
4152 return kCondLE;
4153 }
4154
4155 protected:
4156 DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
4157
4158 private:
4159 template <typename T> static bool Compute(T x, T y) { return x > y; }
4160 };
4161
4162 class HGreaterThanOrEqual final : public HCondition {
4163 public:
4164 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4165 : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
4166 }
4167
4168 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4169 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4170 }
4171 // In the following Evaluate methods, a HCompare instruction has
4172 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
4173 // `Compare(x, y) >= 0`.
4174 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4175 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0));
4176 }
4177 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4178 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4179 }
4180 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4181 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0));
4182 }
4183
4184 DECLARE_INSTRUCTION(GreaterThanOrEqual);
4185
4186 IfCondition GetCondition() const override {
4187 return kCondGE;
4188 }
4189
4190 IfCondition GetOppositeCondition() const override {
4191 return kCondLT;
4192 }
4193
4194 protected:
4195 DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
4196
4197 private:
4198 template <typename T> static bool Compute(T x, T y) { return x >= y; }
4199 };
4200
4201 class HBelow final : public HCondition {
4202 public:
4203 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4204 : HCondition(kBelow, first, second, dex_pc) {
4205 }
4206
4207 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4208 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4209 }
4210 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4211 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4212 }
4213
4214 DECLARE_INSTRUCTION(Below);
4215
4216 IfCondition GetCondition() const override {
4217 return kCondB;
4218 }
4219
4220 IfCondition GetOppositeCondition() const override {
4221 return kCondAE;
4222 }
4223
4224 protected:
4225 DEFAULT_COPY_CONSTRUCTOR(Below);
4226
4227 private:
4228 template <typename T> static bool Compute(T x, T y) {
4229 return MakeUnsigned(x) < MakeUnsigned(y);
4230 }
4231 };
4232
4233 class HBelowOrEqual final : public HCondition {
4234 public:
4235 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4236 : HCondition(kBelowOrEqual, first, second, dex_pc) {
4237 }
4238
4239 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4240 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4241 }
4242 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4243 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4244 }
4245
4246 DECLARE_INSTRUCTION(BelowOrEqual);
4247
4248 IfCondition GetCondition() const override {
4249 return kCondBE;
4250 }
4251
4252 IfCondition GetOppositeCondition() const override {
4253 return kCondA;
4254 }
4255
4256 protected:
4257 DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4258
4259 private:
4260 template <typename T> static bool Compute(T x, T y) {
4261 return MakeUnsigned(x) <= MakeUnsigned(y);
4262 }
4263 };
4264
4265 class HAbove final : public HCondition {
4266 public:
4267 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4268 : HCondition(kAbove, first, second, dex_pc) {
4269 }
4270
4271 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4272 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4273 }
4274 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4275 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4276 }
4277
4278 DECLARE_INSTRUCTION(Above);
4279
4280 IfCondition GetCondition() const override {
4281 return kCondA;
4282 }
4283
4284 IfCondition GetOppositeCondition() const override {
4285 return kCondBE;
4286 }
4287
4288 protected:
4289 DEFAULT_COPY_CONSTRUCTOR(Above);
4290
4291 private:
4292 template <typename T> static bool Compute(T x, T y) {
4293 return MakeUnsigned(x) > MakeUnsigned(y);
4294 }
4295 };
4296
4297 class HAboveOrEqual final : public HCondition {
4298 public:
4299 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4300 : HCondition(kAboveOrEqual, first, second, dex_pc) {
4301 }
4302
4303 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4304 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4305 }
4306 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4307 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()));
4308 }
4309
4310 DECLARE_INSTRUCTION(AboveOrEqual);
4311
4312 IfCondition GetCondition() const override {
4313 return kCondAE;
4314 }
4315
4316 IfCondition GetOppositeCondition() const override {
4317 return kCondB;
4318 }
4319
4320 protected:
4321 DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4322
4323 private:
4324 template <typename T> static bool Compute(T x, T y) {
4325 return MakeUnsigned(x) >= MakeUnsigned(y);
4326 }
4327 };
4328
4329 // Instruction to check how two inputs compare to each other.
4330 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4331 class HCompare final : public HBinaryOperation {
4332 public:
4333 // Note that `comparison_type` is the type of comparison performed
4334 // between the comparison's inputs, not the type of the instantiated
4335 // HCompare instruction (which is always DataType::Type::kInt).
4336 HCompare(DataType::Type comparison_type,
4337 HInstruction* first,
4338 HInstruction* second,
4339 ComparisonBias bias,
4340 uint32_t dex_pc)
4341 : HBinaryOperation(kCompare,
4342 DataType::Type::kInt32,
4343 first,
4344 second,
4345 SideEffectsForArchRuntimeCalls(comparison_type),
4346 dex_pc) {
4347 SetPackedField<ComparisonBiasField>(bias);
4348 SetPackedField<ComparisonTypeField>(comparison_type);
4349 }
4350
4351 template <typename T>
4352 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4353
4354 template <typename T>
4355 int32_t ComputeFP(T x, T y) const {
4356 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4357 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4358 // Handle the bias.
4359 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4360 }
4361
4362 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4363 // Note that there is no "cmp-int" Dex instruction so we shouldn't
4364 // reach this code path when processing a freshly built HIR
4365 // graph. However HCompare integer instructions can be synthesized
4366 // by the instruction simplifier to implement IntegerCompare and
4367 // IntegerSignum intrinsics, so we have to handle this case.
4368 const int32_t value = DataType::IsUnsignedType(GetComparisonType()) ?
4369 Compute(x->GetValueAsUint64(), y->GetValueAsUint64()) :
4370 Compute(x->GetValue(), y->GetValue());
4371 return MakeConstantComparison(value);
4372 }
4373 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4374 const int32_t value = DataType::IsUnsignedType(GetComparisonType()) ?
4375 Compute(x->GetValueAsUint64(), y->GetValueAsUint64()) :
4376 Compute(x->GetValue(), y->GetValue());
4377 return MakeConstantComparison(value);
4378 }
4379 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4380 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()));
4381 }
4382 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4383 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()));
4384 }
4385
4386 bool InstructionDataEquals(const HInstruction* other) const override {
4387 return GetPackedFields() == other->AsCompare()->GetPackedFields();
4388 }
4389
4390 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4391
4392 DataType::Type GetComparisonType() const { return GetPackedField<ComparisonTypeField>(); }
4393
4394 void SetComparisonType(DataType::Type newType) { SetPackedField<ComparisonTypeField>(newType); }
4395
4396 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4397 // Only meaningful for floating-point comparisons.
4398 bool IsGtBias() const {
4399 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4400 return GetBias() == ComparisonBias::kGtBias;
4401 }
4402
4403 static SideEffects SideEffectsForArchRuntimeCalls([[maybe_unused]] DataType::Type type) {
4404 // Comparisons do not require a runtime call in any back end.
4405 return SideEffects::None();
4406 }
4407
4408 DECLARE_INSTRUCTION(Compare);
4409
4410 protected:
4411 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4412 static constexpr size_t kFieldComparisonBiasSize =
4413 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4414 static constexpr size_t kFieldComparisonType = kFieldComparisonBias + kFieldComparisonBiasSize;
4415 static constexpr size_t kFieldComparisonTypeSize =
4416 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
4417 static constexpr size_t kNumberOfComparePackedBits =
4418 kFieldComparisonType + kFieldComparisonTypeSize;
4419 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4420 using ComparisonBiasField =
4421 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4422 using ComparisonTypeField =
4423 BitField<DataType::Type, kFieldComparisonType, kFieldComparisonTypeSize>;
4424
4425 // Return an integer constant containing the result of a comparison evaluated at compile time.
4426 HIntConstant* MakeConstantComparison(int32_t value) const {
4427 DCHECK(value == -1 || value == 0 || value == 1) << value;
4428 return GetBlock()->GetGraph()->GetIntConstant(value);
4429 }
4430
4431 DEFAULT_COPY_CONSTRUCTOR(Compare);
4432 };
4433
4434 class HNewInstance final : public HExpression<1> {
4435 public:
4436 HNewInstance(HInstruction* cls,
4437 uint32_t dex_pc,
4438 dex::TypeIndex type_index,
4439 const DexFile& dex_file,
4440 bool finalizable,
4441 QuickEntrypointEnum entrypoint)
4442 : HExpression(kNewInstance,
4443 DataType::Type::kReference,
4444 SideEffects::CanTriggerGC(),
4445 dex_pc),
4446 type_index_(type_index),
4447 dex_file_(dex_file),
4448 entrypoint_(entrypoint) {
4449 SetPackedFlag<kFlagFinalizable>(finalizable);
4450 SetPackedFlag<kFlagPartialMaterialization>(false);
4451 SetRawInputAt(0, cls);
4452 }
4453
4454 bool IsClonable() const override { return true; }
4455
4456 void SetPartialMaterialization() {
4457 SetPackedFlag<kFlagPartialMaterialization>(true);
4458 }
4459
4460 dex::TypeIndex GetTypeIndex() const { return type_index_; }
4461 const DexFile& GetDexFile() const { return dex_file_; }
4462
4463 // Calls runtime so needs an environment.
4464 bool NeedsEnvironment() const override { return true; }
4465
4466 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
4467 bool CanThrow() const override { return true; }
4468 bool OnlyThrowsAsyncExceptions() const override {
4469 return !IsFinalizable() && !NeedsChecks();
4470 }
4471
4472 bool NeedsChecks() const {
4473 return entrypoint_ == kQuickAllocObjectWithChecks;
4474 }
4475
4476 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4477
4478 bool CanBeNull() const override { return false; }
4479
4480 bool IsPartialMaterialization() const {
4481 return GetPackedFlag<kFlagPartialMaterialization>();
4482 }
4483
4484 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4485
4486 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4487 entrypoint_ = entrypoint;
4488 }
4489
4490 HLoadClass* GetLoadClass() const {
4491 HInstruction* input = InputAt(0);
4492 if (input->IsClinitCheck()) {
4493 input = input->InputAt(0);
4494 }
4495 DCHECK(input->IsLoadClass());
4496 return input->AsLoadClass();
4497 }
4498
4499 bool IsStringAlloc() const;
4500
4501 DECLARE_INSTRUCTION(NewInstance);
4502
4503 protected:
4504 DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4505
4506 private:
4507 static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4508 static constexpr size_t kFlagPartialMaterialization = kFlagFinalizable + 1;
4509 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagPartialMaterialization + 1;
4510 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4511 "Too many packed fields.");
4512
4513 const dex::TypeIndex type_index_;
4514 const DexFile& dex_file_;
4515 QuickEntrypointEnum entrypoint_;
4516 };
4517
4518 enum IntrinsicNeedsEnvironment {
4519 kNoEnvironment, // Intrinsic does not require an environment.
4520 kNeedsEnvironment // Intrinsic requires an environment.
4521 };
4522
4523 enum IntrinsicSideEffects {
4524 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
4525 kReadSideEffects, // Intrinsic may read heap memory.
4526 kWriteSideEffects, // Intrinsic may write heap memory.
4527 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
4528 };
4529
4530 enum IntrinsicExceptions {
4531 kNoThrow, // Intrinsic does not throw any exceptions.
4532 kCanThrow // Intrinsic may throw exceptions.
4533 };
4534
4535 // Determines how to load an ArtMethod*.
4536 enum class MethodLoadKind {
4537 // Use a String init ArtMethod* loaded from Thread entrypoints.
4538 kStringInit,
4539
4540 // Use the method's own ArtMethod* loaded by the register allocator.
4541 kRecursive,
4542
4543 // Use PC-relative boot image ArtMethod* address that will be known at link time.
4544 // Used for boot image methods referenced by boot image code.
4545 kBootImageLinkTimePcRelative,
4546
4547 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
4548 // Used for app->boot calls with relocatable image.
4549 kBootImageRelRo,
4550
4551 // Load from an app image entry in the .data.img.rel.ro using a PC-relative load.
4552 // Used for app image methods referenced by apps in AOT-compiled code.
4553 kAppImageRelRo,
4554
4555 // Load from an entry in the .bss section using a PC-relative load.
4556 // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4557 kBssEntry,
4558
4559 // Use ArtMethod* at a known address, embed the direct address in the code.
4560 // Used for for JIT-compiled calls.
4561 kJitDirectAddress,
4562
4563 // Make a runtime call to resolve and call the method. This is the last-resort-kind
4564 // used when other kinds are unimplemented on a particular architecture.
4565 kRuntimeCall,
4566 };
4567
4568 // Determines the location of the code pointer of an invoke.
4569 enum class CodePtrLocation {
4570 // Recursive call, use local PC-relative call instruction.
4571 kCallSelf,
4572
4573 // Use native pointer from the Artmethod*.
4574 // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
4575 // a special resolution stub if the class is not initialized or no native code is registered.
4576 kCallCriticalNative,
4577
4578 // Use code pointer from the ArtMethod*.
4579 // Used when we don't know the target code. This is also the last-resort-kind used when
4580 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4581 kCallArtMethod,
4582 };
4583
4584 static inline bool IsPcRelativeMethodLoadKind(MethodLoadKind load_kind) {
4585 return load_kind == MethodLoadKind::kBootImageLinkTimePcRelative ||
4586 load_kind == MethodLoadKind::kBootImageRelRo ||
4587 load_kind == MethodLoadKind::kAppImageRelRo ||
4588 load_kind == MethodLoadKind::kBssEntry;
4589 }
4590
4591 class HInvoke : public HVariableInputSizeInstruction {
4592 public:
4593 bool NeedsEnvironment() const override;
4594
4595 void SetArgumentAt(size_t index, HInstruction* argument) {
4596 SetRawInputAt(index, argument);
4597 }
4598
4599 // Return the number of arguments. This number can be lower than
4600 // the number of inputs returned by InputCount(), as some invoke
4601 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4602 // inputs at the end of their list of inputs.
4603 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4604
4605 // Return the number of outgoing vregs.
4606 uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; }
4607
4608 InvokeType GetInvokeType() const {
4609 return GetPackedField<InvokeTypeField>();
4610 }
4611
4612 Intrinsics GetIntrinsic() const {
4613 return intrinsic_;
4614 }
4615
4616 void SetIntrinsic(Intrinsics intrinsic,
4617 IntrinsicNeedsEnvironment needs_env,
4618 IntrinsicSideEffects side_effects,
4619 IntrinsicExceptions exceptions);
4620
4621 bool IsFromInlinedInvoke() const {
4622 return GetEnvironment()->IsFromInlinedInvoke();
4623 }
4624
4625 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4626
4627 bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4628
4629 void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4630
4631 bool AlwaysThrows() const override final { return GetPackedFlag<kFlagAlwaysThrows>(); }
4632
4633 bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4634
4635 bool CanBeNull() const override;
4636
4637 bool InstructionDataEquals(const HInstruction* other) const override {
4638 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4639 }
4640
4641 uint32_t* GetIntrinsicOptimizations() {
4642 return &intrinsic_optimizations_;
4643 }
4644
4645 const uint32_t* GetIntrinsicOptimizations() const {
4646 return &intrinsic_optimizations_;
4647 }
4648
4649 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4650
4651 ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4652 void SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt);
4653
4654 MethodReference GetMethodReference() const { return method_reference_; }
4655
4656 const MethodReference GetResolvedMethodReference() const {
4657 return resolved_method_reference_;
4658 }
4659
4660 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4661
4662 protected:
4663 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4664 static constexpr size_t kFieldInvokeTypeSize =
4665 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4666 static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4667 static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4668 static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4669 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4670 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4671
4672 HInvoke(InstructionKind kind,
4673 ArenaAllocator* allocator,
4674 uint32_t number_of_arguments,
4675 uint32_t number_of_out_vregs,
4676 uint32_t number_of_other_inputs,
4677 DataType::Type return_type,
4678 uint32_t dex_pc,
4679 MethodReference method_reference,
4680 ArtMethod* resolved_method,
4681 MethodReference resolved_method_reference,
4682 InvokeType invoke_type,
4683 bool enable_intrinsic_opt)
4684 : HVariableInputSizeInstruction(
4685 kind,
4686 return_type,
4687 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
4688 dex_pc,
4689 allocator,
4690 number_of_arguments + number_of_other_inputs,
4691 kArenaAllocInvokeInputs),
4692 method_reference_(method_reference),
4693 resolved_method_reference_(resolved_method_reference),
4694 number_of_arguments_(dchecked_integral_cast<uint16_t>(number_of_arguments)),
4695 number_of_out_vregs_(dchecked_integral_cast<uint16_t>(number_of_out_vregs)),
4696 intrinsic_(Intrinsics::kNone),
4697 intrinsic_optimizations_(0) {
4698 SetPackedField<InvokeTypeField>(invoke_type);
4699 SetPackedFlag<kFlagCanThrow>(true);
4700 SetResolvedMethod(resolved_method, enable_intrinsic_opt);
4701 }
4702
4703 DEFAULT_COPY_CONSTRUCTOR(Invoke);
4704
4705 ArtMethod* resolved_method_;
4706 const MethodReference method_reference_;
4707 // Cached values of the resolved method, to avoid needing the mutator lock.
4708 const MethodReference resolved_method_reference_;
4709
4710 uint16_t number_of_arguments_;
4711 uint16_t number_of_out_vregs_;
4712
4713 Intrinsics intrinsic_;
4714
4715 // A magic word holding optimizations for intrinsics. See intrinsics.h.
4716 uint32_t intrinsic_optimizations_;
4717 };
4718
4719 class HInvokeUnresolved final : public HInvoke {
4720 public:
4721 HInvokeUnresolved(ArenaAllocator* allocator,
4722 uint32_t number_of_arguments,
4723 uint32_t number_of_out_vregs,
4724 DataType::Type return_type,
4725 uint32_t dex_pc,
4726 MethodReference method_reference,
4727 InvokeType invoke_type)
4728 : HInvoke(kInvokeUnresolved,
4729 allocator,
4730 number_of_arguments,
4731 number_of_out_vregs,
4732 /* number_of_other_inputs= */ 0u,
4733 return_type,
4734 dex_pc,
4735 method_reference,
4736 nullptr,
4737 MethodReference(nullptr, 0u),
4738 invoke_type,
4739 /* enable_intrinsic_opt= */ false) {
4740 }
4741
4742 bool IsClonable() const override { return true; }
4743
4744 DECLARE_INSTRUCTION(InvokeUnresolved);
4745
4746 protected:
4747 DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4748 };
4749
4750 class HInvokePolymorphic final : public HInvoke {
4751 public:
4752 HInvokePolymorphic(ArenaAllocator* allocator,
4753 uint32_t number_of_arguments,
4754 uint32_t number_of_out_vregs,
4755 uint32_t number_of_other_inputs,
4756 DataType::Type return_type,
4757 uint32_t dex_pc,
4758 MethodReference method_reference,
4759 // resolved_method is the ArtMethod object corresponding to the polymorphic
4760 // method (e.g. VarHandle.get), resolved using the class linker. It is needed
4761 // to pass intrinsic information to the HInvokePolymorphic node.
4762 ArtMethod* resolved_method,
4763 MethodReference resolved_method_reference,
4764 dex::ProtoIndex proto_idx)
4765 : HInvoke(kInvokePolymorphic,
4766 allocator,
4767 number_of_arguments,
4768 number_of_out_vregs,
4769 number_of_other_inputs,
4770 return_type,
4771 dex_pc,
4772 method_reference,
4773 resolved_method,
4774 resolved_method_reference,
4775 kPolymorphic,
4776 /* enable_intrinsic_opt= */ true),
4777 proto_idx_(proto_idx) {}
4778
4779 bool IsClonable() const override { return true; }
4780
4781 dex::ProtoIndex GetProtoIndex() { return proto_idx_; }
4782
4783 bool IsMethodHandleInvokeExact() const {
4784 return GetIntrinsic() == Intrinsics::kMethodHandleInvokeExact;
4785 }
4786
4787 bool CanTargetInstanceMethod() const {
4788 DCHECK(IsMethodHandleInvokeExact());
4789 return GetNumberOfArguments() >= 2 &&
4790 InputAt(1)->GetType() == DataType::Type::kReference;
4791 }
4792
4793 DECLARE_INSTRUCTION(InvokePolymorphic);
4794
4795 protected:
4796 dex::ProtoIndex proto_idx_;
4797 DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4798 };
4799
4800 class HInvokeCustom final : public HInvoke {
4801 public:
4802 HInvokeCustom(ArenaAllocator* allocator,
4803 uint32_t number_of_arguments,
4804 uint32_t number_of_out_vregs,
4805 uint32_t call_site_index,
4806 DataType::Type return_type,
4807 uint32_t dex_pc,
4808 MethodReference method_reference,
4809 bool enable_intrinsic_opt)
4810 : HInvoke(kInvokeCustom,
4811 allocator,
4812 number_of_arguments,
4813 number_of_out_vregs,
4814 /* number_of_other_inputs= */ 0u,
4815 return_type,
4816 dex_pc,
4817 method_reference,
4818 /* resolved_method= */ nullptr,
4819 MethodReference(nullptr, 0u),
4820 kStatic,
4821 enable_intrinsic_opt),
4822 call_site_index_(call_site_index) {
4823 }
4824
4825 uint32_t GetCallSiteIndex() const { return call_site_index_; }
4826
4827 bool IsClonable() const override { return true; }
4828
4829 DECLARE_INSTRUCTION(InvokeCustom);
4830
4831 protected:
4832 DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4833
4834 private:
4835 uint32_t call_site_index_;
4836 };
4837
4838 class HInvokeStaticOrDirect final : public HInvoke {
4839 public:
4840 // Requirements of this method call regarding the class
4841 // initialization (clinit) check of its declaring class.
4842 enum class ClinitCheckRequirement { // private marker to avoid generate-operator-out.py from processing.
4843 kNone, // Class already initialized.
4844 kExplicit, // Static call having explicit clinit check as last input.
4845 kImplicit, // Static call implicitly requiring a clinit check.
4846 kLast = kImplicit
4847 };
4848
4849 struct DispatchInfo {
4850 MethodLoadKind method_load_kind;
4851 CodePtrLocation code_ptr_location;
4852 // The method load data holds
4853 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
4854 // Note that there are multiple string init methods, each having its own offset.
4855 // - the method address for kDirectAddress
4856 uint64_t method_load_data;
4857 };
4858
4859 HInvokeStaticOrDirect(ArenaAllocator* allocator,
4860 uint32_t number_of_arguments,
4861 uint32_t number_of_out_vregs,
4862 DataType::Type return_type,
4863 uint32_t dex_pc,
4864 MethodReference method_reference,
4865 ArtMethod* resolved_method,
4866 DispatchInfo dispatch_info,
4867 InvokeType invoke_type,
4868 MethodReference resolved_method_reference,
4869 ClinitCheckRequirement clinit_check_requirement,
4870 bool enable_intrinsic_opt)
4871 : HInvoke(kInvokeStaticOrDirect,
4872 allocator,
4873 number_of_arguments,
4874 number_of_out_vregs,
4875 // There is potentially one extra argument for the HCurrentMethod input,
4876 // and one other if the clinit check is explicit. These can be removed later.
4877 (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) +
4878 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
4879 return_type,
4880 dex_pc,
4881 method_reference,
4882 resolved_method,
4883 resolved_method_reference,
4884 invoke_type,
4885 enable_intrinsic_opt),
4886 dispatch_info_(dispatch_info) {
4887 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
4888 }
4889
4890 bool IsClonable() const override { return true; }
4891 bool NeedsBss() const override {
4892 return GetMethodLoadKind() == MethodLoadKind::kBssEntry;
4893 }
4894
4895 void SetDispatchInfo(DispatchInfo dispatch_info) {
4896 bool had_current_method_input = HasCurrentMethodInput();
4897 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info);
4898
4899 // Using the current method is the default and once we find a better
4900 // method load kind, we should not go back to using the current method.
4901 DCHECK(had_current_method_input || !needs_current_method_input);
4902
4903 if (had_current_method_input && !needs_current_method_input) {
4904 DCHECK_EQ(InputAt(GetCurrentMethodIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
4905 RemoveInputAt(GetCurrentMethodIndex());
4906 }
4907 dispatch_info_ = dispatch_info;
4908 }
4909
4910 DispatchInfo GetDispatchInfo() const {
4911 return dispatch_info_;
4912 }
4913
4914 using HInstruction::GetInputRecords; // Keep the const version visible.
4915 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
4916 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
4917 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
4918 DCHECK(!input_records.empty());
4919 DCHECK_GT(input_records.size(), GetNumberOfArguments());
4920 HInstruction* last_input = input_records.back().GetInstruction();
4921 // Note: `last_input` may be null during arguments setup.
4922 if (last_input != nullptr) {
4923 // `last_input` is the last input of a static invoke marked as having
4924 // an explicit clinit check. It must either be:
4925 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4926 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4927 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
4928 }
4929 }
4930 return input_records;
4931 }
4932
4933 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
4934 // We do not access the method via object reference, so we cannot do an implicit null check.
4935 // TODO: for intrinsics we can generate implicit null checks.
4936 return false;
4937 }
4938
4939 bool CanBeNull() const override;
4940
4941 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
4942 CodePtrLocation GetCodePtrLocation() const {
4943 // We do CHA analysis after sharpening. When a method has CHA inlining, it
4944 // cannot call itself, as if the CHA optmization is invalid we want to make
4945 // sure the method is never executed again. So, while sharpening can return
4946 // kCallSelf, we bypass it here if there is a CHA optimization.
4947 if (dispatch_info_.code_ptr_location == CodePtrLocation::kCallSelf &&
4948 GetBlock()->GetGraph()->HasShouldDeoptimizeFlag()) {
4949 return CodePtrLocation::kCallArtMethod;
4950 } else {
4951 return dispatch_info_.code_ptr_location;
4952 }
4953 }
4954 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
4955 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
4956 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
4957 bool HasPcRelativeMethodLoadKind() const {
4958 return IsPcRelativeMethodLoadKind(GetMethodLoadKind());
4959 }
4960
4961 QuickEntrypointEnum GetStringInitEntryPoint() const {
4962 DCHECK(IsStringInit());
4963 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
4964 }
4965
4966 uint64_t GetMethodAddress() const {
4967 DCHECK(HasMethodAddress());
4968 return dispatch_info_.method_load_data;
4969 }
4970
4971 const DexFile& GetDexFileForPcRelativeDexCache() const;
4972
4973 ClinitCheckRequirement GetClinitCheckRequirement() const {
4974 return GetPackedField<ClinitCheckRequirementField>();
4975 }
4976
4977 // Is this instruction a call to a static method?
4978 bool IsStatic() const {
4979 return GetInvokeType() == kStatic;
4980 }
4981
4982 // Does this method load kind need the current method as an input?
4983 static bool NeedsCurrentMethodInput(DispatchInfo dispatch_info) {
4984 return dispatch_info.method_load_kind == MethodLoadKind::kRecursive ||
4985 dispatch_info.method_load_kind == MethodLoadKind::kRuntimeCall ||
4986 dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative;
4987 }
4988
4989 // Get the index of the current method input.
4990 size_t GetCurrentMethodIndex() const {
4991 DCHECK(HasCurrentMethodInput());
4992 return GetCurrentMethodIndexUnchecked();
4993 }
4994 size_t GetCurrentMethodIndexUnchecked() const {
4995 return GetNumberOfArguments();
4996 }
4997
4998 // Check if the method has a current method input.
4999 bool HasCurrentMethodInput() const {
5000 if (NeedsCurrentMethodInput(GetDispatchInfo())) {
5001 DCHECK(InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5002 InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5003 return true;
5004 } else {
5005 DCHECK(InputCount() == GetCurrentMethodIndexUnchecked() ||
5006 InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5007 !InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5008 return false;
5009 }
5010 }
5011
5012 // Get the index of the special input.
5013 size_t GetSpecialInputIndex() const {
5014 DCHECK(HasSpecialInput());
5015 return GetSpecialInputIndexUnchecked();
5016 }
5017 size_t GetSpecialInputIndexUnchecked() const {
5018 return GetNumberOfArguments() + (HasCurrentMethodInput() ? 1u : 0u);
5019 }
5020
5021 // Check if the method has a special input.
5022 bool HasSpecialInput() const {
5023 size_t other_inputs =
5024 GetSpecialInputIndexUnchecked() + (IsStaticWithExplicitClinitCheck() ? 1u : 0u);
5025 size_t input_count = InputCount();
5026 DCHECK_LE(input_count - other_inputs, 1u) << other_inputs << " " << input_count;
5027 return other_inputs != input_count;
5028 }
5029
5030 void AddSpecialInput(HInstruction* input) {
5031 // We allow only one special input.
5032 DCHECK(!HasSpecialInput());
5033 InsertInputAt(GetSpecialInputIndexUnchecked(), input);
5034 }
5035
5036 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
5037 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
5038 // instruction; only relevant for static calls with explicit clinit check.
5039 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
5040 DCHECK(IsStaticWithExplicitClinitCheck());
5041 size_t last_input_index = inputs_.size() - 1u;
5042 HInstruction* last_input = inputs_.back().GetInstruction();
5043 DCHECK(last_input != nullptr);
5044 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
5045 RemoveAsUserOfInput(last_input_index);
5046 inputs_.pop_back();
5047 SetPackedField<ClinitCheckRequirementField>(new_requirement);
5048 DCHECK(!IsStaticWithExplicitClinitCheck());
5049 }
5050
5051 // Is this a call to a static method whose declaring class has an
5052 // explicit initialization check in the graph?
5053 bool IsStaticWithExplicitClinitCheck() const {
5054 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
5055 }
5056
5057 // Is this a call to a static method whose declaring class has an
5058 // implicit intialization check requirement?
5059 bool IsStaticWithImplicitClinitCheck() const {
5060 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
5061 }
5062
5063 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
5064
5065 protected:
5066 DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
5067
5068 private:
5069 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
5070 static constexpr size_t kFieldClinitCheckRequirementSize =
5071 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
5072 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
5073 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
5074 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
5075 "Too many packed fields.");
5076 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
5077 kFieldClinitCheckRequirement,
5078 kFieldClinitCheckRequirementSize>;
5079
5080 DispatchInfo dispatch_info_;
5081 };
5082 std::ostream& operator<<(std::ostream& os, MethodLoadKind rhs);
5083 std::ostream& operator<<(std::ostream& os, CodePtrLocation rhs);
5084 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
5085
5086 class HInvokeVirtual final : public HInvoke {
5087 public:
5088 HInvokeVirtual(ArenaAllocator* allocator,
5089 uint32_t number_of_arguments,
5090 uint32_t number_of_out_vregs,
5091 DataType::Type return_type,
5092 uint32_t dex_pc,
5093 MethodReference method_reference,
5094 ArtMethod* resolved_method,
5095 MethodReference resolved_method_reference,
5096 uint32_t vtable_index,
5097 bool enable_intrinsic_opt)
5098 : HInvoke(kInvokeVirtual,
5099 allocator,
5100 number_of_arguments,
5101 number_of_out_vregs,
5102 0u,
5103 return_type,
5104 dex_pc,
5105 method_reference,
5106 resolved_method,
5107 resolved_method_reference,
5108 kVirtual,
5109 enable_intrinsic_opt),
5110 vtable_index_(vtable_index) {
5111 }
5112
5113 bool IsClonable() const override { return true; }
5114
5115 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override;
5116
5117 uint32_t GetVTableIndex() const { return vtable_index_; }
5118
5119 DECLARE_INSTRUCTION(InvokeVirtual);
5120
5121 protected:
5122 DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
5123
5124 private:
5125 // Cached value of the resolved method, to avoid needing the mutator lock.
5126 const uint32_t vtable_index_;
5127 };
5128
5129 class HInvokeInterface final : public HInvoke {
5130 public:
5131 HInvokeInterface(ArenaAllocator* allocator,
5132 uint32_t number_of_arguments,
5133 uint32_t number_of_out_vregs,
5134 DataType::Type return_type,
5135 uint32_t dex_pc,
5136 MethodReference method_reference,
5137 ArtMethod* resolved_method,
5138 MethodReference resolved_method_reference,
5139 uint32_t imt_index,
5140 MethodLoadKind load_kind,
5141 bool enable_intrinsic_opt)
5142 : HInvoke(kInvokeInterface,
5143 allocator,
5144 number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0),
5145 number_of_out_vregs,
5146 0u,
5147 return_type,
5148 dex_pc,
5149 method_reference,
5150 resolved_method,
5151 resolved_method_reference,
5152 kInterface,
5153 enable_intrinsic_opt),
5154 imt_index_(imt_index),
5155 hidden_argument_load_kind_(load_kind) {
5156 }
5157
5158 static bool NeedsCurrentMethod(MethodLoadKind load_kind) {
5159 return load_kind == MethodLoadKind::kRecursive;
5160 }
5161
5162 bool IsClonable() const override { return true; }
5163 bool NeedsBss() const override {
5164 return GetHiddenArgumentLoadKind() == MethodLoadKind::kBssEntry;
5165 }
5166
5167 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5168 // TODO: Add implicit null checks in intrinsics.
5169 return (obj == InputAt(0)) && !IsIntrinsic();
5170 }
5171
5172 size_t GetSpecialInputIndex() const {
5173 return GetNumberOfArguments();
5174 }
5175
5176 void AddSpecialInput(HInstruction* input) {
5177 InsertInputAt(GetSpecialInputIndex(), input);
5178 }
5179
5180 uint32_t GetImtIndex() const { return imt_index_; }
5181 MethodLoadKind GetHiddenArgumentLoadKind() const { return hidden_argument_load_kind_; }
5182
5183 DECLARE_INSTRUCTION(InvokeInterface);
5184
5185 protected:
5186 DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
5187
5188 private:
5189 // Cached value of the resolved method, to avoid needing the mutator lock.
5190 const uint32_t imt_index_;
5191
5192 // How the hidden argument (the interface method) is being loaded.
5193 const MethodLoadKind hidden_argument_load_kind_;
5194 };
5195
5196 class HNeg final : public HUnaryOperation {
5197 public:
5198 HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5199 : HUnaryOperation(kNeg, result_type, input, dex_pc) {
5200 DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
5201 }
5202
5203 template <typename T> static T Compute(T x) { return -x; }
5204
5205 HConstant* Evaluate(HIntConstant* x) const override {
5206 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()));
5207 }
5208 HConstant* Evaluate(HLongConstant* x) const override {
5209 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()));
5210 }
5211 HConstant* Evaluate(HFloatConstant* x) const override {
5212 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()));
5213 }
5214 HConstant* Evaluate(HDoubleConstant* x) const override {
5215 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()));
5216 }
5217
5218 DECLARE_INSTRUCTION(Neg);
5219
5220 protected:
5221 DEFAULT_COPY_CONSTRUCTOR(Neg);
5222 };
5223
5224 class HNewArray final : public HExpression<2> {
5225 public:
5226 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
5227 : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
5228 SetRawInputAt(0, cls);
5229 SetRawInputAt(1, length);
5230 SetPackedField<ComponentSizeShiftField>(component_size_shift);
5231 }
5232
5233 bool IsClonable() const override { return true; }
5234
5235 // Calls runtime so needs an environment.
5236 bool NeedsEnvironment() const override { return true; }
5237
5238 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
5239 bool CanThrow() const override { return true; }
5240
5241 bool CanBeNull() const override { return false; }
5242
5243 HLoadClass* GetLoadClass() const {
5244 DCHECK(InputAt(0)->IsLoadClass());
5245 return InputAt(0)->AsLoadClass();
5246 }
5247
5248 HInstruction* GetLength() const {
5249 return InputAt(1);
5250 }
5251
5252 size_t GetComponentSizeShift() {
5253 return GetPackedField<ComponentSizeShiftField>();
5254 }
5255
5256 DECLARE_INSTRUCTION(NewArray);
5257
5258 protected:
5259 DEFAULT_COPY_CONSTRUCTOR(NewArray);
5260
5261 private:
5262 static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
5263 static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
5264 static constexpr size_t kNumberOfNewArrayPackedBits =
5265 kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
5266 static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5267 using ComponentSizeShiftField =
5268 BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShiftSize>;
5269 };
5270
5271 class HAdd final : public HBinaryOperation {
5272 public:
5273 HAdd(DataType::Type result_type,
5274 HInstruction* left,
5275 HInstruction* right,
5276 uint32_t dex_pc = kNoDexPc)
5277 : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
5278 }
5279
5280 bool IsCommutative() const override { return true; }
5281
5282 template <typename T> static T Compute(T x, T y) { return x + y; }
5283
5284 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5285 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5286 }
5287 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5288 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5289 }
5290 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5291 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue(), y->GetValue()));
5292 }
5293 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5294 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue(), y->GetValue()));
5295 }
5296
5297 DECLARE_INSTRUCTION(Add);
5298
5299 protected:
5300 DEFAULT_COPY_CONSTRUCTOR(Add);
5301 };
5302
5303 class HSub final : public HBinaryOperation {
5304 public:
5305 HSub(DataType::Type result_type,
5306 HInstruction* left,
5307 HInstruction* right,
5308 uint32_t dex_pc = kNoDexPc)
5309 : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
5310 }
5311
5312 template <typename T> static T Compute(T x, T y) { return x - y; }
5313
5314 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5315 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5316 }
5317 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5318 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5319 }
5320 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5321 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue(), y->GetValue()));
5322 }
5323 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5324 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue(), y->GetValue()));
5325 }
5326
5327 DECLARE_INSTRUCTION(Sub);
5328
5329 protected:
5330 DEFAULT_COPY_CONSTRUCTOR(Sub);
5331 };
5332
5333 class HMul final : public HBinaryOperation {
5334 public:
5335 HMul(DataType::Type result_type,
5336 HInstruction* left,
5337 HInstruction* right,
5338 uint32_t dex_pc = kNoDexPc)
5339 : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5340 }
5341
5342 bool IsCommutative() const override { return true; }
5343
5344 template <typename T> static T Compute(T x, T y) { return x * y; }
5345
5346 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5347 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5348 }
5349 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5350 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5351 }
5352 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5353 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue(), y->GetValue()));
5354 }
5355 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5356 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue(), y->GetValue()));
5357 }
5358
5359 DECLARE_INSTRUCTION(Mul);
5360
5361 protected:
5362 DEFAULT_COPY_CONSTRUCTOR(Mul);
5363 };
5364
5365 class HDiv final : public HBinaryOperation {
5366 public:
5367 HDiv(DataType::Type result_type,
5368 HInstruction* left,
5369 HInstruction* right,
5370 uint32_t dex_pc)
5371 : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5372 }
5373
5374 template <typename T>
5375 T ComputeIntegral(T x, T y) const {
5376 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5377 // Our graph structure ensures we never have 0 for `y` during
5378 // constant folding.
5379 DCHECK_NE(y, 0);
5380 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5381 return (y == -1) ? -x : x / y;
5382 }
5383
5384 template <typename T>
5385 T ComputeFP(T x, T y) const {
5386 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5387 return x / y;
5388 }
5389
5390 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5391 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5392 }
5393 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5394 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5395 }
5396 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5397 return GetBlock()->GetGraph()->GetFloatConstant(ComputeFP(x->GetValue(), y->GetValue()));
5398 }
5399 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5400 return GetBlock()->GetGraph()->GetDoubleConstant(ComputeFP(x->GetValue(), y->GetValue()));
5401 }
5402
5403 DECLARE_INSTRUCTION(Div);
5404
5405 protected:
5406 DEFAULT_COPY_CONSTRUCTOR(Div);
5407 };
5408
5409 class HRem final : public HBinaryOperation {
5410 public:
5411 HRem(DataType::Type result_type,
5412 HInstruction* left,
5413 HInstruction* right,
5414 uint32_t dex_pc)
5415 : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5416 }
5417
5418 template <typename T>
5419 T ComputeIntegral(T x, T y) const {
5420 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5421 // Our graph structure ensures we never have 0 for `y` during
5422 // constant folding.
5423 DCHECK_NE(y, 0);
5424 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5425 return (y == -1) ? 0 : x % y;
5426 }
5427
5428 template <typename T>
5429 T ComputeFP(T x, T y) const {
5430 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5431 return std::fmod(x, y);
5432 }
5433
5434 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5435 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5436 }
5437 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5438 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5439 }
5440 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5441 return GetBlock()->GetGraph()->GetFloatConstant(ComputeFP(x->GetValue(), y->GetValue()));
5442 }
5443 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5444 return GetBlock()->GetGraph()->GetDoubleConstant(ComputeFP(x->GetValue(), y->GetValue()));
5445 }
5446
5447 DECLARE_INSTRUCTION(Rem);
5448
5449 protected:
5450 DEFAULT_COPY_CONSTRUCTOR(Rem);
5451 };
5452
5453 class HMin final : public HBinaryOperation {
5454 public:
5455 HMin(DataType::Type result_type,
5456 HInstruction* left,
5457 HInstruction* right,
5458 uint32_t dex_pc)
5459 : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5460
5461 bool IsCommutative() const override { return true; }
5462
5463 // Evaluation for integral values.
5464 template <typename T> static T ComputeIntegral(T x, T y) {
5465 return (x <= y) ? x : y;
5466 }
5467
5468 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5469 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5470 }
5471 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5472 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5473 }
5474 // TODO: Evaluation for floating-point values.
5475 HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
5476 [[maybe_unused]] HFloatConstant* y) const override {
5477 return nullptr;
5478 }
5479 HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
5480 [[maybe_unused]] HDoubleConstant* y) const override {
5481 return nullptr;
5482 }
5483
5484 DECLARE_INSTRUCTION(Min);
5485
5486 protected:
5487 DEFAULT_COPY_CONSTRUCTOR(Min);
5488 };
5489
5490 class HMax final : public HBinaryOperation {
5491 public:
5492 HMax(DataType::Type result_type,
5493 HInstruction* left,
5494 HInstruction* right,
5495 uint32_t dex_pc)
5496 : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5497
5498 bool IsCommutative() const override { return true; }
5499
5500 // Evaluation for integral values.
5501 template <typename T> static T ComputeIntegral(T x, T y) {
5502 return (x >= y) ? x : y;
5503 }
5504
5505 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5506 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5507 }
5508 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5509 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue(), y->GetValue()));
5510 }
5511 // TODO: Evaluation for floating-point values.
5512 HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
5513 [[maybe_unused]] HFloatConstant* y) const override {
5514 return nullptr;
5515 }
5516 HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
5517 [[maybe_unused]] HDoubleConstant* y) const override {
5518 return nullptr;
5519 }
5520
5521 DECLARE_INSTRUCTION(Max);
5522
5523 protected:
5524 DEFAULT_COPY_CONSTRUCTOR(Max);
5525 };
5526
5527 class HAbs final : public HUnaryOperation {
5528 public:
5529 HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5530 : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5531
5532 // Evaluation for integral values.
5533 template <typename T> static T ComputeIntegral(T x) {
5534 return x < 0 ? -x : x;
5535 }
5536
5537 // Evaluation for floating-point values.
5538 // Note, as a "quality of implementation", rather than pure "spec compliance",
5539 // we require that Math.abs() clears the sign bit (but changes nothing else)
5540 // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5541 // http://b/30758343
5542 template <typename T, typename S> static T ComputeFP(T x) {
5543 S bits = bit_cast<S, T>(x);
5544 return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5545 }
5546
5547 HConstant* Evaluate(HIntConstant* x) const override {
5548 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()));
5549 }
5550 HConstant* Evaluate(HLongConstant* x) const override {
5551 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()));
5552 }
5553 HConstant* Evaluate(HFloatConstant* x) const override {
5554 return GetBlock()->GetGraph()->GetFloatConstant(ComputeFP<float, int32_t>(x->GetValue()));
5555 }
5556 HConstant* Evaluate(HDoubleConstant* x) const override {
5557 return GetBlock()->GetGraph()->GetDoubleConstant(ComputeFP<double, int64_t>(x->GetValue()));
5558 }
5559
5560 DECLARE_INSTRUCTION(Abs);
5561
5562 protected:
5563 DEFAULT_COPY_CONSTRUCTOR(Abs);
5564 };
5565
5566 class HDivZeroCheck final : public HExpression<1> {
5567 public:
5568 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5569 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5570 // instruction following the current one; thus 'SideEffects::None()' is used.
5571 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5572 : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5573 SetRawInputAt(0, value);
5574 }
5575
5576 bool IsClonable() const override { return true; }
5577 bool CanBeMoved() const override { return true; }
5578
5579 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5580 return true;
5581 }
5582
5583 bool NeedsEnvironment() const override { return true; }
5584 bool CanThrow() const override { return true; }
5585
5586 DECLARE_INSTRUCTION(DivZeroCheck);
5587
5588 protected:
5589 DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5590 };
5591
5592 class HShl final : public HBinaryOperation {
5593 public:
5594 HShl(DataType::Type result_type,
5595 HInstruction* value,
5596 HInstruction* distance,
5597 uint32_t dex_pc = kNoDexPc)
5598 : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5599 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5600 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5601 }
5602
5603 template <typename T>
5604 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5605 return value << (distance & max_shift_distance);
5606 }
5607
5608 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5609 return GetBlock()->GetGraph()->GetIntConstant(
5610 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance));
5611 }
5612 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5613 return GetBlock()->GetGraph()->GetLongConstant(
5614 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance));
5615 }
5616
5617 DECLARE_INSTRUCTION(Shl);
5618
5619 protected:
5620 DEFAULT_COPY_CONSTRUCTOR(Shl);
5621 };
5622
5623 class HShr final : public HBinaryOperation {
5624 public:
5625 HShr(DataType::Type result_type,
5626 HInstruction* value,
5627 HInstruction* distance,
5628 uint32_t dex_pc = kNoDexPc)
5629 : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5630 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5631 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5632 }
5633
5634 template <typename T>
5635 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5636 return value >> (distance & max_shift_distance);
5637 }
5638
5639 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5640 return GetBlock()->GetGraph()->GetIntConstant(
5641 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance));
5642 }
5643 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5644 return GetBlock()->GetGraph()->GetLongConstant(
5645 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance));
5646 }
5647
5648 DECLARE_INSTRUCTION(Shr);
5649
5650 protected:
5651 DEFAULT_COPY_CONSTRUCTOR(Shr);
5652 };
5653
5654 class HUShr final : public HBinaryOperation {
5655 public:
5656 HUShr(DataType::Type result_type,
5657 HInstruction* value,
5658 HInstruction* distance,
5659 uint32_t dex_pc = kNoDexPc)
5660 : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5661 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5662 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5663 }
5664
5665 template <typename T>
5666 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5667 using V = std::make_unsigned_t<T>;
5668 V ux = static_cast<V>(value);
5669 return static_cast<T>(ux >> (distance & max_shift_distance));
5670 }
5671
5672 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5673 return GetBlock()->GetGraph()->GetIntConstant(
5674 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance));
5675 }
5676 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5677 return GetBlock()->GetGraph()->GetLongConstant(
5678 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance));
5679 }
5680
5681 DECLARE_INSTRUCTION(UShr);
5682
5683 protected:
5684 DEFAULT_COPY_CONSTRUCTOR(UShr);
5685 };
5686
5687 class HAnd final : public HBinaryOperation {
5688 public:
5689 HAnd(DataType::Type result_type,
5690 HInstruction* left,
5691 HInstruction* right,
5692 uint32_t dex_pc = kNoDexPc)
5693 : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5694 }
5695
5696 bool IsCommutative() const override { return true; }
5697
5698 template <typename T> static T Compute(T x, T y) { return x & y; }
5699
5700 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5701 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5702 }
5703 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5704 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5705 }
5706
5707 DECLARE_INSTRUCTION(And);
5708
5709 protected:
5710 DEFAULT_COPY_CONSTRUCTOR(And);
5711 };
5712
5713 class HOr final : public HBinaryOperation {
5714 public:
5715 HOr(DataType::Type result_type,
5716 HInstruction* left,
5717 HInstruction* right,
5718 uint32_t dex_pc = kNoDexPc)
5719 : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5720 }
5721
5722 bool IsCommutative() const override { return true; }
5723
5724 template <typename T> static T Compute(T x, T y) { return x | y; }
5725
5726 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5727 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5728 }
5729 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5730 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5731 }
5732
5733 DECLARE_INSTRUCTION(Or);
5734
5735 protected:
5736 DEFAULT_COPY_CONSTRUCTOR(Or);
5737 };
5738
5739 class HXor final : public HBinaryOperation {
5740 public:
5741 HXor(DataType::Type result_type,
5742 HInstruction* left,
5743 HInstruction* right,
5744 uint32_t dex_pc = kNoDexPc)
5745 : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5746 }
5747
5748 bool IsCommutative() const override { return true; }
5749
5750 template <typename T> static T Compute(T x, T y) { return x ^ y; }
5751
5752 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5753 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
5754 }
5755 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5756 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
5757 }
5758
5759 DECLARE_INSTRUCTION(Xor);
5760
5761 protected:
5762 DEFAULT_COPY_CONSTRUCTOR(Xor);
5763 };
5764
5765 class HRor final : public HBinaryOperation {
5766 public:
5767 HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5768 : HBinaryOperation(kRor, result_type, value, distance) {
5769 }
5770
5771 template <typename T>
5772 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5773 using V = std::make_unsigned_t<T>;
5774 V ux = static_cast<V>(value);
5775 if ((distance & max_shift_value) == 0) {
5776 return static_cast<T>(ux);
5777 } else {
5778 const V reg_bits = sizeof(T) * 8;
5779 return static_cast<T>(ux >> (distance & max_shift_value)) |
5780 (value << (reg_bits - (distance & max_shift_value)));
5781 }
5782 }
5783
5784 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5785 return GetBlock()->GetGraph()->GetIntConstant(
5786 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance));
5787 }
5788 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5789 return GetBlock()->GetGraph()->GetLongConstant(
5790 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance));
5791 }
5792
5793 DECLARE_INSTRUCTION(Ror);
5794
5795 protected:
5796 DEFAULT_COPY_CONSTRUCTOR(Ror);
5797 };
5798
5799 class HRol final : public HBinaryOperation {
5800 public:
5801 HRol(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5802 : HBinaryOperation(kRol, result_type, value, distance) {}
5803
5804 template <typename T>
5805 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5806 return HRor::Compute(value, -distance, max_shift_value);
5807 }
5808
5809 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5810 return GetBlock()->GetGraph()->GetIntConstant(
5811 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance));
5812 }
5813 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5814 return GetBlock()->GetGraph()->GetLongConstant(
5815 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance));
5816 }
5817
5818 DECLARE_INSTRUCTION(Rol);
5819
5820 protected:
5821 DEFAULT_COPY_CONSTRUCTOR(Rol);
5822 };
5823
5824 // The value of a parameter in this method. Its location depends on
5825 // the calling convention.
5826 class HParameterValue final : public HExpression<0> {
5827 public:
5828 HParameterValue(const DexFile& dex_file,
5829 dex::TypeIndex type_index,
5830 uint8_t index,
5831 DataType::Type parameter_type,
5832 bool is_this = false)
5833 : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5834 dex_file_(dex_file),
5835 type_index_(type_index),
5836 index_(index) {
5837 SetPackedFlag<kFlagIsThis>(is_this);
5838 SetPackedFlag<kFlagCanBeNull>(!is_this);
5839 }
5840
5841 const DexFile& GetDexFile() const { return dex_file_; }
5842 dex::TypeIndex GetTypeIndex() const { return type_index_; }
5843 uint8_t GetIndex() const { return index_; }
5844 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5845
5846 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
5847 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
5848
5849 DECLARE_INSTRUCTION(ParameterValue);
5850
5851 protected:
5852 DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
5853
5854 private:
5855 // Whether or not the parameter value corresponds to 'this' argument.
5856 static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
5857 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
5858 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
5859 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
5860 "Too many packed fields.");
5861
5862 const DexFile& dex_file_;
5863 const dex::TypeIndex type_index_;
5864 // The index of this parameter in the parameters list. Must be less
5865 // than HGraph::number_of_in_vregs_.
5866 const uint8_t index_;
5867 };
5868
5869 class HNot final : public HUnaryOperation {
5870 public:
5871 HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5872 : HUnaryOperation(kNot, result_type, input, dex_pc) {
5873 }
5874
5875 bool CanBeMoved() const override { return true; }
5876 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5877 return true;
5878 }
5879
5880 template <typename T> static T Compute(T x) { return ~x; }
5881
5882 HConstant* Evaluate(HIntConstant* x) const override {
5883 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()));
5884 }
5885 HConstant* Evaluate(HLongConstant* x) const override {
5886 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()));
5887 }
5888
5889 DECLARE_INSTRUCTION(Not);
5890
5891 protected:
5892 DEFAULT_COPY_CONSTRUCTOR(Not);
5893 };
5894
5895 class HBooleanNot final : public HUnaryOperation {
5896 public:
5897 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
5898 : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
5899 }
5900
5901 bool CanBeMoved() const override { return true; }
5902 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5903 return true;
5904 }
5905
5906 template <typename T> static bool Compute(T x) {
5907 DCHECK(IsUint<1>(x)) << x;
5908 return !x;
5909 }
5910
5911 HConstant* Evaluate(HIntConstant* x) const override {
5912 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()));
5913 }
5914
5915 DECLARE_INSTRUCTION(BooleanNot);
5916
5917 protected:
5918 DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
5919 };
5920
5921 class HTypeConversion final : public HExpression<1> {
5922 public:
5923 // Instantiate a type conversion of `input` to `result_type`.
5924 HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5925 : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
5926 SetRawInputAt(0, input);
5927 // Invariant: We should never generate a conversion to a Boolean value.
5928 DCHECK_NE(DataType::Type::kBool, result_type);
5929 }
5930
5931 HInstruction* GetInput() const { return InputAt(0); }
5932 DataType::Type GetInputType() const { return GetInput()->GetType(); }
5933 DataType::Type GetResultType() const { return GetType(); }
5934
5935 bool IsClonable() const override { return true; }
5936 bool CanBeMoved() const override { return true; }
5937 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5938 return true;
5939 }
5940 // Return whether the conversion is implicit. This includes conversion to the same type.
5941 bool IsImplicitConversion() const {
5942 return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
5943 }
5944
5945 // Try to statically evaluate the conversion and return a HConstant
5946 // containing the result. If the input cannot be converted, return nullptr.
5947 HConstant* TryStaticEvaluation() const;
5948
5949 // Same but for `input` instead of GetInput().
5950 HConstant* TryStaticEvaluation(HInstruction* input) const;
5951
5952 DECLARE_INSTRUCTION(TypeConversion);
5953
5954 protected:
5955 DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
5956 };
5957
5958 static constexpr uint32_t kNoRegNumber = -1;
5959
5960 class HNullCheck final : public HExpression<1> {
5961 public:
5962 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
5963 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5964 // instruction following the current one; thus 'SideEffects::None()' is used.
5965 HNullCheck(HInstruction* value, uint32_t dex_pc)
5966 : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
5967 SetRawInputAt(0, value);
5968 }
5969
5970 bool IsClonable() const override { return true; }
5971 bool CanBeMoved() const override { return true; }
5972 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5973 return true;
5974 }
5975
5976 bool NeedsEnvironment() const override { return true; }
5977
5978 bool CanThrow() const override { return true; }
5979
5980 bool CanBeNull() const override { return false; }
5981
5982 DECLARE_INSTRUCTION(NullCheck);
5983
5984 protected:
5985 DEFAULT_COPY_CONSTRUCTOR(NullCheck);
5986 };
5987
5988 // Embeds an ArtField and all the information required by the compiler. We cache
5989 // that information to avoid requiring the mutator lock every time we need it.
5990 class FieldInfo : public ValueObject {
5991 public:
5992 FieldInfo(ArtField* field,
5993 MemberOffset field_offset,
5994 DataType::Type field_type,
5995 bool is_volatile,
5996 uint32_t index,
5997 uint16_t declaring_class_def_index,
5998 const DexFile& dex_file)
5999 : field_(field),
6000 field_offset_(field_offset),
6001 field_type_(field_type),
6002 is_volatile_(is_volatile),
6003 index_(index),
6004 declaring_class_def_index_(declaring_class_def_index),
6005 dex_file_(dex_file) {}
6006
6007 ArtField* GetField() const { return field_; }
6008 MemberOffset GetFieldOffset() const { return field_offset_; }
6009 DataType::Type GetFieldType() const { return field_type_; }
6010 uint32_t GetFieldIndex() const { return index_; }
6011 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
6012 const DexFile& GetDexFile() const { return dex_file_; }
6013 bool IsVolatile() const { return is_volatile_; }
6014
6015 bool Equals(const FieldInfo& other) const {
6016 return field_ == other.field_ &&
6017 field_offset_ == other.field_offset_ &&
6018 field_type_ == other.field_type_ &&
6019 is_volatile_ == other.is_volatile_ &&
6020 index_ == other.index_ &&
6021 declaring_class_def_index_ == other.declaring_class_def_index_ &&
6022 &dex_file_ == &other.dex_file_;
6023 }
6024
6025 std::ostream& Dump(std::ostream& os) const {
6026 os << field_ << ", off: " << field_offset_ << ", type: " << field_type_
6027 << ", volatile: " << std::boolalpha << is_volatile_ << ", index_: " << std::dec << index_
6028 << ", declaring_class: " << declaring_class_def_index_ << ", dex: " << dex_file_;
6029 return os;
6030 }
6031
6032 private:
6033 ArtField* const field_;
6034 const MemberOffset field_offset_;
6035 const DataType::Type field_type_;
6036 const bool is_volatile_;
6037 const uint32_t index_;
6038 const uint16_t declaring_class_def_index_;
6039 const DexFile& dex_file_;
6040 };
6041
6042 inline bool operator==(const FieldInfo& a, const FieldInfo& b) {
6043 return a.Equals(b);
6044 }
6045
6046 inline std::ostream& operator<<(std::ostream& os, const FieldInfo& a) {
6047 return a.Dump(os);
6048 }
6049
6050 class HInstanceFieldGet final : public HExpression<1> {
6051 public:
6052 HInstanceFieldGet(HInstruction* object,
6053 ArtField* field,
6054 DataType::Type field_type,
6055 MemberOffset field_offset,
6056 bool is_volatile,
6057 uint32_t field_idx,
6058 uint16_t declaring_class_def_index,
6059 const DexFile& dex_file,
6060 uint32_t dex_pc)
6061 : HExpression(kInstanceFieldGet,
6062 field_type,
6063 SideEffects::FieldReadOfType(field_type, is_volatile),
6064 dex_pc),
6065 field_info_(field,
6066 field_offset,
6067 field_type,
6068 is_volatile,
6069 field_idx,
6070 declaring_class_def_index,
6071 dex_file) {
6072 SetRawInputAt(0, object);
6073 }
6074
6075 bool IsClonable() const override { return true; }
6076 bool CanBeMoved() const override { return !IsVolatile(); }
6077
6078 bool InstructionDataEquals(const HInstruction* other) const override {
6079 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
6080 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6081 }
6082
6083 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6084 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6085 }
6086
6087 size_t ComputeHashCode() const override {
6088 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6089 }
6090
6091 bool IsFieldAccess() const override { return true; }
6092 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6093 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6094 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6095 bool IsVolatile() const { return field_info_.IsVolatile(); }
6096
6097 void SetType(DataType::Type new_type) {
6098 DCHECK(DataType::IsIntegralType(GetType()));
6099 DCHECK(DataType::IsIntegralType(new_type));
6100 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6101 SetPackedField<TypeField>(new_type);
6102 }
6103
6104 DECLARE_INSTRUCTION(InstanceFieldGet);
6105
6106 protected:
6107 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
6108
6109 private:
6110 const FieldInfo field_info_;
6111 };
6112
6113 enum class WriteBarrierKind {
6114 // Emit the write barrier. This write barrier is not being relied on so e.g. codegen can decide to
6115 // skip it if the value stored is null. This is the default behavior.
6116 kEmitNotBeingReliedOn,
6117 // Emit the write barrier. This write barrier is being relied on and must be emitted.
6118 kEmitBeingReliedOn,
6119 // Skip emitting the write barrier. This could be set because:
6120 // A) The write barrier is not needed (i.e. it is not a reference, or the value is the null
6121 // constant)
6122 // B) This write barrier was coalesced into another one so there's no need to emit it.
6123 kDontEmit,
6124 kLast = kDontEmit
6125 };
6126 std::ostream& operator<<(std::ostream& os, WriteBarrierKind rhs);
6127
6128 class HInstanceFieldSet final : public HExpression<2> {
6129 public:
6130 HInstanceFieldSet(HInstruction* object,
6131 HInstruction* value,
6132 ArtField* field,
6133 DataType::Type field_type,
6134 MemberOffset field_offset,
6135 bool is_volatile,
6136 uint32_t field_idx,
6137 uint16_t declaring_class_def_index,
6138 const DexFile& dex_file,
6139 uint32_t dex_pc)
6140 : HExpression(kInstanceFieldSet,
6141 SideEffects::FieldWriteOfType(field_type, is_volatile),
6142 dex_pc),
6143 field_info_(field,
6144 field_offset,
6145 field_type,
6146 is_volatile,
6147 field_idx,
6148 declaring_class_def_index,
6149 dex_file) {
6150 SetPackedFlag<kFlagValueCanBeNull>(true);
6151 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
6152 SetRawInputAt(0, object);
6153 SetRawInputAt(1, value);
6154 }
6155
6156 bool IsClonable() const override { return true; }
6157
6158 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6159 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6160 }
6161
6162 bool IsFieldAccess() const override { return true; }
6163 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6164 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6165 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6166 bool IsVolatile() const { return field_info_.IsVolatile(); }
6167 HInstruction* GetValue() const { return InputAt(1); }
6168 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6169 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6170 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6171 void SetWriteBarrierKind(WriteBarrierKind kind) {
6172 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
6173 << "We shouldn't go back to the original value.";
6174 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
6175 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
6176 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
6177 SetPackedField<WriteBarrierKindField>(kind);
6178 }
6179
6180 DECLARE_INSTRUCTION(InstanceFieldSet);
6181
6182 protected:
6183 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
6184
6185 private:
6186 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6187 static constexpr size_t kWriteBarrierKind = kFlagValueCanBeNull + 1;
6188 static constexpr size_t kWriteBarrierKindSize =
6189 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6190 static constexpr size_t kNumberOfInstanceFieldSetPackedBits =
6191 kWriteBarrierKind + kWriteBarrierKindSize;
6192 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
6193 "Too many packed fields.");
6194
6195 const FieldInfo field_info_;
6196 using WriteBarrierKindField =
6197 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6198 };
6199
6200 class HArrayGet final : public HExpression<2> {
6201 public:
6202 HArrayGet(HInstruction* array,
6203 HInstruction* index,
6204 DataType::Type type,
6205 uint32_t dex_pc)
6206 : HArrayGet(array,
6207 index,
6208 type,
6209 SideEffects::ArrayReadOfType(type),
6210 dex_pc,
6211 /* is_string_char_at= */ false) {
6212 }
6213
6214 HArrayGet(HInstruction* array,
6215 HInstruction* index,
6216 DataType::Type type,
6217 SideEffects side_effects,
6218 uint32_t dex_pc,
6219 bool is_string_char_at)
6220 : HExpression(kArrayGet, type, side_effects, dex_pc) {
6221 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6222 SetRawInputAt(0, array);
6223 SetRawInputAt(1, index);
6224 }
6225
6226 bool IsClonable() const override { return true; }
6227 bool CanBeMoved() const override { return true; }
6228 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6229 return true;
6230 }
6231 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
6232 // TODO: We can be smarter here.
6233 // Currently, unless the array is the result of NewArray, the array access is always
6234 // preceded by some form of null NullCheck necessary for the bounds check, usually
6235 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
6236 // dynamic BCE. There are cases when these could be removed to produce better code.
6237 // If we ever add optimizations to do so we should allow an implicit check here
6238 // (as long as the address falls in the first page).
6239 //
6240 // As an example of such fancy optimization, we could eliminate BoundsCheck for
6241 // a = cond ? new int[1] : null;
6242 // a[0]; // The Phi does not need bounds check for either input.
6243 return false;
6244 }
6245
6246 bool IsEquivalentOf(HArrayGet* other) const {
6247 bool result = (GetDexPc() == other->GetDexPc());
6248 if (kIsDebugBuild && result) {
6249 DCHECK_EQ(GetBlock(), other->GetBlock());
6250 DCHECK_EQ(GetArray(), other->GetArray());
6251 DCHECK_EQ(GetIndex(), other->GetIndex());
6252 if (DataType::IsIntOrLongType(GetType())) {
6253 DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
6254 } else {
6255 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
6256 DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
6257 }
6258 }
6259 return result;
6260 }
6261
6262 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6263
6264 HInstruction* GetArray() const { return InputAt(0); }
6265 HInstruction* GetIndex() const { return InputAt(1); }
6266
6267 void SetType(DataType::Type new_type) {
6268 DCHECK(DataType::IsIntegralType(GetType()));
6269 DCHECK(DataType::IsIntegralType(new_type));
6270 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6271 SetPackedField<TypeField>(new_type);
6272 }
6273
6274 DECLARE_INSTRUCTION(ArrayGet);
6275
6276 protected:
6277 DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
6278
6279 private:
6280 // We treat a String as an array, creating the HArrayGet from String.charAt()
6281 // intrinsic in the instruction simplifier. We can always determine whether
6282 // a particular HArrayGet is actually a String.charAt() by looking at the type
6283 // of the input but that requires holding the mutator lock, so we prefer to use
6284 // a flag, so that code generators don't need to do the locking.
6285 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6286 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
6287 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6288 "Too many packed fields.");
6289 };
6290
6291 class HArraySet final : public HExpression<3> {
6292 public:
6293 HArraySet(HInstruction* array,
6294 HInstruction* index,
6295 HInstruction* value,
6296 DataType::Type expected_component_type,
6297 uint32_t dex_pc)
6298 : HArraySet(array,
6299 index,
6300 value,
6301 expected_component_type,
6302 // Make a best guess for side effects now, may be refined during SSA building.
6303 ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6304 dex_pc) {
6305 }
6306
6307 HArraySet(HInstruction* array,
6308 HInstruction* index,
6309 HInstruction* value,
6310 DataType::Type expected_component_type,
6311 SideEffects side_effects,
6312 uint32_t dex_pc)
6313 : HExpression(kArraySet, side_effects, dex_pc) {
6314 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6315 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6316 SetPackedFlag<kFlagValueCanBeNull>(true);
6317 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6318 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
6319 SetRawInputAt(0, array);
6320 SetRawInputAt(1, index);
6321 SetRawInputAt(2, value);
6322 }
6323
6324 bool IsClonable() const override { return true; }
6325
6326 bool NeedsEnvironment() const override {
6327 // We call a runtime method to throw ArrayStoreException.
6328 return NeedsTypeCheck();
6329 }
6330
6331 // Can throw ArrayStoreException.
6332 bool CanThrow() const override { return NeedsTypeCheck(); }
6333
6334 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
6335 // TODO: Same as for ArrayGet.
6336 return false;
6337 }
6338
6339 void ClearTypeCheck() {
6340 SetPackedFlag<kFlagNeedsTypeCheck>(false);
6341 // Clear the `CanTriggerGC` flag too as we can only trigger a GC when doing a type check.
6342 SetSideEffects(GetSideEffects().Exclusion(SideEffects::CanTriggerGC()));
6343 // Clear the environment too as we can only throw if we need a type check.
6344 RemoveEnvironment();
6345 }
6346
6347 void ClearValueCanBeNull() {
6348 SetPackedFlag<kFlagValueCanBeNull>(false);
6349 }
6350
6351 void SetStaticTypeOfArrayIsObjectArray() {
6352 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6353 }
6354
6355 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6356 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
6357 bool StaticTypeOfArrayIsObjectArray() const {
6358 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6359 }
6360
6361 HInstruction* GetArray() const { return InputAt(0); }
6362 HInstruction* GetIndex() const { return InputAt(1); }
6363 HInstruction* GetValue() const { return InputAt(2); }
6364
6365 DataType::Type GetComponentType() const {
6366 return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6367 }
6368
6369 static DataType::Type GetComponentType(DataType::Type value_type,
6370 DataType::Type expected_component_type) {
6371 // The Dex format does not type floating point index operations. Since the
6372 // `expected_component_type` comes from SSA building and can therefore not
6373 // be correct, we also check what is the value type. If it is a floating
6374 // point type, we must use that type.
6375 return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6376 ? value_type
6377 : expected_component_type;
6378 }
6379
6380 DataType::Type GetRawExpectedComponentType() const {
6381 return GetPackedField<ExpectedComponentTypeField>();
6382 }
6383
6384 static SideEffects ComputeSideEffects(DataType::Type type) {
6385 return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6386 }
6387
6388 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6389 return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6390 : SideEffects::None();
6391 }
6392
6393 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6394
6395 void SetWriteBarrierKind(WriteBarrierKind kind) {
6396 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
6397 << "We shouldn't go back to the original value.";
6398 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
6399 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
6400 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
6401 SetPackedField<WriteBarrierKindField>(kind);
6402 }
6403
6404 DECLARE_INSTRUCTION(ArraySet);
6405
6406 protected:
6407 DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6408
6409 private:
6410 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6411 static constexpr size_t kFieldExpectedComponentTypeSize =
6412 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6413 static constexpr size_t kFlagNeedsTypeCheck =
6414 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6415 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6416 // Cached information for the reference_type_info_ so that codegen
6417 // does not need to inspect the static type.
6418 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6419 static constexpr size_t kWriteBarrierKind = kFlagStaticTypeOfArrayIsObjectArray + 1;
6420 static constexpr size_t kWriteBarrierKindSize =
6421 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6422 static constexpr size_t kNumberOfArraySetPackedBits = kWriteBarrierKind + kWriteBarrierKindSize;
6423 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6424 using ExpectedComponentTypeField =
6425 BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6426
6427 using WriteBarrierKindField =
6428 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6429 };
6430
6431 class HArrayLength final : public HExpression<1> {
6432 public:
6433 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
6434 : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6435 SetPackedFlag<kFlagIsStringLength>(is_string_length);
6436 // Note that arrays do not change length, so the instruction does not
6437 // depend on any write.
6438 SetRawInputAt(0, array);
6439 }
6440
6441 bool IsClonable() const override { return true; }
6442 bool CanBeMoved() const override { return true; }
6443 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6444 return true;
6445 }
6446 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6447 return obj == InputAt(0);
6448 }
6449
6450 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6451
6452 DECLARE_INSTRUCTION(ArrayLength);
6453
6454 protected:
6455 DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6456
6457 private:
6458 // We treat a String as an array, creating the HArrayLength from String.length()
6459 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6460 // determine whether a particular HArrayLength is actually a String.length() by
6461 // looking at the type of the input but that requires holding the mutator lock, so
6462 // we prefer to use a flag, so that code generators don't need to do the locking.
6463 static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6464 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6465 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6466 "Too many packed fields.");
6467 };
6468
6469 class HBoundsCheck final : public HExpression<2> {
6470 public:
6471 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6472 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6473 // instruction following the current one; thus 'SideEffects::None()' is used.
6474 HBoundsCheck(HInstruction* index,
6475 HInstruction* length,
6476 uint32_t dex_pc,
6477 bool is_string_char_at = false)
6478 : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6479 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6480 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6481 SetRawInputAt(0, index);
6482 SetRawInputAt(1, length);
6483 }
6484
6485 bool IsClonable() const override { return true; }
6486 bool CanBeMoved() const override { return true; }
6487 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6488 return true;
6489 }
6490
6491 bool NeedsEnvironment() const override { return true; }
6492
6493 bool CanThrow() const override { return true; }
6494
6495 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6496
6497 HInstruction* GetIndex() const { return InputAt(0); }
6498
6499 DECLARE_INSTRUCTION(BoundsCheck);
6500
6501 protected:
6502 DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6503
6504 private:
6505 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6506 static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6507 static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6508 "Too many packed fields.");
6509 };
6510
6511 class HSuspendCheck final : public HExpression<0> {
6512 public:
6513 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc, bool is_no_op = false)
6514 : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6515 slow_path_(nullptr) {
6516 SetPackedFlag<kFlagIsNoOp>(is_no_op);
6517 }
6518
6519 bool IsClonable() const override { return true; }
6520
6521 bool NeedsEnvironment() const override {
6522 return true;
6523 }
6524
6525 void SetIsNoOp(bool is_no_op) { SetPackedFlag<kFlagIsNoOp>(is_no_op); }
6526 bool IsNoOp() const { return GetPackedFlag<kFlagIsNoOp>(); }
6527
6528
6529 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
6530 SlowPathCode* GetSlowPath() const { return slow_path_; }
6531
6532 DECLARE_INSTRUCTION(SuspendCheck);
6533
6534 protected:
6535 DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6536
6537 // True if the HSuspendCheck should not emit any code during codegen. It is
6538 // not possible to simply remove this instruction to disable codegen, as
6539 // other optimizations (e.g: CHAGuardVisitor::HoistGuard) depend on
6540 // HSuspendCheck being present in every loop.
6541 static constexpr size_t kFlagIsNoOp = kNumberOfGenericPackedBits;
6542 static constexpr size_t kNumberOfSuspendCheckPackedBits = kFlagIsNoOp + 1;
6543 static_assert(kNumberOfSuspendCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6544 "Too many packed fields.");
6545
6546 private:
6547 // Only used for code generation, in order to share the same slow path between back edges
6548 // of a same loop.
6549 SlowPathCode* slow_path_;
6550 };
6551
6552 // Pseudo-instruction which doesn't generate any code.
6553 // If `emit_environment` is true, it can be used to generate an environment. It is used, for
6554 // example, to provide the native debugger with mapping information. It ensures that we can generate
6555 // line number and local variables at this point.
6556 class HNop : public HExpression<0> {
6557 public:
6558 explicit HNop(uint32_t dex_pc, bool needs_environment)
6559 : HExpression<0>(kNop, SideEffects::None(), dex_pc), needs_environment_(needs_environment) {
6560 }
6561
6562 bool NeedsEnvironment() const override {
6563 return needs_environment_;
6564 }
6565
6566 DECLARE_INSTRUCTION(Nop);
6567
6568 protected:
6569 DEFAULT_COPY_CONSTRUCTOR(Nop);
6570
6571 private:
6572 bool needs_environment_;
6573 };
6574
6575 /**
6576 * Instruction to load a Class object.
6577 */
6578 class HLoadClass final : public HInstruction {
6579 public:
6580 // Determines how to load the Class.
6581 enum class LoadKind {
6582 // We cannot load this class. See HSharpening::SharpenLoadClass.
6583 kInvalid = -1,
6584
6585 // Use the Class* from the method's own ArtMethod*.
6586 kReferrersClass,
6587
6588 // Use PC-relative boot image Class* address that will be known at link time.
6589 // Used for boot image classes referenced by boot image code.
6590 kBootImageLinkTimePcRelative,
6591
6592 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
6593 // Used for boot image classes referenced by apps in AOT-compiled code.
6594 kBootImageRelRo,
6595
6596 // Load from an app image entry in the .data.img.rel.ro using a PC-relative load.
6597 // Used for app image classes referenced by apps in AOT-compiled code.
6598 kAppImageRelRo,
6599
6600 // Load from an entry in the .bss section using a PC-relative load.
6601 // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6602 kBssEntry,
6603
6604 // Load from an entry for public class in the .bss section using a PC-relative load.
6605 // Used for classes that were unresolved during AOT-compilation outside the literal
6606 // package of the compiling class. Such classes are accessible only if they are public
6607 // and the .bss entry shall therefore be filled only if the resolved class is public.
6608 kBssEntryPublic,
6609
6610 // Load from an entry for package class in the .bss section using a PC-relative load.
6611 // Used for classes that were unresolved during AOT-compilation but within the literal
6612 // package of the compiling class. Such classes are accessible if they are public or
6613 // in the same package which, given the literal package match, requires only matching
6614 // defining class loader and the .bss entry shall therefore be filled only if at least
6615 // one of those conditions holds. Note that all code in an oat file belongs to classes
6616 // with the same defining class loader.
6617 kBssEntryPackage,
6618
6619 // Use a known boot image Class* address, embedded in the code by the codegen.
6620 // Used for boot image classes referenced by apps in JIT-compiled code.
6621 kJitBootImageAddress,
6622
6623 // Load from the root table associated with the JIT compiled method.
6624 kJitTableAddress,
6625
6626 // Load using a simple runtime call. This is the fall-back load kind when
6627 // the codegen is unable to use another appropriate kind.
6628 kRuntimeCall,
6629
6630 kLast = kRuntimeCall
6631 };
6632
6633 HLoadClass(HCurrentMethod* current_method,
6634 dex::TypeIndex type_index,
6635 const DexFile& dex_file,
6636 Handle<mirror::Class> klass,
6637 bool is_referrers_class,
6638 uint32_t dex_pc,
6639 bool needs_access_check)
6640 : HInstruction(kLoadClass,
6641 DataType::Type::kReference,
6642 SideEffectsForArchRuntimeCalls(),
6643 dex_pc),
6644 special_input_(HUserRecord<HInstruction*>(current_method)),
6645 type_index_(type_index),
6646 dex_file_(dex_file),
6647 klass_(klass) {
6648 // Referrers class should not need access check. We never inline unverified
6649 // methods so we can't possibly end up in this situation.
6650 DCHECK_IMPLIES(is_referrers_class, !needs_access_check);
6651
6652 SetPackedField<LoadKindField>(
6653 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6654 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6655 SetPackedFlag<kFlagIsInImage>(false);
6656 SetPackedFlag<kFlagGenerateClInitCheck>(false);
6657 SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6658 }
6659
6660 bool IsClonable() const override { return true; }
6661
6662 void SetLoadKind(LoadKind load_kind);
6663
6664 LoadKind GetLoadKind() const {
6665 return GetPackedField<LoadKindField>();
6666 }
6667
6668 bool HasPcRelativeLoadKind() const {
6669 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6670 GetLoadKind() == LoadKind::kBootImageRelRo ||
6671 GetLoadKind() == LoadKind::kAppImageRelRo ||
6672 GetLoadKind() == LoadKind::kBssEntry ||
6673 GetLoadKind() == LoadKind::kBssEntryPublic ||
6674 GetLoadKind() == LoadKind::kBssEntryPackage;
6675 }
6676
6677 bool CanBeMoved() const override { return true; }
6678
6679 bool InstructionDataEquals(const HInstruction* other) const override;
6680
6681 size_t ComputeHashCode() const override { return type_index_.index_; }
6682
6683 bool CanBeNull() const override { return false; }
6684
6685 bool NeedsEnvironment() const override {
6686 return CanCallRuntime();
6687 }
6688 bool NeedsBss() const override {
6689 LoadKind load_kind = GetLoadKind();
6690 return load_kind == LoadKind::kBssEntry ||
6691 load_kind == LoadKind::kBssEntryPublic ||
6692 load_kind == LoadKind::kBssEntryPackage;
6693 }
6694
6695 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6696 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6697 }
6698
6699 bool CanCallRuntime() const {
6700 return NeedsAccessCheck() ||
6701 MustGenerateClinitCheck() ||
6702 NeedsBss() ||
6703 GetLoadKind() == LoadKind::kRuntimeCall;
6704 }
6705
6706 bool CanThrow() const override {
6707 return NeedsAccessCheck() ||
6708 MustGenerateClinitCheck() ||
6709 // If the class is in the boot or app image, the lookup in the runtime call cannot throw.
6710 ((GetLoadKind() == LoadKind::kRuntimeCall || NeedsBss()) && !IsInImage());
6711 }
6712
6713 ReferenceTypeInfo GetLoadedClassRTI() {
6714 if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
6715 // Note: The is_exact flag from the return value should not be used.
6716 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
6717 } else {
6718 return ReferenceTypeInfo::CreateInvalid();
6719 }
6720 }
6721
6722 // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
6723 void SetValidLoadedClassRTI() {
6724 DCHECK(klass_ != nullptr);
6725 SetPackedFlag<kFlagValidLoadedClassRTI>(true);
6726 }
6727
6728 dex::TypeIndex GetTypeIndex() const { return type_index_; }
6729 const DexFile& GetDexFile() const { return dex_file_; }
6730
6731 static SideEffects SideEffectsForArchRuntimeCalls() {
6732 return SideEffects::CanTriggerGC();
6733 }
6734
6735 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
6736 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
6737 bool IsInImage() const { return GetPackedFlag<kFlagIsInImage>(); }
6738 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6739
6740 bool MustResolveTypeOnSlowPath() const {
6741 // Check that this instruction has a slow path.
6742 LoadKind load_kind = GetLoadKind();
6743 DCHECK(load_kind != LoadKind::kRuntimeCall); // kRuntimeCall calls on main path.
6744 bool must_resolve_type_on_slow_path =
6745 load_kind == LoadKind::kBssEntry ||
6746 load_kind == LoadKind::kBssEntryPublic ||
6747 load_kind == LoadKind::kBssEntryPackage;
6748 DCHECK(must_resolve_type_on_slow_path || MustGenerateClinitCheck());
6749 return must_resolve_type_on_slow_path;
6750 }
6751
6752 void MarkInImage() {
6753 SetPackedFlag<kFlagIsInImage>(true);
6754 }
6755
6756 void AddSpecialInput(HInstruction* special_input);
6757
6758 using HInstruction::GetInputRecords; // Keep the const version visible.
6759 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6760 return ArrayRef<HUserRecord<HInstruction*>>(
6761 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6762 }
6763
6764 Handle<mirror::Class> GetClass() const {
6765 return klass_;
6766 }
6767
6768 DECLARE_INSTRUCTION(LoadClass);
6769
6770 protected:
6771 DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6772
6773 private:
6774 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
6775 // Whether the type is in an image (boot image or app image).
6776 static constexpr size_t kFlagIsInImage = kFlagNeedsAccessCheck + 1;
6777 // Whether this instruction must generate the initialization check.
6778 // Used for code generation.
6779 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInImage + 1;
6780 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1;
6781 static constexpr size_t kFieldLoadKindSize =
6782 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6783 static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
6784 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
6785 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6786 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6787
6788 static bool HasTypeReference(LoadKind load_kind) {
6789 return load_kind == LoadKind::kReferrersClass ||
6790 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6791 load_kind == LoadKind::kAppImageRelRo ||
6792 load_kind == LoadKind::kBssEntry ||
6793 load_kind == LoadKind::kBssEntryPublic ||
6794 load_kind == LoadKind::kBssEntryPackage ||
6795 load_kind == LoadKind::kRuntimeCall;
6796 }
6797
6798 void SetLoadKindInternal(LoadKind load_kind);
6799
6800 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6801 // For other load kinds it's empty or possibly some architecture-specific instruction
6802 // for PC-relative loads, i.e. kBssEntry* or kBootImageLinkTimePcRelative.
6803 HUserRecord<HInstruction*> special_input_;
6804
6805 // A type index and dex file where the class can be accessed. The dex file can be:
6806 // - The compiling method's dex file if the class is defined there too.
6807 // - The compiling method's dex file if the class is referenced there.
6808 // - The dex file where the class is defined. When the load kind can only be
6809 // kBssEntry* or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6810 const dex::TypeIndex type_index_;
6811 const DexFile& dex_file_;
6812
6813 Handle<mirror::Class> klass_;
6814 };
6815 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6816
6817 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6818 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6819 // The load kind should be determined before inserting the instruction to the graph.
6820 DCHECK(GetBlock() == nullptr);
6821 DCHECK(GetEnvironment() == nullptr);
6822 SetPackedField<LoadKindField>(load_kind);
6823 if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6824 special_input_ = HUserRecord<HInstruction*>(nullptr);
6825 }
6826 if (!NeedsEnvironment()) {
6827 SetSideEffects(SideEffects::None());
6828 }
6829 }
6830
6831 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6832 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6833 // The special input is used for PC-relative loads on some architectures,
6834 // including literal pool loads, which are PC-relative too.
6835 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6836 GetLoadKind() == LoadKind::kBootImageRelRo ||
6837 GetLoadKind() == LoadKind::kAppImageRelRo ||
6838 GetLoadKind() == LoadKind::kBssEntry ||
6839 GetLoadKind() == LoadKind::kBssEntryPublic ||
6840 GetLoadKind() == LoadKind::kBssEntryPackage ||
6841 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6842 DCHECK(special_input_.GetInstruction() == nullptr);
6843 special_input_ = HUserRecord<HInstruction*>(special_input);
6844 special_input->AddUseAt(this, 0);
6845 }
6846
6847 class HLoadString final : public HInstruction {
6848 public:
6849 // Determines how to load the String.
6850 enum class LoadKind {
6851 // Use PC-relative boot image String* address that will be known at link time.
6852 // Used for boot image strings referenced by boot image code.
6853 kBootImageLinkTimePcRelative,
6854
6855 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
6856 // Used for boot image strings referenced by apps in AOT-compiled code.
6857 kBootImageRelRo,
6858
6859 // Load from an entry in the .bss section using a PC-relative load.
6860 // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
6861 kBssEntry,
6862
6863 // Use a known boot image String* address, embedded in the code by the codegen.
6864 // Used for boot image strings referenced by apps in JIT-compiled code.
6865 kJitBootImageAddress,
6866
6867 // Load from the root table associated with the JIT compiled method.
6868 kJitTableAddress,
6869
6870 // Load using a simple runtime call. This is the fall-back load kind when
6871 // the codegen is unable to use another appropriate kind.
6872 kRuntimeCall,
6873
6874 kLast = kRuntimeCall,
6875 };
6876
6877 HLoadString(HCurrentMethod* current_method,
6878 dex::StringIndex string_index,
6879 const DexFile& dex_file,
6880 uint32_t dex_pc)
6881 : HInstruction(kLoadString,
6882 DataType::Type::kReference,
6883 SideEffectsForArchRuntimeCalls(),
6884 dex_pc),
6885 special_input_(HUserRecord<HInstruction*>(current_method)),
6886 string_index_(string_index),
6887 dex_file_(dex_file) {
6888 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
6889 }
6890
6891 bool IsClonable() const override { return true; }
6892 bool NeedsBss() const override {
6893 return GetLoadKind() == LoadKind::kBssEntry;
6894 }
6895
6896 void SetLoadKind(LoadKind load_kind);
6897
6898 LoadKind GetLoadKind() const {
6899 return GetPackedField<LoadKindField>();
6900 }
6901
6902 bool HasPcRelativeLoadKind() const {
6903 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6904 GetLoadKind() == LoadKind::kBootImageRelRo ||
6905 GetLoadKind() == LoadKind::kBssEntry;
6906 }
6907
6908 const DexFile& GetDexFile() const {
6909 return dex_file_;
6910 }
6911
6912 dex::StringIndex GetStringIndex() const {
6913 return string_index_;
6914 }
6915
6916 Handle<mirror::String> GetString() const {
6917 return string_;
6918 }
6919
6920 void SetString(Handle<mirror::String> str) {
6921 string_ = str;
6922 }
6923
6924 bool CanBeMoved() const override { return true; }
6925
6926 bool InstructionDataEquals(const HInstruction* other) const override;
6927
6928 size_t ComputeHashCode() const override { return string_index_.index_; }
6929
6930 // Will call the runtime if we need to load the string through
6931 // the dex cache and the string is not guaranteed to be there yet.
6932 bool NeedsEnvironment() const override {
6933 LoadKind load_kind = GetLoadKind();
6934 if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6935 load_kind == LoadKind::kBootImageRelRo ||
6936 load_kind == LoadKind::kJitBootImageAddress ||
6937 load_kind == LoadKind::kJitTableAddress) {
6938 return false;
6939 }
6940 return true;
6941 }
6942
6943 bool CanBeNull() const override { return false; }
6944 bool CanThrow() const override { return NeedsEnvironment(); }
6945
6946 static SideEffects SideEffectsForArchRuntimeCalls() {
6947 return SideEffects::CanTriggerGC();
6948 }
6949
6950 void AddSpecialInput(HInstruction* special_input);
6951
6952 using HInstruction::GetInputRecords; // Keep the const version visible.
6953 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6954 return ArrayRef<HUserRecord<HInstruction*>>(
6955 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6956 }
6957
6958 DECLARE_INSTRUCTION(LoadString);
6959
6960 protected:
6961 DEFAULT_COPY_CONSTRUCTOR(LoadString);
6962
6963 private:
6964 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
6965 static constexpr size_t kFieldLoadKindSize =
6966 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6967 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
6968 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6969 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6970
6971 void SetLoadKindInternal(LoadKind load_kind);
6972
6973 // The special input is the HCurrentMethod for kRuntimeCall.
6974 // For other load kinds it's empty or possibly some architecture-specific instruction
6975 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
6976 HUserRecord<HInstruction*> special_input_;
6977
6978 dex::StringIndex string_index_;
6979 const DexFile& dex_file_;
6980
6981 Handle<mirror::String> string_;
6982 };
6983 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
6984
6985 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
6986 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
6987 // The load kind should be determined before inserting the instruction to the graph.
6988 DCHECK(GetBlock() == nullptr);
6989 DCHECK(GetEnvironment() == nullptr);
6990 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
6991 SetPackedField<LoadKindField>(load_kind);
6992 if (load_kind != LoadKind::kRuntimeCall) {
6993 special_input_ = HUserRecord<HInstruction*>(nullptr);
6994 }
6995 if (!NeedsEnvironment()) {
6996 SetSideEffects(SideEffects::None());
6997 }
6998 }
6999
7000 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7001 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
7002 // The special input is used for PC-relative loads on some architectures,
7003 // including literal pool loads, which are PC-relative too.
7004 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7005 GetLoadKind() == LoadKind::kBootImageRelRo ||
7006 GetLoadKind() == LoadKind::kBssEntry ||
7007 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
7008 // HLoadString::GetInputRecords() returns an empty array at this point,
7009 // so use the GetInputRecords() from the base class to set the input record.
7010 DCHECK(special_input_.GetInstruction() == nullptr);
7011 special_input_ = HUserRecord<HInstruction*>(special_input);
7012 special_input->AddUseAt(this, 0);
7013 }
7014
7015 class HLoadMethodHandle final : public HInstruction {
7016 public:
7017 HLoadMethodHandle(HCurrentMethod* current_method,
7018 uint16_t method_handle_idx,
7019 const DexFile& dex_file,
7020 uint32_t dex_pc)
7021 : HInstruction(kLoadMethodHandle,
7022 DataType::Type::kReference,
7023 SideEffectsForArchRuntimeCalls(),
7024 dex_pc),
7025 special_input_(HUserRecord<HInstruction*>(current_method)),
7026 method_handle_idx_(method_handle_idx),
7027 dex_file_(dex_file) {
7028 }
7029
7030 using HInstruction::GetInputRecords; // Keep the const version visible.
7031 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7032 return ArrayRef<HUserRecord<HInstruction*>>(
7033 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7034 }
7035
7036 bool IsClonable() const override { return true; }
7037
7038 uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
7039
7040 const DexFile& GetDexFile() const { return dex_file_; }
7041
7042 static SideEffects SideEffectsForArchRuntimeCalls() {
7043 return SideEffects::CanTriggerGC();
7044 }
7045
7046 bool CanThrow() const override { return true; }
7047
7048 bool NeedsEnvironment() const override { return true; }
7049
7050 DECLARE_INSTRUCTION(LoadMethodHandle);
7051
7052 protected:
7053 DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
7054
7055 private:
7056 // The special input is the HCurrentMethod for kRuntimeCall.
7057 HUserRecord<HInstruction*> special_input_;
7058
7059 const uint16_t method_handle_idx_;
7060 const DexFile& dex_file_;
7061 };
7062
7063 class HLoadMethodType final : public HInstruction {
7064 public:
7065 // Determines how to load the MethodType.
7066 enum class LoadKind {
7067 // Load from an entry in the .bss section using a PC-relative load.
7068 kBssEntry,
7069 // Load from the root table associated with the JIT compiled method.
7070 kJitTableAddress,
7071 // Load using a single runtime call.
7072 kRuntimeCall,
7073
7074 kLast = kRuntimeCall,
7075 };
7076
7077 HLoadMethodType(HCurrentMethod* current_method,
7078 dex::ProtoIndex proto_index,
7079 const DexFile& dex_file,
7080 uint32_t dex_pc)
7081 : HInstruction(kLoadMethodType,
7082 DataType::Type::kReference,
7083 SideEffectsForArchRuntimeCalls(),
7084 dex_pc),
7085 special_input_(HUserRecord<HInstruction*>(current_method)),
7086 proto_index_(proto_index),
7087 dex_file_(dex_file) {
7088 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
7089 }
7090
7091 using HInstruction::GetInputRecords; // Keep the const version visible.
7092 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7093 return ArrayRef<HUserRecord<HInstruction*>>(
7094 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7095 }
7096
7097 bool IsClonable() const override { return true; }
7098
7099 void SetLoadKind(LoadKind load_kind);
7100
7101 LoadKind GetLoadKind() const {
7102 return GetPackedField<LoadKindField>();
7103 }
7104
7105 dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
7106
7107 Handle<mirror::MethodType> GetMethodType() const { return method_type_; }
7108
7109 void SetMethodType(Handle<mirror::MethodType> method_type) { method_type_ = method_type; }
7110
7111 const DexFile& GetDexFile() const { return dex_file_; }
7112
7113 static SideEffects SideEffectsForArchRuntimeCalls() {
7114 return SideEffects::CanTriggerGC();
7115 }
7116
7117 bool CanThrow() const override { return true; }
7118
7119 bool NeedsEnvironment() const override { return true; }
7120
7121 DECLARE_INSTRUCTION(LoadMethodType);
7122
7123 protected:
7124 DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
7125
7126 private:
7127 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
7128 static constexpr size_t kFieldLoadKindSize =
7129 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7130 static constexpr size_t kNumberOfLoadMethodTypePackedBits = kFieldLoadKind + kFieldLoadKindSize;
7131 static_assert(kNumberOfLoadMethodTypePackedBits <= kMaxNumberOfPackedBits,
7132 "Too many packed fields.");
7133 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7134
7135 // The special input is the HCurrentMethod for kRuntimeCall.
7136 HUserRecord<HInstruction*> special_input_;
7137
7138 const dex::ProtoIndex proto_index_;
7139 const DexFile& dex_file_;
7140
7141 Handle<mirror::MethodType> method_type_;
7142 };
7143
7144 std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
7145
7146 // Note: defined outside class to see operator<<(., HLoadMethodType::LoadKind).
7147 inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
7148 // The load kind should be determined before inserting the instruction to the graph.
7149 DCHECK(GetBlock() == nullptr);
7150 DCHECK(GetEnvironment() == nullptr);
7151 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
7152 DCHECK_IMPLIES(GetLoadKind() == LoadKind::kJitTableAddress, GetMethodType() != nullptr);
7153 SetPackedField<LoadKindField>(load_kind);
7154 }
7155
7156 /**
7157 * Performs an initialization check on its Class object input.
7158 */
7159 class HClinitCheck final : public HExpression<1> {
7160 public:
7161 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
7162 : HExpression(
7163 kClinitCheck,
7164 DataType::Type::kReference,
7165 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
7166 dex_pc) {
7167 SetRawInputAt(0, constant);
7168 }
7169 // TODO: Make ClinitCheck clonable.
7170 bool CanBeMoved() const override { return true; }
7171 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
7172 return true;
7173 }
7174
7175 bool NeedsEnvironment() const override {
7176 // May call runtime to initialize the class.
7177 return true;
7178 }
7179
7180 bool CanThrow() const override { return true; }
7181
7182 HLoadClass* GetLoadClass() const {
7183 DCHECK(InputAt(0)->IsLoadClass());
7184 return InputAt(0)->AsLoadClass();
7185 }
7186
7187 DECLARE_INSTRUCTION(ClinitCheck);
7188
7189
7190 protected:
7191 DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
7192 };
7193
7194 class HStaticFieldGet final : public HExpression<1> {
7195 public:
7196 HStaticFieldGet(HInstruction* cls,
7197 ArtField* field,
7198 DataType::Type field_type,
7199 MemberOffset field_offset,
7200 bool is_volatile,
7201 uint32_t field_idx,
7202 uint16_t declaring_class_def_index,
7203 const DexFile& dex_file,
7204 uint32_t dex_pc)
7205 : HExpression(kStaticFieldGet,
7206 field_type,
7207 SideEffects::FieldReadOfType(field_type, is_volatile),
7208 dex_pc),
7209 field_info_(field,
7210 field_offset,
7211 field_type,
7212 is_volatile,
7213 field_idx,
7214 declaring_class_def_index,
7215 dex_file) {
7216 SetRawInputAt(0, cls);
7217 }
7218
7219
7220 bool IsClonable() const override { return true; }
7221 bool CanBeMoved() const override { return !IsVolatile(); }
7222
7223 bool InstructionDataEquals(const HInstruction* other) const override {
7224 const HStaticFieldGet* other_get = other->AsStaticFieldGet();
7225 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
7226 }
7227
7228 size_t ComputeHashCode() const override {
7229 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
7230 }
7231
7232 bool IsFieldAccess() const override { return true; }
7233 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7234 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7235 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7236 bool IsVolatile() const { return field_info_.IsVolatile(); }
7237
7238 void SetType(DataType::Type new_type) {
7239 DCHECK(DataType::IsIntegralType(GetType()));
7240 DCHECK(DataType::IsIntegralType(new_type));
7241 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
7242 SetPackedField<TypeField>(new_type);
7243 }
7244
7245 DECLARE_INSTRUCTION(StaticFieldGet);
7246
7247 protected:
7248 DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
7249
7250 private:
7251 const FieldInfo field_info_;
7252 };
7253
7254 class HStaticFieldSet final : public HExpression<2> {
7255 public:
7256 HStaticFieldSet(HInstruction* cls,
7257 HInstruction* value,
7258 ArtField* field,
7259 DataType::Type field_type,
7260 MemberOffset field_offset,
7261 bool is_volatile,
7262 uint32_t field_idx,
7263 uint16_t declaring_class_def_index,
7264 const DexFile& dex_file,
7265 uint32_t dex_pc)
7266 : HExpression(kStaticFieldSet,
7267 SideEffects::FieldWriteOfType(field_type, is_volatile),
7268 dex_pc),
7269 field_info_(field,
7270 field_offset,
7271 field_type,
7272 is_volatile,
7273 field_idx,
7274 declaring_class_def_index,
7275 dex_file) {
7276 SetPackedFlag<kFlagValueCanBeNull>(true);
7277 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
7278 SetRawInputAt(0, cls);
7279 SetRawInputAt(1, value);
7280 }
7281
7282 bool IsClonable() const override { return true; }
7283 bool IsFieldAccess() const override { return true; }
7284 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7285 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7286 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7287 bool IsVolatile() const { return field_info_.IsVolatile(); }
7288
7289 HInstruction* GetValue() const { return InputAt(1); }
7290 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
7291 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
7292
7293 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
7294 void SetWriteBarrierKind(WriteBarrierKind kind) {
7295 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
7296 << "We shouldn't go back to the original value.";
7297 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
7298 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
7299 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
7300 SetPackedField<WriteBarrierKindField>(kind);
7301 }
7302
7303 DECLARE_INSTRUCTION(StaticFieldSet);
7304
7305 protected:
7306 DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
7307
7308 private:
7309 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
7310 static constexpr size_t kWriteBarrierKind = kFlagValueCanBeNull + 1;
7311 static constexpr size_t kWriteBarrierKindSize =
7312 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
7313 static constexpr size_t kNumberOfStaticFieldSetPackedBits =
7314 kWriteBarrierKind + kWriteBarrierKindSize;
7315 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
7316 "Too many packed fields.");
7317
7318 const FieldInfo field_info_;
7319 using WriteBarrierKindField =
7320 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
7321 };
7322
7323 class HStringBuilderAppend final : public HVariableInputSizeInstruction {
7324 public:
7325 HStringBuilderAppend(HIntConstant* format,
7326 uint32_t number_of_arguments,
7327 uint32_t number_of_out_vregs,
7328 bool has_fp_args,
7329 ArenaAllocator* allocator,
7330 uint32_t dex_pc)
7331 : HVariableInputSizeInstruction(
7332 kStringBuilderAppend,
7333 DataType::Type::kReference,
7334 SideEffects::CanTriggerGC().Union(
7335 // The runtime call may read memory from inputs. It never writes outside
7336 // of the newly allocated result object or newly allocated helper objects,
7337 // except for float/double arguments where we reuse thread-local helper objects.
7338 has_fp_args ? SideEffects::AllWritesAndReads() : SideEffects::AllReads()),
7339 dex_pc,
7340 allocator,
7341 number_of_arguments + /* format */ 1u,
7342 kArenaAllocInvokeInputs),
7343 number_of_out_vregs_(number_of_out_vregs) {
7344 DCHECK_GE(number_of_arguments, 1u); // There must be something to append.
7345 SetRawInputAt(FormatIndex(), format);
7346 }
7347
7348 void SetArgumentAt(size_t index, HInstruction* argument) {
7349 DCHECK_LE(index, GetNumberOfArguments());
7350 SetRawInputAt(index, argument);
7351 }
7352
7353 // Return the number of arguments, excluding the format.
7354 size_t GetNumberOfArguments() const {
7355 DCHECK_GE(InputCount(), 1u);
7356 return InputCount() - 1u;
7357 }
7358
7359 // Return the number of outgoing vregs.
7360 uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; }
7361
7362 size_t FormatIndex() const {
7363 return GetNumberOfArguments();
7364 }
7365
7366 HIntConstant* GetFormat() {
7367 return InputAt(FormatIndex())->AsIntConstant();
7368 }
7369
7370 bool NeedsEnvironment() const override { return true; }
7371
7372 bool CanThrow() const override { return true; }
7373
7374 bool CanBeNull() const override { return false; }
7375
7376 DECLARE_INSTRUCTION(StringBuilderAppend);
7377
7378 protected:
7379 DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend);
7380
7381 private:
7382 uint32_t number_of_out_vregs_;
7383 };
7384
7385 class HUnresolvedInstanceFieldGet final : public HExpression<1> {
7386 public:
7387 HUnresolvedInstanceFieldGet(HInstruction* obj,
7388 DataType::Type field_type,
7389 uint32_t field_index,
7390 uint32_t dex_pc)
7391 : HExpression(kUnresolvedInstanceFieldGet,
7392 field_type,
7393 SideEffects::AllExceptGCDependency(),
7394 dex_pc),
7395 field_index_(field_index) {
7396 SetRawInputAt(0, obj);
7397 }
7398
7399 bool IsClonable() const override { return true; }
7400 bool NeedsEnvironment() const override { return true; }
7401 bool CanThrow() const override { return true; }
7402
7403 DataType::Type GetFieldType() const { return GetType(); }
7404 uint32_t GetFieldIndex() const { return field_index_; }
7405
7406 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
7407
7408 protected:
7409 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
7410
7411 private:
7412 const uint32_t field_index_;
7413 };
7414
7415 class HUnresolvedInstanceFieldSet final : public HExpression<2> {
7416 public:
7417 HUnresolvedInstanceFieldSet(HInstruction* obj,
7418 HInstruction* value,
7419 DataType::Type field_type,
7420 uint32_t field_index,
7421 uint32_t dex_pc)
7422 : HExpression(kUnresolvedInstanceFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7423 field_index_(field_index) {
7424 SetPackedField<FieldTypeField>(field_type);
7425 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7426 SetRawInputAt(0, obj);
7427 SetRawInputAt(1, value);
7428 }
7429
7430 bool IsClonable() const override { return true; }
7431 bool NeedsEnvironment() const override { return true; }
7432 bool CanThrow() const override { return true; }
7433
7434 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7435 uint32_t GetFieldIndex() const { return field_index_; }
7436
7437 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
7438
7439 protected:
7440 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
7441
7442 private:
7443 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7444 static constexpr size_t kFieldFieldTypeSize =
7445 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7446 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7447 kFieldFieldType + kFieldFieldTypeSize;
7448 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7449 "Too many packed fields.");
7450 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7451
7452 const uint32_t field_index_;
7453 };
7454
7455 class HUnresolvedStaticFieldGet final : public HExpression<0> {
7456 public:
7457 HUnresolvedStaticFieldGet(DataType::Type field_type,
7458 uint32_t field_index,
7459 uint32_t dex_pc)
7460 : HExpression(kUnresolvedStaticFieldGet,
7461 field_type,
7462 SideEffects::AllExceptGCDependency(),
7463 dex_pc),
7464 field_index_(field_index) {
7465 }
7466
7467 bool IsClonable() const override { return true; }
7468 bool NeedsEnvironment() const override { return true; }
7469 bool CanThrow() const override { return true; }
7470
7471 DataType::Type GetFieldType() const { return GetType(); }
7472 uint32_t GetFieldIndex() const { return field_index_; }
7473
7474 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
7475
7476 protected:
7477 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
7478
7479 private:
7480 const uint32_t field_index_;
7481 };
7482
7483 class HUnresolvedStaticFieldSet final : public HExpression<1> {
7484 public:
7485 HUnresolvedStaticFieldSet(HInstruction* value,
7486 DataType::Type field_type,
7487 uint32_t field_index,
7488 uint32_t dex_pc)
7489 : HExpression(kUnresolvedStaticFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7490 field_index_(field_index) {
7491 SetPackedField<FieldTypeField>(field_type);
7492 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7493 SetRawInputAt(0, value);
7494 }
7495
7496 bool IsClonable() const override { return true; }
7497 bool NeedsEnvironment() const override { return true; }
7498 bool CanThrow() const override { return true; }
7499
7500 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7501 uint32_t GetFieldIndex() const { return field_index_; }
7502
7503 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
7504
7505 protected:
7506 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
7507
7508 private:
7509 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7510 static constexpr size_t kFieldFieldTypeSize =
7511 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7512 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7513 kFieldFieldType + kFieldFieldTypeSize;
7514 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7515 "Too many packed fields.");
7516 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7517
7518 const uint32_t field_index_;
7519 };
7520
7521 // Implement the move-exception DEX instruction.
7522 class HLoadException final : public HExpression<0> {
7523 public:
7524 explicit HLoadException(uint32_t dex_pc = kNoDexPc)
7525 : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
7526 }
7527
7528 bool CanBeNull() const override { return false; }
7529
7530 DECLARE_INSTRUCTION(LoadException);
7531
7532 protected:
7533 DEFAULT_COPY_CONSTRUCTOR(LoadException);
7534 };
7535
7536 // Implicit part of move-exception which clears thread-local exception storage.
7537 // Must not be removed because the runtime expects the TLS to get cleared.
7538 class HClearException final : public HExpression<0> {
7539 public:
7540 explicit HClearException(uint32_t dex_pc = kNoDexPc)
7541 : HExpression(kClearException, SideEffects::AllWrites(), dex_pc) {
7542 }
7543
7544 DECLARE_INSTRUCTION(ClearException);
7545
7546 protected:
7547 DEFAULT_COPY_CONSTRUCTOR(ClearException);
7548 };
7549
7550 class HThrow final : public HExpression<1> {
7551 public:
7552 HThrow(HInstruction* exception, uint32_t dex_pc)
7553 : HExpression(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
7554 SetRawInputAt(0, exception);
7555 }
7556
7557 bool IsControlFlow() const override { return true; }
7558
7559 bool NeedsEnvironment() const override { return true; }
7560
7561 bool CanThrow() const override { return true; }
7562
7563 bool AlwaysThrows() const override { return true; }
7564
7565 DECLARE_INSTRUCTION(Throw);
7566
7567 protected:
7568 DEFAULT_COPY_CONSTRUCTOR(Throw);
7569 };
7570
7571 /**
7572 * Implementation strategies for the code generator of a HInstanceOf
7573 * or `HCheckCast`.
7574 */
7575 enum class TypeCheckKind { // private marker to avoid generate-operator-out.py from processing.
7576 kUnresolvedCheck, // Check against an unresolved type.
7577 kExactCheck, // Can do a single class compare.
7578 kClassHierarchyCheck, // Can just walk the super class chain.
7579 kAbstractClassCheck, // Can just walk the super class chain, starting one up.
7580 kInterfaceCheck, // No optimization yet when checking against an interface.
7581 kArrayObjectCheck, // Can just check if the array is not primitive.
7582 kArrayCheck, // No optimization yet when checking against a generic array.
7583 kBitstringCheck, // Compare the type check bitstring.
7584 kLast = kArrayCheck
7585 };
7586
7587 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
7588
7589 // Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
7590 // `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
7591 class HTypeCheckInstruction : public HVariableInputSizeInstruction {
7592 public:
7593 HTypeCheckInstruction(InstructionKind kind,
7594 DataType::Type type,
7595 HInstruction* object,
7596 HInstruction* target_class_or_null,
7597 TypeCheckKind check_kind,
7598 Handle<mirror::Class> klass,
7599 uint32_t dex_pc,
7600 ArenaAllocator* allocator,
7601 HIntConstant* bitstring_path_to_root,
7602 HIntConstant* bitstring_mask,
7603 SideEffects side_effects)
7604 : HVariableInputSizeInstruction(
7605 kind,
7606 type,
7607 side_effects,
7608 dex_pc,
7609 allocator,
7610 /* number_of_inputs= */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
7611 kArenaAllocTypeCheckInputs),
7612 klass_(klass) {
7613 SetPackedField<TypeCheckKindField>(check_kind);
7614 SetPackedFlag<kFlagMustDoNullCheck>(true);
7615 SetPackedFlag<kFlagValidTargetClassRTI>(false);
7616 SetRawInputAt(0, object);
7617 SetRawInputAt(1, target_class_or_null);
7618 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
7619 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
7620 if (check_kind == TypeCheckKind::kBitstringCheck) {
7621 DCHECK(target_class_or_null->IsNullConstant());
7622 SetRawInputAt(2, bitstring_path_to_root);
7623 SetRawInputAt(3, bitstring_mask);
7624 } else {
7625 DCHECK(target_class_or_null->IsLoadClass());
7626 }
7627 }
7628
7629 HLoadClass* GetTargetClass() const {
7630 DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7631 HInstruction* load_class = InputAt(1);
7632 DCHECK(load_class->IsLoadClass());
7633 return load_class->AsLoadClass();
7634 }
7635
7636 uint32_t GetBitstringPathToRoot() const {
7637 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7638 HInstruction* path_to_root = InputAt(2);
7639 DCHECK(path_to_root->IsIntConstant());
7640 return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
7641 }
7642
7643 uint32_t GetBitstringMask() const {
7644 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7645 HInstruction* mask = InputAt(3);
7646 DCHECK(mask->IsIntConstant());
7647 return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
7648 }
7649
7650 bool IsClonable() const override { return true; }
7651 bool CanBeMoved() const override { return true; }
7652
7653 bool InstructionDataEquals(const HInstruction* other) const override {
7654 DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
7655 return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
7656 }
7657
7658 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
7659 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
7660 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
7661 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
7662
7663 ReferenceTypeInfo GetTargetClassRTI() {
7664 if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
7665 // Note: The is_exact flag from the return value should not be used.
7666 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7667 } else {
7668 return ReferenceTypeInfo::CreateInvalid();
7669 }
7670 }
7671
7672 // Target class RTI is marked as valid by RTP if the klass_ is admissible.
7673 void SetValidTargetClassRTI() {
7674 DCHECK(klass_ != nullptr);
7675 SetPackedFlag<kFlagValidTargetClassRTI>(true);
7676 }
7677
7678 Handle<mirror::Class> GetClass() const {
7679 return klass_;
7680 }
7681
7682 protected:
7683 DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
7684
7685 private:
7686 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
7687 static constexpr size_t kFieldTypeCheckKindSize =
7688 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
7689 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
7690 static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
7691 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
7692 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7693 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
7694
7695 Handle<mirror::Class> klass_;
7696 };
7697
7698 class HInstanceOf final : public HTypeCheckInstruction {
7699 public:
7700 HInstanceOf(HInstruction* object,
7701 HInstruction* target_class_or_null,
7702 TypeCheckKind check_kind,
7703 Handle<mirror::Class> klass,
7704 uint32_t dex_pc,
7705 ArenaAllocator* allocator,
7706 HIntConstant* bitstring_path_to_root,
7707 HIntConstant* bitstring_mask)
7708 : HTypeCheckInstruction(kInstanceOf,
7709 DataType::Type::kBool,
7710 object,
7711 target_class_or_null,
7712 check_kind,
7713 klass,
7714 dex_pc,
7715 allocator,
7716 bitstring_path_to_root,
7717 bitstring_mask,
7718 SideEffectsForArchRuntimeCalls(check_kind)) {}
7719
7720 bool IsClonable() const override { return true; }
7721
7722 bool NeedsEnvironment() const override {
7723 return CanCallRuntime(GetTypeCheckKind());
7724 }
7725
7726 static bool CanCallRuntime(TypeCheckKind check_kind) {
7727 // TODO: Re-evaluate now that mips codegen has been removed.
7728 return check_kind != TypeCheckKind::kExactCheck;
7729 }
7730
7731 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
7732 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
7733 }
7734
7735 DECLARE_INSTRUCTION(InstanceOf);
7736
7737 protected:
7738 DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
7739 };
7740
7741 class HBoundType final : public HExpression<1> {
7742 public:
7743 explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
7744 : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
7745 upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
7746 SetPackedFlag<kFlagUpperCanBeNull>(true);
7747 SetPackedFlag<kFlagCanBeNull>(true);
7748 DCHECK_EQ(input->GetType(), DataType::Type::kReference);
7749 SetRawInputAt(0, input);
7750 }
7751
7752 bool InstructionDataEquals(const HInstruction* other) const override;
7753 bool IsClonable() const override { return true; }
7754
7755 // {Get,Set}Upper* should only be used in reference type propagation.
7756 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
7757 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
7758 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
7759
7760 void SetCanBeNull(bool can_be_null) {
7761 DCHECK(GetUpperCanBeNull() || !can_be_null);
7762 SetPackedFlag<kFlagCanBeNull>(can_be_null);
7763 }
7764
7765 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
7766
7767 DECLARE_INSTRUCTION(BoundType);
7768
7769 protected:
7770 DEFAULT_COPY_CONSTRUCTOR(BoundType);
7771
7772 private:
7773 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
7774 // is false then CanBeNull() cannot be true).
7775 static constexpr size_t kFlagUpperCanBeNull = kNumberOfGenericPackedBits;
7776 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
7777 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
7778 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7779
7780 // Encodes the most upper class that this instruction can have. In other words
7781 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
7782 // It is used to bound the type in cases like:
7783 // if (x instanceof ClassX) {
7784 // // uper_bound_ will be ClassX
7785 // }
7786 ReferenceTypeInfo upper_bound_;
7787 };
7788
7789 class HCheckCast final : public HTypeCheckInstruction {
7790 public:
7791 HCheckCast(HInstruction* object,
7792 HInstruction* target_class_or_null,
7793 TypeCheckKind check_kind,
7794 Handle<mirror::Class> klass,
7795 uint32_t dex_pc,
7796 ArenaAllocator* allocator,
7797 HIntConstant* bitstring_path_to_root,
7798 HIntConstant* bitstring_mask)
7799 : HTypeCheckInstruction(kCheckCast,
7800 DataType::Type::kVoid,
7801 object,
7802 target_class_or_null,
7803 check_kind,
7804 klass,
7805 dex_pc,
7806 allocator,
7807 bitstring_path_to_root,
7808 bitstring_mask,
7809 SideEffects::CanTriggerGC()) {}
7810
7811 bool IsClonable() const override { return true; }
7812 bool NeedsEnvironment() const override {
7813 // Instruction may throw a CheckCastError.
7814 return true;
7815 }
7816
7817 bool CanThrow() const override { return true; }
7818
7819 DECLARE_INSTRUCTION(CheckCast);
7820
7821 protected:
7822 DEFAULT_COPY_CONSTRUCTOR(CheckCast);
7823 };
7824
7825 /**
7826 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
7827 * @details We define the combined barrier types that are actually required
7828 * by the Java Memory Model, rather than using exactly the terminology from
7829 * the JSR-133 cookbook. These should, in many cases, be replaced by acquire/release
7830 * primitives. Note that the JSR-133 cookbook generally does not deal with
7831 * store atomicity issues, and the recipes there are not always entirely sufficient.
7832 * The current recipe is as follows:
7833 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
7834 * -# Use AnyAny barrier after volatile store. (StoreLoad is as expensive.)
7835 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
7836 * -# Use StoreStore barrier after all stores but before return from any constructor whose
7837 * class has final fields.
7838 * -# Use NTStoreStore to order non-temporal stores with respect to all later
7839 * store-to-memory instructions. Only generated together with non-temporal stores.
7840 */
7841 enum MemBarrierKind {
7842 kAnyStore,
7843 kLoadAny,
7844 kStoreStore,
7845 kAnyAny,
7846 kNTStoreStore,
7847 kLastBarrierKind = kNTStoreStore
7848 };
7849 std::ostream& operator<<(std::ostream& os, MemBarrierKind kind);
7850
7851 class HMemoryBarrier final : public HExpression<0> {
7852 public:
7853 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
7854 : HExpression(kMemoryBarrier,
7855 SideEffects::AllWritesAndReads(), // Assume write/read on all fields/arrays.
7856 dex_pc) {
7857 SetPackedField<BarrierKindField>(barrier_kind);
7858 }
7859
7860 bool IsClonable() const override { return true; }
7861
7862 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
7863
7864 DECLARE_INSTRUCTION(MemoryBarrier);
7865
7866 protected:
7867 DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
7868
7869 private:
7870 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
7871 static constexpr size_t kFieldBarrierKindSize =
7872 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
7873 static constexpr size_t kNumberOfMemoryBarrierPackedBits =
7874 kFieldBarrierKind + kFieldBarrierKindSize;
7875 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
7876 "Too many packed fields.");
7877 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
7878 };
7879
7880 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
7881 // the specified object(s), with respect to any subsequent store that might "publish"
7882 // (i.e. make visible) the specified object to another thread.
7883 //
7884 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
7885 // for all final fields (that were set) at the end of the invoked constructor.
7886 //
7887 // The constructor fence models the freeze actions for the final fields of an object
7888 // being constructed (semantically at the end of the constructor). Constructor fences
7889 // have a per-object affinity; two separate objects being constructed get two separate
7890 // constructor fences.
7891 //
7892 // (Note: that if calling a super-constructor or forwarding to another constructor,
7893 // the freezes would happen at the end of *that* constructor being invoked).
7894 //
7895 // The memory model guarantees that when the object being constructed is "published" after
7896 // constructor completion (i.e. escapes the current thread via a store), then any final field
7897 // writes must be observable on other threads (once they observe that publication).
7898 //
7899 // Further, anything written before the freeze, and read by dereferencing through the final field,
7900 // must also be visible (so final object field could itself have an object with non-final fields;
7901 // yet the freeze must also extend to them).
7902 //
7903 // Constructor example:
7904 //
7905 // class HasFinal {
7906 // final int field; Optimizing IR for <init>()V:
7907 // HasFinal() {
7908 // field = 123; HInstanceFieldSet(this, HasFinal.field, 123)
7909 // // freeze(this.field); HConstructorFence(this)
7910 // } HReturn
7911 // }
7912 //
7913 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
7914 // already-initialized classes; in that case the allocation must act as a "default-initializer"
7915 // of the object which effectively writes the class pointer "final field".
7916 //
7917 // For example, we can model default-initialiation as roughly the equivalent of the following:
7918 //
7919 // class Object {
7920 // private final Class header;
7921 // }
7922 //
7923 // Java code: Optimizing IR:
7924 //
7925 // T new_instance<T>() {
7926 // Object obj = allocate_memory(T.class.size); obj = HInvoke(art_quick_alloc_object, T)
7927 // obj.header = T.class; // header write is done by above call.
7928 // // freeze(obj.header) HConstructorFence(obj)
7929 // return (T)obj;
7930 // }
7931 //
7932 // See also:
7933 // * DexCompilationUnit::RequiresConstructorBarrier
7934 // * QuasiAtomic::ThreadFenceForConstructor
7935 //
7936 class HConstructorFence final : public HVariableInputSizeInstruction {
7937 // A fence has variable inputs because the inputs can be removed
7938 // after prepare_for_register_allocation phase.
7939 // (TODO: In the future a fence could freeze multiple objects
7940 // after merging two fences together.)
7941 public:
7942 // `fence_object` is the reference that needs to be protected for correct publication.
7943 //
7944 // It makes sense in the following situations:
7945 // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
7946 // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
7947 //
7948 // After construction the `fence_object` becomes the 0th input.
7949 // This is not an input in a real sense, but just a convenient place to stash the information
7950 // about the associated object.
7951 HConstructorFence(HInstruction* fence_object,
7952 uint32_t dex_pc,
7953 ArenaAllocator* allocator)
7954 // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
7955 // constraints described in the class header. We claim that these SideEffects constraints
7956 // enforce a superset of the real constraints.
7957 //
7958 // The ordering described above is conservatively modeled with SideEffects as follows:
7959 //
7960 // * To prevent reordering of the publication stores:
7961 // ----> "Reads of objects" is the initial SideEffect.
7962 // * For every primitive final field store in the constructor:
7963 // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
7964 // * If there are any stores to reference final fields in the constructor:
7965 // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
7966 // that are reachable from `fence_object` also need to be prevented for reordering
7967 // (and we do not want to do alias analysis to figure out what those stores are).
7968 //
7969 // In the implementation, this initially starts out as an "all reads" side effect; this is an
7970 // even more conservative approach than the one described above, and prevents all of the
7971 // above reordering without analyzing any of the instructions in the constructor.
7972 //
7973 // If in a later phase we discover that there are no writes to reference final fields,
7974 // we can refine the side effect to a smaller set of type reads (see above constraints).
7975 : HVariableInputSizeInstruction(kConstructorFence,
7976 SideEffects::AllReads(),
7977 dex_pc,
7978 allocator,
7979 /* number_of_inputs= */ 1,
7980 kArenaAllocConstructorFenceInputs) {
7981 DCHECK(fence_object != nullptr);
7982 SetRawInputAt(0, fence_object);
7983 }
7984
7985 // The object associated with this constructor fence.
7986 //
7987 // (Note: This will be null after the prepare_for_register_allocation phase,
7988 // as all constructor fence inputs are removed there).
7989 HInstruction* GetFenceObject() const {
7990 return InputAt(0);
7991 }
7992
7993 // Find all the HConstructorFence uses (`fence_use`) for `this` and:
7994 // - Delete `fence_use` from `this`'s use list.
7995 // - Delete `this` from `fence_use`'s inputs list.
7996 // - If the `fence_use` is dead, remove it from the graph.
7997 //
7998 // A fence is considered dead once it no longer has any uses
7999 // and all of the inputs are dead.
8000 //
8001 // This must *not* be called during/after prepare_for_register_allocation,
8002 // because that removes all the inputs to the fences but the fence is actually
8003 // still considered live.
8004 //
8005 // Returns how many HConstructorFence instructions were removed from graph.
8006 static size_t RemoveConstructorFences(HInstruction* instruction);
8007
8008 // Combine all inputs of `this` and `other` instruction and remove
8009 // `other` from the graph.
8010 //
8011 // Inputs are unique after the merge.
8012 //
8013 // Requirement: `this` must not be the same as `other.
8014 void Merge(HConstructorFence* other);
8015
8016 // Check if this constructor fence is protecting
8017 // an HNewInstance or HNewArray that is also the immediate
8018 // predecessor of `this`.
8019 //
8020 // If `ignore_inputs` is true, then the immediate predecessor doesn't need
8021 // to be one of the inputs of `this`.
8022 //
8023 // Returns the associated HNewArray or HNewInstance,
8024 // or null otherwise.
8025 HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
8026
8027 DECLARE_INSTRUCTION(ConstructorFence);
8028
8029 protected:
8030 DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
8031 };
8032
8033 class HMonitorOperation final : public HExpression<1> {
8034 public:
8035 enum class OperationKind {
8036 kEnter,
8037 kExit,
8038 kLast = kExit
8039 };
8040
8041 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
8042 : HExpression(kMonitorOperation,
8043 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
8044 dex_pc) {
8045 SetPackedField<OperationKindField>(kind);
8046 SetRawInputAt(0, object);
8047 }
8048
8049 // Instruction may go into runtime, so we need an environment.
8050 bool NeedsEnvironment() const override { return true; }
8051
8052 bool CanThrow() const override {
8053 // Verifier guarantees that monitor-exit cannot throw.
8054 // This is important because it allows the HGraphBuilder to remove
8055 // a dead throw-catch loop generated for `synchronized` blocks/methods.
8056 return IsEnter();
8057 }
8058
8059 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
8060 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
8061
8062 DECLARE_INSTRUCTION(MonitorOperation);
8063
8064 protected:
8065 DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
8066
8067 private:
8068 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
8069 static constexpr size_t kFieldOperationKindSize =
8070 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
8071 static constexpr size_t kNumberOfMonitorOperationPackedBits =
8072 kFieldOperationKind + kFieldOperationKindSize;
8073 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
8074 "Too many packed fields.");
8075 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
8076 };
8077
8078 class HSelect final : public HExpression<3> {
8079 public:
8080 HSelect(HInstruction* condition,
8081 HInstruction* true_value,
8082 HInstruction* false_value,
8083 uint32_t dex_pc)
8084 : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
8085 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
8086
8087 // First input must be `true_value` or `false_value` to allow codegens to
8088 // use the SameAsFirstInput allocation policy. We make it `false_value`, so
8089 // that architectures which implement HSelect as a conditional move also
8090 // will not need to invert the condition.
8091 SetRawInputAt(0, false_value);
8092 SetRawInputAt(1, true_value);
8093 SetRawInputAt(2, condition);
8094 }
8095
8096 bool IsClonable() const override { return true; }
8097 HInstruction* GetFalseValue() const { return InputAt(0); }
8098 HInstruction* GetTrueValue() const { return InputAt(1); }
8099 HInstruction* GetCondition() const { return InputAt(2); }
8100
8101 bool CanBeMoved() const override { return true; }
8102 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
8103 return true;
8104 }
8105
8106 bool CanBeNull() const override {
8107 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
8108 }
8109
8110 void UpdateType() {
8111 DCHECK_EQ(HPhi::ToPhiType(GetTrueValue()->GetType()),
8112 HPhi::ToPhiType(GetFalseValue()->GetType()));
8113 SetPackedField<TypeField>(HPhi::ToPhiType(GetTrueValue()->GetType()));
8114 }
8115
8116 DECLARE_INSTRUCTION(Select);
8117
8118 protected:
8119 DEFAULT_COPY_CONSTRUCTOR(Select);
8120 };
8121
8122 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
8123 public:
8124 MoveOperands(Location source,
8125 Location destination,
8126 DataType::Type type,
8127 HInstruction* instruction)
8128 : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
8129
8130 Location GetSource() const { return source_; }
8131 Location GetDestination() const { return destination_; }
8132
8133 void SetSource(Location value) { source_ = value; }
8134 void SetDestination(Location value) { destination_ = value; }
8135
8136 // The parallel move resolver marks moves as "in-progress" by clearing the
8137 // destination (but not the source).
8138 Location MarkPending() {
8139 DCHECK(!IsPending());
8140 Location dest = destination_;
8141 destination_ = Location::NoLocation();
8142 return dest;
8143 }
8144
8145 void ClearPending(Location dest) {
8146 DCHECK(IsPending());
8147 destination_ = dest;
8148 }
8149
8150 bool IsPending() const {
8151 DCHECK(source_.IsValid() || destination_.IsInvalid());
8152 return destination_.IsInvalid() && source_.IsValid();
8153 }
8154
8155 // True if this blocks a move from the given location.
8156 bool Blocks(Location loc) const {
8157 return !IsEliminated() && source_.OverlapsWith(loc);
8158 }
8159
8160 // A move is redundant if it's been eliminated, if its source and
8161 // destination are the same, or if its destination is unneeded.
8162 bool IsRedundant() const {
8163 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
8164 }
8165
8166 // We clear both operands to indicate move that's been eliminated.
8167 void Eliminate() {
8168 source_ = destination_ = Location::NoLocation();
8169 }
8170
8171 bool IsEliminated() const {
8172 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
8173 return source_.IsInvalid();
8174 }
8175
8176 DataType::Type GetType() const { return type_; }
8177
8178 bool Is64BitMove() const {
8179 return DataType::Is64BitType(type_);
8180 }
8181
8182 HInstruction* GetInstruction() const { return instruction_; }
8183
8184 private:
8185 Location source_;
8186 Location destination_;
8187 // The type this move is for.
8188 DataType::Type type_;
8189 // The instruction this move is assocatied with. Null when this move is
8190 // for moving an input in the expected locations of user (including a phi user).
8191 // This is only used in debug mode, to ensure we do not connect interval siblings
8192 // in the same parallel move.
8193 HInstruction* instruction_;
8194 };
8195
8196 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
8197
8198 static constexpr size_t kDefaultNumberOfMoves = 4;
8199
8200 class HParallelMove final : public HExpression<0> {
8201 public:
8202 explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
8203 : HExpression(kParallelMove, SideEffects::None(), dex_pc),
8204 moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
8205 moves_.reserve(kDefaultNumberOfMoves);
8206 }
8207
8208 void AddMove(Location source,
8209 Location destination,
8210 DataType::Type type,
8211 HInstruction* instruction) {
8212 DCHECK(source.IsValid());
8213 DCHECK(destination.IsValid());
8214 if (kIsDebugBuild) {
8215 if (instruction != nullptr) {
8216 for (const MoveOperands& move : moves_) {
8217 if (move.GetInstruction() == instruction) {
8218 // Special case the situation where the move is for the spill slot
8219 // of the instruction.
8220 if ((GetPrevious() == instruction)
8221 || ((GetPrevious() == nullptr)
8222 && instruction->IsPhi()
8223 && instruction->GetBlock() == GetBlock())) {
8224 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
8225 << "Doing parallel moves for the same instruction.";
8226 } else {
8227 DCHECK(false) << "Doing parallel moves for the same instruction.";
8228 }
8229 }
8230 }
8231 }
8232 for (const MoveOperands& move : moves_) {
8233 DCHECK(!destination.OverlapsWith(move.GetDestination()))
8234 << "Overlapped destination for two moves in a parallel move: "
8235 << move.GetSource() << " ==> " << move.GetDestination() << " and "
8236 << source << " ==> " << destination << " for " << SafePrint(instruction);
8237 }
8238 }
8239 moves_.emplace_back(source, destination, type, instruction);
8240 }
8241
8242 MoveOperands* MoveOperandsAt(size_t index) {
8243 return &moves_[index];
8244 }
8245
8246 size_t NumMoves() const { return moves_.size(); }
8247
8248 DECLARE_INSTRUCTION(ParallelMove);
8249
8250 protected:
8251 DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
8252
8253 private:
8254 ArenaVector<MoveOperands> moves_;
8255 };
8256
8257 class HBitwiseNegatedRight final : public HBinaryOperation {
8258 public:
8259 HBitwiseNegatedRight(DataType::Type result_type,
8260 InstructionKind op,
8261 HInstruction* left,
8262 HInstruction* right,
8263 uint32_t dex_pc = kNoDexPc)
8264 : HBinaryOperation(
8265 kBitwiseNegatedRight, result_type, left, right, SideEffects::None(), dex_pc),
8266 op_kind_(op) {
8267 DCHECK(op == HInstruction::kAnd || op == HInstruction::kOr || op == HInstruction::kXor) << op;
8268 }
8269
8270 template <typename T, typename U>
8271 auto Compute(T x, U y) const -> decltype(x & ~y) {
8272 static_assert(std::is_same<decltype(x & ~y), decltype(x | ~y)>::value &&
8273 std::is_same<decltype(x & ~y), decltype(x ^ ~y)>::value,
8274 "Inconsistent negated bitwise types");
8275 switch (op_kind_) {
8276 case HInstruction::kAnd:
8277 return x & ~y;
8278 case HInstruction::kOr:
8279 return x | ~y;
8280 case HInstruction::kXor:
8281 return x ^ ~y;
8282 default:
8283 LOG(FATAL) << "Unreachable";
8284 UNREACHABLE();
8285 }
8286 }
8287
8288 bool InstructionDataEquals(const HInstruction* other) const override {
8289 return op_kind_ == other->AsBitwiseNegatedRight()->op_kind_;
8290 }
8291
8292 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
8293 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()));
8294 }
8295
8296 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
8297 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()));
8298 }
8299
8300 InstructionKind GetOpKind() const { return op_kind_; }
8301
8302 DECLARE_INSTRUCTION(BitwiseNegatedRight);
8303
8304 protected:
8305 DEFAULT_COPY_CONSTRUCTOR(BitwiseNegatedRight);
8306
8307 private:
8308 // Specifies the bitwise operation, which will be then negated.
8309 const InstructionKind op_kind_;
8310 };
8311
8312 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
8313 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
8314 // never used across anything that can trigger GC.
8315 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
8316 // So we represent it by the type `DataType::Type::kInt`.
8317 class HIntermediateAddress final : public HExpression<2> {
8318 public:
8319 HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
8320 : HExpression(kIntermediateAddress,
8321 DataType::Type::kInt32,
8322 SideEffects::DependsOnGC(),
8323 dex_pc) {
8324 DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
8325 DataType::Size(DataType::Type::kReference))
8326 << "kPrimInt and kPrimNot have different sizes.";
8327 SetRawInputAt(0, base_address);
8328 SetRawInputAt(1, offset);
8329 }
8330
8331 bool IsClonable() const override { return true; }
8332 bool CanBeMoved() const override { return true; }
8333 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
8334 return true;
8335 }
8336 bool IsActualObject() const override { return false; }
8337
8338 HInstruction* GetBaseAddress() const { return InputAt(0); }
8339 HInstruction* GetOffset() const { return InputAt(1); }
8340
8341 DECLARE_INSTRUCTION(IntermediateAddress);
8342
8343 protected:
8344 DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
8345 };
8346
8347
8348 } // namespace art
8349
8350 #include "nodes_vector.h"
8351
8352 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
8353 #include "nodes_shared.h"
8354 #endif
8355 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
8356 #include "nodes_x86.h"
8357 #endif
8358 #if defined(ART_ENABLE_CODEGEN_riscv64)
8359 #include "nodes_riscv64.h"
8360 #endif
8361
8362 namespace art HIDDEN {
8363
8364 class OptimizingCompilerStats;
8365
8366 class HGraphVisitor : public ValueObject {
8367 public:
8368 explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8369 : stats_(stats),
8370 graph_(graph) {}
8371 virtual ~HGraphVisitor() {}
8372
8373 virtual void VisitInstruction([[maybe_unused]] HInstruction* instruction) {}
8374 virtual void VisitBasicBlock(HBasicBlock* block);
8375
8376 // Visit the graph following basic block insertion order.
8377 void VisitInsertionOrder();
8378
8379 // Visit the graph following dominator tree reverse post-order.
8380 void VisitReversePostOrder();
8381
8382 HGraph* GetGraph() const { return graph_; }
8383
8384 // Visit functions for instruction classes.
8385 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8386 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
8387
8388 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8389
8390 #undef DECLARE_VISIT_INSTRUCTION
8391
8392 protected:
8393 void VisitPhis(HBasicBlock* block);
8394 void VisitNonPhiInstructions(HBasicBlock* block);
8395
8396 OptimizingCompilerStats* stats_;
8397
8398 private:
8399 HGraph* const graph_;
8400
8401 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
8402 };
8403
8404 class HGraphDelegateVisitor : public HGraphVisitor {
8405 public:
8406 explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8407 : HGraphVisitor(graph, stats) {}
8408 virtual ~HGraphDelegateVisitor() {}
8409
8410 // Visit functions that delegate to to super class.
8411 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8412 void Visit##name(H##name* instr) override { Visit##super(instr); }
8413
8414 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8415
8416 #undef DECLARE_VISIT_INSTRUCTION
8417
8418 private:
8419 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
8420 };
8421
8422 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
8423 // and remove the old instruction.
8424 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
8425
8426 // Create a clone for each clonable instructions/phis and replace the original with the clone.
8427 //
8428 // Used for testing individual instruction cloner.
8429 class CloneAndReplaceInstructionVisitor final : public HGraphDelegateVisitor {
8430 public:
8431 explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
8432 : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
8433
8434 void VisitInstruction(HInstruction* instruction) override {
8435 if (instruction->IsClonable()) {
8436 ReplaceInstrOrPhiByClone(instruction);
8437 instr_replaced_by_clones_count_++;
8438 }
8439 }
8440
8441 size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
8442
8443 private:
8444 size_t instr_replaced_by_clones_count_;
8445
8446 DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
8447 };
8448
8449 // Iterator over the blocks that are part of the loop; includes blocks which are part
8450 // of an inner loop. The order in which the blocks are iterated is on their
8451 // block id.
8452 class HBlocksInLoopIterator : public ValueObject {
8453 public:
8454 explicit HBlocksInLoopIterator(const HLoopInformation& info)
8455 : blocks_in_loop_(info.GetBlocks()),
8456 blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
8457 index_(0) {
8458 if (!blocks_in_loop_.IsBitSet(index_)) {
8459 Advance();
8460 }
8461 }
8462
8463 bool Done() const { return index_ == blocks_.size(); }
8464 HBasicBlock* Current() const { return blocks_[index_]; }
8465 void Advance() {
8466 ++index_;
8467 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8468 if (blocks_in_loop_.IsBitSet(index_)) {
8469 break;
8470 }
8471 }
8472 }
8473
8474 private:
8475 const BitVector& blocks_in_loop_;
8476 const ArenaVector<HBasicBlock*>& blocks_;
8477 size_t index_;
8478
8479 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
8480 };
8481
8482 // Iterator over the blocks that are part of the loop; includes blocks which are part
8483 // of an inner loop. The order in which the blocks are iterated is reverse
8484 // post order.
8485 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
8486 public:
8487 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
8488 : blocks_in_loop_(info.GetBlocks()),
8489 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8490 index_(0) {
8491 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8492 Advance();
8493 }
8494 }
8495
8496 bool Done() const { return index_ == blocks_.size(); }
8497 HBasicBlock* Current() const { return blocks_[index_]; }
8498 void Advance() {
8499 ++index_;
8500 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8501 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8502 break;
8503 }
8504 }
8505 }
8506
8507 private:
8508 const BitVector& blocks_in_loop_;
8509 const ArenaVector<HBasicBlock*>& blocks_;
8510 size_t index_;
8511
8512 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
8513 };
8514
8515 // Iterator over the blocks that are part of the loop; includes blocks which are part
8516 // of an inner loop. The order in which the blocks are iterated is post order.
8517 class HBlocksInLoopPostOrderIterator : public ValueObject {
8518 public:
8519 explicit HBlocksInLoopPostOrderIterator(const HLoopInformation& info)
8520 : blocks_in_loop_(info.GetBlocks()),
8521 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8522 index_(blocks_.size() - 1) {
8523 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8524 Advance();
8525 }
8526 }
8527
8528 bool Done() const { return index_ < 0; }
8529 HBasicBlock* Current() const { return blocks_[index_]; }
8530 void Advance() {
8531 --index_;
8532 for (; index_ >= 0; --index_) {
8533 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8534 break;
8535 }
8536 }
8537 }
8538
8539 private:
8540 const BitVector& blocks_in_loop_;
8541 const ArenaVector<HBasicBlock*>& blocks_;
8542
8543 int32_t index_;
8544
8545 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopPostOrderIterator);
8546 };
8547
8548 // Returns int64_t value of a properly typed constant.
8549 inline int64_t Int64FromConstant(HConstant* constant) {
8550 if (constant->IsIntConstant()) {
8551 return constant->AsIntConstant()->GetValue();
8552 } else if (constant->IsLongConstant()) {
8553 return constant->AsLongConstant()->GetValue();
8554 } else {
8555 DCHECK(constant->IsNullConstant()) << constant->DebugName();
8556 return 0;
8557 }
8558 }
8559
8560 // Returns true iff instruction is an integral constant (and sets value on success).
8561 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
8562 if (instruction->IsIntConstant()) {
8563 *value = instruction->AsIntConstant()->GetValue();
8564 return true;
8565 } else if (instruction->IsLongConstant()) {
8566 *value = instruction->AsLongConstant()->GetValue();
8567 return true;
8568 } else if (instruction->IsNullConstant()) {
8569 *value = 0;
8570 return true;
8571 }
8572 return false;
8573 }
8574
8575 // Returns true iff instruction is the given integral constant.
8576 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
8577 int64_t val = 0;
8578 return IsInt64AndGet(instruction, &val) && val == value;
8579 }
8580
8581 // Returns true iff instruction is a zero bit pattern.
8582 inline bool IsZeroBitPattern(HInstruction* instruction) {
8583 return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
8584 }
8585
8586 // Implement HInstruction::Is##type() for concrete instructions.
8587 #define INSTRUCTION_TYPE_CHECK(type, super) \
8588 inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
8589 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8590 #undef INSTRUCTION_TYPE_CHECK
8591
8592 // Implement HInstruction::Is##type() for abstract instructions.
8593 #define INSTRUCTION_TYPE_CHECK_RESULT(type, super) \
8594 std::is_base_of<BaseType, H##type>::value,
8595 #define INSTRUCTION_TYPE_CHECK(type, super) \
8596 inline bool HInstruction::Is##type() const { \
8597 DCHECK_LT(GetKind(), kLastInstructionKind); \
8598 using BaseType = H##type; \
8599 static constexpr bool results[] = { \
8600 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT) \
8601 }; \
8602 return results[static_cast<size_t>(GetKind())]; \
8603 }
8604
8605 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8606 #undef INSTRUCTION_TYPE_CHECK
8607 #undef INSTRUCTION_TYPE_CHECK_RESULT
8608
8609 #define INSTRUCTION_TYPE_CAST(type, super) \
8610 inline const H##type* HInstruction::As##type() const { \
8611 DCHECK(Is##type()); \
8612 return down_cast<const H##type*>(this); \
8613 } \
8614 inline H##type* HInstruction::As##type() { \
8615 DCHECK(Is##type()); \
8616 return down_cast<H##type*>(this); \
8617 } \
8618 inline const H##type* HInstruction::As##type##OrNull() const { \
8619 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
8620 } \
8621 inline H##type* HInstruction::As##type##OrNull() { \
8622 return Is##type() ? down_cast<H##type*>(this) : nullptr; \
8623 }
8624
8625 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
8626 #undef INSTRUCTION_TYPE_CAST
8627
8628
8629 // Create space in `blocks` for adding `number_of_new_blocks` entries
8630 // starting at location `at`. Blocks after `at` are moved accordingly.
8631 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
8632 size_t number_of_new_blocks,
8633 size_t after) {
8634 DCHECK_LT(after, blocks->size());
8635 size_t old_size = blocks->size();
8636 size_t new_size = old_size + number_of_new_blocks;
8637 blocks->resize(new_size);
8638 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
8639 }
8640
8641 /*
8642 * Hunt "under the hood" of array lengths (leading to array references),
8643 * null checks (also leading to array references), and new arrays
8644 * (leading to the actual length). This makes it more likely related
8645 * instructions become actually comparable.
8646 */
8647 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
8648 while (instruction->IsArrayLength() ||
8649 instruction->IsNullCheck() ||
8650 instruction->IsNewArray()) {
8651 instruction = instruction->IsNewArray()
8652 ? instruction->AsNewArray()->GetLength()
8653 : instruction->InputAt(0);
8654 }
8655 return instruction;
8656 }
8657
8658 inline bool IsAddOrSub(const HInstruction* instruction) {
8659 return instruction->IsAdd() || instruction->IsSub();
8660 }
8661
8662 void RemoveEnvironmentUses(HInstruction* instruction);
8663 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
8664 void ResetEnvironmentInputRecords(HInstruction* instruction);
8665
8666 // Detects an instruction that is >= 0. As long as the value is carried by
8667 // a single instruction, arithmetic wrap-around cannot occur.
8668 bool IsGEZero(HInstruction* instruction);
8669
8670 } // namespace art
8671
8672 #endif // ART_COMPILER_OPTIMIZING_NODES_H_
8673