1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instruction_builder.h"
18
19 #include "art_method-inl.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "block_builder.h"
24 #include "class_linker-inl.h"
25 #include "code_generator.h"
26 #include "data_type-inl.h"
27 #include "dex/bytecode_utils.h"
28 #include "dex/dex_instruction-inl.h"
29 #include "driver/compiler_options.h"
30 #include "driver/dex_compilation_unit.h"
31 #include "entrypoints/entrypoint_utils-inl.h"
32 #include "handle_cache-inl.h"
33 #include "imtable-inl.h"
34 #include "intrinsics.h"
35 #include "intrinsics_utils.h"
36 #include "jit/jit.h"
37 #include "jit/profiling_info.h"
38 #include "mirror/dex_cache.h"
39 #include "oat/oat_file.h"
40 #include "optimizing/data_type.h"
41 #include "optimizing_compiler_stats.h"
42 #include "reflective_handle_scope-inl.h"
43 #include "scoped_thread_state_change-inl.h"
44 #include "sharpening.h"
45 #include "ssa_builder.h"
46 #include "well_known_classes.h"
47
48 namespace art HIDDEN {
49
50 namespace {
51
52 class SamePackageCompare {
53 public:
SamePackageCompare(const DexCompilationUnit & dex_compilation_unit)54 explicit SamePackageCompare(const DexCompilationUnit& dex_compilation_unit)
55 : dex_compilation_unit_(dex_compilation_unit) {}
56
operator ()(ObjPtr<mirror::Class> klass)57 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
58 if (klass->GetClassLoader() != dex_compilation_unit_.GetClassLoader().Get()) {
59 return false;
60 }
61 if (referrers_descriptor_ == nullptr) {
62 const DexFile* dex_file = dex_compilation_unit_.GetDexFile();
63 uint32_t referrers_method_idx = dex_compilation_unit_.GetDexMethodIndex();
64 referrers_descriptor_ =
65 dex_file->GetMethodDeclaringClassDescriptor(dex_file->GetMethodId(referrers_method_idx));
66 referrers_package_length_ = PackageLength(referrers_descriptor_);
67 }
68 std::string temp;
69 const char* klass_descriptor = klass->GetDescriptor(&temp);
70 size_t klass_package_length = PackageLength(klass_descriptor);
71 return (referrers_package_length_ == klass_package_length) &&
72 memcmp(referrers_descriptor_, klass_descriptor, referrers_package_length_) == 0;
73 };
74
75 private:
PackageLength(const char * descriptor)76 static size_t PackageLength(const char* descriptor) {
77 const char* slash_pos = strrchr(descriptor, '/');
78 return (slash_pos != nullptr) ? static_cast<size_t>(slash_pos - descriptor) : 0u;
79 }
80
81 const DexCompilationUnit& dex_compilation_unit_;
82 const char* referrers_descriptor_ = nullptr;
83 size_t referrers_package_length_ = 0u;
84 };
85
86 } // anonymous namespace
87
HInstructionBuilder(HGraph * graph,HBasicBlockBuilder * block_builder,SsaBuilder * ssa_builder,const DexFile * dex_file,const CodeItemDebugInfoAccessor & accessor,DataType::Type return_type,const DexCompilationUnit * dex_compilation_unit,const DexCompilationUnit * outer_compilation_unit,CodeGenerator * code_generator,OptimizingCompilerStats * compiler_stats,ScopedArenaAllocator * local_allocator)88 HInstructionBuilder::HInstructionBuilder(HGraph* graph,
89 HBasicBlockBuilder* block_builder,
90 SsaBuilder* ssa_builder,
91 const DexFile* dex_file,
92 const CodeItemDebugInfoAccessor& accessor,
93 DataType::Type return_type,
94 const DexCompilationUnit* dex_compilation_unit,
95 const DexCompilationUnit* outer_compilation_unit,
96 CodeGenerator* code_generator,
97 OptimizingCompilerStats* compiler_stats,
98 ScopedArenaAllocator* local_allocator)
99 : allocator_(graph->GetAllocator()),
100 graph_(graph),
101 dex_file_(dex_file),
102 code_item_accessor_(accessor),
103 return_type_(return_type),
104 block_builder_(block_builder),
105 ssa_builder_(ssa_builder),
106 code_generator_(code_generator),
107 dex_compilation_unit_(dex_compilation_unit),
108 outer_compilation_unit_(outer_compilation_unit),
109 compilation_stats_(compiler_stats),
110 local_allocator_(local_allocator),
111 locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
112 current_block_(nullptr),
113 current_locals_(nullptr),
114 latest_result_(nullptr),
115 current_this_parameter_(nullptr),
116 loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
117 class_cache_(std::less<dex::TypeIndex>(), local_allocator->Adapter(kArenaAllocGraphBuilder)) {
118 loop_headers_.reserve(kDefaultNumberOfLoops);
119 }
120
FindBlockStartingAt(uint32_t dex_pc) const121 HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const {
122 return block_builder_->GetBlockAt(dex_pc);
123 }
124
GetLocalsFor(HBasicBlock * block)125 inline ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsFor(HBasicBlock* block) {
126 ScopedArenaVector<HInstruction*>* locals = &locals_for_[block->GetBlockId()];
127 const size_t vregs = graph_->GetNumberOfVRegs();
128 if (locals->size() == vregs) {
129 return locals;
130 }
131 return GetLocalsForWithAllocation(block, locals, vregs);
132 }
133
GetLocalsForWithAllocation(HBasicBlock * block,ScopedArenaVector<HInstruction * > * locals,const size_t vregs)134 ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsForWithAllocation(
135 HBasicBlock* block,
136 ScopedArenaVector<HInstruction*>* locals,
137 const size_t vregs) {
138 DCHECK_NE(locals->size(), vregs);
139 locals->resize(vregs, nullptr);
140 if (block->IsCatchBlock()) {
141 // We record incoming inputs of catch phis at throwing instructions and
142 // must therefore eagerly create the phis. Phis for undefined vregs will
143 // be deleted when the first throwing instruction with the vreg undefined
144 // is encountered. Unused phis will be removed by dead phi analysis.
145 for (size_t i = 0; i < vregs; ++i) {
146 // No point in creating the catch phi if it is already undefined at
147 // the first throwing instruction.
148 HInstruction* current_local_value = (*current_locals_)[i];
149 if (current_local_value != nullptr) {
150 HPhi* phi = new (allocator_) HPhi(
151 allocator_,
152 i,
153 0,
154 current_local_value->GetType());
155 block->AddPhi(phi);
156 (*locals)[i] = phi;
157 }
158 }
159 }
160 return locals;
161 }
162
ValueOfLocalAt(HBasicBlock * block,size_t local)163 inline HInstruction* HInstructionBuilder::ValueOfLocalAt(HBasicBlock* block, size_t local) {
164 ScopedArenaVector<HInstruction*>* locals = GetLocalsFor(block);
165 return (*locals)[local];
166 }
167
InitializeBlockLocals()168 void HInstructionBuilder::InitializeBlockLocals() {
169 current_locals_ = GetLocalsFor(current_block_);
170
171 if (current_block_->IsCatchBlock()) {
172 // Catch phis were already created and inputs collected from throwing sites.
173 if (kIsDebugBuild) {
174 // Make sure there was at least one throwing instruction which initialized
175 // locals (guaranteed by HGraphBuilder) and that all try blocks have been
176 // visited already (from HTryBoundary scoping and reverse post order).
177 bool catch_block_visited = false;
178 for (HBasicBlock* current : graph_->GetReversePostOrder()) {
179 if (current == current_block_) {
180 catch_block_visited = true;
181 } else if (current->IsTryBlock()) {
182 const HTryBoundary& try_entry = current->GetTryCatchInformation()->GetTryEntry();
183 if (try_entry.HasExceptionHandler(*current_block_)) {
184 DCHECK(!catch_block_visited) << "Catch block visited before its try block.";
185 }
186 }
187 }
188 DCHECK_EQ(current_locals_->size(), graph_->GetNumberOfVRegs())
189 << "No instructions throwing into a live catch block.";
190 }
191 } else if (current_block_->IsLoopHeader()) {
192 // If the block is a loop header, we know we only have visited the pre header
193 // because we are visiting in reverse post order. We create phis for all initialized
194 // locals from the pre header. Their inputs will be populated at the end of
195 // the analysis.
196 for (size_t local = 0; local < current_locals_->size(); ++local) {
197 HInstruction* incoming =
198 ValueOfLocalAt(current_block_->GetLoopInformation()->GetPreHeader(), local);
199 if (incoming != nullptr) {
200 HPhi* phi = new (allocator_) HPhi(
201 allocator_,
202 local,
203 0,
204 incoming->GetType());
205 current_block_->AddPhi(phi);
206 (*current_locals_)[local] = phi;
207 }
208 }
209
210 // Save the loop header so that the last phase of the analysis knows which
211 // blocks need to be updated.
212 loop_headers_.push_back(current_block_);
213 } else if (current_block_->GetPredecessors().size() > 0) {
214 // All predecessors have already been visited because we are visiting in reverse post order.
215 // We merge the values of all locals, creating phis if those values differ.
216 for (size_t local = 0; local < current_locals_->size(); ++local) {
217 bool one_predecessor_has_no_value = false;
218 bool is_different = false;
219 HInstruction* value = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
220
221 for (HBasicBlock* predecessor : current_block_->GetPredecessors()) {
222 HInstruction* current = ValueOfLocalAt(predecessor, local);
223 if (current == nullptr) {
224 one_predecessor_has_no_value = true;
225 break;
226 } else if (current != value) {
227 is_different = true;
228 }
229 }
230
231 if (one_predecessor_has_no_value) {
232 // If one predecessor has no value for this local, we trust the verifier has
233 // successfully checked that there is a store dominating any read after this block.
234 continue;
235 }
236
237 if (is_different) {
238 HInstruction* first_input = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
239 HPhi* phi = new (allocator_) HPhi(
240 allocator_,
241 local,
242 current_block_->GetPredecessors().size(),
243 first_input->GetType());
244 for (size_t i = 0; i < current_block_->GetPredecessors().size(); i++) {
245 HInstruction* pred_value = ValueOfLocalAt(current_block_->GetPredecessors()[i], local);
246 phi->SetRawInputAt(i, pred_value);
247 }
248 current_block_->AddPhi(phi);
249 value = phi;
250 }
251 (*current_locals_)[local] = value;
252 }
253 }
254 }
255
PropagateLocalsToCatchBlocks()256 void HInstructionBuilder::PropagateLocalsToCatchBlocks() {
257 const HTryBoundary& try_entry = current_block_->GetTryCatchInformation()->GetTryEntry();
258 for (HBasicBlock* catch_block : try_entry.GetExceptionHandlers()) {
259 ScopedArenaVector<HInstruction*>* handler_locals = GetLocalsFor(catch_block);
260 DCHECK_EQ(handler_locals->size(), current_locals_->size());
261 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
262 HInstruction* handler_value = (*handler_locals)[vreg];
263 if (handler_value == nullptr) {
264 // Vreg was undefined at a previously encountered throwing instruction
265 // and the catch phi was deleted. Do not record the local value.
266 continue;
267 }
268 DCHECK(handler_value->IsPhi());
269
270 HInstruction* local_value = (*current_locals_)[vreg];
271 if (local_value == nullptr) {
272 // This is the first instruction throwing into `catch_block` where
273 // `vreg` is undefined. Delete the catch phi.
274 catch_block->RemovePhi(handler_value->AsPhi());
275 (*handler_locals)[vreg] = nullptr;
276 } else {
277 // Vreg has been defined at all instructions throwing into `catch_block`
278 // encountered so far. Record the local value in the catch phi.
279 handler_value->AsPhi()->AddInput(local_value);
280 }
281 }
282 }
283 }
284
AppendInstruction(HInstruction * instruction)285 void HInstructionBuilder::AppendInstruction(HInstruction* instruction) {
286 current_block_->AddInstruction(instruction);
287 InitializeInstruction(instruction);
288 }
289
InsertInstructionAtTop(HInstruction * instruction)290 void HInstructionBuilder::InsertInstructionAtTop(HInstruction* instruction) {
291 if (current_block_->GetInstructions().IsEmpty()) {
292 current_block_->AddInstruction(instruction);
293 } else {
294 current_block_->InsertInstructionBefore(instruction, current_block_->GetFirstInstruction());
295 }
296 InitializeInstruction(instruction);
297 }
298
InitializeInstruction(HInstruction * instruction)299 void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
300 if (instruction->NeedsEnvironment()) {
301 HEnvironment* environment = HEnvironment::Create(
302 allocator_,
303 current_locals_->size(),
304 graph_->GetArtMethod(),
305 instruction->GetDexPc(),
306 instruction);
307 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals_));
308 instruction->SetRawEnvironment(environment);
309 }
310 }
311
LoadNullCheckedLocal(uint32_t register_index,uint32_t dex_pc)312 HInstruction* HInstructionBuilder::LoadNullCheckedLocal(uint32_t register_index, uint32_t dex_pc) {
313 HInstruction* ref = LoadLocal(register_index, DataType::Type::kReference);
314 if (!ref->CanBeNull()) {
315 return ref;
316 }
317
318 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc);
319 AppendInstruction(null_check);
320 return null_check;
321 }
322
SetLoopHeaderPhiInputs()323 void HInstructionBuilder::SetLoopHeaderPhiInputs() {
324 for (size_t i = loop_headers_.size(); i > 0; --i) {
325 HBasicBlock* block = loop_headers_[i - 1];
326 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
327 HPhi* phi = it.Current()->AsPhi();
328 size_t vreg = phi->GetRegNumber();
329 for (HBasicBlock* predecessor : block->GetPredecessors()) {
330 HInstruction* value = ValueOfLocalAt(predecessor, vreg);
331 if (value == nullptr) {
332 // Vreg is undefined at this predecessor. Mark it dead and leave with
333 // fewer inputs than predecessors. SsaChecker will fail if not removed.
334 phi->SetDead();
335 break;
336 } else {
337 phi->AddInput(value);
338 }
339 }
340 }
341 }
342 }
343
IsBlockPopulated(HBasicBlock * block)344 static bool IsBlockPopulated(HBasicBlock* block) {
345 if (block->IsLoopHeader()) {
346 // Suspend checks were inserted into loop headers during building of dominator tree.
347 DCHECK(block->GetFirstInstruction()->IsSuspendCheck());
348 return block->GetFirstInstruction() != block->GetLastInstruction();
349 } else if (block->IsCatchBlock()) {
350 // Nops were inserted into the beginning of catch blocks.
351 DCHECK(block->GetFirstInstruction()->IsNop());
352 return block->GetFirstInstruction() != block->GetLastInstruction();
353 } else {
354 return !block->GetInstructions().IsEmpty();
355 }
356 }
357
Build()358 bool HInstructionBuilder::Build() {
359 DCHECK(code_item_accessor_.HasCodeItem());
360 locals_for_.resize(
361 graph_->GetBlocks().size(),
362 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
363
364 // Find locations where we want to generate extra stackmaps for native debugging.
365 // This allows us to generate the info only at interesting points (for example,
366 // at start of java statement) rather than before every dex instruction.
367 const bool native_debuggable = code_generator_ != nullptr &&
368 code_generator_->GetCompilerOptions().GetNativeDebuggable();
369 ArenaBitVector* native_debug_info_locations = nullptr;
370 if (native_debuggable) {
371 native_debug_info_locations = FindNativeDebugInfoLocations();
372 }
373
374 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
375 current_block_ = block;
376 uint32_t block_dex_pc = current_block_->GetDexPc();
377
378 InitializeBlockLocals();
379
380 if (current_block_->IsEntryBlock()) {
381 InitializeParameters();
382 AppendInstruction(new (allocator_) HSuspendCheck(0u));
383 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
384 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
385 }
386 AppendInstruction(new (allocator_) HGoto(0u));
387 continue;
388 } else if (current_block_->IsExitBlock()) {
389 AppendInstruction(new (allocator_) HExit());
390 continue;
391 } else if (current_block_->IsLoopHeader()) {
392 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(current_block_->GetDexPc());
393 current_block_->GetLoopInformation()->SetSuspendCheck(suspend_check);
394 // This is slightly odd because the loop header might not be empty (TryBoundary).
395 // But we're still creating the environment with locals from the top of the block.
396 InsertInstructionAtTop(suspend_check);
397 } else if (current_block_->IsCatchBlock()) {
398 // We add an environment emitting instruction at the beginning of each catch block, in order
399 // to support try catch inlining.
400 // This is slightly odd because the catch block might not be empty (TryBoundary).
401 InsertInstructionAtTop(new (allocator_) HNop(block_dex_pc, /* needs_environment= */ true));
402 }
403
404 if (block_dex_pc == kNoDexPc || current_block_ != block_builder_->GetBlockAt(block_dex_pc)) {
405 // Synthetic block that does not need to be populated.
406 DCHECK(IsBlockPopulated(current_block_));
407 continue;
408 }
409
410 DCHECK(!IsBlockPopulated(current_block_));
411
412 for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) {
413 if (current_block_ == nullptr) {
414 // The previous instruction ended this block.
415 break;
416 }
417
418 const uint32_t dex_pc = pair.DexPc();
419 if (dex_pc != block_dex_pc && FindBlockStartingAt(dex_pc) != nullptr) {
420 // This dex_pc starts a new basic block.
421 break;
422 }
423
424 if (current_block_->IsTryBlock() && IsThrowingDexInstruction(pair.Inst())) {
425 PropagateLocalsToCatchBlocks();
426 }
427
428 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) {
429 AppendInstruction(new (allocator_) HNop(dex_pc, /* needs_environment= */ true));
430 }
431
432 // Note: There may be no Thread for gtests.
433 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
434 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
435 << " " << pair.Inst().Name() << "@" << dex_pc;
436 if (!ProcessDexInstruction(pair.Inst(), dex_pc)) {
437 return false;
438 }
439 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
440 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
441 << " " << pair.Inst().Name() << "@" << dex_pc;
442 }
443
444 if (current_block_ != nullptr) {
445 // Branching instructions clear current_block, so we know the last
446 // instruction of the current block is not a branching instruction.
447 // We add an unconditional Goto to the next block.
448 DCHECK_EQ(current_block_->GetSuccessors().size(), 1u);
449 AppendInstruction(new (allocator_) HGoto());
450 }
451 }
452
453 SetLoopHeaderPhiInputs();
454
455 return true;
456 }
457
BuildIntrinsic(ArtMethod * method)458 void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
459 DCHECK(!code_item_accessor_.HasCodeItem());
460 DCHECK(method->IsIntrinsic());
461 if (kIsDebugBuild) {
462 ScopedObjectAccess soa(Thread::Current());
463 CHECK(!method->IsSignaturePolymorphic());
464 }
465
466 locals_for_.resize(
467 graph_->GetBlocks().size(),
468 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
469
470 // Fill the entry block. Do not add suspend check, we do not want a suspend
471 // check in intrinsics; intrinsic methods are supposed to be fast.
472 current_block_ = graph_->GetEntryBlock();
473 InitializeBlockLocals();
474 InitializeParameters();
475 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
476 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
477 }
478 AppendInstruction(new (allocator_) HGoto(0u));
479
480 // Fill the body.
481 current_block_ = current_block_->GetSingleSuccessor();
482 InitializeBlockLocals();
483 DCHECK(!IsBlockPopulated(current_block_));
484
485 // Add the intermediate representation, if available, or invoke instruction.
486 size_t in_vregs = graph_->GetNumberOfInVRegs();
487 size_t number_of_arguments =
488 in_vregs - std::count(current_locals_->end() - in_vregs, current_locals_->end(), nullptr);
489 uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
490 const char* shorty = dex_file_->GetMethodShorty(method_idx);
491 RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
492 if (!BuildSimpleIntrinsic(method, kNoDexPc, operands, shorty)) {
493 // Some intrinsics without intermediate representation still yield a leaf method,
494 // so build the invoke. Use HInvokeStaticOrDirect even for methods that would
495 // normally use an HInvokeVirtual (sharpen the call).
496 MethodReference target_method(dex_file_, method_idx);
497 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
498 MethodLoadKind::kRuntimeCall,
499 CodePtrLocation::kCallArtMethod,
500 /* method_load_data= */ 0u
501 };
502 InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
503 HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect(
504 allocator_,
505 number_of_arguments,
506 /* number_of_out_vregs= */ in_vregs,
507 return_type_,
508 kNoDexPc,
509 target_method,
510 method,
511 dispatch_info,
512 invoke_type,
513 target_method,
514 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
515 !graph_->IsDebuggable());
516 HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
517 }
518
519 // Add the return instruction.
520 if (return_type_ == DataType::Type::kVoid) {
521 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
522 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), kNoDexPc));
523 }
524 AppendInstruction(new (allocator_) HReturnVoid());
525 } else {
526 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
527 AppendInstruction(new (allocator_) HMethodExitHook(latest_result_, kNoDexPc));
528 }
529 AppendInstruction(new (allocator_) HReturn(latest_result_));
530 }
531
532 // Fill the exit block.
533 DCHECK_EQ(current_block_->GetSingleSuccessor(), graph_->GetExitBlock());
534 current_block_ = graph_->GetExitBlock();
535 InitializeBlockLocals();
536 AppendInstruction(new (allocator_) HExit());
537 }
538
FindNativeDebugInfoLocations()539 ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
540 ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_,
541 code_item_accessor_.InsnsSizeInCodeUnits(),
542 /* expandable= */ false,
543 kArenaAllocGraphBuilder);
544 // The visitor gets called when the line number changes.
545 // In other words, it marks the start of new java statement.
546 code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
547 locations->SetBit(entry.address_);
548 return false;
549 });
550 // Instruction-specific tweaks.
551 for (const DexInstructionPcPair& inst : code_item_accessor_) {
552 switch (inst->Opcode()) {
553 case Instruction::MOVE_EXCEPTION: {
554 // Stop in native debugger after the exception has been moved.
555 // The compiler also expects the move at the start of basic block so
556 // we do not want to interfere by inserting native-debug-info before it.
557 locations->ClearBit(inst.DexPc());
558 DexInstructionIterator next = std::next(DexInstructionIterator(inst));
559 DCHECK(next.DexPc() != inst.DexPc());
560 if (next != code_item_accessor_.end()) {
561 locations->SetBit(next.DexPc());
562 }
563 break;
564 }
565 default:
566 break;
567 }
568 }
569 return locations;
570 }
571
LoadLocal(uint32_t reg_number,DataType::Type type) const572 HInstruction* HInstructionBuilder::LoadLocal(uint32_t reg_number, DataType::Type type) const {
573 HInstruction* value = (*current_locals_)[reg_number];
574 DCHECK(value != nullptr);
575
576 // If the operation requests a specific type, we make sure its input is of that type.
577 if (type != value->GetType()) {
578 if (DataType::IsFloatingPointType(type)) {
579 value = ssa_builder_->GetFloatOrDoubleEquivalent(value, type);
580 } else if (type == DataType::Type::kReference) {
581 value = ssa_builder_->GetReferenceTypeEquivalent(value);
582 }
583 DCHECK(value != nullptr);
584 }
585
586 return value;
587 }
588
UpdateLocal(uint32_t reg_number,HInstruction * stored_value)589 void HInstructionBuilder::UpdateLocal(uint32_t reg_number, HInstruction* stored_value) {
590 DataType::Type stored_type = stored_value->GetType();
591 DCHECK_NE(stored_type, DataType::Type::kVoid);
592
593 // Storing into vreg `reg_number` may implicitly invalidate the surrounding
594 // registers. Consider the following cases:
595 // (1) Storing a wide value must overwrite previous values in both `reg_number`
596 // and `reg_number+1`. We store `nullptr` in `reg_number+1`.
597 // (2) If vreg `reg_number-1` holds a wide value, writing into `reg_number`
598 // must invalidate it. We store `nullptr` in `reg_number-1`.
599 // Consequently, storing a wide value into the high vreg of another wide value
600 // will invalidate both `reg_number-1` and `reg_number+1`.
601
602 if (reg_number != 0) {
603 HInstruction* local_low = (*current_locals_)[reg_number - 1];
604 if (local_low != nullptr && DataType::Is64BitType(local_low->GetType())) {
605 // The vreg we are storing into was previously the high vreg of a pair.
606 // We need to invalidate its low vreg.
607 DCHECK((*current_locals_)[reg_number] == nullptr);
608 (*current_locals_)[reg_number - 1] = nullptr;
609 }
610 }
611
612 (*current_locals_)[reg_number] = stored_value;
613 if (DataType::Is64BitType(stored_type)) {
614 // We are storing a pair. Invalidate the instruction in the high vreg.
615 (*current_locals_)[reg_number + 1] = nullptr;
616 }
617 }
618
InitializeParameters()619 void HInstructionBuilder::InitializeParameters() {
620 DCHECK(current_block_->IsEntryBlock());
621
622 // outer_compilation_unit_ is null only when unit testing.
623 if (outer_compilation_unit_ == nullptr) {
624 return;
625 }
626
627 const char* shorty = dex_compilation_unit_->GetShorty();
628 uint16_t number_of_parameters = graph_->GetNumberOfInVRegs();
629 uint16_t locals_index = graph_->GetNumberOfLocalVRegs();
630 uint16_t parameter_index = 0;
631
632 const dex::MethodId& referrer_method_id =
633 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
634 if (!dex_compilation_unit_->IsStatic()) {
635 // Add the implicit 'this' argument, not expressed in the signature.
636 HParameterValue* parameter = new (allocator_) HParameterValue(*dex_file_,
637 referrer_method_id.class_idx_,
638 parameter_index++,
639 DataType::Type::kReference,
640 /* is_this= */ true);
641 AppendInstruction(parameter);
642 UpdateLocal(locals_index++, parameter);
643 number_of_parameters--;
644 current_this_parameter_ = parameter;
645 } else {
646 DCHECK(current_this_parameter_ == nullptr);
647 }
648
649 const dex::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id);
650 const dex::TypeList* arg_types = dex_file_->GetProtoParameters(proto);
651 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) {
652 HParameterValue* parameter = new (allocator_) HParameterValue(
653 *dex_file_,
654 arg_types->GetTypeItem(shorty_pos - 1).type_idx_,
655 parameter_index++,
656 DataType::FromShorty(shorty[shorty_pos]),
657 /* is_this= */ false);
658 ++shorty_pos;
659 AppendInstruction(parameter);
660 // Store the parameter value in the local that the dex code will use
661 // to reference that parameter.
662 UpdateLocal(locals_index++, parameter);
663 if (DataType::Is64BitType(parameter->GetType())) {
664 i++;
665 locals_index++;
666 parameter_index++;
667 }
668 }
669 }
670
671 template<typename T, bool kCompareWithZero>
If_21_22t(const Instruction & instruction,uint32_t dex_pc)672 void HInstructionBuilder::If_21_22t(const Instruction& instruction, uint32_t dex_pc) {
673 DCHECK_EQ(kCompareWithZero ? Instruction::Format::k21t : Instruction::Format::k22t,
674 Instruction::FormatOf(instruction.Opcode()));
675 HInstruction* value = LoadLocal(
676 kCompareWithZero ? instruction.VRegA_21t() : instruction.VRegA_22t(),
677 DataType::Type::kInt32);
678 T* comparison = nullptr;
679 if (kCompareWithZero) {
680 comparison = new (allocator_) T(value, graph_->GetIntConstant(0), dex_pc);
681 } else {
682 HInstruction* second = LoadLocal(instruction.VRegB_22t(), DataType::Type::kInt32);
683 comparison = new (allocator_) T(value, second, dex_pc);
684 }
685 AppendInstruction(comparison);
686 HIf* if_instr = new (allocator_) HIf(comparison, dex_pc);
687
688 ProfilingInfo* info = graph_->GetProfilingInfo();
689 if (info != nullptr && !graph_->IsCompilingBaseline()) {
690 BranchCache* cache = info->GetBranchCache(dex_pc);
691 if (cache != nullptr) {
692 if_instr->SetTrueCount(cache->GetTrue());
693 if_instr->SetFalseCount(cache->GetFalse());
694 }
695 }
696
697 // Append after setting true/false count, so that the builder knows if the
698 // instruction needs an environment.
699 AppendInstruction(if_instr);
700 current_block_ = nullptr;
701 }
702
703 template<typename T>
Unop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)704 void HInstructionBuilder::Unop_12x(const Instruction& instruction,
705 DataType::Type type,
706 uint32_t dex_pc) {
707 HInstruction* first = LoadLocal(instruction.VRegB_12x(), type);
708 AppendInstruction(new (allocator_) T(type, first, dex_pc));
709 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
710 }
711
Conversion_12x(const Instruction & instruction,DataType::Type input_type,DataType::Type result_type,uint32_t dex_pc)712 void HInstructionBuilder::Conversion_12x(const Instruction& instruction,
713 DataType::Type input_type,
714 DataType::Type result_type,
715 uint32_t dex_pc) {
716 HInstruction* first = LoadLocal(instruction.VRegB_12x(), input_type);
717 AppendInstruction(new (allocator_) HTypeConversion(result_type, first, dex_pc));
718 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
719 }
720
721 template<typename T>
Binop_23x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)722 void HInstructionBuilder::Binop_23x(const Instruction& instruction,
723 DataType::Type type,
724 uint32_t dex_pc) {
725 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
726 HInstruction* second = LoadLocal(instruction.VRegC_23x(), type);
727 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
728 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
729 }
730
731 template<typename T>
Binop_23x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)732 void HInstructionBuilder::Binop_23x_shift(const Instruction& instruction,
733 DataType::Type type,
734 uint32_t dex_pc) {
735 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
736 HInstruction* second = LoadLocal(instruction.VRegC_23x(), DataType::Type::kInt32);
737 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
738 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
739 }
740
Binop_23x_cmp(const Instruction & instruction,DataType::Type type,ComparisonBias bias,uint32_t dex_pc)741 void HInstructionBuilder::Binop_23x_cmp(const Instruction& instruction,
742 DataType::Type type,
743 ComparisonBias bias,
744 uint32_t dex_pc) {
745 HInstruction* first = LoadLocal(instruction.VRegB_23x(), type);
746 HInstruction* second = LoadLocal(instruction.VRegC_23x(), type);
747 AppendInstruction(new (allocator_) HCompare(type, first, second, bias, dex_pc));
748 UpdateLocal(instruction.VRegA_23x(), current_block_->GetLastInstruction());
749 }
750
751 template<typename T>
Binop_12x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)752 void HInstructionBuilder::Binop_12x_shift(const Instruction& instruction,
753 DataType::Type type,
754 uint32_t dex_pc) {
755 HInstruction* first = LoadLocal(instruction.VRegA_12x(), type);
756 HInstruction* second = LoadLocal(instruction.VRegB_12x(), DataType::Type::kInt32);
757 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
758 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
759 }
760
761 template<typename T>
Binop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)762 void HInstructionBuilder::Binop_12x(const Instruction& instruction,
763 DataType::Type type,
764 uint32_t dex_pc) {
765 HInstruction* first = LoadLocal(instruction.VRegA_12x(), type);
766 HInstruction* second = LoadLocal(instruction.VRegB_12x(), type);
767 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
768 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
769 }
770
771 template<typename T>
Binop_22s(const Instruction & instruction,bool reverse,uint32_t dex_pc)772 void HInstructionBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
773 HInstruction* first = LoadLocal(instruction.VRegB_22s(), DataType::Type::kInt32);
774 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s());
775 if (reverse) {
776 std::swap(first, second);
777 }
778 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
779 UpdateLocal(instruction.VRegA_22s(), current_block_->GetLastInstruction());
780 }
781
782 template<typename T>
Binop_22b(const Instruction & instruction,bool reverse,uint32_t dex_pc)783 void HInstructionBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
784 HInstruction* first = LoadLocal(instruction.VRegB_22b(), DataType::Type::kInt32);
785 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b());
786 if (reverse) {
787 std::swap(first, second);
788 }
789 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
790 UpdateLocal(instruction.VRegA_22b(), current_block_->GetLastInstruction());
791 }
792
793 // Does the method being compiled need any constructor barriers being inserted?
794 // (Always 'false' for methods that aren't <init>.)
RequiresConstructorBarrier(const DexCompilationUnit * cu)795 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu) {
796 // Can be null in unit tests only.
797 if (UNLIKELY(cu == nullptr)) {
798 return false;
799 }
800
801 // Constructor barriers are applicable only for <init> methods.
802 if (LIKELY(!cu->IsConstructor() || cu->IsStatic())) {
803 return false;
804 }
805
806 return cu->RequiresConstructorBarrier();
807 }
808
809 // Returns true if `block` has only one successor which starts at the next
810 // dex_pc after `instruction` at `dex_pc`.
IsFallthroughInstruction(const Instruction & instruction,uint32_t dex_pc,HBasicBlock * block)811 static bool IsFallthroughInstruction(const Instruction& instruction,
812 uint32_t dex_pc,
813 HBasicBlock* block) {
814 uint32_t next_dex_pc = dex_pc + instruction.SizeInCodeUnits();
815 return block->GetSingleSuccessor()->GetDexPc() == next_dex_pc;
816 }
817
BuildSwitch(const Instruction & instruction,uint32_t dex_pc)818 void HInstructionBuilder::BuildSwitch(const Instruction& instruction, uint32_t dex_pc) {
819 HInstruction* value = LoadLocal(instruction.VRegA_31t(), DataType::Type::kInt32);
820 DexSwitchTable table(instruction, dex_pc);
821
822 if (table.GetNumEntries() == 0) {
823 // Empty Switch. Code falls through to the next block.
824 DCHECK(IsFallthroughInstruction(instruction, dex_pc, current_block_));
825 AppendInstruction(new (allocator_) HGoto(dex_pc));
826 } else if (table.ShouldBuildDecisionTree()) {
827 for (DexSwitchTableIterator it(table); !it.Done(); it.Advance()) {
828 HInstruction* case_value = graph_->GetIntConstant(it.CurrentKey());
829 HEqual* comparison = new (allocator_) HEqual(value, case_value, dex_pc);
830 AppendInstruction(comparison);
831 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
832
833 if (!it.IsLast()) {
834 current_block_ = FindBlockStartingAt(it.GetDexPcForCurrentIndex());
835 }
836 }
837 } else {
838 AppendInstruction(
839 new (allocator_) HPackedSwitch(table.GetEntryAt(0), table.GetNumEntries(), value, dex_pc));
840 }
841
842 current_block_ = nullptr;
843 }
844
845 template <DataType::Type type>
BuildMove(uint32_t dest_reg,uint32_t src_reg)846 ALWAYS_INLINE inline void HInstructionBuilder::BuildMove(uint32_t dest_reg, uint32_t src_reg) {
847 // The verifier has no notion of a null type, so a move-object of constant 0
848 // will lead to the same constant 0 in the destination register. To mimic
849 // this behavior, we just pretend we haven't seen a type change (int to reference)
850 // for the 0 constant and phis. We rely on our type propagation to eventually get the
851 // types correct.
852 constexpr bool is_reference = type == DataType::Type::kReference;
853 HInstruction* value = is_reference ? (*current_locals_)[src_reg] : /* not needed */ nullptr;
854 if (is_reference && value->IsIntConstant()) {
855 DCHECK_EQ(value->AsIntConstant()->GetValue(), 0);
856 } else if (is_reference && value->IsPhi()) {
857 DCHECK(value->GetType() == DataType::Type::kInt32 ||
858 value->GetType() == DataType::Type::kReference);
859 } else {
860 value = LoadLocal(src_reg, type);
861 }
862 UpdateLocal(dest_reg, value);
863 }
864
BuildReturn(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)865 void HInstructionBuilder::BuildReturn(const Instruction& instruction,
866 DataType::Type type,
867 uint32_t dex_pc) {
868 if (type == DataType::Type::kVoid) {
869 // Only <init> (which is a return-void) could possibly have a constructor fence.
870 // This may insert additional redundant constructor fences from the super constructors.
871 // TODO: remove redundant constructor fences (b/36656456).
872 if (RequiresConstructorBarrier(dex_compilation_unit_)) {
873 // Compiling instance constructor.
874 DCHECK_STREQ("<init>", graph_->GetMethodName());
875
876 HInstruction* fence_target = current_this_parameter_;
877 DCHECK(fence_target != nullptr);
878
879 AppendInstruction(new (allocator_) HConstructorFence(fence_target, dex_pc, allocator_));
880 MaybeRecordStat(
881 compilation_stats_,
882 MethodCompilationStat::kConstructorFenceGeneratedFinal);
883 }
884 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
885 // Return value is not used for void functions. We pass NullConstant to
886 // avoid special cases when generating code.
887 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), dex_pc));
888 }
889 AppendInstruction(new (allocator_) HReturnVoid(dex_pc));
890 } else {
891 DCHECK(!RequiresConstructorBarrier(dex_compilation_unit_));
892 HInstruction* value = LoadLocal(instruction.VRegA_11x(), type);
893 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
894 AppendInstruction(new (allocator_) HMethodExitHook(value, dex_pc));
895 }
896 AppendInstruction(new (allocator_) HReturn(value, dex_pc));
897 }
898 current_block_ = nullptr;
899 }
900
GetInvokeTypeFromOpCode(Instruction::Code opcode)901 static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) {
902 switch (opcode) {
903 case Instruction::INVOKE_STATIC:
904 case Instruction::INVOKE_STATIC_RANGE:
905 return kStatic;
906 case Instruction::INVOKE_DIRECT:
907 case Instruction::INVOKE_DIRECT_RANGE:
908 return kDirect;
909 case Instruction::INVOKE_VIRTUAL:
910 case Instruction::INVOKE_VIRTUAL_RANGE:
911 return kVirtual;
912 case Instruction::INVOKE_INTERFACE:
913 case Instruction::INVOKE_INTERFACE_RANGE:
914 return kInterface;
915 case Instruction::INVOKE_SUPER_RANGE:
916 case Instruction::INVOKE_SUPER:
917 return kSuper;
918 default:
919 LOG(FATAL) << "Unexpected invoke opcode: " << opcode;
920 UNREACHABLE();
921 }
922 }
923
924 // Try to resolve a method using the class linker. Return null if a method could
925 // not be resolved or the resolved method cannot be used for some reason.
926 // Also retrieve method data needed for creating the invoke intermediate
927 // representation while we hold the mutator lock here.
ResolveMethod(uint16_t method_idx,ArtMethod * referrer,const DexCompilationUnit & dex_compilation_unit,InvokeType * invoke_type,MethodReference * resolved_method_info,uint16_t * imt_or_vtable_index,bool * is_string_constructor)928 static ArtMethod* ResolveMethod(uint16_t method_idx,
929 ArtMethod* referrer,
930 const DexCompilationUnit& dex_compilation_unit,
931 /*inout*/InvokeType* invoke_type,
932 /*out*/MethodReference* resolved_method_info,
933 /*out*/uint16_t* imt_or_vtable_index,
934 /*out*/bool* is_string_constructor) {
935 ScopedObjectAccess soa(Thread::Current());
936
937 ClassLinker* class_linker = dex_compilation_unit.GetClassLinker();
938 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit.GetClassLoader();
939
940 ArtMethod* resolved_method = nullptr;
941 if (referrer == nullptr) {
942 // The referrer may be unresolved for AOT if we're compiling a class that cannot be
943 // resolved because, for example, we don't find a superclass in the classpath.
944 resolved_method = class_linker->ResolveMethodId(
945 method_idx, dex_compilation_unit.GetDexCache(), class_loader);
946 } else if (referrer->SkipAccessChecks()) {
947 resolved_method = class_linker->ResolveMethodId(method_idx, referrer);
948 } else {
949 resolved_method = class_linker->ResolveMethodWithChecks(
950 method_idx,
951 referrer,
952 *invoke_type);
953 }
954
955 if (UNLIKELY(resolved_method == nullptr)) {
956 // Clean up any exception left by type resolution.
957 soa.Self()->ClearException();
958 return nullptr;
959 }
960 DCHECK(!soa.Self()->IsExceptionPending());
961
962 if (referrer == nullptr) {
963 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
964 dex_compilation_unit.GetDexFile()->GetMethodId(method_idx).class_idx_,
965 dex_compilation_unit.GetDexCache().Get(),
966 class_loader.Get());
967 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the method.
968 if (class_linker->ThrowIfInvokeClassMismatch(referenced_class,
969 *dex_compilation_unit.GetDexFile(),
970 *invoke_type)) {
971 soa.Self()->ClearException();
972 return nullptr;
973 }
974 // The class linker cannot check access without a referrer, so we have to do it.
975 // Check if the declaring class or referencing class is accessible.
976 SamePackageCompare same_package(dex_compilation_unit);
977 ObjPtr<mirror::Class> declaring_class = resolved_method->GetDeclaringClass();
978 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
979 if (!declaring_class_accessible) {
980 // It is possible to access members from an inaccessible superclass
981 // by referencing them through an accessible subclass.
982 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
983 return nullptr;
984 }
985 }
986 // Check whether the method itself is accessible.
987 // Since the referrer is unresolved but the method is resolved, it cannot be
988 // inside the same class, so a private method is known to be inaccessible.
989 // And without a resolved referrer, we cannot check for protected member access
990 // in superlass, so we handle only access to public member or within the package.
991 if (resolved_method->IsPrivate() ||
992 (!resolved_method->IsPublic() && !declaring_class_accessible)) {
993 return nullptr;
994 }
995 }
996
997 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not.
998 // We need to look at the referrer's super class vtable. We need to do this to know if we need to
999 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of
1000 // which require runtime handling.
1001 if (*invoke_type == kSuper) {
1002 if (referrer == nullptr) {
1003 // We could not determine the method's class we need to wait until runtime.
1004 DCHECK(Runtime::Current()->IsAotCompiler());
1005 return nullptr;
1006 }
1007 ArtMethod* actual_method = FindSuperMethodToCall</*access_check=*/true>(
1008 method_idx, resolved_method, referrer, soa.Self());
1009 if (actual_method == nullptr) {
1010 // Clean up any exception left by method resolution.
1011 soa.Self()->ClearException();
1012 return nullptr;
1013 }
1014 if (!actual_method->IsInvokable()) {
1015 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub
1016 // could resolve the callee to the wrong method.
1017 return nullptr;
1018 }
1019 // Call GetCanonicalMethod in case the resolved method is a copy: for super calls, the encoding
1020 // of ArtMethod in BSS relies on not having copies there.
1021 resolved_method = actual_method->GetCanonicalMethod(class_linker->GetImagePointerSize());
1022 }
1023
1024 if (*invoke_type == kInterface) {
1025 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
1026 // If the resolved method is from j.l.Object, emit a virtual call instead.
1027 // The IMT conflict stub only handles interface methods.
1028 *invoke_type = kVirtual;
1029 } else {
1030 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1031 }
1032 }
1033
1034 *resolved_method_info =
1035 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
1036 if (*invoke_type == kVirtual) {
1037 // For HInvokeVirtual we need the vtable index.
1038 *imt_or_vtable_index = resolved_method->GetVtableIndex();
1039 } else if (*invoke_type == kInterface) {
1040 // For HInvokeInterface we need the IMT index.
1041 *imt_or_vtable_index = resolved_method->GetImtIndex();
1042 DCHECK_EQ(*imt_or_vtable_index, ImTable::GetImtIndex(resolved_method));
1043 }
1044
1045 *is_string_constructor = resolved_method->IsStringConstructor();
1046
1047 return resolved_method;
1048 }
1049
BuildInvoke(const Instruction & instruction,uint32_t dex_pc,uint32_t method_idx,const InstructionOperands & operands)1050 bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
1051 uint32_t dex_pc,
1052 uint32_t method_idx,
1053 const InstructionOperands& operands) {
1054 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
1055 const char* shorty = dex_file_->GetMethodShorty(method_idx);
1056 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1057
1058 // Remove the return type from the 'proto'.
1059 size_t number_of_arguments = strlen(shorty) - 1;
1060 if (invoke_type != kStatic) { // instance call
1061 // One extra argument for 'this'.
1062 number_of_arguments++;
1063 }
1064
1065 MethodReference resolved_method_reference(nullptr, 0u);
1066 bool is_string_constructor = false;
1067 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1068 ArtMethod* resolved_method = ResolveMethod(method_idx,
1069 graph_->GetArtMethod(),
1070 *dex_compilation_unit_,
1071 &invoke_type,
1072 &resolved_method_reference,
1073 &imt_or_vtable_index,
1074 &is_string_constructor);
1075
1076 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1077 if (UNLIKELY(resolved_method == nullptr)) {
1078 DCHECK(!Thread::Current()->IsExceptionPending());
1079 MaybeRecordStat(compilation_stats_,
1080 MethodCompilationStat::kUnresolvedMethod);
1081 HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_,
1082 number_of_arguments,
1083 operands.GetNumberOfOperands(),
1084 return_type,
1085 dex_pc,
1086 method_reference,
1087 invoke_type);
1088 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ true);
1089 }
1090
1091 // Replace calls to String.<init> with StringFactory.
1092 if (is_string_constructor) {
1093 uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
1094 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
1095 MethodLoadKind::kStringInit,
1096 CodePtrLocation::kCallArtMethod,
1097 dchecked_integral_cast<uint64_t>(string_init_entry_point)
1098 };
1099 // We pass null for the resolved_method to ensure optimizations
1100 // don't rely on it.
1101 HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect(
1102 allocator_,
1103 number_of_arguments - 1,
1104 operands.GetNumberOfOperands() - 1,
1105 /* return_type= */ DataType::Type::kReference,
1106 dex_pc,
1107 method_reference,
1108 /* resolved_method= */ nullptr,
1109 dispatch_info,
1110 invoke_type,
1111 resolved_method_reference,
1112 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit,
1113 !graph_->IsDebuggable());
1114 return HandleStringInit(invoke, operands, shorty);
1115 }
1116
1117 // Potential class initialization check, in the case of a static method call.
1118 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
1119 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1120 HClinitCheck* clinit_check = nullptr;
1121 if (invoke_type == kStatic) {
1122 clinit_check = ProcessClinitCheckForInvoke(dex_pc, resolved_method, &clinit_check_requirement);
1123 }
1124
1125 // Try to build an HIR replacement for the intrinsic.
1126 if (UNLIKELY(resolved_method->IsIntrinsic()) && !graph_->IsDebuggable()) {
1127 // All intrinsics are in the primary boot image, so their class can always be referenced
1128 // and we do not need to rely on the implicit class initialization check. The class should
1129 // be initialized but we do not require that here.
1130 DCHECK_NE(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1131 if (BuildSimpleIntrinsic(resolved_method, dex_pc, operands, shorty)) {
1132 return true;
1133 }
1134 }
1135
1136 HInvoke* invoke = nullptr;
1137 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) {
1138 // For sharpening, we create another MethodReference, to account for the
1139 // kSuper case below where we cannot find a dex method index.
1140 bool has_method_id = true;
1141 if (invoke_type == kSuper) {
1142 uint32_t dex_method_index = method_reference.index;
1143 if (IsSameDexFile(*resolved_method_reference.dex_file,
1144 *dex_compilation_unit_->GetDexFile())) {
1145 // Update the method index to the one resolved. Note that this may be a no-op if
1146 // we resolved to the method referenced by the instruction.
1147 dex_method_index = resolved_method_reference.index;
1148 } else {
1149 // Try to find a dex method index in this caller's dex file.
1150 ScopedObjectAccess soa(Thread::Current());
1151 dex_method_index = resolved_method->FindDexMethodIndexInOtherDexFile(
1152 *dex_compilation_unit_->GetDexFile(), method_idx);
1153 }
1154 if (dex_method_index == dex::kDexNoIndex) {
1155 has_method_id = false;
1156 } else {
1157 method_reference.index = dex_method_index;
1158 }
1159 }
1160 HInvokeStaticOrDirect::DispatchInfo dispatch_info =
1161 HSharpening::SharpenLoadMethod(resolved_method,
1162 has_method_id,
1163 /* for_interface_call= */ false,
1164 code_generator_);
1165 if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
1166 graph_->SetHasDirectCriticalNativeCall(true);
1167 }
1168 invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
1169 number_of_arguments,
1170 operands.GetNumberOfOperands(),
1171 return_type,
1172 dex_pc,
1173 method_reference,
1174 resolved_method,
1175 dispatch_info,
1176 invoke_type,
1177 resolved_method_reference,
1178 clinit_check_requirement,
1179 !graph_->IsDebuggable());
1180 if (clinit_check != nullptr) {
1181 // Add the class initialization check as last input of `invoke`.
1182 DCHECK_EQ(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit);
1183 size_t clinit_check_index = invoke->InputCount() - 1u;
1184 DCHECK(invoke->InputAt(clinit_check_index) == nullptr);
1185 invoke->SetArgumentAt(clinit_check_index, clinit_check);
1186 }
1187 } else if (invoke_type == kVirtual) {
1188 invoke = new (allocator_) HInvokeVirtual(allocator_,
1189 number_of_arguments,
1190 operands.GetNumberOfOperands(),
1191 return_type,
1192 dex_pc,
1193 method_reference,
1194 resolved_method,
1195 resolved_method_reference,
1196 /*vtable_index=*/ imt_or_vtable_index,
1197 !graph_->IsDebuggable());
1198 } else {
1199 DCHECK_EQ(invoke_type, kInterface);
1200 if (kIsDebugBuild) {
1201 ScopedObjectAccess soa(Thread::Current());
1202 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1203 }
1204 MethodLoadKind load_kind = HSharpening::SharpenLoadMethod(
1205 resolved_method,
1206 /* has_method_id= */ true,
1207 /* for_interface_call= */ true,
1208 code_generator_)
1209 .method_load_kind;
1210 invoke = new (allocator_) HInvokeInterface(allocator_,
1211 number_of_arguments,
1212 operands.GetNumberOfOperands(),
1213 return_type,
1214 dex_pc,
1215 method_reference,
1216 resolved_method,
1217 resolved_method_reference,
1218 /*imt_index=*/ imt_or_vtable_index,
1219 load_kind,
1220 !graph_->IsDebuggable());
1221 }
1222 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1223 }
1224
VarHandleAccessorNeedsReturnTypeCheck(HInvoke * invoke,DataType::Type return_type)1225 static bool VarHandleAccessorNeedsReturnTypeCheck(HInvoke* invoke, DataType::Type return_type) {
1226 mirror::VarHandle::AccessModeTemplate access_mode_template =
1227 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1228
1229 switch (access_mode_template) {
1230 case mirror::VarHandle::AccessModeTemplate::kGet:
1231 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate:
1232 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange:
1233 return return_type == DataType::Type::kReference;
1234 case mirror::VarHandle::AccessModeTemplate::kSet:
1235 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet:
1236 return false;
1237 }
1238 }
1239
1240 // This function initializes `VarHandleOptimizations`, does a number of static checks and disables
1241 // the intrinsic if some of the checks fail. This is necessary for the code generator to work (for
1242 // both the baseline and the optimizing compiler).
DecideVarHandleIntrinsic(HInvoke * invoke)1243 static void DecideVarHandleIntrinsic(HInvoke* invoke) {
1244 switch (invoke->GetIntrinsic()) {
1245 case Intrinsics::kVarHandleCompareAndExchange:
1246 case Intrinsics::kVarHandleCompareAndExchangeAcquire:
1247 case Intrinsics::kVarHandleCompareAndExchangeRelease:
1248 case Intrinsics::kVarHandleCompareAndSet:
1249 case Intrinsics::kVarHandleGet:
1250 case Intrinsics::kVarHandleGetAcquire:
1251 case Intrinsics::kVarHandleGetAndAdd:
1252 case Intrinsics::kVarHandleGetAndAddAcquire:
1253 case Intrinsics::kVarHandleGetAndAddRelease:
1254 case Intrinsics::kVarHandleGetAndBitwiseAnd:
1255 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
1256 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
1257 case Intrinsics::kVarHandleGetAndBitwiseOr:
1258 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
1259 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
1260 case Intrinsics::kVarHandleGetAndBitwiseXor:
1261 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
1262 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
1263 case Intrinsics::kVarHandleGetAndSet:
1264 case Intrinsics::kVarHandleGetAndSetAcquire:
1265 case Intrinsics::kVarHandleGetAndSetRelease:
1266 case Intrinsics::kVarHandleGetOpaque:
1267 case Intrinsics::kVarHandleGetVolatile:
1268 case Intrinsics::kVarHandleSet:
1269 case Intrinsics::kVarHandleSetOpaque:
1270 case Intrinsics::kVarHandleSetRelease:
1271 case Intrinsics::kVarHandleSetVolatile:
1272 case Intrinsics::kVarHandleWeakCompareAndSet:
1273 case Intrinsics::kVarHandleWeakCompareAndSetAcquire:
1274 case Intrinsics::kVarHandleWeakCompareAndSetPlain:
1275 case Intrinsics::kVarHandleWeakCompareAndSetRelease:
1276 break;
1277 default:
1278 return; // Not a VarHandle intrinsic, skip.
1279 }
1280
1281 DCHECK(invoke->IsInvokePolymorphic());
1282 VarHandleOptimizations optimizations(invoke);
1283
1284 // Do only simple static checks here (those for which we have enough information). More complex
1285 // checks should be done in instruction simplifier, which runs after other optimization passes
1286 // that may provide useful information.
1287
1288 size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
1289 if (expected_coordinates_count > 2u) {
1290 optimizations.SetDoNotIntrinsify();
1291 return;
1292 }
1293 if (expected_coordinates_count != 0u) {
1294 // Except for static fields (no coordinates), the first coordinate must be a reference.
1295 // Do not intrinsify if the reference is null as we would always go to slow path anyway.
1296 HInstruction* object = invoke->InputAt(1);
1297 if (object->GetType() != DataType::Type::kReference || object->IsNullConstant()) {
1298 optimizations.SetDoNotIntrinsify();
1299 return;
1300 }
1301 }
1302 if (expected_coordinates_count == 2u) {
1303 // For arrays and views, the second coordinate must be convertible to `int`.
1304 // In this context, `boolean` is not convertible but we have to look at the shorty
1305 // as compiler transformations can give the invoke a valid boolean input.
1306 DataType::Type index_type = GetDataTypeFromShorty(invoke, 2);
1307 if (index_type == DataType::Type::kBool ||
1308 DataType::Kind(index_type) != DataType::Type::kInt32) {
1309 optimizations.SetDoNotIntrinsify();
1310 return;
1311 }
1312 }
1313
1314 uint32_t number_of_arguments = invoke->GetNumberOfArguments();
1315 DataType::Type return_type = invoke->GetType();
1316 mirror::VarHandle::AccessModeTemplate access_mode_template =
1317 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1318 switch (access_mode_template) {
1319 case mirror::VarHandle::AccessModeTemplate::kGet:
1320 // The return type should be the same as varType, so it shouldn't be void.
1321 if (return_type == DataType::Type::kVoid) {
1322 optimizations.SetDoNotIntrinsify();
1323 return;
1324 }
1325 break;
1326 case mirror::VarHandle::AccessModeTemplate::kSet:
1327 if (return_type != DataType::Type::kVoid) {
1328 optimizations.SetDoNotIntrinsify();
1329 return;
1330 }
1331 break;
1332 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: {
1333 if (return_type != DataType::Type::kBool) {
1334 optimizations.SetDoNotIntrinsify();
1335 return;
1336 }
1337 uint32_t expected_value_index = number_of_arguments - 2;
1338 uint32_t new_value_index = number_of_arguments - 1;
1339 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1340 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1341 if (expected_value_type != new_value_type) {
1342 optimizations.SetDoNotIntrinsify();
1343 return;
1344 }
1345 break;
1346 }
1347 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: {
1348 uint32_t expected_value_index = number_of_arguments - 2;
1349 uint32_t new_value_index = number_of_arguments - 1;
1350 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1351 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1352 if (expected_value_type != new_value_type || return_type != expected_value_type) {
1353 optimizations.SetDoNotIntrinsify();
1354 return;
1355 }
1356 break;
1357 }
1358 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: {
1359 DataType::Type value_type = GetDataTypeFromShorty(invoke, number_of_arguments - 1);
1360 if (IsVarHandleGetAndAdd(invoke) &&
1361 (value_type == DataType::Type::kReference || value_type == DataType::Type::kBool)) {
1362 // We should only add numerical types.
1363 //
1364 // For byte array views floating-point types are not allowed, see javadoc comments for
1365 // java.lang.invoke.MethodHandles.byteArrayViewVarHandle(). But ART treats them as numeric
1366 // types in ByteArrayViewVarHandle::Access(). Consequently we do generate intrinsic code,
1367 // but it always fails access mode check at runtime.
1368 optimizations.SetDoNotIntrinsify();
1369 return;
1370 } else if (IsVarHandleGetAndBitwiseOp(invoke) && !DataType::IsIntegralType(value_type)) {
1371 // We can only apply operators to bitwise integral types.
1372 // Note that bitwise VarHandle operations accept a non-integral boolean type and
1373 // perform the appropriate logical operation. However, the result is the same as
1374 // using the bitwise operation on our boolean representation and this fits well
1375 // with DataType::IsIntegralType() treating the compiler type kBool as integral.
1376 optimizations.SetDoNotIntrinsify();
1377 return;
1378 }
1379 if (value_type != return_type && return_type != DataType::Type::kVoid) {
1380 optimizations.SetDoNotIntrinsify();
1381 return;
1382 }
1383 break;
1384 }
1385 }
1386 }
1387
BuildInvokePolymorphic(uint32_t dex_pc,uint32_t method_idx,dex::ProtoIndex proto_idx,const InstructionOperands & operands)1388 bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
1389 uint32_t method_idx,
1390 dex::ProtoIndex proto_idx,
1391 const InstructionOperands& operands) {
1392 const char* shorty = dex_file_->GetShorty(proto_idx);
1393 DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands());
1394 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1395 size_t number_of_arguments = strlen(shorty);
1396 // We use ResolveMethod which is also used in BuildInvoke in order to
1397 // not duplicate code. As such, we need to provide is_string_constructor
1398 // even if we don't need it afterwards.
1399 InvokeType invoke_type = InvokeType::kPolymorphic;
1400 bool is_string_constructor = false;
1401 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1402 MethodReference resolved_method_reference(nullptr, 0u);
1403 ArtMethod* resolved_method = ResolveMethod(method_idx,
1404 graph_->GetArtMethod(),
1405 *dex_compilation_unit_,
1406 &invoke_type,
1407 &resolved_method_reference,
1408 &imt_or_vtable_index,
1409 &is_string_constructor);
1410
1411 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1412
1413 // MethodHandle.invokeExact intrinsic needs to check whether call-site matches with MethodHandle's
1414 // type. To do that, MethodType corresponding to the call-site is passed as an extra input.
1415 // Other invoke-polymorphic calls do not need it.
1416 bool can_be_intrinsified =
1417 static_cast<Intrinsics>(resolved_method->GetIntrinsic()) ==
1418 Intrinsics::kMethodHandleInvokeExact;
1419
1420 uint32_t number_of_other_inputs = can_be_intrinsified ? 1u : 0u;
1421
1422 HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
1423 number_of_arguments,
1424 operands.GetNumberOfOperands(),
1425 number_of_other_inputs,
1426 return_type,
1427 dex_pc,
1428 method_reference,
1429 resolved_method,
1430 resolved_method_reference,
1431 proto_idx);
1432 if (!HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false)) {
1433 return false;
1434 }
1435
1436 DCHECK_EQ(invoke->AsInvokePolymorphic()->IsMethodHandleInvokeExact(), can_be_intrinsified);
1437
1438 if (invoke->GetIntrinsic() != Intrinsics::kNone &&
1439 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
1440 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvokeExact &&
1441 VarHandleAccessorNeedsReturnTypeCheck(invoke, return_type)) {
1442 // Type check is needed because VarHandle intrinsics do not type check the retrieved reference.
1443 ScopedObjectAccess soa(Thread::Current());
1444 ArtMethod* referrer = graph_->GetArtMethod();
1445 dex::TypeIndex return_type_index =
1446 referrer->GetDexFile()->GetProtoId(proto_idx).return_type_idx_;
1447
1448 BuildTypeCheck(/* is_instance_of= */ false, invoke, return_type_index, dex_pc);
1449 latest_result_ = current_block_->GetLastInstruction();
1450 }
1451
1452 DecideVarHandleIntrinsic(invoke);
1453
1454 return true;
1455 }
1456
1457
BuildInvokeCustom(uint32_t dex_pc,uint32_t call_site_idx,const InstructionOperands & operands)1458 bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc,
1459 uint32_t call_site_idx,
1460 const InstructionOperands& operands) {
1461 dex::ProtoIndex proto_idx = dex_file_->GetProtoIndexForCallSite(call_site_idx);
1462 const char* shorty = dex_file_->GetShorty(proto_idx);
1463 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1464 size_t number_of_arguments = strlen(shorty) - 1;
1465 // HInvokeCustom takes a DexNoNoIndex method reference.
1466 MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex);
1467 HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_,
1468 number_of_arguments,
1469 operands.GetNumberOfOperands(),
1470 call_site_idx,
1471 return_type,
1472 dex_pc,
1473 method_reference,
1474 !graph_->IsDebuggable());
1475 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1476 }
1477
BuildNewInstance(dex::TypeIndex type_index,uint32_t dex_pc)1478 HNewInstance* HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
1479 ScopedObjectAccess soa(Thread::Current());
1480
1481 HLoadClass* load_class = BuildLoadClass(type_index, dex_pc);
1482
1483 HInstruction* cls = load_class;
1484 Handle<mirror::Class> klass = load_class->GetClass();
1485
1486 if (!IsInitialized(klass.Get())) {
1487 cls = new (allocator_) HClinitCheck(load_class, dex_pc);
1488 AppendInstruction(cls);
1489 }
1490
1491 // Only the access check entrypoint handles the finalizable class case. If we
1492 // need access checks, then we haven't resolved the method and the class may
1493 // again be finalizable.
1494 QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
1495 if (load_class->NeedsAccessCheck() ||
1496 klass == nullptr || // Finalizable/instantiable is unknown.
1497 klass->IsFinalizable() ||
1498 klass.Get() == klass->GetClass() || // Classes cannot be allocated in code
1499 !klass->IsInstantiable()) {
1500 entrypoint = kQuickAllocObjectWithChecks;
1501 }
1502 // We will always be able to resolve the string class since it is in the BCP.
1503 if (!klass.IsNull() && klass->IsStringClass()) {
1504 entrypoint = kQuickAllocStringObject;
1505 }
1506
1507 // Consider classes we haven't resolved as potentially finalizable.
1508 bool finalizable = (klass == nullptr) || klass->IsFinalizable();
1509
1510 HNewInstance* new_instance = new (allocator_) HNewInstance(
1511 cls,
1512 dex_pc,
1513 type_index,
1514 *dex_compilation_unit_->GetDexFile(),
1515 finalizable,
1516 entrypoint);
1517 AppendInstruction(new_instance);
1518
1519 return new_instance;
1520 }
1521
BuildConstructorFenceForAllocation(HInstruction * allocation)1522 void HInstructionBuilder::BuildConstructorFenceForAllocation(HInstruction* allocation) {
1523 DCHECK(allocation != nullptr &&
1524 (allocation->IsNewInstance() ||
1525 allocation->IsNewArray())); // corresponding to "new" keyword in JLS.
1526
1527 if (allocation->IsNewInstance()) {
1528 // STRING SPECIAL HANDLING:
1529 // -------------------------------
1530 // Strings have a real HNewInstance node but they end up always having 0 uses.
1531 // All uses of a String HNewInstance are always transformed to replace their input
1532 // of the HNewInstance with an input of the invoke to StringFactory.
1533 //
1534 // Do not emit an HConstructorFence here since it can inhibit some String new-instance
1535 // optimizations (to pass checker tests that rely on those optimizations).
1536 HNewInstance* new_inst = allocation->AsNewInstance();
1537 HLoadClass* load_class = new_inst->GetLoadClass();
1538
1539 Thread* self = Thread::Current();
1540 ScopedObjectAccess soa(self);
1541 StackHandleScope<1> hs(self);
1542 Handle<mirror::Class> klass = load_class->GetClass();
1543 if (klass != nullptr && klass->IsStringClass()) {
1544 return;
1545 // Note: Do not use allocation->IsStringAlloc which requires
1546 // a valid ReferenceTypeInfo, but that doesn't get made until after reference type
1547 // propagation (and instruction builder is too early).
1548 }
1549 // (In terms of correctness, the StringFactory needs to provide its own
1550 // default initialization barrier, see below.)
1551 }
1552
1553 // JLS 17.4.5 "Happens-before Order" describes:
1554 //
1555 // The default initialization of any object happens-before any other actions (other than
1556 // default-writes) of a program.
1557 //
1558 // In our implementation the default initialization of an object to type T means
1559 // setting all of its initial data (object[0..size)) to 0, and setting the
1560 // object's class header (i.e. object.getClass() == T.class).
1561 //
1562 // In practice this fence ensures that the writes to the object header
1563 // are visible to other threads if this object escapes the current thread.
1564 // (and in theory the 0-initializing, but that happens automatically
1565 // when new memory pages are mapped in by the OS).
1566 HConstructorFence* ctor_fence =
1567 new (allocator_) HConstructorFence(allocation, allocation->GetDexPc(), allocator_);
1568 AppendInstruction(ctor_fence);
1569 MaybeRecordStat(
1570 compilation_stats_,
1571 MethodCompilationStat::kConstructorFenceGeneratedNew);
1572 }
1573
IsInImage(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1574 static bool IsInImage(ObjPtr<mirror::Class> cls, const CompilerOptions& compiler_options)
1575 REQUIRES_SHARED(Locks::mutator_lock_) {
1576 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(cls)) {
1577 return true;
1578 }
1579 if (compiler_options.IsGeneratingImage()) {
1580 std::string temp;
1581 const char* descriptor = cls->GetDescriptor(&temp);
1582 return compiler_options.IsImageClass(descriptor);
1583 } else {
1584 return false;
1585 }
1586 }
1587
IsSubClass(ObjPtr<mirror::Class> to_test,ObjPtr<mirror::Class> super_class)1588 static bool IsSubClass(ObjPtr<mirror::Class> to_test, ObjPtr<mirror::Class> super_class)
1589 REQUIRES_SHARED(Locks::mutator_lock_) {
1590 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class);
1591 }
1592
HasTrivialClinit(ObjPtr<mirror::Class> klass,PointerSize pointer_size)1593 static bool HasTrivialClinit(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
1594 REQUIRES_SHARED(Locks::mutator_lock_) {
1595 // Check if the class has encoded fields that trigger bytecode execution.
1596 // (Encoded fields are just a different representation of <clinit>.)
1597 if (klass->NumStaticFields() != 0u) {
1598 DCHECK(klass->GetClassDef() != nullptr);
1599 EncodedStaticFieldValueIterator it(klass->GetDexFile(), *klass->GetClassDef());
1600 for (; it.HasNext(); it.Next()) {
1601 switch (it.GetValueType()) {
1602 case EncodedArrayValueIterator::ValueType::kBoolean:
1603 case EncodedArrayValueIterator::ValueType::kByte:
1604 case EncodedArrayValueIterator::ValueType::kShort:
1605 case EncodedArrayValueIterator::ValueType::kChar:
1606 case EncodedArrayValueIterator::ValueType::kInt:
1607 case EncodedArrayValueIterator::ValueType::kLong:
1608 case EncodedArrayValueIterator::ValueType::kFloat:
1609 case EncodedArrayValueIterator::ValueType::kDouble:
1610 case EncodedArrayValueIterator::ValueType::kNull:
1611 case EncodedArrayValueIterator::ValueType::kString:
1612 // Primitive, null or j.l.String initialization is permitted.
1613 break;
1614 case EncodedArrayValueIterator::ValueType::kType:
1615 // Type initialization can load classes and execute bytecode through a class loader
1616 // which can execute arbitrary bytecode. We do not optimize for known class loaders;
1617 // kType is rarely used (if ever).
1618 return false;
1619 default:
1620 // Other types in the encoded static field list are rejected by the DexFileVerifier.
1621 LOG(FATAL) << "Unexpected type " << it.GetValueType();
1622 UNREACHABLE();
1623 }
1624 }
1625 }
1626 // Check if the class has <clinit> that executes arbitrary code.
1627 // Initialization of static fields of the class itself with constants is allowed.
1628 ArtMethod* clinit = klass->FindClassInitializer(pointer_size);
1629 if (clinit != nullptr) {
1630 const DexFile& dex_file = *clinit->GetDexFile();
1631 CodeItemInstructionAccessor accessor(dex_file, clinit->GetCodeItem());
1632 for (DexInstructionPcPair it : accessor) {
1633 switch (it->Opcode()) {
1634 case Instruction::CONST_4:
1635 case Instruction::CONST_16:
1636 case Instruction::CONST:
1637 case Instruction::CONST_HIGH16:
1638 case Instruction::CONST_WIDE_16:
1639 case Instruction::CONST_WIDE_32:
1640 case Instruction::CONST_WIDE:
1641 case Instruction::CONST_WIDE_HIGH16:
1642 case Instruction::CONST_STRING:
1643 case Instruction::CONST_STRING_JUMBO:
1644 // Primitive, null or j.l.String initialization is permitted.
1645 break;
1646 case Instruction::RETURN_VOID:
1647 break;
1648 case Instruction::SPUT:
1649 case Instruction::SPUT_WIDE:
1650 case Instruction::SPUT_OBJECT:
1651 case Instruction::SPUT_BOOLEAN:
1652 case Instruction::SPUT_BYTE:
1653 case Instruction::SPUT_CHAR:
1654 case Instruction::SPUT_SHORT:
1655 // Only initialization of a static field of the same class is permitted.
1656 if (dex_file.GetFieldId(it->VRegB_21c()).class_idx_ != klass->GetDexTypeIndex()) {
1657 return false;
1658 }
1659 break;
1660 case Instruction::NEW_ARRAY:
1661 // Only primitive arrays are permitted.
1662 if (Primitive::GetType(dex_file.GetTypeDescriptor(dex_file.GetTypeId(
1663 dex::TypeIndex(it->VRegC_22c())))[1]) == Primitive::kPrimNot) {
1664 return false;
1665 }
1666 break;
1667 case Instruction::APUT:
1668 case Instruction::APUT_WIDE:
1669 case Instruction::APUT_BOOLEAN:
1670 case Instruction::APUT_BYTE:
1671 case Instruction::APUT_CHAR:
1672 case Instruction::APUT_SHORT:
1673 case Instruction::FILL_ARRAY_DATA:
1674 case Instruction::NOP:
1675 // Allow initialization of primitive arrays (only constants can be stored).
1676 // Note: We expect NOPs used for fill-array-data-payload but accept all NOPs
1677 // (even unreferenced switch payloads if they make it through the verifier).
1678 break;
1679 default:
1680 return false;
1681 }
1682 }
1683 }
1684 return true;
1685 }
1686
HasTrivialInitialization(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1687 static bool HasTrivialInitialization(ObjPtr<mirror::Class> cls,
1688 const CompilerOptions& compiler_options)
1689 REQUIRES_SHARED(Locks::mutator_lock_) {
1690 Runtime* runtime = Runtime::Current();
1691 PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1692
1693 // Check the superclass chain.
1694 for (ObjPtr<mirror::Class> klass = cls; klass != nullptr; klass = klass->GetSuperClass()) {
1695 if (klass->IsInitialized() && IsInImage(klass, compiler_options)) {
1696 break; // `klass` and its superclasses are already initialized in the boot or app image.
1697 }
1698 if (!HasTrivialClinit(klass, pointer_size)) {
1699 return false;
1700 }
1701 }
1702
1703 // Also check interfaces with default methods as they need to be initialized as well.
1704 ObjPtr<mirror::IfTable> iftable = cls->GetIfTable();
1705 DCHECK(iftable != nullptr);
1706 for (int32_t i = 0, count = iftable->Count(); i != count; ++i) {
1707 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
1708 if (!iface->HasDefaultMethods()) {
1709 continue; // Initializing `cls` does not initialize this interface.
1710 }
1711 if (iface->IsInitialized() && IsInImage(iface, compiler_options)) {
1712 continue; // This interface is already initialized in the boot or app image.
1713 }
1714 if (!HasTrivialClinit(iface, pointer_size)) {
1715 return false;
1716 }
1717 }
1718 return true;
1719 }
1720
IsInitialized(ObjPtr<mirror::Class> cls) const1721 bool HInstructionBuilder::IsInitialized(ObjPtr<mirror::Class> cls) const {
1722 if (cls == nullptr) {
1723 return false;
1724 }
1725
1726 // Check if the class will be initialized at runtime.
1727 if (cls->IsInitialized()) {
1728 const CompilerOptions& compiler_options = code_generator_->GetCompilerOptions();
1729 if (compiler_options.IsAotCompiler()) {
1730 // Assume loaded only if klass is in the boot or app image.
1731 if (IsInImage(cls, compiler_options)) {
1732 return true;
1733 }
1734 } else {
1735 DCHECK(compiler_options.IsJitCompiler());
1736 if (Runtime::Current()->GetJit()->CanAssumeInitialized(
1737 cls,
1738 compiler_options.IsJitCompilerForSharedCode())) {
1739 // For JIT, the class cannot revert to an uninitialized state.
1740 return true;
1741 }
1742 }
1743 }
1744
1745 // We can avoid the class initialization check for `cls` in static methods and constructors
1746 // in the very same class; invoking a static method involves a class initialization check
1747 // and so does the instance allocation that must be executed before invoking a constructor.
1748 // Other instance methods of the same class can run on an escaped instance
1749 // of an erroneous class. Even a superclass may need to be checked as the subclass
1750 // can be completely initialized while the superclass is initializing and the subclass
1751 // remains initialized when the superclass initializer throws afterwards. b/62478025
1752 // Note: The HClinitCheck+HInvokeStaticOrDirect merging can still apply.
1753 auto is_static_method_or_constructor_of_cls = [cls](const DexCompilationUnit& compilation_unit)
1754 REQUIRES_SHARED(Locks::mutator_lock_) {
1755 return (compilation_unit.GetAccessFlags() & (kAccStatic | kAccConstructor)) != 0u &&
1756 compilation_unit.GetCompilingClass().Get() == cls;
1757 };
1758 if (is_static_method_or_constructor_of_cls(*outer_compilation_unit_) ||
1759 // Check also the innermost method. Though excessive copies of ClinitCheck can be
1760 // eliminated by GVN, that happens only after the decision whether to inline the
1761 // graph or not and that may depend on the presence of the ClinitCheck.
1762 // TODO: We should walk over the entire inlined method chain, but we don't pass that
1763 // information to the builder.
1764 is_static_method_or_constructor_of_cls(*dex_compilation_unit_)) {
1765 return true;
1766 }
1767
1768 // Otherwise, we may be able to avoid the check if `cls` is a superclass of a method being
1769 // compiled here (anywhere in the inlining chain) as the `cls` must have started initializing
1770 // before calling any `cls` or subclass methods. Static methods require a clinit check and
1771 // instance methods require an instance which cannot be created before doing a clinit check.
1772 // When a subclass of `cls` starts initializing, it starts initializing its superclass
1773 // chain up to `cls` without running any bytecode, i.e. without any opportunity for circular
1774 // initialization weirdness.
1775 //
1776 // If the initialization of `cls` is trivial (`cls` and its superclasses and superinterfaces
1777 // with default methods initialize only their own static fields using constant values), it must
1778 // complete, either successfully or by throwing and marking `cls` erroneous, without allocating
1779 // any instances of `cls` or subclasses (or any other class) and without calling any methods.
1780 // If it completes by throwing, no instances of `cls` shall be created and no subclass method
1781 // bytecode shall execute (see above), therefore the instruction we're building shall be
1782 // unreachable. By reaching the instruction, we know that `cls` was initialized successfully.
1783 //
1784 // TODO: We should walk over the entire inlined methods chain, but we don't pass that
1785 // information to the builder. (We could also check if we're guaranteed a non-null instance
1786 // of `cls` at this location but that's outside the scope of the instruction builder.)
1787 bool is_subclass = IsSubClass(outer_compilation_unit_->GetCompilingClass().Get(), cls);
1788 if (dex_compilation_unit_ != outer_compilation_unit_) {
1789 is_subclass = is_subclass ||
1790 IsSubClass(dex_compilation_unit_->GetCompilingClass().Get(), cls);
1791 }
1792 if (is_subclass && HasTrivialInitialization(cls, code_generator_->GetCompilerOptions())) {
1793 return true;
1794 }
1795
1796 return false;
1797 }
1798
ProcessClinitCheckForInvoke(uint32_t dex_pc,ArtMethod * resolved_method,HInvokeStaticOrDirect::ClinitCheckRequirement * clinit_check_requirement)1799 HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
1800 uint32_t dex_pc,
1801 ArtMethod* resolved_method,
1802 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
1803 ScopedObjectAccess soa(Thread::Current());
1804 ObjPtr<mirror::Class> klass = resolved_method->GetDeclaringClass();
1805
1806 HClinitCheck* clinit_check = nullptr;
1807 if (IsInitialized(klass)) {
1808 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1809 } else {
1810 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
1811 HLoadClass* cls = BuildLoadClass(h_klass->GetDexTypeIndex(),
1812 h_klass->GetDexFile(),
1813 h_klass,
1814 dex_pc,
1815 /* needs_access_check= */ false);
1816 if (cls != nullptr) {
1817 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
1818 clinit_check = new (allocator_) HClinitCheck(cls, dex_pc);
1819 AppendInstruction(clinit_check);
1820 } else {
1821 // Let the invoke handle this with an implicit class initialization check.
1822 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
1823 }
1824 }
1825 return clinit_check;
1826 }
1827
SetupInvokeArguments(HInstruction * invoke,const InstructionOperands & operands,const char * shorty,ReceiverArg receiver_arg)1828 bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
1829 const InstructionOperands& operands,
1830 const char* shorty,
1831 ReceiverArg receiver_arg) {
1832 // Note: The `invoke` can be an intrinsic replacement, so not necessaritly HInvoke.
1833 // In that case, do not log errors, they shall be reported when we try to build the HInvoke.
1834 uint32_t shorty_index = 1; // Skip the return type.
1835 const size_t number_of_operands = operands.GetNumberOfOperands();
1836 bool argument_length_error = false;
1837
1838 size_t start_index = 0u;
1839 size_t argument_index = 0u;
1840 if (receiver_arg != ReceiverArg::kNone) {
1841 if (number_of_operands == 0u) {
1842 argument_length_error = true;
1843 } else {
1844 start_index = 1u;
1845 if (receiver_arg != ReceiverArg::kIgnored) {
1846 uint32_t obj_reg = operands.GetOperand(0u);
1847 HInstruction* arg = (receiver_arg == ReceiverArg::kPlainArg)
1848 ? LoadLocal(obj_reg, DataType::Type::kReference)
1849 : LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
1850 if (receiver_arg != ReceiverArg::kNullCheckedOnly) {
1851 invoke->SetRawInputAt(0u, arg);
1852 argument_index = 1u;
1853 }
1854 }
1855 }
1856 }
1857
1858 for (size_t i = start_index; i < number_of_operands; ++i, ++argument_index) {
1859 // Make sure we don't go over the expected arguments or over the number of
1860 // dex registers given. If the instruction was seen as dead by the verifier,
1861 // it hasn't been properly checked.
1862 if (UNLIKELY(shorty[shorty_index] == 0)) {
1863 argument_length_error = true;
1864 break;
1865 }
1866 DataType::Type type = DataType::FromShorty(shorty[shorty_index++]);
1867 bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
1868 if (is_wide && ((i + 1 == number_of_operands) ||
1869 (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
1870 if (invoke->IsInvoke()) {
1871 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should
1872 // reject any class where this is violated. However, the verifier only does these checks
1873 // on non trivially dead instructions, so we just bailout the compilation.
1874 VLOG(compiler) << "Did not compile "
1875 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1876 << " because of non-sequential dex register pair in wide argument";
1877 MaybeRecordStat(compilation_stats_,
1878 MethodCompilationStat::kNotCompiledMalformedOpcode);
1879 }
1880 return false;
1881 }
1882 HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
1883 DCHECK(invoke->InputAt(argument_index) == nullptr);
1884 invoke->SetRawInputAt(argument_index, arg);
1885 if (is_wide) {
1886 ++i;
1887 }
1888 }
1889
1890 argument_length_error = argument_length_error || shorty[shorty_index] != 0;
1891 if (argument_length_error) {
1892 if (invoke->IsInvoke()) {
1893 VLOG(compiler) << "Did not compile "
1894 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1895 << " because of wrong number of arguments in invoke instruction";
1896 MaybeRecordStat(compilation_stats_,
1897 MethodCompilationStat::kNotCompiledMalformedOpcode);
1898 }
1899 return false;
1900 }
1901
1902 if (invoke->IsInvokeStaticOrDirect() &&
1903 HInvokeStaticOrDirect::NeedsCurrentMethodInput(
1904 invoke->AsInvokeStaticOrDirect()->GetDispatchInfo())) {
1905 DCHECK_EQ(argument_index, invoke->AsInvokeStaticOrDirect()->GetCurrentMethodIndex());
1906 DCHECK(invoke->InputAt(argument_index) == nullptr);
1907 invoke->SetRawInputAt(argument_index, graph_->GetCurrentMethod());
1908 }
1909
1910 if (invoke->IsInvokeInterface() &&
1911 (invoke->AsInvokeInterface()->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive)) {
1912 invoke->SetRawInputAt(invoke->AsInvokeInterface()->GetNumberOfArguments() - 1,
1913 graph_->GetCurrentMethod());
1914 }
1915
1916 if (invoke->IsInvokePolymorphic()) {
1917 HInvokePolymorphic* invoke_polymorphic = invoke->AsInvokePolymorphic();
1918
1919 // MethodHandle.invokeExact intrinsic expects MethodType corresponding to the call-site as an
1920 // extra input to determine whether to throw WrongMethodTypeException or execute target method.
1921 if (invoke_polymorphic->IsMethodHandleInvokeExact()) {
1922 HLoadMethodType* load_method_type =
1923 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(),
1924 invoke_polymorphic->GetProtoIndex(),
1925 graph_->GetDexFile(),
1926 invoke_polymorphic->GetDexPc());
1927 HSharpening::ProcessLoadMethodType(load_method_type,
1928 code_generator_,
1929 *dex_compilation_unit_,
1930 graph_->GetHandleCache()->GetHandles());
1931 invoke->SetRawInputAt(invoke_polymorphic->GetNumberOfArguments(), load_method_type);
1932 AppendInstruction(load_method_type);
1933 }
1934 }
1935
1936 return true;
1937 }
1938
HandleInvoke(HInvoke * invoke,const InstructionOperands & operands,const char * shorty,bool is_unresolved)1939 bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
1940 const InstructionOperands& operands,
1941 const char* shorty,
1942 bool is_unresolved) {
1943 DCHECK_IMPLIES(invoke->IsInvokeStaticOrDirect(),
1944 !invoke->AsInvokeStaticOrDirect()->IsStringInit());
1945
1946 ReceiverArg receiver_arg = (invoke->GetInvokeType() == InvokeType::kStatic)
1947 ? ReceiverArg::kNone
1948 : (is_unresolved ? ReceiverArg::kPlainArg : ReceiverArg::kNullCheckedArg);
1949 if (!SetupInvokeArguments(invoke, operands, shorty, receiver_arg)) {
1950 return false;
1951 }
1952
1953 AppendInstruction(invoke);
1954 latest_result_ = invoke;
1955
1956 return true;
1957 }
1958
BuildSimpleIntrinsic(ArtMethod * method,uint32_t dex_pc,const InstructionOperands & operands,const char * shorty)1959 bool HInstructionBuilder::BuildSimpleIntrinsic(ArtMethod* method,
1960 uint32_t dex_pc,
1961 const InstructionOperands& operands,
1962 const char* shorty) {
1963 Intrinsics intrinsic = method->GetIntrinsic();
1964 DCHECK_NE(intrinsic, Intrinsics::kNone);
1965 constexpr DataType::Type kInt32 = DataType::Type::kInt32;
1966 constexpr DataType::Type kInt64 = DataType::Type::kInt64;
1967 constexpr DataType::Type kFloat32 = DataType::Type::kFloat32;
1968 constexpr DataType::Type kFloat64 = DataType::Type::kFloat64;
1969 ReceiverArg receiver_arg = method->IsStatic() ? ReceiverArg::kNone : ReceiverArg::kNullCheckedArg;
1970 HInstruction* instruction = nullptr;
1971 switch (intrinsic) {
1972 case Intrinsics::kIntegerRotateLeft:
1973 instruction = new (allocator_) HRol(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
1974 break;
1975 case Intrinsics::kIntegerRotateRight:
1976 instruction = new (allocator_) HRor(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
1977 break;
1978 case Intrinsics::kLongRotateLeft:
1979 instruction = new (allocator_) HRol(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
1980 break;
1981 case Intrinsics::kLongRotateRight:
1982 instruction = new (allocator_) HRor(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
1983 break;
1984 case Intrinsics::kIntegerCompare:
1985 instruction = new (allocator_) HCompare(
1986 kInt32, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1987 break;
1988 case Intrinsics::kLongCompare:
1989 instruction = new (allocator_) HCompare(
1990 kInt64, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1991 break;
1992 case Intrinsics::kIntegerSignum:
1993 instruction = new (allocator_) HCompare(
1994 kInt32, /*first=*/ nullptr, graph_->GetIntConstant(0), ComparisonBias::kNoBias, dex_pc);
1995 break;
1996 case Intrinsics::kLongSignum:
1997 instruction = new (allocator_) HCompare(
1998 kInt64, /*first=*/ nullptr, graph_->GetLongConstant(0), ComparisonBias::kNoBias, dex_pc);
1999 break;
2000 case Intrinsics::kFloatIsNaN:
2001 case Intrinsics::kDoubleIsNaN: {
2002 // IsNaN(x) is the same as x != x.
2003 instruction = new (allocator_) HNotEqual(/*first=*/ nullptr, /*second=*/ nullptr, dex_pc);
2004 instruction->AsCondition()->SetBias(ComparisonBias::kLtBias);
2005 break;
2006 }
2007 case Intrinsics::kStringCharAt:
2008 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
2009 instruction = new (allocator_) HArrayGet(/*array=*/ nullptr,
2010 /*index=*/ nullptr,
2011 DataType::Type::kUint16,
2012 SideEffects::None(), // Strings are immutable.
2013 dex_pc,
2014 /*is_string_char_at=*/ true);
2015 break;
2016 case Intrinsics::kStringIsEmpty:
2017 case Intrinsics::kStringLength:
2018 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
2019 // For String.isEmpty(), we add a comparison with 0 below.
2020 instruction =
2021 new (allocator_) HArrayLength(/*array=*/ nullptr, dex_pc, /* is_string_length= */ true);
2022 break;
2023 case Intrinsics::kUnsafeLoadFence:
2024 case Intrinsics::kJdkUnsafeLoadFence:
2025 receiver_arg = ReceiverArg::kNullCheckedOnly;
2026 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2027 break;
2028 case Intrinsics::kUnsafeStoreFence:
2029 case Intrinsics::kJdkUnsafeStoreFence:
2030 receiver_arg = ReceiverArg::kNullCheckedOnly;
2031 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
2032 break;
2033 case Intrinsics::kUnsafeFullFence:
2034 case Intrinsics::kJdkUnsafeFullFence:
2035 receiver_arg = ReceiverArg::kNullCheckedOnly;
2036 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
2037 break;
2038 case Intrinsics::kVarHandleFullFence:
2039 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
2040 break;
2041 case Intrinsics::kVarHandleAcquireFence:
2042 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2043 break;
2044 case Intrinsics::kVarHandleReleaseFence:
2045 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
2046 break;
2047 case Intrinsics::kVarHandleLoadLoadFence:
2048 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
2049 break;
2050 case Intrinsics::kVarHandleStoreStoreFence:
2051 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kStoreStore, dex_pc);
2052 break;
2053 case Intrinsics::kMathMinIntInt:
2054 instruction = new (allocator_) HMin(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2055 break;
2056 case Intrinsics::kMathMinLongLong:
2057 instruction = new (allocator_) HMin(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2058 break;
2059 case Intrinsics::kMathMinFloatFloat:
2060 instruction = new (allocator_) HMin(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2061 break;
2062 case Intrinsics::kMathMinDoubleDouble:
2063 instruction = new (allocator_) HMin(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2064 break;
2065 case Intrinsics::kMathMaxIntInt:
2066 instruction = new (allocator_) HMax(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2067 break;
2068 case Intrinsics::kMathMaxLongLong:
2069 instruction = new (allocator_) HMax(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2070 break;
2071 case Intrinsics::kMathMaxFloatFloat:
2072 instruction = new (allocator_) HMax(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2073 break;
2074 case Intrinsics::kMathMaxDoubleDouble:
2075 instruction = new (allocator_) HMax(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
2076 break;
2077 case Intrinsics::kMathAbsInt:
2078 instruction = new (allocator_) HAbs(kInt32, /*input=*/ nullptr, dex_pc);
2079 break;
2080 case Intrinsics::kMathAbsLong:
2081 instruction = new (allocator_) HAbs(kInt64, /*input=*/ nullptr, dex_pc);
2082 break;
2083 case Intrinsics::kMathAbsFloat:
2084 instruction = new (allocator_) HAbs(kFloat32, /*input=*/ nullptr, dex_pc);
2085 break;
2086 case Intrinsics::kMathAbsDouble:
2087 instruction = new (allocator_) HAbs(kFloat64, /*input=*/ nullptr, dex_pc);
2088 break;
2089 default:
2090 // We do not have intermediate representation for other intrinsics.
2091 DCHECK(!IsIntrinsicWithSpecializedHir(intrinsic));
2092 return false;
2093 }
2094 DCHECK(instruction != nullptr);
2095 if (!SetupInvokeArguments(instruction, operands, shorty, receiver_arg)) {
2096 return false;
2097 }
2098
2099 switch (intrinsic) {
2100 case Intrinsics::kFloatIsNaN:
2101 case Intrinsics::kDoubleIsNaN:
2102 // Set the second input to be the same as first.
2103 DCHECK(instruction->IsNotEqual());
2104 DCHECK(instruction->InputAt(1u) == nullptr);
2105 instruction->SetRawInputAt(1u, instruction->InputAt(0u));
2106 break;
2107 case Intrinsics::kStringCharAt: {
2108 // Add bounds check.
2109 HInstruction* array = instruction->InputAt(0u);
2110 HInstruction* index = instruction->InputAt(1u);
2111 HInstruction* length =
2112 new (allocator_) HArrayLength(array, dex_pc, /*is_string_length=*/ true);
2113 AppendInstruction(length);
2114 HBoundsCheck* bounds_check =
2115 new (allocator_) HBoundsCheck(index, length, dex_pc, /*is_string_char_at=*/ true);
2116 AppendInstruction(bounds_check);
2117 graph_->SetHasBoundsChecks(true);
2118 instruction->SetRawInputAt(1u, bounds_check);
2119 break;
2120 }
2121 case Intrinsics::kStringIsEmpty: {
2122 // Compare the length with 0.
2123 DCHECK(instruction->IsArrayLength());
2124 AppendInstruction(instruction);
2125 HEqual* equal = new (allocator_) HEqual(instruction, graph_->GetIntConstant(0), dex_pc);
2126 instruction = equal;
2127 break;
2128 }
2129 default:
2130 break;
2131 }
2132
2133 AppendInstruction(instruction);
2134 latest_result_ = instruction;
2135
2136 return true;
2137 }
2138
HandleStringInit(HInvoke * invoke,const InstructionOperands & operands,const char * shorty)2139 bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
2140 const InstructionOperands& operands,
2141 const char* shorty) {
2142 DCHECK(invoke->IsInvokeStaticOrDirect());
2143 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
2144
2145 if (!SetupInvokeArguments(invoke, operands, shorty, ReceiverArg::kIgnored)) {
2146 return false;
2147 }
2148
2149 AppendInstruction(invoke);
2150
2151 // This is a StringFactory call, not an actual String constructor. Its result
2152 // replaces the empty String pre-allocated by NewInstance.
2153 uint32_t orig_this_reg = operands.GetOperand(0);
2154 HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
2155
2156 // Replacing the NewInstance might render it redundant. Keep a list of these
2157 // to be visited once it is clear whether it has remaining uses.
2158 if (arg_this->IsNewInstance()) {
2159 ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
2160 } else {
2161 DCHECK(arg_this->IsPhi());
2162 // We can get a phi as input of a String.<init> if there is a loop between the
2163 // allocation and the String.<init> call. As we don't know which other phis might alias
2164 // with `arg_this`, we keep a record of those invocations so we can later replace
2165 // the allocation with the invocation.
2166 // Add the actual 'this' input so the analysis knows what is the allocation instruction.
2167 // The input will be removed during the analysis.
2168 invoke->AddInput(arg_this);
2169 ssa_builder_->AddUninitializedStringPhi(invoke);
2170 }
2171 // Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
2172 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
2173 if ((*current_locals_)[vreg] == arg_this) {
2174 (*current_locals_)[vreg] = invoke;
2175 }
2176 }
2177 return true;
2178 }
2179
GetFieldAccessType(const DexFile & dex_file,uint16_t field_index)2180 static DataType::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
2181 const dex::FieldId& field_id = dex_file.GetFieldId(field_index);
2182 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
2183 return DataType::FromShorty(type[0]);
2184 }
2185
BuildInstanceFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2186 bool HInstructionBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
2187 uint32_t dex_pc,
2188 bool is_put) {
2189 uint32_t source_or_dest_reg = instruction.VRegA_22c();
2190 uint32_t obj_reg = instruction.VRegB_22c();
2191 uint16_t field_index = instruction.VRegC_22c();
2192
2193 ScopedObjectAccess soa(Thread::Current());
2194 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ false, is_put);
2195
2196 // Generate an explicit null check on the reference, unless the field access
2197 // is unresolved. In that case, we rely on the runtime to perform various
2198 // checks first, followed by a null check.
2199 HInstruction* object = (resolved_field == nullptr)
2200 ? LoadLocal(obj_reg, DataType::Type::kReference)
2201 : LoadNullCheckedLocal(obj_reg, dex_pc);
2202
2203 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2204 if (is_put) {
2205 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2206 HInstruction* field_set = nullptr;
2207 if (resolved_field == nullptr) {
2208 MaybeRecordStat(compilation_stats_,
2209 MethodCompilationStat::kUnresolvedField);
2210 field_set = new (allocator_) HUnresolvedInstanceFieldSet(object,
2211 value,
2212 field_type,
2213 field_index,
2214 dex_pc);
2215 } else {
2216 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2217 field_set = new (allocator_) HInstanceFieldSet(object,
2218 value,
2219 resolved_field,
2220 field_type,
2221 resolved_field->GetOffset(),
2222 resolved_field->IsVolatile(),
2223 field_index,
2224 class_def_index,
2225 *dex_file_,
2226 dex_pc);
2227 }
2228 AppendInstruction(field_set);
2229 } else {
2230 HInstruction* field_get = nullptr;
2231 if (resolved_field == nullptr) {
2232 MaybeRecordStat(compilation_stats_,
2233 MethodCompilationStat::kUnresolvedField);
2234 field_get = new (allocator_) HUnresolvedInstanceFieldGet(object,
2235 field_type,
2236 field_index,
2237 dex_pc);
2238 } else {
2239 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2240 field_get = new (allocator_) HInstanceFieldGet(object,
2241 resolved_field,
2242 field_type,
2243 resolved_field->GetOffset(),
2244 resolved_field->IsVolatile(),
2245 field_index,
2246 class_def_index,
2247 *dex_file_,
2248 dex_pc);
2249 }
2250 AppendInstruction(field_get);
2251 UpdateLocal(source_or_dest_reg, field_get);
2252 }
2253
2254 return true;
2255 }
2256
BuildUnresolvedStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type field_type)2257 void HInstructionBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction,
2258 uint32_t dex_pc,
2259 bool is_put,
2260 DataType::Type field_type) {
2261 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2262 uint16_t field_index = instruction.VRegB_21c();
2263
2264 if (is_put) {
2265 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2266 AppendInstruction(
2267 new (allocator_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
2268 } else {
2269 AppendInstruction(new (allocator_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
2270 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2271 }
2272 }
2273
ResolveField(uint16_t field_idx,bool is_static,bool is_put)2274 ArtField* HInstructionBuilder::ResolveField(uint16_t field_idx, bool is_static, bool is_put) {
2275 ScopedObjectAccess soa(Thread::Current());
2276
2277 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker();
2278 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
2279
2280 ArtField* resolved_field = class_linker->ResolveFieldJLS(field_idx,
2281 dex_compilation_unit_->GetDexCache(),
2282 class_loader);
2283 DCHECK_EQ(resolved_field == nullptr, soa.Self()->IsExceptionPending())
2284 << "field="
2285 << ((resolved_field == nullptr) ? "null" : resolved_field->PrettyField())
2286 << ", exception="
2287 << (soa.Self()->IsExceptionPending() ? soa.Self()->GetException()->Dump() : "null");
2288 if (UNLIKELY(resolved_field == nullptr)) {
2289 // Clean up any exception left by field resolution.
2290 soa.Self()->ClearException();
2291 return nullptr;
2292 }
2293
2294 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
2295 return nullptr;
2296 }
2297
2298 // Check access.
2299 Handle<mirror::Class> compiling_class = dex_compilation_unit_->GetCompilingClass();
2300 if (compiling_class == nullptr) {
2301 // Check if the declaring class or referencing class is accessible.
2302 SamePackageCompare same_package(*dex_compilation_unit_);
2303 ObjPtr<mirror::Class> declaring_class = resolved_field->GetDeclaringClass();
2304 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
2305 if (!declaring_class_accessible) {
2306 // It is possible to access members from an inaccessible superclass
2307 // by referencing them through an accessible subclass.
2308 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
2309 dex_compilation_unit_->GetDexFile()->GetFieldId(field_idx).class_idx_,
2310 dex_compilation_unit_->GetDexCache().Get(),
2311 class_loader.Get());
2312 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the field.
2313 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
2314 return nullptr;
2315 }
2316 }
2317 // Check whether the field itself is accessible.
2318 // Since the referrer is unresolved but the field is resolved, it cannot be
2319 // inside the same class, so a private field is known to be inaccessible.
2320 // And without a resolved referrer, we cannot check for protected member access
2321 // in superlass, so we handle only access to public member or within the package.
2322 if (resolved_field->IsPrivate() ||
2323 (!resolved_field->IsPublic() && !declaring_class_accessible)) {
2324 return nullptr;
2325 }
2326 } else if (!compiling_class->CanAccessResolvedField(resolved_field->GetDeclaringClass(),
2327 resolved_field,
2328 dex_compilation_unit_->GetDexCache().Get(),
2329 field_idx)) {
2330 return nullptr;
2331 }
2332
2333 if (is_put) {
2334 if (resolved_field->IsFinal() &&
2335 (compiling_class.Get() != resolved_field->GetDeclaringClass())) {
2336 // Final fields can only be updated within their own class.
2337 // TODO: Only allow it in constructors. b/34966607.
2338 return nullptr;
2339 }
2340
2341 // Note: We do not need to resolve the field type for `get` opcodes.
2342 StackArtFieldHandleScope<1> rhs(soa.Self());
2343 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
2344 if (resolved_field->ResolveType().IsNull()) {
2345 // ArtField::ResolveType() may fail as evidenced with a dexing bug (b/78788577).
2346 soa.Self()->ClearException();
2347 return nullptr; // Failure
2348 }
2349 resolved_field = resolved_field_handle.Get();
2350 }
2351
2352 return resolved_field;
2353 }
2354
BuildStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2355 void HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
2356 uint32_t dex_pc,
2357 bool is_put) {
2358 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2359 uint16_t field_index = instruction.VRegB_21c();
2360
2361 ScopedObjectAccess soa(Thread::Current());
2362 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ true, is_put);
2363
2364 if (resolved_field == nullptr) {
2365 MaybeRecordStat(compilation_stats_,
2366 MethodCompilationStat::kUnresolvedField);
2367 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2368 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2369 return;
2370 }
2371
2372 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2373
2374 Handle<mirror::Class> klass =
2375 graph_->GetHandleCache()->NewHandle(resolved_field->GetDeclaringClass());
2376 HLoadClass* constant = BuildLoadClass(klass->GetDexTypeIndex(),
2377 klass->GetDexFile(),
2378 klass,
2379 dex_pc,
2380 /* needs_access_check= */ false);
2381
2382 if (constant == nullptr) {
2383 // The class cannot be referenced from this compiled code. Generate
2384 // an unresolved access.
2385 MaybeRecordStat(compilation_stats_,
2386 MethodCompilationStat::kUnresolvedFieldNotAFastAccess);
2387 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2388 return;
2389 }
2390
2391 HInstruction* cls = constant;
2392 if (!IsInitialized(klass.Get())) {
2393 cls = new (allocator_) HClinitCheck(constant, dex_pc);
2394 AppendInstruction(cls);
2395 }
2396
2397 uint16_t class_def_index = klass->GetDexClassDefIndex();
2398 if (is_put) {
2399 // We need to keep the class alive before loading the value.
2400 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2401 DCHECK_EQ(HPhi::ToPhiType(value->GetType()), HPhi::ToPhiType(field_type));
2402 AppendInstruction(new (allocator_) HStaticFieldSet(cls,
2403 value,
2404 resolved_field,
2405 field_type,
2406 resolved_field->GetOffset(),
2407 resolved_field->IsVolatile(),
2408 field_index,
2409 class_def_index,
2410 *dex_file_,
2411 dex_pc));
2412 } else {
2413 AppendInstruction(new (allocator_) HStaticFieldGet(cls,
2414 resolved_field,
2415 field_type,
2416 resolved_field->GetOffset(),
2417 resolved_field->IsVolatile(),
2418 field_index,
2419 class_def_index,
2420 *dex_file_,
2421 dex_pc));
2422 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2423 }
2424 }
2425
BuildCheckedDivRem(uint16_t out_vreg,uint16_t first_vreg,int64_t second_vreg_or_constant,uint32_t dex_pc,DataType::Type type,bool second_is_constant,bool is_div)2426 void HInstructionBuilder::BuildCheckedDivRem(uint16_t out_vreg,
2427 uint16_t first_vreg,
2428 int64_t second_vreg_or_constant,
2429 uint32_t dex_pc,
2430 DataType::Type type,
2431 bool second_is_constant,
2432 bool is_div) {
2433 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
2434
2435 HInstruction* first = LoadLocal(first_vreg, type);
2436 HInstruction* second = nullptr;
2437 if (second_is_constant) {
2438 if (type == DataType::Type::kInt32) {
2439 second = graph_->GetIntConstant(second_vreg_or_constant);
2440 } else {
2441 second = graph_->GetLongConstant(second_vreg_or_constant);
2442 }
2443 } else {
2444 second = LoadLocal(second_vreg_or_constant, type);
2445 }
2446
2447 if (!second_is_constant ||
2448 (type == DataType::Type::kInt32 && second->AsIntConstant()->GetValue() == 0) ||
2449 (type == DataType::Type::kInt64 && second->AsLongConstant()->GetValue() == 0)) {
2450 second = new (allocator_) HDivZeroCheck(second, dex_pc);
2451 AppendInstruction(second);
2452 }
2453
2454 if (is_div) {
2455 AppendInstruction(new (allocator_) HDiv(type, first, second, dex_pc));
2456 } else {
2457 AppendInstruction(new (allocator_) HRem(type, first, second, dex_pc));
2458 }
2459 UpdateLocal(out_vreg, current_block_->GetLastInstruction());
2460 }
2461
BuildArrayAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type anticipated_type)2462 void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
2463 uint32_t dex_pc,
2464 bool is_put,
2465 DataType::Type anticipated_type) {
2466 uint8_t source_or_dest_reg = instruction.VRegA_23x();
2467 uint8_t array_reg = instruction.VRegB_23x();
2468 uint8_t index_reg = instruction.VRegC_23x();
2469
2470 HInstruction* object = LoadNullCheckedLocal(array_reg, dex_pc);
2471 HInstruction* length = new (allocator_) HArrayLength(object, dex_pc);
2472 AppendInstruction(length);
2473 HInstruction* index = LoadLocal(index_reg, DataType::Type::kInt32);
2474 index = new (allocator_) HBoundsCheck(index, length, dex_pc);
2475 AppendInstruction(index);
2476 if (is_put) {
2477 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type);
2478 // TODO: Insert a type check node if the type is Object.
2479 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2480 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2481 AppendInstruction(aset);
2482 } else {
2483 HArrayGet* aget = new (allocator_) HArrayGet(object, index, anticipated_type, dex_pc);
2484 ssa_builder_->MaybeAddAmbiguousArrayGet(aget);
2485 AppendInstruction(aget);
2486 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2487 }
2488 graph_->SetHasBoundsChecks(true);
2489 }
2490
BuildNewArray(uint32_t dex_pc,dex::TypeIndex type_index,HInstruction * length)2491 HNewArray* HInstructionBuilder::BuildNewArray(uint32_t dex_pc,
2492 dex::TypeIndex type_index,
2493 HInstruction* length) {
2494 HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
2495
2496 const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(type_index));
2497 DCHECK_EQ(descriptor[0], '[');
2498 size_t component_type_shift = Primitive::ComponentSizeShift(Primitive::GetType(descriptor[1]));
2499
2500 HNewArray* new_array = new (allocator_) HNewArray(cls, length, dex_pc, component_type_shift);
2501 AppendInstruction(new_array);
2502 return new_array;
2503 }
2504
BuildFilledNewArray(uint32_t dex_pc,dex::TypeIndex type_index,const InstructionOperands & operands)2505 HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
2506 dex::TypeIndex type_index,
2507 const InstructionOperands& operands) {
2508 const size_t number_of_operands = operands.GetNumberOfOperands();
2509 HInstruction* length = graph_->GetIntConstant(number_of_operands);
2510
2511 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
2512 const char* descriptor = dex_file_->GetTypeDescriptor(type_index);
2513 DCHECK_EQ(descriptor[0], '[') << descriptor;
2514 char primitive = descriptor[1];
2515 DCHECK(primitive == 'I'
2516 || primitive == 'L'
2517 || primitive == '[') << descriptor;
2518 bool is_reference_array = (primitive == 'L') || (primitive == '[');
2519 DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
2520
2521 for (size_t i = 0; i < number_of_operands; ++i) {
2522 HInstruction* value = LoadLocal(operands.GetOperand(i), type);
2523 HInstruction* index = graph_->GetIntConstant(i);
2524 HArraySet* aset = new (allocator_) HArraySet(new_array, index, value, type, dex_pc);
2525 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2526 AppendInstruction(aset);
2527 }
2528 latest_result_ = new_array;
2529
2530 return new_array;
2531 }
2532
2533 template <typename T>
BuildFillArrayData(HInstruction * object,const T * data,uint32_t element_count,DataType::Type anticipated_type,uint32_t dex_pc)2534 void HInstructionBuilder::BuildFillArrayData(HInstruction* object,
2535 const T* data,
2536 uint32_t element_count,
2537 DataType::Type anticipated_type,
2538 uint32_t dex_pc) {
2539 for (uint32_t i = 0; i < element_count; ++i) {
2540 HInstruction* index = graph_->GetIntConstant(i);
2541 HInstruction* value = graph_->GetIntConstant(data[i]);
2542 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2543 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2544 AppendInstruction(aset);
2545 }
2546 }
2547
BuildFillArrayData(const Instruction & instruction,uint32_t dex_pc)2548 void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) {
2549 HInstruction* array = LoadNullCheckedLocal(instruction.VRegA_31t(), dex_pc);
2550
2551 int32_t payload_offset = instruction.VRegB_31t() + dex_pc;
2552 const Instruction::ArrayDataPayload* payload =
2553 reinterpret_cast<const Instruction::ArrayDataPayload*>(
2554 code_item_accessor_.Insns() + payload_offset);
2555 const uint8_t* data = payload->data;
2556 uint32_t element_count = payload->element_count;
2557
2558 if (element_count == 0u) {
2559 // For empty payload we emit only the null check above.
2560 return;
2561 }
2562
2563 HInstruction* length = new (allocator_) HArrayLength(array, dex_pc);
2564 AppendInstruction(length);
2565
2566 // Implementation of this DEX instruction seems to be that the bounds check is
2567 // done before doing any stores.
2568 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1);
2569 AppendInstruction(new (allocator_) HBoundsCheck(last_index, length, dex_pc));
2570
2571 switch (payload->element_width) {
2572 case 1:
2573 BuildFillArrayData(array,
2574 reinterpret_cast<const int8_t*>(data),
2575 element_count,
2576 DataType::Type::kInt8,
2577 dex_pc);
2578 break;
2579 case 2:
2580 BuildFillArrayData(array,
2581 reinterpret_cast<const int16_t*>(data),
2582 element_count,
2583 DataType::Type::kInt16,
2584 dex_pc);
2585 break;
2586 case 4:
2587 BuildFillArrayData(array,
2588 reinterpret_cast<const int32_t*>(data),
2589 element_count,
2590 DataType::Type::kInt32,
2591 dex_pc);
2592 break;
2593 case 8:
2594 BuildFillWideArrayData(array,
2595 reinterpret_cast<const int64_t*>(data),
2596 element_count,
2597 dex_pc);
2598 break;
2599 default:
2600 LOG(FATAL) << "Unknown element width for " << payload->element_width;
2601 }
2602 graph_->SetHasBoundsChecks(true);
2603 }
2604
BuildFillWideArrayData(HInstruction * object,const int64_t * data,uint32_t element_count,uint32_t dex_pc)2605 void HInstructionBuilder::BuildFillWideArrayData(HInstruction* object,
2606 const int64_t* data,
2607 uint32_t element_count,
2608 uint32_t dex_pc) {
2609 for (uint32_t i = 0; i < element_count; ++i) {
2610 HInstruction* index = graph_->GetIntConstant(i);
2611 HInstruction* value = graph_->GetLongConstant(data[i]);
2612 HArraySet* aset =
2613 new (allocator_) HArraySet(object, index, value, DataType::Type::kInt64, dex_pc);
2614 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2615 AppendInstruction(aset);
2616 }
2617 }
2618
BuildLoadString(dex::StringIndex string_index,uint32_t dex_pc)2619 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
2620 HLoadString* load_string =
2621 new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
2622 HSharpening::ProcessLoadString(load_string,
2623 code_generator_,
2624 *dex_compilation_unit_,
2625 graph_->GetHandleCache()->GetHandles());
2626 AppendInstruction(load_string);
2627 }
2628
BuildLoadClass(dex::TypeIndex type_index,uint32_t dex_pc)2629 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
2630 ScopedObjectAccess soa(Thread::Current());
2631 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2632 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2633 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2634 return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2635 }
2636
BuildLoadClass(dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,uint32_t dex_pc,bool needs_access_check)2637 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
2638 const DexFile& dex_file,
2639 Handle<mirror::Class> klass,
2640 uint32_t dex_pc,
2641 bool needs_access_check) {
2642 // Try to find a reference in the compiling dex file.
2643 const DexFile* actual_dex_file = &dex_file;
2644 if (!IsSameDexFile(dex_file, *dex_compilation_unit_->GetDexFile())) {
2645 dex::TypeIndex local_type_index =
2646 klass->FindTypeIndexInOtherDexFile(*dex_compilation_unit_->GetDexFile());
2647 if (local_type_index.IsValid()) {
2648 type_index = local_type_index;
2649 actual_dex_file = dex_compilation_unit_->GetDexFile();
2650 }
2651 }
2652
2653 // We cannot use the referrer's class load kind if we need to do an access check.
2654 // If the `klass` is unresolved, we need access check with the exception of the referrer's
2655 // class, see LoadClassNeedsAccessCheck(), so the `!needs_access_check` check is enough.
2656 // Otherwise, also check if the `klass` is the same as the compiling class, which also
2657 // conveniently rejects the case of unresolved compiling class.
2658 bool is_referrers_class =
2659 !needs_access_check &&
2660 (klass == nullptr || outer_compilation_unit_->GetCompilingClass().Get() == klass.Get());
2661 // Note: `klass` must be from `graph_->GetHandleCache()`.
2662 HLoadClass* load_class = new (allocator_) HLoadClass(
2663 graph_->GetCurrentMethod(),
2664 type_index,
2665 *actual_dex_file,
2666 klass,
2667 is_referrers_class,
2668 dex_pc,
2669 needs_access_check);
2670
2671 HLoadClass::LoadKind load_kind = HSharpening::ComputeLoadClassKind(load_class,
2672 code_generator_,
2673 *dex_compilation_unit_);
2674
2675 if (load_kind == HLoadClass::LoadKind::kInvalid) {
2676 // We actually cannot reference this class, we're forced to bail.
2677 return nullptr;
2678 }
2679 // Load kind must be set before inserting the instruction into the graph.
2680 load_class->SetLoadKind(load_kind);
2681 AppendInstruction(load_class);
2682 return load_class;
2683 }
2684
ResolveClass(ScopedObjectAccess & soa,dex::TypeIndex type_index)2685 Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
2686 dex::TypeIndex type_index) {
2687 auto it = class_cache_.find(type_index);
2688 if (it != class_cache_.end()) {
2689 return it->second;
2690 }
2691
2692 ObjPtr<mirror::Class> klass = dex_compilation_unit_->GetClassLinker()->ResolveType(
2693 type_index, dex_compilation_unit_->GetDexCache(), dex_compilation_unit_->GetClassLoader());
2694 DCHECK_EQ(klass == nullptr, soa.Self()->IsExceptionPending());
2695 soa.Self()->ClearException(); // Clean up the exception left by type resolution if any.
2696
2697 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
2698 class_cache_.Put(type_index, h_klass);
2699 return h_klass;
2700 }
2701
LoadClassNeedsAccessCheck(dex::TypeIndex type_index,ObjPtr<mirror::Class> klass)2702 bool HInstructionBuilder::LoadClassNeedsAccessCheck(dex::TypeIndex type_index,
2703 ObjPtr<mirror::Class> klass) {
2704 if (klass == nullptr) {
2705 // If the class is unresolved, we can avoid access checks only for references to
2706 // the compiling class as determined by checking the descriptor and ClassLoader.
2707 if (outer_compilation_unit_->GetCompilingClass() != nullptr) {
2708 // Compiling class is resolved, so different from the unresolved class.
2709 return true;
2710 }
2711 if (dex_compilation_unit_->GetClassLoader().Get() !=
2712 outer_compilation_unit_->GetClassLoader().Get()) {
2713 // Resolving the same descriptor in a different ClassLoader than the
2714 // defining loader of the compiling class shall either fail to find
2715 // the class definition, or find a different one.
2716 // (Assuming no custom ClassLoader hierarchy with circular delegation.)
2717 return true;
2718 }
2719 // Check if the class is the outer method's class.
2720 // For the same dex file compare type indexes, otherwise descriptors.
2721 const DexFile* outer_dex_file = outer_compilation_unit_->GetDexFile();
2722 const DexFile* inner_dex_file = dex_compilation_unit_->GetDexFile();
2723 const dex::ClassDef& outer_class_def =
2724 outer_dex_file->GetClassDef(outer_compilation_unit_->GetClassDefIndex());
2725 if (IsSameDexFile(*inner_dex_file, *outer_dex_file)) {
2726 if (type_index != outer_class_def.class_idx_) {
2727 return true;
2728 }
2729 } else {
2730 const std::string_view outer_descriptor =
2731 outer_dex_file->GetTypeDescriptorView(outer_class_def.class_idx_);
2732 const std::string_view target_descriptor =
2733 inner_dex_file->GetTypeDescriptorView(type_index);
2734 if (outer_descriptor != target_descriptor) {
2735 return true;
2736 }
2737 }
2738 // For inlined methods we also need to check if the compiling class
2739 // is public or in the same package as the inlined method's class.
2740 if (dex_compilation_unit_ != outer_compilation_unit_ &&
2741 (outer_class_def.access_flags_ & kAccPublic) == 0) {
2742 DCHECK(dex_compilation_unit_->GetCompilingClass() != nullptr);
2743 SamePackageCompare same_package(*outer_compilation_unit_);
2744 if (!same_package(dex_compilation_unit_->GetCompilingClass().Get())) {
2745 return true;
2746 }
2747 }
2748 return false;
2749 } else if (klass->IsPublic()) {
2750 return false;
2751 } else if (dex_compilation_unit_->GetCompilingClass() != nullptr) {
2752 return !dex_compilation_unit_->GetCompilingClass()->CanAccess(klass);
2753 } else {
2754 SamePackageCompare same_package(*dex_compilation_unit_);
2755 return !same_package(klass);
2756 }
2757 }
2758
BuildLoadMethodHandle(uint16_t method_handle_index,uint32_t dex_pc)2759 void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
2760 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2761 HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
2762 graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
2763 AppendInstruction(load_method_handle);
2764 }
2765
BuildLoadMethodType(dex::ProtoIndex proto_index,uint32_t dex_pc)2766 void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
2767 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2768 HLoadMethodType* load_method_type =
2769 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
2770 HSharpening::ProcessLoadMethodType(load_method_type,
2771 code_generator_,
2772 *dex_compilation_unit_,
2773 graph_->GetHandleCache()->GetHandles());
2774 AppendInstruction(load_method_type);
2775 }
2776
BuildTypeCheck(bool is_instance_of,HInstruction * object,dex::TypeIndex type_index,uint32_t dex_pc)2777 void HInstructionBuilder::BuildTypeCheck(bool is_instance_of,
2778 HInstruction* object,
2779 dex::TypeIndex type_index,
2780 uint32_t dex_pc) {
2781 ScopedObjectAccess soa(Thread::Current());
2782 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2783 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2784 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2785 TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
2786 klass.Get(), code_generator_, needs_access_check);
2787
2788 HInstruction* class_or_null = nullptr;
2789 HIntConstant* bitstring_path_to_root = nullptr;
2790 HIntConstant* bitstring_mask = nullptr;
2791 if (check_kind == TypeCheckKind::kBitstringCheck) {
2792 // TODO: Allow using the bitstring check also if we need an access check.
2793 DCHECK(!needs_access_check);
2794 class_or_null = graph_->GetNullConstant();
2795 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2796 uint32_t path_to_root =
2797 SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
2798 uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
2799 bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root));
2800 bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask));
2801 } else {
2802 class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2803 }
2804 DCHECK(class_or_null != nullptr);
2805
2806 if (is_instance_of) {
2807 AppendInstruction(new (allocator_) HInstanceOf(object,
2808 class_or_null,
2809 check_kind,
2810 klass,
2811 dex_pc,
2812 allocator_,
2813 bitstring_path_to_root,
2814 bitstring_mask));
2815 } else {
2816 // We emit a CheckCast followed by a BoundType. CheckCast is a statement
2817 // which may throw. If it succeeds BoundType sets the new type of `object`
2818 // for all subsequent uses.
2819 AppendInstruction(
2820 new (allocator_) HCheckCast(object,
2821 class_or_null,
2822 check_kind,
2823 klass,
2824 dex_pc,
2825 allocator_,
2826 bitstring_path_to_root,
2827 bitstring_mask));
2828 AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
2829 }
2830 }
2831
BuildTypeCheck(const Instruction & instruction,uint8_t destination,uint8_t reference,dex::TypeIndex type_index,uint32_t dex_pc)2832 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
2833 uint8_t destination,
2834 uint8_t reference,
2835 dex::TypeIndex type_index,
2836 uint32_t dex_pc) {
2837 HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
2838 bool is_instance_of = instruction.Opcode() == Instruction::INSTANCE_OF;
2839
2840 BuildTypeCheck(is_instance_of, object, type_index, dex_pc);
2841
2842 if (is_instance_of) {
2843 UpdateLocal(destination, current_block_->GetLastInstruction());
2844 } else {
2845 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
2846 UpdateLocal(reference, current_block_->GetLastInstruction());
2847 }
2848 }
2849
ProcessDexInstruction(const Instruction & instruction,uint32_t dex_pc)2850 bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, uint32_t dex_pc) {
2851 switch (instruction.Opcode()) {
2852 case Instruction::CONST_4: {
2853 int32_t register_index = instruction.VRegA_11n();
2854 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n());
2855 UpdateLocal(register_index, constant);
2856 break;
2857 }
2858
2859 case Instruction::CONST_16: {
2860 int32_t register_index = instruction.VRegA_21s();
2861 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s());
2862 UpdateLocal(register_index, constant);
2863 break;
2864 }
2865
2866 case Instruction::CONST: {
2867 int32_t register_index = instruction.VRegA_31i();
2868 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i());
2869 UpdateLocal(register_index, constant);
2870 break;
2871 }
2872
2873 case Instruction::CONST_HIGH16: {
2874 int32_t register_index = instruction.VRegA_21h();
2875 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16);
2876 UpdateLocal(register_index, constant);
2877 break;
2878 }
2879
2880 case Instruction::CONST_WIDE_16: {
2881 int32_t register_index = instruction.VRegA_21s();
2882 // Get 16 bits of constant value, sign extended to 64 bits.
2883 int64_t value = instruction.VRegB_21s();
2884 value <<= 48;
2885 value >>= 48;
2886 HLongConstant* constant = graph_->GetLongConstant(value);
2887 UpdateLocal(register_index, constant);
2888 break;
2889 }
2890
2891 case Instruction::CONST_WIDE_32: {
2892 int32_t register_index = instruction.VRegA_31i();
2893 // Get 32 bits of constant value, sign extended to 64 bits.
2894 int64_t value = instruction.VRegB_31i();
2895 value <<= 32;
2896 value >>= 32;
2897 HLongConstant* constant = graph_->GetLongConstant(value);
2898 UpdateLocal(register_index, constant);
2899 break;
2900 }
2901
2902 case Instruction::CONST_WIDE: {
2903 int32_t register_index = instruction.VRegA_51l();
2904 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l());
2905 UpdateLocal(register_index, constant);
2906 break;
2907 }
2908
2909 case Instruction::CONST_WIDE_HIGH16: {
2910 int32_t register_index = instruction.VRegA_21h();
2911 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48;
2912 HLongConstant* constant = graph_->GetLongConstant(value);
2913 UpdateLocal(register_index, constant);
2914 break;
2915 }
2916
2917 // Note that the SSA building will refine the types for moves.
2918
2919 case Instruction::MOVE: {
2920 BuildMove<DataType::Type::kInt32>(instruction.VRegA_12x(), instruction.VRegB_12x());
2921 break;
2922 }
2923
2924 case Instruction::MOVE_FROM16: {
2925 BuildMove<DataType::Type::kInt32>(instruction.VRegA_22x(), instruction.VRegB_22x());
2926 break;
2927 }
2928
2929 case Instruction::MOVE_16: {
2930 BuildMove<DataType::Type::kInt32>(instruction.VRegA_32x(), instruction.VRegB_32x());
2931 break;
2932 }
2933
2934 case Instruction::MOVE_WIDE: {
2935 BuildMove<DataType::Type::kInt64>(instruction.VRegA_12x(), instruction.VRegB_12x());
2936 break;
2937 }
2938
2939 case Instruction::MOVE_WIDE_FROM16: {
2940 BuildMove<DataType::Type::kInt64>(instruction.VRegA_22x(), instruction.VRegB_22x());
2941 break;
2942 }
2943
2944 case Instruction::MOVE_WIDE_16: {
2945 BuildMove<DataType::Type::kInt64>(instruction.VRegA_32x(), instruction.VRegB_32x());
2946 break;
2947 }
2948
2949 case Instruction::MOVE_OBJECT: {
2950 BuildMove<DataType::Type::kReference>(instruction.VRegA_12x(), instruction.VRegB_12x());
2951 break;
2952 }
2953
2954 case Instruction::MOVE_OBJECT_FROM16: {
2955 BuildMove<DataType::Type::kReference>(instruction.VRegA_22x(), instruction.VRegB_22x());
2956 break;
2957 }
2958
2959 case Instruction::MOVE_OBJECT_16: {
2960 BuildMove<DataType::Type::kReference>(instruction.VRegA_32x(), instruction.VRegB_32x());
2961 break;
2962 }
2963
2964 case Instruction::RETURN_VOID: {
2965 BuildReturn(instruction, DataType::Type::kVoid, dex_pc);
2966 break;
2967 }
2968
2969 #define IF_XX(comparison, cond) \
2970 case Instruction::IF_##cond: \
2971 If_21_22t<comparison, /* kCompareWithZero= */ false>(instruction, dex_pc); \
2972 break; \
2973 case Instruction::IF_##cond##Z: \
2974 If_21_22t<comparison, /* kCompareWithZero= */ true>(instruction, dex_pc); \
2975 break;
2976
2977 IF_XX(HEqual, EQ);
2978 IF_XX(HNotEqual, NE);
2979 IF_XX(HLessThan, LT);
2980 IF_XX(HLessThanOrEqual, LE);
2981 IF_XX(HGreaterThan, GT);
2982 IF_XX(HGreaterThanOrEqual, GE);
2983 #undef IF_XX
2984
2985 case Instruction::GOTO:
2986 case Instruction::GOTO_16:
2987 case Instruction::GOTO_32: {
2988 AppendInstruction(new (allocator_) HGoto(dex_pc));
2989 current_block_ = nullptr;
2990 break;
2991 }
2992
2993 case Instruction::RETURN: {
2994 BuildReturn(instruction, return_type_, dex_pc);
2995 break;
2996 }
2997
2998 case Instruction::RETURN_OBJECT: {
2999 BuildReturn(instruction, return_type_, dex_pc);
3000 break;
3001 }
3002
3003 case Instruction::RETURN_WIDE: {
3004 BuildReturn(instruction, return_type_, dex_pc);
3005 break;
3006 }
3007
3008 case Instruction::INVOKE_DIRECT:
3009 case Instruction::INVOKE_INTERFACE:
3010 case Instruction::INVOKE_STATIC:
3011 case Instruction::INVOKE_SUPER:
3012 case Instruction::INVOKE_VIRTUAL: {
3013 uint16_t method_idx = instruction.VRegB_35c();
3014 uint32_t args[5];
3015 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3016 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3017 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
3018 return false;
3019 }
3020 break;
3021 }
3022
3023 case Instruction::INVOKE_DIRECT_RANGE:
3024 case Instruction::INVOKE_INTERFACE_RANGE:
3025 case Instruction::INVOKE_STATIC_RANGE:
3026 case Instruction::INVOKE_SUPER_RANGE:
3027 case Instruction::INVOKE_VIRTUAL_RANGE: {
3028 uint16_t method_idx = instruction.VRegB_3rc();
3029 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3030 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
3031 return false;
3032 }
3033 break;
3034 }
3035
3036 case Instruction::INVOKE_POLYMORPHIC: {
3037 uint16_t method_idx = instruction.VRegB_45cc();
3038 dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
3039 uint32_t args[5];
3040 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3041 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3042 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
3043 }
3044
3045 case Instruction::INVOKE_POLYMORPHIC_RANGE: {
3046 uint16_t method_idx = instruction.VRegB_4rcc();
3047 dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
3048 RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
3049 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
3050 }
3051
3052 case Instruction::INVOKE_CUSTOM: {
3053 uint16_t call_site_idx = instruction.VRegB_35c();
3054 uint32_t args[5];
3055 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3056 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3057 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
3058 }
3059
3060 case Instruction::INVOKE_CUSTOM_RANGE: {
3061 uint16_t call_site_idx = instruction.VRegB_3rc();
3062 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3063 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
3064 }
3065
3066 case Instruction::NEG_INT: {
3067 Unop_12x<HNeg>(instruction, DataType::Type::kInt32, dex_pc);
3068 break;
3069 }
3070
3071 case Instruction::NEG_LONG: {
3072 Unop_12x<HNeg>(instruction, DataType::Type::kInt64, dex_pc);
3073 break;
3074 }
3075
3076 case Instruction::NEG_FLOAT: {
3077 Unop_12x<HNeg>(instruction, DataType::Type::kFloat32, dex_pc);
3078 break;
3079 }
3080
3081 case Instruction::NEG_DOUBLE: {
3082 Unop_12x<HNeg>(instruction, DataType::Type::kFloat64, dex_pc);
3083 break;
3084 }
3085
3086 case Instruction::NOT_INT: {
3087 Unop_12x<HNot>(instruction, DataType::Type::kInt32, dex_pc);
3088 break;
3089 }
3090
3091 case Instruction::NOT_LONG: {
3092 Unop_12x<HNot>(instruction, DataType::Type::kInt64, dex_pc);
3093 break;
3094 }
3095
3096 case Instruction::INT_TO_LONG: {
3097 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt64, dex_pc);
3098 break;
3099 }
3100
3101 case Instruction::INT_TO_FLOAT: {
3102 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat32, dex_pc);
3103 break;
3104 }
3105
3106 case Instruction::INT_TO_DOUBLE: {
3107 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat64, dex_pc);
3108 break;
3109 }
3110
3111 case Instruction::LONG_TO_INT: {
3112 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kInt32, dex_pc);
3113 break;
3114 }
3115
3116 case Instruction::LONG_TO_FLOAT: {
3117 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat32, dex_pc);
3118 break;
3119 }
3120
3121 case Instruction::LONG_TO_DOUBLE: {
3122 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat64, dex_pc);
3123 break;
3124 }
3125
3126 case Instruction::FLOAT_TO_INT: {
3127 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt32, dex_pc);
3128 break;
3129 }
3130
3131 case Instruction::FLOAT_TO_LONG: {
3132 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt64, dex_pc);
3133 break;
3134 }
3135
3136 case Instruction::FLOAT_TO_DOUBLE: {
3137 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kFloat64, dex_pc);
3138 break;
3139 }
3140
3141 case Instruction::DOUBLE_TO_INT: {
3142 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt32, dex_pc);
3143 break;
3144 }
3145
3146 case Instruction::DOUBLE_TO_LONG: {
3147 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt64, dex_pc);
3148 break;
3149 }
3150
3151 case Instruction::DOUBLE_TO_FLOAT: {
3152 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kFloat32, dex_pc);
3153 break;
3154 }
3155
3156 case Instruction::INT_TO_BYTE: {
3157 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt8, dex_pc);
3158 break;
3159 }
3160
3161 case Instruction::INT_TO_SHORT: {
3162 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt16, dex_pc);
3163 break;
3164 }
3165
3166 case Instruction::INT_TO_CHAR: {
3167 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kUint16, dex_pc);
3168 break;
3169 }
3170
3171 case Instruction::ADD_INT: {
3172 Binop_23x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3173 break;
3174 }
3175
3176 case Instruction::ADD_LONG: {
3177 Binop_23x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3178 break;
3179 }
3180
3181 case Instruction::ADD_DOUBLE: {
3182 Binop_23x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3183 break;
3184 }
3185
3186 case Instruction::ADD_FLOAT: {
3187 Binop_23x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3188 break;
3189 }
3190
3191 case Instruction::SUB_INT: {
3192 Binop_23x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3193 break;
3194 }
3195
3196 case Instruction::SUB_LONG: {
3197 Binop_23x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3198 break;
3199 }
3200
3201 case Instruction::SUB_FLOAT: {
3202 Binop_23x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3203 break;
3204 }
3205
3206 case Instruction::SUB_DOUBLE: {
3207 Binop_23x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3208 break;
3209 }
3210
3211 case Instruction::ADD_INT_2ADDR: {
3212 Binop_12x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3213 break;
3214 }
3215
3216 case Instruction::MUL_INT: {
3217 Binop_23x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3218 break;
3219 }
3220
3221 case Instruction::MUL_LONG: {
3222 Binop_23x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3223 break;
3224 }
3225
3226 case Instruction::MUL_FLOAT: {
3227 Binop_23x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3228 break;
3229 }
3230
3231 case Instruction::MUL_DOUBLE: {
3232 Binop_23x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3233 break;
3234 }
3235
3236 case Instruction::DIV_INT: {
3237 BuildCheckedDivRem(instruction.VRegA_23x(),
3238 instruction.VRegB_23x(),
3239 instruction.VRegC_23x(),
3240 dex_pc,
3241 DataType::Type::kInt32,
3242 /* second_is_constant= */ false,
3243 /* is_div=*/ true);
3244 break;
3245 }
3246
3247 case Instruction::DIV_LONG: {
3248 BuildCheckedDivRem(instruction.VRegA_23x(),
3249 instruction.VRegB_23x(),
3250 instruction.VRegC_23x(),
3251 dex_pc,
3252 DataType::Type::kInt64,
3253 /* second_is_constant= */ false,
3254 /* is_div=*/ true);
3255 break;
3256 }
3257
3258 case Instruction::DIV_FLOAT: {
3259 Binop_23x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3260 break;
3261 }
3262
3263 case Instruction::DIV_DOUBLE: {
3264 Binop_23x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3265 break;
3266 }
3267
3268 case Instruction::REM_INT: {
3269 BuildCheckedDivRem(instruction.VRegA_23x(),
3270 instruction.VRegB_23x(),
3271 instruction.VRegC_23x(),
3272 dex_pc,
3273 DataType::Type::kInt32,
3274 /* second_is_constant= */ false,
3275 /* is_div=*/ false);
3276 break;
3277 }
3278
3279 case Instruction::REM_LONG: {
3280 BuildCheckedDivRem(instruction.VRegA_23x(),
3281 instruction.VRegB_23x(),
3282 instruction.VRegC_23x(),
3283 dex_pc,
3284 DataType::Type::kInt64,
3285 /* second_is_constant= */ false,
3286 /* is_div=*/ false);
3287 break;
3288 }
3289
3290 case Instruction::REM_FLOAT: {
3291 Binop_23x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3292 break;
3293 }
3294
3295 case Instruction::REM_DOUBLE: {
3296 Binop_23x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3297 break;
3298 }
3299
3300 case Instruction::AND_INT: {
3301 Binop_23x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3302 break;
3303 }
3304
3305 case Instruction::AND_LONG: {
3306 Binop_23x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3307 break;
3308 }
3309
3310 case Instruction::SHL_INT: {
3311 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3312 break;
3313 }
3314
3315 case Instruction::SHL_LONG: {
3316 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3317 break;
3318 }
3319
3320 case Instruction::SHR_INT: {
3321 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3322 break;
3323 }
3324
3325 case Instruction::SHR_LONG: {
3326 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3327 break;
3328 }
3329
3330 case Instruction::USHR_INT: {
3331 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3332 break;
3333 }
3334
3335 case Instruction::USHR_LONG: {
3336 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3337 break;
3338 }
3339
3340 case Instruction::OR_INT: {
3341 Binop_23x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3342 break;
3343 }
3344
3345 case Instruction::OR_LONG: {
3346 Binop_23x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3347 break;
3348 }
3349
3350 case Instruction::XOR_INT: {
3351 Binop_23x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3352 break;
3353 }
3354
3355 case Instruction::XOR_LONG: {
3356 Binop_23x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3357 break;
3358 }
3359
3360 case Instruction::ADD_LONG_2ADDR: {
3361 Binop_12x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3362 break;
3363 }
3364
3365 case Instruction::ADD_DOUBLE_2ADDR: {
3366 Binop_12x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3367 break;
3368 }
3369
3370 case Instruction::ADD_FLOAT_2ADDR: {
3371 Binop_12x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3372 break;
3373 }
3374
3375 case Instruction::SUB_INT_2ADDR: {
3376 Binop_12x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3377 break;
3378 }
3379
3380 case Instruction::SUB_LONG_2ADDR: {
3381 Binop_12x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3382 break;
3383 }
3384
3385 case Instruction::SUB_FLOAT_2ADDR: {
3386 Binop_12x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3387 break;
3388 }
3389
3390 case Instruction::SUB_DOUBLE_2ADDR: {
3391 Binop_12x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3392 break;
3393 }
3394
3395 case Instruction::MUL_INT_2ADDR: {
3396 Binop_12x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3397 break;
3398 }
3399
3400 case Instruction::MUL_LONG_2ADDR: {
3401 Binop_12x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3402 break;
3403 }
3404
3405 case Instruction::MUL_FLOAT_2ADDR: {
3406 Binop_12x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3407 break;
3408 }
3409
3410 case Instruction::MUL_DOUBLE_2ADDR: {
3411 Binop_12x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3412 break;
3413 }
3414
3415 case Instruction::DIV_INT_2ADDR: {
3416 BuildCheckedDivRem(instruction.VRegA_12x(),
3417 instruction.VRegA_12x(),
3418 instruction.VRegB_12x(),
3419 dex_pc,
3420 DataType::Type::kInt32,
3421 /* second_is_constant= */ false,
3422 /* is_div=*/ true);
3423 break;
3424 }
3425
3426 case Instruction::DIV_LONG_2ADDR: {
3427 BuildCheckedDivRem(instruction.VRegA_12x(),
3428 instruction.VRegA_12x(),
3429 instruction.VRegB_12x(),
3430 dex_pc,
3431 DataType::Type::kInt64,
3432 /* second_is_constant= */ false,
3433 /* is_div=*/ true);
3434 break;
3435 }
3436
3437 case Instruction::REM_INT_2ADDR: {
3438 BuildCheckedDivRem(instruction.VRegA_12x(),
3439 instruction.VRegA_12x(),
3440 instruction.VRegB_12x(),
3441 dex_pc,
3442 DataType::Type::kInt32,
3443 /* second_is_constant= */ false,
3444 /* is_div=*/ false);
3445 break;
3446 }
3447
3448 case Instruction::REM_LONG_2ADDR: {
3449 BuildCheckedDivRem(instruction.VRegA_12x(),
3450 instruction.VRegA_12x(),
3451 instruction.VRegB_12x(),
3452 dex_pc,
3453 DataType::Type::kInt64,
3454 /* second_is_constant= */ false,
3455 /* is_div=*/ false);
3456 break;
3457 }
3458
3459 case Instruction::REM_FLOAT_2ADDR: {
3460 Binop_12x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3461 break;
3462 }
3463
3464 case Instruction::REM_DOUBLE_2ADDR: {
3465 Binop_12x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3466 break;
3467 }
3468
3469 case Instruction::SHL_INT_2ADDR: {
3470 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3471 break;
3472 }
3473
3474 case Instruction::SHL_LONG_2ADDR: {
3475 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3476 break;
3477 }
3478
3479 case Instruction::SHR_INT_2ADDR: {
3480 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3481 break;
3482 }
3483
3484 case Instruction::SHR_LONG_2ADDR: {
3485 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3486 break;
3487 }
3488
3489 case Instruction::USHR_INT_2ADDR: {
3490 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3491 break;
3492 }
3493
3494 case Instruction::USHR_LONG_2ADDR: {
3495 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3496 break;
3497 }
3498
3499 case Instruction::DIV_FLOAT_2ADDR: {
3500 Binop_12x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3501 break;
3502 }
3503
3504 case Instruction::DIV_DOUBLE_2ADDR: {
3505 Binop_12x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3506 break;
3507 }
3508
3509 case Instruction::AND_INT_2ADDR: {
3510 Binop_12x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3511 break;
3512 }
3513
3514 case Instruction::AND_LONG_2ADDR: {
3515 Binop_12x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3516 break;
3517 }
3518
3519 case Instruction::OR_INT_2ADDR: {
3520 Binop_12x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3521 break;
3522 }
3523
3524 case Instruction::OR_LONG_2ADDR: {
3525 Binop_12x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3526 break;
3527 }
3528
3529 case Instruction::XOR_INT_2ADDR: {
3530 Binop_12x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3531 break;
3532 }
3533
3534 case Instruction::XOR_LONG_2ADDR: {
3535 Binop_12x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3536 break;
3537 }
3538
3539 case Instruction::ADD_INT_LIT16: {
3540 Binop_22s<HAdd>(instruction, false, dex_pc);
3541 break;
3542 }
3543
3544 case Instruction::AND_INT_LIT16: {
3545 Binop_22s<HAnd>(instruction, false, dex_pc);
3546 break;
3547 }
3548
3549 case Instruction::OR_INT_LIT16: {
3550 Binop_22s<HOr>(instruction, false, dex_pc);
3551 break;
3552 }
3553
3554 case Instruction::XOR_INT_LIT16: {
3555 Binop_22s<HXor>(instruction, false, dex_pc);
3556 break;
3557 }
3558
3559 case Instruction::RSUB_INT: {
3560 Binop_22s<HSub>(instruction, true, dex_pc);
3561 break;
3562 }
3563
3564 case Instruction::MUL_INT_LIT16: {
3565 Binop_22s<HMul>(instruction, false, dex_pc);
3566 break;
3567 }
3568
3569 case Instruction::ADD_INT_LIT8: {
3570 Binop_22b<HAdd>(instruction, false, dex_pc);
3571 break;
3572 }
3573
3574 case Instruction::AND_INT_LIT8: {
3575 Binop_22b<HAnd>(instruction, false, dex_pc);
3576 break;
3577 }
3578
3579 case Instruction::OR_INT_LIT8: {
3580 Binop_22b<HOr>(instruction, false, dex_pc);
3581 break;
3582 }
3583
3584 case Instruction::XOR_INT_LIT8: {
3585 Binop_22b<HXor>(instruction, false, dex_pc);
3586 break;
3587 }
3588
3589 case Instruction::RSUB_INT_LIT8: {
3590 Binop_22b<HSub>(instruction, true, dex_pc);
3591 break;
3592 }
3593
3594 case Instruction::MUL_INT_LIT8: {
3595 Binop_22b<HMul>(instruction, false, dex_pc);
3596 break;
3597 }
3598
3599 case Instruction::DIV_INT_LIT16: {
3600 BuildCheckedDivRem(instruction.VRegA_22s(),
3601 instruction.VRegB_22s(),
3602 instruction.VRegC_22s(),
3603 dex_pc,
3604 DataType::Type::kInt32,
3605 /* second_is_constant= */ true,
3606 /* is_div=*/ true);
3607 break;
3608 }
3609
3610 case Instruction::DIV_INT_LIT8: {
3611 BuildCheckedDivRem(instruction.VRegA_22b(),
3612 instruction.VRegB_22b(),
3613 instruction.VRegC_22b(),
3614 dex_pc,
3615 DataType::Type::kInt32,
3616 /* second_is_constant= */ true,
3617 /* is_div=*/ true);
3618 break;
3619 }
3620
3621 case Instruction::REM_INT_LIT16: {
3622 BuildCheckedDivRem(instruction.VRegA_22s(),
3623 instruction.VRegB_22s(),
3624 instruction.VRegC_22s(),
3625 dex_pc,
3626 DataType::Type::kInt32,
3627 /* second_is_constant= */ true,
3628 /* is_div=*/ false);
3629 break;
3630 }
3631
3632 case Instruction::REM_INT_LIT8: {
3633 BuildCheckedDivRem(instruction.VRegA_22b(),
3634 instruction.VRegB_22b(),
3635 instruction.VRegC_22b(),
3636 dex_pc,
3637 DataType::Type::kInt32,
3638 /* second_is_constant= */ true,
3639 /* is_div=*/ false);
3640 break;
3641 }
3642
3643 case Instruction::SHL_INT_LIT8: {
3644 Binop_22b<HShl>(instruction, false, dex_pc);
3645 break;
3646 }
3647
3648 case Instruction::SHR_INT_LIT8: {
3649 Binop_22b<HShr>(instruction, false, dex_pc);
3650 break;
3651 }
3652
3653 case Instruction::USHR_INT_LIT8: {
3654 Binop_22b<HUShr>(instruction, false, dex_pc);
3655 break;
3656 }
3657
3658 case Instruction::NEW_INSTANCE: {
3659 HNewInstance* new_instance =
3660 BuildNewInstance(dex::TypeIndex(instruction.VRegB_21c()), dex_pc);
3661 DCHECK(new_instance != nullptr);
3662
3663 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3664 BuildConstructorFenceForAllocation(new_instance);
3665 break;
3666 }
3667
3668 case Instruction::NEW_ARRAY: {
3669 dex::TypeIndex type_index(instruction.VRegC_22c());
3670 HInstruction* length = LoadLocal(instruction.VRegB_22c(), DataType::Type::kInt32);
3671 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
3672
3673 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction());
3674 BuildConstructorFenceForAllocation(new_array);
3675 break;
3676 }
3677
3678 case Instruction::FILLED_NEW_ARRAY: {
3679 dex::TypeIndex type_index(instruction.VRegB_35c());
3680 uint32_t args[5];
3681 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3682 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3683 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3684 BuildConstructorFenceForAllocation(new_array);
3685 break;
3686 }
3687
3688 case Instruction::FILLED_NEW_ARRAY_RANGE: {
3689 dex::TypeIndex type_index(instruction.VRegB_3rc());
3690 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3691 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3692 BuildConstructorFenceForAllocation(new_array);
3693 break;
3694 }
3695
3696 case Instruction::FILL_ARRAY_DATA: {
3697 BuildFillArrayData(instruction, dex_pc);
3698 break;
3699 }
3700
3701 case Instruction::MOVE_RESULT:
3702 case Instruction::MOVE_RESULT_WIDE:
3703 case Instruction::MOVE_RESULT_OBJECT: {
3704 DCHECK(latest_result_ != nullptr);
3705 UpdateLocal(instruction.VRegA_11x(), latest_result_);
3706 latest_result_ = nullptr;
3707 break;
3708 }
3709
3710 case Instruction::CMP_LONG: {
3711 Binop_23x_cmp(instruction, DataType::Type::kInt64, ComparisonBias::kNoBias, dex_pc);
3712 break;
3713 }
3714
3715 case Instruction::CMPG_FLOAT: {
3716 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kGtBias, dex_pc);
3717 break;
3718 }
3719
3720 case Instruction::CMPG_DOUBLE: {
3721 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kGtBias, dex_pc);
3722 break;
3723 }
3724
3725 case Instruction::CMPL_FLOAT: {
3726 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kLtBias, dex_pc);
3727 break;
3728 }
3729
3730 case Instruction::CMPL_DOUBLE: {
3731 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kLtBias, dex_pc);
3732 break;
3733 }
3734
3735 case Instruction::NOP:
3736 break;
3737
3738 case Instruction::IGET:
3739 case Instruction::IGET_WIDE:
3740 case Instruction::IGET_OBJECT:
3741 case Instruction::IGET_BOOLEAN:
3742 case Instruction::IGET_BYTE:
3743 case Instruction::IGET_CHAR:
3744 case Instruction::IGET_SHORT: {
3745 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ false)) {
3746 return false;
3747 }
3748 break;
3749 }
3750
3751 case Instruction::IPUT:
3752 case Instruction::IPUT_WIDE:
3753 case Instruction::IPUT_OBJECT:
3754 case Instruction::IPUT_BOOLEAN:
3755 case Instruction::IPUT_BYTE:
3756 case Instruction::IPUT_CHAR:
3757 case Instruction::IPUT_SHORT: {
3758 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ true)) {
3759 return false;
3760 }
3761 break;
3762 }
3763
3764 case Instruction::SGET:
3765 case Instruction::SGET_WIDE:
3766 case Instruction::SGET_OBJECT:
3767 case Instruction::SGET_BOOLEAN:
3768 case Instruction::SGET_BYTE:
3769 case Instruction::SGET_CHAR:
3770 case Instruction::SGET_SHORT: {
3771 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ false);
3772 break;
3773 }
3774
3775 case Instruction::SPUT:
3776 case Instruction::SPUT_WIDE:
3777 case Instruction::SPUT_OBJECT:
3778 case Instruction::SPUT_BOOLEAN:
3779 case Instruction::SPUT_BYTE:
3780 case Instruction::SPUT_CHAR:
3781 case Instruction::SPUT_SHORT: {
3782 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ true);
3783 break;
3784 }
3785
3786 #define ARRAY_XX(kind, anticipated_type) \
3787 case Instruction::AGET##kind: { \
3788 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \
3789 break; \
3790 } \
3791 case Instruction::APUT##kind: { \
3792 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \
3793 break; \
3794 }
3795
3796 ARRAY_XX(, DataType::Type::kInt32);
3797 ARRAY_XX(_WIDE, DataType::Type::kInt64);
3798 ARRAY_XX(_OBJECT, DataType::Type::kReference);
3799 ARRAY_XX(_BOOLEAN, DataType::Type::kBool);
3800 ARRAY_XX(_BYTE, DataType::Type::kInt8);
3801 ARRAY_XX(_CHAR, DataType::Type::kUint16);
3802 ARRAY_XX(_SHORT, DataType::Type::kInt16);
3803
3804 case Instruction::ARRAY_LENGTH: {
3805 HInstruction* object = LoadNullCheckedLocal(instruction.VRegB_12x(), dex_pc);
3806 AppendInstruction(new (allocator_) HArrayLength(object, dex_pc));
3807 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
3808 break;
3809 }
3810
3811 case Instruction::CONST_STRING: {
3812 dex::StringIndex string_index(instruction.VRegB_21c());
3813 BuildLoadString(string_index, dex_pc);
3814 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3815 break;
3816 }
3817
3818 case Instruction::CONST_STRING_JUMBO: {
3819 dex::StringIndex string_index(instruction.VRegB_31c());
3820 BuildLoadString(string_index, dex_pc);
3821 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
3822 break;
3823 }
3824
3825 case Instruction::CONST_CLASS: {
3826 dex::TypeIndex type_index(instruction.VRegB_21c());
3827 BuildLoadClass(type_index, dex_pc);
3828 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3829 break;
3830 }
3831
3832 case Instruction::CONST_METHOD_HANDLE: {
3833 uint16_t method_handle_idx = instruction.VRegB_21c();
3834 BuildLoadMethodHandle(method_handle_idx, dex_pc);
3835 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3836 break;
3837 }
3838
3839 case Instruction::CONST_METHOD_TYPE: {
3840 dex::ProtoIndex proto_idx(instruction.VRegB_21c());
3841 BuildLoadMethodType(proto_idx, dex_pc);
3842 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3843 break;
3844 }
3845
3846 case Instruction::MOVE_EXCEPTION: {
3847 AppendInstruction(new (allocator_) HLoadException(dex_pc));
3848 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction());
3849 AppendInstruction(new (allocator_) HClearException(dex_pc));
3850 break;
3851 }
3852
3853 case Instruction::THROW: {
3854 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference);
3855 AppendInstruction(new (allocator_) HThrow(exception, dex_pc));
3856 // We finished building this block. Set the current block to null to avoid
3857 // adding dead instructions to it.
3858 current_block_ = nullptr;
3859 break;
3860 }
3861
3862 case Instruction::INSTANCE_OF: {
3863 uint8_t destination = instruction.VRegA_22c();
3864 uint8_t reference = instruction.VRegB_22c();
3865 dex::TypeIndex type_index(instruction.VRegC_22c());
3866 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc);
3867 break;
3868 }
3869
3870 case Instruction::CHECK_CAST: {
3871 uint8_t reference = instruction.VRegA_21c();
3872 dex::TypeIndex type_index(instruction.VRegB_21c());
3873 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc);
3874 break;
3875 }
3876
3877 case Instruction::MONITOR_ENTER: {
3878 AppendInstruction(new (allocator_) HMonitorOperation(
3879 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3880 HMonitorOperation::OperationKind::kEnter,
3881 dex_pc));
3882 graph_->SetHasMonitorOperations(true);
3883 break;
3884 }
3885
3886 case Instruction::MONITOR_EXIT: {
3887 AppendInstruction(new (allocator_) HMonitorOperation(
3888 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3889 HMonitorOperation::OperationKind::kExit,
3890 dex_pc));
3891 graph_->SetHasMonitorOperations(true);
3892 break;
3893 }
3894
3895 case Instruction::SPARSE_SWITCH:
3896 case Instruction::PACKED_SWITCH: {
3897 BuildSwitch(instruction, dex_pc);
3898 break;
3899 }
3900
3901 case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
3902 case Instruction::UNUSED_73:
3903 case Instruction::UNUSED_79:
3904 case Instruction::UNUSED_7A:
3905 case Instruction::UNUSED_E3 ... Instruction::UNUSED_F9: {
3906 VLOG(compiler) << "Did not compile "
3907 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
3908 << " because of unhandled instruction "
3909 << instruction.Name();
3910 MaybeRecordStat(compilation_stats_,
3911 MethodCompilationStat::kNotCompiledUnhandledInstruction);
3912 return false;
3913 }
3914 }
3915 return true;
3916 } // NOLINT(readability/fn_size)
3917
LookupResolvedType(dex::TypeIndex type_index,const DexCompilationUnit & compilation_unit) const3918 ObjPtr<mirror::Class> HInstructionBuilder::LookupResolvedType(
3919 dex::TypeIndex type_index,
3920 const DexCompilationUnit& compilation_unit) const {
3921 return compilation_unit.GetClassLinker()->LookupResolvedType(
3922 type_index, compilation_unit.GetDexCache().Get(), compilation_unit.GetClassLoader().Get());
3923 }
3924
LookupReferrerClass() const3925 ObjPtr<mirror::Class> HInstructionBuilder::LookupReferrerClass() const {
3926 // TODO: Cache the result in a Handle<mirror::Class>.
3927 const dex::MethodId& method_id =
3928 dex_compilation_unit_->GetDexFile()->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
3929 return LookupResolvedType(method_id.class_idx_, *dex_compilation_unit_);
3930 }
3931
3932 } // namespace art
3933