1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "compiler_driver.h"
18
19 #include <unistd.h>
20
21 #ifndef __APPLE__
22 #include <malloc.h> // For mallinfo
23 #endif
24
25 #include <string_view>
26 #include <vector>
27
28 #include <android-base/logging.h>
29 #include <android-base/strings.h>
30
31 #include "aot_class_linker.h"
32 #include "art_field-inl.h"
33 #include "art_method-inl.h"
34 #include "base/arena_allocator.h"
35 #include "base/array_ref.h"
36 #include "base/bit_vector.h"
37 #include "base/hash_set.h"
38 #include "base/logging.h" // For VLOG
39 #include "base/pointer_size.h"
40 #include "base/stl_util.h"
41 #include "base/systrace.h"
42 #include "base/time_utils.h"
43 #include "base/timing_logger.h"
44 #include "class_linker-inl.h"
45 #include "class_root-inl.h"
46 #include "common_throws.h"
47 #include "compiled_method-inl.h"
48 #include "compiler.h"
49 #include "compiler_callbacks.h"
50 #include "compiler_driver-inl.h"
51 #include "dex/class_accessor-inl.h"
52 #include "dex/descriptors_names.h"
53 #include "dex/dex_file-inl.h"
54 #include "dex/dex_file_annotations.h"
55 #include "dex/dex_file_exception_helpers.h"
56 #include "dex/dex_instruction-inl.h"
57 #include "dex/verification_results.h"
58 #include "driver/compiler_options.h"
59 #include "driver/dex_compilation_unit.h"
60 #include "gc/accounting/card_table-inl.h"
61 #include "gc/accounting/heap_bitmap.h"
62 #include "gc/space/image_space.h"
63 #include "gc/space/space.h"
64 #include "handle_scope-inl.h"
65 #include "intrinsics_enum.h"
66 #include "intrinsics_list.h"
67 #include "jni/jni_internal.h"
68 #include "linker/linker_patch.h"
69 #include "mirror/class-inl.h"
70 #include "mirror/class_loader.h"
71 #include "mirror/dex_cache-inl.h"
72 #include "mirror/object-inl.h"
73 #include "mirror/object-refvisitor-inl.h"
74 #include "mirror/object_array-inl.h"
75 #include "mirror/throwable.h"
76 #include "object_lock.h"
77 #include "profile/profile_compilation_info.h"
78 #include "runtime.h"
79 #include "runtime_intrinsics.h"
80 #include "scoped_thread_state_change-inl.h"
81 #include "thread.h"
82 #include "thread_list.h"
83 #include "thread_pool.h"
84 #include "trampolines/trampoline_compiler.h"
85 #include "utils/atomic_dex_ref_map-inl.h"
86 #include "utils/swap_space.h"
87 #include "vdex_file.h"
88 #include "verifier/class_verifier.h"
89 #include "verifier/verifier_deps.h"
90 #include "verifier/verifier_enums.h"
91 #include "well_known_classes-inl.h"
92
93 namespace art {
94
95 static constexpr bool kTimeCompileMethod = !kIsDebugBuild;
96
97 // Print additional info during profile guided compilation.
98 static constexpr bool kDebugProfileGuidedCompilation = false;
99
100 // Max encoded fields allowed for initializing app image. Hardcode the number for now
101 // because 5000 should be large enough.
102 static constexpr uint32_t kMaxEncodedFields = 5000;
103
Percentage(size_t x,size_t y)104 static double Percentage(size_t x, size_t y) {
105 return 100.0 * (static_cast<double>(x)) / (static_cast<double>(x + y));
106 }
107
DumpStat(size_t x,size_t y,const char * str)108 static void DumpStat(size_t x, size_t y, const char* str) {
109 if (x == 0 && y == 0) {
110 return;
111 }
112 LOG(INFO) << Percentage(x, y) << "% of " << str << " for " << (x + y) << " cases";
113 }
114
115 class CompilerDriver::AOTCompilationStats {
116 public:
AOTCompilationStats()117 AOTCompilationStats()
118 : stats_lock_("AOT compilation statistics lock") {}
119
Dump()120 void Dump() {
121 DumpStat(resolved_instance_fields_, unresolved_instance_fields_, "instance fields resolved");
122 DumpStat(resolved_local_static_fields_ + resolved_static_fields_, unresolved_static_fields_,
123 "static fields resolved");
124 DumpStat(resolved_local_static_fields_, resolved_static_fields_ + unresolved_static_fields_,
125 "static fields local to a class");
126 DumpStat(safe_casts_, not_safe_casts_, "check-casts removed based on type information");
127 // Note, the code below subtracts the stat value so that when added to the stat value we have
128 // 100% of samples. TODO: clean this up.
129 DumpStat(type_based_devirtualization_,
130 resolved_methods_[kVirtual] + unresolved_methods_[kVirtual] +
131 resolved_methods_[kInterface] + unresolved_methods_[kInterface] -
132 type_based_devirtualization_,
133 "virtual/interface calls made direct based on type information");
134
135 const size_t total = std::accumulate(
136 class_status_count_,
137 class_status_count_ + static_cast<size_t>(ClassStatus::kLast) + 1,
138 0u);
139 for (size_t i = 0; i <= static_cast<size_t>(ClassStatus::kLast); ++i) {
140 std::ostringstream oss;
141 oss << "classes with status " << static_cast<ClassStatus>(i);
142 DumpStat(class_status_count_[i], total - class_status_count_[i], oss.str().c_str());
143 }
144
145 for (size_t i = 0; i <= kMaxInvokeType; i++) {
146 std::ostringstream oss;
147 oss << static_cast<InvokeType>(i) << " methods were AOT resolved";
148 DumpStat(resolved_methods_[i], unresolved_methods_[i], oss.str().c_str());
149 if (virtual_made_direct_[i] > 0) {
150 std::ostringstream oss2;
151 oss2 << static_cast<InvokeType>(i) << " methods made direct";
152 DumpStat(virtual_made_direct_[i],
153 resolved_methods_[i] + unresolved_methods_[i] - virtual_made_direct_[i],
154 oss2.str().c_str());
155 }
156 if (direct_calls_to_boot_[i] > 0) {
157 std::ostringstream oss2;
158 oss2 << static_cast<InvokeType>(i) << " method calls are direct into boot";
159 DumpStat(direct_calls_to_boot_[i],
160 resolved_methods_[i] + unresolved_methods_[i] - direct_calls_to_boot_[i],
161 oss2.str().c_str());
162 }
163 if (direct_methods_to_boot_[i] > 0) {
164 std::ostringstream oss2;
165 oss2 << static_cast<InvokeType>(i) << " method calls have methods in boot";
166 DumpStat(direct_methods_to_boot_[i],
167 resolved_methods_[i] + unresolved_methods_[i] - direct_methods_to_boot_[i],
168 oss2.str().c_str());
169 }
170 }
171 }
172
173 // Allow lossy statistics in non-debug builds.
174 #ifndef NDEBUG
175 #define STATS_LOCK() MutexLock mu(Thread::Current(), stats_lock_)
176 #else
177 #define STATS_LOCK()
178 #endif
179
ResolvedInstanceField()180 void ResolvedInstanceField() REQUIRES(!stats_lock_) {
181 STATS_LOCK();
182 resolved_instance_fields_++;
183 }
184
UnresolvedInstanceField()185 void UnresolvedInstanceField() REQUIRES(!stats_lock_) {
186 STATS_LOCK();
187 unresolved_instance_fields_++;
188 }
189
ResolvedLocalStaticField()190 void ResolvedLocalStaticField() REQUIRES(!stats_lock_) {
191 STATS_LOCK();
192 resolved_local_static_fields_++;
193 }
194
ResolvedStaticField()195 void ResolvedStaticField() REQUIRES(!stats_lock_) {
196 STATS_LOCK();
197 resolved_static_fields_++;
198 }
199
UnresolvedStaticField()200 void UnresolvedStaticField() REQUIRES(!stats_lock_) {
201 STATS_LOCK();
202 unresolved_static_fields_++;
203 }
204
205 // Indicate that type information from the verifier led to devirtualization.
PreciseTypeDevirtualization()206 void PreciseTypeDevirtualization() REQUIRES(!stats_lock_) {
207 STATS_LOCK();
208 type_based_devirtualization_++;
209 }
210
211 // A check-cast could be eliminated due to verifier type analysis.
SafeCast()212 void SafeCast() REQUIRES(!stats_lock_) {
213 STATS_LOCK();
214 safe_casts_++;
215 }
216
217 // A check-cast couldn't be eliminated due to verifier type analysis.
NotASafeCast()218 void NotASafeCast() REQUIRES(!stats_lock_) {
219 STATS_LOCK();
220 not_safe_casts_++;
221 }
222
223 // Register a class status.
AddClassStatus(ClassStatus status)224 void AddClassStatus(ClassStatus status) REQUIRES(!stats_lock_) {
225 STATS_LOCK();
226 ++class_status_count_[static_cast<size_t>(status)];
227 }
228
229 private:
230 Mutex stats_lock_;
231
232 size_t resolved_instance_fields_ = 0u;
233 size_t unresolved_instance_fields_ = 0u;
234
235 size_t resolved_local_static_fields_ = 0u;
236 size_t resolved_static_fields_ = 0u;
237 size_t unresolved_static_fields_ = 0u;
238 // Type based devirtualization for invoke interface and virtual.
239 size_t type_based_devirtualization_ = 0u;
240
241 size_t resolved_methods_[kMaxInvokeType + 1] = {};
242 size_t unresolved_methods_[kMaxInvokeType + 1] = {};
243 size_t virtual_made_direct_[kMaxInvokeType + 1] = {};
244 size_t direct_calls_to_boot_[kMaxInvokeType + 1] = {};
245 size_t direct_methods_to_boot_[kMaxInvokeType + 1] = {};
246
247 size_t safe_casts_ = 0u;
248 size_t not_safe_casts_ = 0u;
249
250 size_t class_status_count_[static_cast<size_t>(ClassStatus::kLast) + 1] = {};
251
252 DISALLOW_COPY_AND_ASSIGN(AOTCompilationStats);
253 };
254
CompilerDriver(const CompilerOptions * compiler_options,const VerificationResults * verification_results,size_t thread_count,int swap_fd)255 CompilerDriver::CompilerDriver(
256 const CompilerOptions* compiler_options,
257 const VerificationResults* verification_results,
258 size_t thread_count,
259 int swap_fd)
260 : compiler_options_(compiler_options),
261 verification_results_(verification_results),
262 compiler_(),
263 number_of_soft_verifier_failures_(0),
264 had_hard_verifier_failure_(false),
265 parallel_thread_count_(thread_count),
266 stats_(new AOTCompilationStats),
267 compiled_method_storage_(swap_fd),
268 max_arena_alloc_(0) {
269 DCHECK(compiler_options_ != nullptr);
270
271 compiled_method_storage_.SetDedupeEnabled(compiler_options_->DeduplicateCode());
272 compiler_.reset(Compiler::Create(*compiler_options, &compiled_method_storage_));
273 }
274
~CompilerDriver()275 CompilerDriver::~CompilerDriver() {
276 compiled_methods_.Visit(
277 [this]([[maybe_unused]] const DexFileReference& ref, CompiledMethod* method) {
278 if (method != nullptr) {
279 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method);
280 }
281 });
282 }
283
284
285 #define CREATE_TRAMPOLINE(type, abi, offset) \
286 if (Is64BitInstructionSet(GetCompilerOptions().GetInstructionSet())) { \
287 return CreateTrampoline64(GetCompilerOptions().GetInstructionSet(), \
288 abi, \
289 type ## _ENTRYPOINT_OFFSET(PointerSize::k64, offset)); \
290 } else { \
291 return CreateTrampoline32(GetCompilerOptions().GetInstructionSet(), \
292 abi, \
293 type ## _ENTRYPOINT_OFFSET(PointerSize::k32, offset)); \
294 }
295
CreateJniDlsymLookupTrampoline() const296 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateJniDlsymLookupTrampoline() const {
297 CREATE_TRAMPOLINE(JNI, kJniAbi, pDlsymLookup)
298 }
299
300 std::unique_ptr<const std::vector<uint8_t>>
CreateJniDlsymLookupCriticalTrampoline() const301 CompilerDriver::CreateJniDlsymLookupCriticalTrampoline() const {
302 // @CriticalNative calls do not have the `JNIEnv*` parameter, so this trampoline uses the
303 // architecture-dependent access to `Thread*` using the managed code ABI, i.e. `kQuickAbi`.
304 CREATE_TRAMPOLINE(JNI, kQuickAbi, pDlsymLookupCritical)
305 }
306
CreateQuickGenericJniTrampoline() const307 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickGenericJniTrampoline()
308 const {
309 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickGenericJniTrampoline)
310 }
311
CreateQuickImtConflictTrampoline() const312 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickImtConflictTrampoline()
313 const {
314 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickImtConflictTrampoline)
315 }
316
CreateQuickResolutionTrampoline() const317 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickResolutionTrampoline()
318 const {
319 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickResolutionTrampoline)
320 }
321
CreateQuickToInterpreterBridge() const322 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickToInterpreterBridge()
323 const {
324 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge)
325 }
326
CreateNterpTrampoline() const327 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateNterpTrampoline()
328 const {
329 // We use QuickToInterpreterBridge to not waste one word in the Thread object.
330 // The Nterp trampoline gets replaced with the nterp entrypoint when loading
331 // an image.
332 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge)
333 }
334 #undef CREATE_TRAMPOLINE
335
CompileAll(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)336 void CompilerDriver::CompileAll(jobject class_loader,
337 const std::vector<const DexFile*>& dex_files,
338 TimingLogger* timings) {
339 DCHECK(!Runtime::Current()->IsStarted());
340
341 CheckThreadPools();
342
343 // Compile:
344 // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex
345 // compilation.
346 if (GetCompilerOptions().IsAnyCompilationEnabled()) {
347 Compile(class_loader, dex_files, timings);
348 }
349 if (GetCompilerOptions().GetDumpStats()) {
350 stats_->Dump();
351 }
352 }
353
354 // Does the runtime for the InstructionSet provide an implementation returned by
355 // GetQuickGenericJniStub allowing down calls that aren't compiled using a JNI compiler?
InstructionSetHasGenericJniStub(InstructionSet isa)356 static bool InstructionSetHasGenericJniStub(InstructionSet isa) {
357 switch (isa) {
358 case InstructionSet::kArm:
359 case InstructionSet::kArm64:
360 case InstructionSet::kThumb2:
361 case InstructionSet::kX86:
362 case InstructionSet::kX86_64: return true;
363 default: return false;
364 }
365 }
366
367 template <typename CompileFn>
CompileMethodHarness(Thread * self,CompilerDriver * driver,const dex::CodeItem * code_item,uint32_t access_flags,uint16_t class_def_idx,uint32_t method_idx,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,Handle<mirror::DexCache> dex_cache,CompileFn compile_fn)368 static void CompileMethodHarness(
369 Thread* self,
370 CompilerDriver* driver,
371 const dex::CodeItem* code_item,
372 uint32_t access_flags,
373 uint16_t class_def_idx,
374 uint32_t method_idx,
375 Handle<mirror::ClassLoader> class_loader,
376 const DexFile& dex_file,
377 Handle<mirror::DexCache> dex_cache,
378 CompileFn compile_fn) {
379 DCHECK(driver != nullptr);
380 CompiledMethod* compiled_method;
381 uint64_t start_ns = kTimeCompileMethod ? NanoTime() : 0;
382 MethodReference method_ref(&dex_file, method_idx);
383
384 compiled_method = compile_fn(self,
385 driver,
386 code_item,
387 access_flags,
388 class_def_idx,
389 method_idx,
390 class_loader,
391 dex_file,
392 dex_cache);
393
394 if (kTimeCompileMethod) {
395 uint64_t duration_ns = NanoTime() - start_ns;
396 if (duration_ns > MsToNs(driver->GetCompiler()->GetMaximumCompilationTimeBeforeWarning())) {
397 LOG(WARNING) << "Compilation of " << dex_file.PrettyMethod(method_idx)
398 << " took " << PrettyDuration(duration_ns);
399 }
400 }
401
402 if (compiled_method != nullptr) {
403 driver->AddCompiledMethod(method_ref, compiled_method);
404 }
405
406 if (self->IsExceptionPending()) {
407 ScopedObjectAccess soa(self);
408 LOG(FATAL) << "Unexpected exception compiling: " << dex_file.PrettyMethod(method_idx) << "\n"
409 << self->GetException()->Dump();
410 }
411 }
412
413 // Checks whether profile guided compilation is enabled and if the method should be compiled
414 // according to the profile file.
ShouldCompileBasedOnProfile(const CompilerOptions & compiler_options,ProfileCompilationInfo::ProfileIndexType profile_index,MethodReference method_ref)415 static bool ShouldCompileBasedOnProfile(const CompilerOptions& compiler_options,
416 ProfileCompilationInfo::ProfileIndexType profile_index,
417 MethodReference method_ref) {
418 if (profile_index == ProfileCompilationInfo::MaxProfileIndex()) {
419 // No profile for this dex file. Check if we're actually compiling based on a profile.
420 if (!CompilerFilter::DependsOnProfile(compiler_options.GetCompilerFilter())) {
421 return true;
422 }
423 // Profile-based compilation without profile for this dex file. Do not compile the method.
424 DCHECK(compiler_options.GetProfileCompilationInfo() == nullptr ||
425 compiler_options.GetProfileCompilationInfo()->FindDexFile(*method_ref.dex_file) ==
426 ProfileCompilationInfo::MaxProfileIndex());
427 return false;
428 } else {
429 DCHECK(CompilerFilter::DependsOnProfile(compiler_options.GetCompilerFilter()));
430 const ProfileCompilationInfo* profile_compilation_info =
431 compiler_options.GetProfileCompilationInfo();
432 DCHECK(profile_compilation_info != nullptr);
433
434 bool result = profile_compilation_info->IsHotMethod(profile_index, method_ref.index);
435
436 // On non-low RAM devices, compile startup methods to potentially speed up
437 // startup.
438 if (!result && !Runtime::Current()->GetHeap()->IsLowMemoryMode()) {
439 result = profile_compilation_info->IsStartupMethod(profile_index, method_ref.index);
440 }
441
442 if (kDebugProfileGuidedCompilation) {
443 LOG(INFO) << "[ProfileGuidedCompilation] "
444 << (result ? "Compiled" : "Skipped") << " method:" << method_ref.PrettyMethod(true);
445 }
446
447
448 return result;
449 }
450 }
451
CompileMethodQuick(Thread * self,CompilerDriver * driver,const dex::CodeItem * code_item,uint32_t access_flags,uint16_t class_def_idx,uint32_t method_idx,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,Handle<mirror::DexCache> dex_cache,ProfileCompilationInfo::ProfileIndexType profile_index)452 static void CompileMethodQuick(
453 Thread* self,
454 CompilerDriver* driver,
455 const dex::CodeItem* code_item,
456 uint32_t access_flags,
457 uint16_t class_def_idx,
458 uint32_t method_idx,
459 Handle<mirror::ClassLoader> class_loader,
460 const DexFile& dex_file,
461 Handle<mirror::DexCache> dex_cache,
462 ProfileCompilationInfo::ProfileIndexType profile_index) {
463 auto quick_fn = [profile_index]([[maybe_unused]] Thread* self,
464 CompilerDriver* driver,
465 const dex::CodeItem* code_item,
466 uint32_t access_flags,
467 uint16_t class_def_idx,
468 uint32_t method_idx,
469 Handle<mirror::ClassLoader> class_loader,
470 const DexFile& dex_file,
471 Handle<mirror::DexCache> dex_cache) {
472 DCHECK(driver != nullptr);
473 const VerificationResults* results = driver->GetVerificationResults();
474 DCHECK(results != nullptr);
475 MethodReference method_ref(&dex_file, method_idx);
476 CompiledMethod* compiled_method = nullptr;
477 if (results->IsUncompilableMethod(method_ref)) {
478 return compiled_method;
479 }
480
481 if ((access_flags & kAccNative) != 0) {
482 // Are we extracting only and have support for generic JNI down calls?
483 const CompilerOptions& compiler_options = driver->GetCompilerOptions();
484 if (!compiler_options.IsJniCompilationEnabled() &&
485 InstructionSetHasGenericJniStub(compiler_options.GetInstructionSet())) {
486 // Leaving this empty will trigger the generic JNI version
487 } else {
488 // Query any JNI optimization annotations such as @FastNative or @CriticalNative.
489 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
490 dex_file, dex_file.GetClassDef(class_def_idx), method_idx);
491 const void* boot_jni_stub = nullptr;
492 if (!Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) {
493 // Skip the compilation for native method if found an usable boot JNI stub.
494 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
495 std::string_view shorty = dex_file.GetMethodShortyView(dex_file.GetMethodId(method_idx));
496 boot_jni_stub = class_linker->FindBootJniStub(access_flags, shorty);
497 }
498 if (boot_jni_stub == nullptr) {
499 compiled_method =
500 driver->GetCompiler()->JniCompile(access_flags, method_idx, dex_file, dex_cache);
501 CHECK(compiled_method != nullptr);
502 }
503 }
504 } else if ((access_flags & kAccAbstract) != 0) {
505 // Abstract methods don't have code.
506 } else if (annotations::MethodIsNeverCompile(dex_file,
507 dex_file.GetClassDef(class_def_idx),
508 method_idx)) {
509 // Method is annotated with @NeverCompile and should not be compiled.
510 } else {
511 const CompilerOptions& compiler_options = driver->GetCompilerOptions();
512 // Don't compile class initializers unless kEverything.
513 bool compile = (compiler_options.GetCompilerFilter() == CompilerFilter::kEverything) ||
514 ((access_flags & kAccConstructor) == 0) || ((access_flags & kAccStatic) == 0);
515 // Check if we should compile based on the profile.
516 compile = compile && ShouldCompileBasedOnProfile(compiler_options, profile_index, method_ref);
517
518 if (compile) {
519 // NOTE: if compiler declines to compile this method, it will return null.
520 compiled_method = driver->GetCompiler()->Compile(code_item,
521 access_flags,
522 class_def_idx,
523 method_idx,
524 class_loader,
525 dex_file,
526 dex_cache);
527 ProfileMethodsCheck check_type = compiler_options.CheckProfiledMethodsCompiled();
528 if (UNLIKELY(check_type != ProfileMethodsCheck::kNone)) {
529 DCHECK(ShouldCompileBasedOnProfile(compiler_options, profile_index, method_ref));
530 bool violation = (compiled_method == nullptr);
531 if (violation) {
532 std::ostringstream oss;
533 oss << "Failed to compile "
534 << method_ref.dex_file->PrettyMethod(method_ref.index)
535 << "[" << method_ref.dex_file->GetLocation() << "]"
536 << " as expected by profile";
537 switch (check_type) {
538 case ProfileMethodsCheck::kNone:
539 break;
540 case ProfileMethodsCheck::kLog:
541 LOG(ERROR) << oss.str();
542 break;
543 case ProfileMethodsCheck::kAbort:
544 LOG(FATAL_WITHOUT_ABORT) << oss.str();
545 _exit(1);
546 }
547 }
548 }
549 }
550 }
551 return compiled_method;
552 };
553 CompileMethodHarness(self,
554 driver,
555 code_item,
556 access_flags,
557 class_def_idx,
558 method_idx,
559 class_loader,
560 dex_file,
561 dex_cache,
562 quick_fn);
563 }
564
Resolve(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)565 void CompilerDriver::Resolve(jobject class_loader,
566 const std::vector<const DexFile*>& dex_files,
567 TimingLogger* timings) {
568 // Resolution allocates classes and needs to run single-threaded to be deterministic.
569 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
570 ThreadPool* resolve_thread_pool = force_determinism
571 ? single_thread_pool_.get()
572 : parallel_thread_pool_.get();
573 size_t resolve_thread_count = force_determinism ? 1U : parallel_thread_count_;
574
575 for (size_t i = 0; i != dex_files.size(); ++i) {
576 const DexFile* dex_file = dex_files[i];
577 CHECK(dex_file != nullptr);
578 ResolveDexFile(class_loader,
579 *dex_file,
580 resolve_thread_pool,
581 resolve_thread_count,
582 timings);
583 }
584 }
585
ResolveConstStrings(const std::vector<const DexFile * > & dex_files,bool only_startup_strings,TimingLogger * timings)586 void CompilerDriver::ResolveConstStrings(const std::vector<const DexFile*>& dex_files,
587 bool only_startup_strings,
588 TimingLogger* timings) {
589 const ProfileCompilationInfo* profile_compilation_info =
590 GetCompilerOptions().GetProfileCompilationInfo();
591 if (only_startup_strings && profile_compilation_info == nullptr) {
592 // If there is no profile, don't resolve any strings. Resolving all of the strings in the image
593 // will cause a bloated app image and slow down startup.
594 return;
595 }
596 ScopedObjectAccess soa(Thread::Current());
597 StackHandleScope<1> hs(soa.Self());
598 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
599 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr));
600 size_t num_instructions = 0u;
601
602 for (const DexFile* dex_file : dex_files) {
603 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file));
604 TimingLogger::ScopedTiming t("Resolve const-string Strings", timings);
605
606 ProfileCompilationInfo::ProfileIndexType profile_index =
607 ProfileCompilationInfo::MaxProfileIndex();
608 if (profile_compilation_info != nullptr) {
609 profile_index = profile_compilation_info->FindDexFile(*dex_file);
610 if (profile_index == ProfileCompilationInfo::MaxProfileIndex()) {
611 // We have a `ProfileCompilationInfo` but no data for this dex file.
612 // The code below would not find any method to process.
613 continue;
614 }
615 }
616
617 // TODO: Implement a profile-based filter for the boot image. See b/76145463.
618 for (ClassAccessor accessor : dex_file->GetClasses()) {
619 // Skip methods that failed to verify since they may contain invalid Dex code.
620 if (GetClassStatus(ClassReference(dex_file, accessor.GetClassDefIndex())) <
621 ClassStatus::kRetryVerificationAtRuntime) {
622 continue;
623 }
624
625 for (const ClassAccessor::Method& method : accessor.GetMethods()) {
626 if (profile_compilation_info != nullptr) {
627 DCHECK_NE(profile_index, ProfileCompilationInfo::MaxProfileIndex());
628 // There can be at most one class initializer in a class, so we shall not
629 // call `ProfileCompilationInfo::ContainsClass()` more than once per class.
630 constexpr uint32_t kMask = kAccConstructor | kAccStatic;
631 const bool is_startup_clinit =
632 (method.GetAccessFlags() & kMask) == kMask &&
633 profile_compilation_info->ContainsClass(profile_index, accessor.GetClassIdx());
634
635 if (!is_startup_clinit) {
636 uint32_t method_index = method.GetIndex();
637 bool process_method = only_startup_strings
638 ? profile_compilation_info->IsStartupMethod(profile_index, method_index)
639 : profile_compilation_info->IsMethodInProfile(profile_index, method_index);
640 if (!process_method) {
641 continue;
642 }
643 }
644 }
645
646 // Resolve const-strings in the code. Done to have deterministic allocation behavior. Right
647 // now this is single-threaded for simplicity.
648 // TODO: Collect the relevant string indices in parallel, then allocate them sequentially
649 // in a stable order.
650 for (const DexInstructionPcPair& inst : method.GetInstructions()) {
651 switch (inst->Opcode()) {
652 case Instruction::CONST_STRING:
653 case Instruction::CONST_STRING_JUMBO: {
654 dex::StringIndex string_index((inst->Opcode() == Instruction::CONST_STRING)
655 ? inst->VRegB_21c()
656 : inst->VRegB_31c());
657 ObjPtr<mirror::String> string = class_linker->ResolveString(string_index, dex_cache);
658 CHECK(string != nullptr) << "Could not allocate a string when forcing determinism";
659 ++num_instructions;
660 break;
661 }
662
663 default:
664 break;
665 }
666 }
667 }
668 }
669 }
670 VLOG(compiler) << "Resolved " << num_instructions << " const string instructions";
671 }
672
673 // Initialize type check bit strings for check-cast and instance-of in the code. Done to have
674 // deterministic allocation behavior. Right now this is single-threaded for simplicity.
675 // TODO: Collect the relevant type indices in parallel, then process them sequentially in a
676 // stable order.
677
InitializeTypeCheckBitstrings(CompilerDriver * driver,ClassLinker * class_linker,Handle<mirror::DexCache> dex_cache,const DexFile & dex_file,const ClassAccessor::Method & method)678 static void InitializeTypeCheckBitstrings(CompilerDriver* driver,
679 ClassLinker* class_linker,
680 Handle<mirror::DexCache> dex_cache,
681 const DexFile& dex_file,
682 const ClassAccessor::Method& method)
683 REQUIRES_SHARED(Locks::mutator_lock_) {
684 for (const DexInstructionPcPair& inst : method.GetInstructions()) {
685 switch (inst->Opcode()) {
686 case Instruction::CHECK_CAST:
687 case Instruction::INSTANCE_OF: {
688 dex::TypeIndex type_index(
689 (inst->Opcode() == Instruction::CHECK_CAST) ? inst->VRegB_21c() : inst->VRegC_22c());
690 const char* descriptor = dex_file.GetTypeDescriptor(type_index);
691 // We currently do not use the bitstring type check for array or final (including
692 // primitive) classes. We may reconsider this in future if it's deemed to be beneficial.
693 // And we cannot use it for classes outside the boot image as we do not know the runtime
694 // value of their bitstring when compiling (it may not even get assigned at runtime).
695 if (descriptor[0] == 'L' && driver->GetCompilerOptions().IsImageClass(descriptor)) {
696 ObjPtr<mirror::Class> klass =
697 class_linker->LookupResolvedType(type_index,
698 dex_cache.Get(),
699 /* class_loader= */ nullptr);
700 CHECK(klass != nullptr) << descriptor << " should have been previously resolved.";
701 // Now assign the bitstring if the class is not final. Keep this in sync with sharpening.
702 if (!klass->IsFinal()) {
703 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
704 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureAssigned(klass);
705 }
706 }
707 break;
708 }
709
710 default:
711 break;
712 }
713 }
714 }
715
InitializeTypeCheckBitstrings(CompilerDriver * driver,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)716 static void InitializeTypeCheckBitstrings(CompilerDriver* driver,
717 const std::vector<const DexFile*>& dex_files,
718 TimingLogger* timings) {
719 ScopedObjectAccess soa(Thread::Current());
720 StackHandleScope<1> hs(soa.Self());
721 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
722 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr));
723
724 for (const DexFile* dex_file : dex_files) {
725 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file));
726 TimingLogger::ScopedTiming t("Initialize type check bitstrings", timings);
727
728 for (ClassAccessor accessor : dex_file->GetClasses()) {
729 // Direct and virtual methods.
730 for (const ClassAccessor::Method& method : accessor.GetMethods()) {
731 InitializeTypeCheckBitstrings(driver, class_linker, dex_cache, *dex_file, method);
732 }
733 }
734 }
735 }
736
CheckThreadPools()737 inline void CompilerDriver::CheckThreadPools() {
738 DCHECK(parallel_thread_pool_ != nullptr);
739 DCHECK(single_thread_pool_ != nullptr);
740 }
741
EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader,const std::vector<const DexFile * > & dex_files)742 static void EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader,
743 const std::vector<const DexFile*>& dex_files) {
744 ScopedObjectAccess soa(Thread::Current());
745 StackHandleScope<2> hs(soa.Self());
746 Handle<mirror::ClassLoader> class_loader(
747 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
748 MutableHandle<mirror::Class> cls(hs.NewHandle<mirror::Class>(nullptr));
749 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
750
751 for (const DexFile* dex_file : dex_files) {
752 for (ClassAccessor accessor : dex_file->GetClasses()) {
753 cls.Assign(
754 class_linker->FindClass(soa.Self(), *dex_file, accessor.GetClassIdx(), class_loader));
755 if (cls == nullptr) {
756 soa.Self()->ClearException();
757 } else if (&cls->GetDexFile() == dex_file) {
758 DCHECK(cls->IsErroneous() ||
759 cls->IsVerified() ||
760 cls->ShouldVerifyAtRuntime() ||
761 cls->IsVerifiedNeedsAccessChecks())
762 << cls->PrettyClass()
763 << " " << cls->GetStatus();
764 }
765 }
766 }
767 }
768
PrepareDexFilesForOatFile(TimingLogger * timings)769 void CompilerDriver::PrepareDexFilesForOatFile([[maybe_unused]] TimingLogger* timings) {
770 compiled_classes_.AddDexFiles(GetCompilerOptions().GetDexFilesForOatFile());
771 }
772
773 class CreateConflictTablesVisitor : public ClassVisitor {
774 public:
CreateConflictTablesVisitor(VariableSizedHandleScope & hs)775 explicit CreateConflictTablesVisitor(VariableSizedHandleScope& hs)
776 : hs_(hs) {}
777
operator ()(ObjPtr<mirror::Class> klass)778 bool operator()(ObjPtr<mirror::Class> klass) override
779 REQUIRES_SHARED(Locks::mutator_lock_) {
780 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
781 return true;
782 }
783 // Collect handles since there may be thread suspension in future EnsureInitialized.
784 to_visit_.push_back(hs_.NewHandle(klass));
785 return true;
786 }
787
FillAllIMTAndConflictTables()788 void FillAllIMTAndConflictTables() REQUIRES_SHARED(Locks::mutator_lock_) {
789 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
790 for (Handle<mirror::Class> c : to_visit_) {
791 // Create the conflict tables.
792 FillIMTAndConflictTables(c.Get());
793 }
794 }
795
796 private:
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)797 void FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)
798 REQUIRES_SHARED(Locks::mutator_lock_) {
799 if (!klass->ShouldHaveImt()) {
800 return;
801 }
802 if (visited_classes_.find(klass.Ptr()) != visited_classes_.end()) {
803 return;
804 }
805 if (klass->HasSuperClass()) {
806 FillIMTAndConflictTables(klass->GetSuperClass());
807 }
808 if (!klass->IsTemp()) {
809 Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass);
810 }
811 visited_classes_.insert(klass.Ptr());
812 }
813
814 VariableSizedHandleScope& hs_;
815 std::vector<Handle<mirror::Class>> to_visit_;
816 HashSet<mirror::Class*> visited_classes_;
817 };
818
PreCompile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings,HashSet<std::string> * image_classes)819 void CompilerDriver::PreCompile(jobject class_loader,
820 const std::vector<const DexFile*>& dex_files,
821 TimingLogger* timings,
822 /*inout*/ HashSet<std::string>* image_classes) {
823 CheckThreadPools();
824
825 VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false);
826
827 // Precompile:
828 // 1) Load image classes.
829 // 2) Resolve all classes.
830 // 3) For deterministic boot image, resolve strings for const-string instructions.
831 // 4) Attempt to verify all classes.
832 // 5) Attempt to initialize image classes, and trivially initialized classes.
833 // 6) Update the set of image classes.
834 // 7) For deterministic boot image, initialize bitstrings for type checking.
835
836 LoadImageClasses(timings, class_loader, image_classes);
837 VLOG(compiler) << "LoadImageClasses: " << GetMemoryUsageString(false);
838
839 if (compiler_options_->AssumeClassesAreVerified()) {
840 VLOG(compiler) << "Verify none mode specified, skipping verification.";
841 SetVerified(class_loader, dex_files, timings);
842 } else {
843 DCHECK(compiler_options_->IsVerificationEnabled());
844
845 if (compiler_options_->IsAnyCompilationEnabled()) {
846 // Avoid adding the dex files in the case where we aren't going to add compiled methods.
847 // This reduces RAM usage for this case.
848 for (const DexFile* dex_file : dex_files) {
849 // Can be already inserted. This happens for gtests.
850 if (!compiled_methods_.HaveDexFile(dex_file)) {
851 compiled_methods_.AddDexFile(dex_file);
852 }
853 }
854 }
855
856 // Resolve eagerly for compilations always, and for verifications only if we are running with
857 // multiple threads.
858 const bool should_resolve_eagerly =
859 compiler_options_->IsAnyCompilationEnabled() ||
860 (!GetCompilerOptions().IsForceDeterminism() && parallel_thread_count_ > 1);
861 if (should_resolve_eagerly) {
862 Resolve(class_loader, dex_files, timings);
863 VLOG(compiler) << "Resolve: " << GetMemoryUsageString(false);
864 }
865
866 Verify(class_loader, dex_files, timings);
867 VLOG(compiler) << "Verify: " << GetMemoryUsageString(false);
868
869 if (GetCompilerOptions().IsForceDeterminism() &&
870 (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension())) {
871 // Resolve strings from const-string. Do this now to have a deterministic image.
872 ResolveConstStrings(dex_files, /*only_startup_strings=*/ false, timings);
873 VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false);
874 } else if (GetCompilerOptions().ResolveStartupConstStrings()) {
875 ResolveConstStrings(dex_files, /*only_startup_strings=*/ true, timings);
876 }
877
878 if (had_hard_verifier_failure_ && GetCompilerOptions().AbortOnHardVerifierFailure()) {
879 // Avoid dumping threads. Even if we shut down the thread pools, there will still be three
880 // instances of this thread's stack.
881 LOG(FATAL_WITHOUT_ABORT) << "Had a hard failure verifying all classes, and was asked to abort "
882 << "in such situations. Please check the log.";
883 _exit(1);
884 } else if (number_of_soft_verifier_failures_ > 0 &&
885 GetCompilerOptions().AbortOnSoftVerifierFailure()) {
886 LOG(FATAL_WITHOUT_ABORT) << "Had " << number_of_soft_verifier_failures_ << " soft failure(s) "
887 << "verifying all classes, and was asked to abort in such situations. "
888 << "Please check the log.";
889 _exit(1);
890 }
891
892 if (GetCompilerOptions().IsAppImage() && had_hard_verifier_failure_) {
893 // Prune erroneous classes and classes that depend on them.
894 UpdateImageClasses(timings, image_classes);
895 VLOG(compiler) << "verify/UpdateImageClasses: " << GetMemoryUsageString(false);
896 }
897 }
898
899 if (GetCompilerOptions().IsGeneratingImage()) {
900 // We can only initialize classes when their verification bit is set.
901 if (compiler_options_->AssumeClassesAreVerified() ||
902 compiler_options_->IsVerificationEnabled()) {
903 if (kIsDebugBuild) {
904 EnsureVerifiedOrVerifyAtRuntime(class_loader, dex_files);
905 }
906 InitializeClasses(class_loader, dex_files, timings);
907 VLOG(compiler) << "InitializeClasses: " << GetMemoryUsageString(false);
908 }
909 {
910 // Create conflict tables, as the runtime expects boot image classes to
911 // always have their conflict tables filled.
912 ScopedObjectAccess soa(Thread::Current());
913 VariableSizedHandleScope hs(soa.Self());
914 CreateConflictTablesVisitor visitor(hs);
915 Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor);
916 visitor.FillAllIMTAndConflictTables();
917 }
918
919 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
920 UpdateImageClasses(timings, image_classes);
921 VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false);
922 }
923
924 if (kBitstringSubtypeCheckEnabled &&
925 GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) {
926 // Initialize type check bit string used by check-cast and instanceof.
927 // Do this now to have a deterministic image.
928 // Note: This is done after UpdateImageClasses() at it relies on the image
929 // classes to be final.
930 InitializeTypeCheckBitstrings(this, dex_files, timings);
931 }
932 }
933 }
934
935 class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor {
936 public:
ResolveCatchBlockExceptionsClassVisitor(Thread * self)937 explicit ResolveCatchBlockExceptionsClassVisitor(Thread* self)
938 : hs_(self),
939 dex_file_records_(),
940 unprocessed_classes_(),
941 exception_types_to_resolve_(),
942 boot_images_start_(Runtime::Current()->GetHeap()->GetBootImagesStartAddress()),
943 boot_images_size_(Runtime::Current()->GetHeap()->GetBootImagesSize()) {}
944
operator ()(ObjPtr<mirror::Class> c)945 bool operator()(ObjPtr<mirror::Class> c) override REQUIRES_SHARED(Locks::mutator_lock_) {
946 // Filter out classes from boot images we're compiling against.
947 // These have been processed when we compiled those boot images.
948 if (reinterpret_cast32<uint32_t>(c.Ptr()) - boot_images_start_ < boot_images_size_) {
949 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(c));
950 return true;
951 }
952 // Filter out classes without methods.
953 // These include primitive types and array types which have no dex file.
954 if (c->GetMethodsPtr() == nullptr) {
955 return true;
956 }
957 auto it = dex_file_records_.find(&c->GetDexFile());
958 if (it != dex_file_records_.end()) {
959 DexFileRecord& record = it->second;
960 DCHECK_EQ(c->GetDexCache(), record.GetDexCache().Get());
961 DCHECK_EQ(c->GetClassLoader(), record.GetClassLoader().Get());
962 if (record.IsProcessedClass(c)) {
963 return true;
964 }
965 }
966 unprocessed_classes_.push_back(c);
967 return true;
968 }
969
FindAndResolveExceptionTypes(Thread * self,ClassLinker * class_linker)970 void FindAndResolveExceptionTypes(Thread* self, ClassLinker* class_linker)
971 REQUIRES_SHARED(Locks::mutator_lock_) {
972 // If we try to resolve any exception types, we need to repeat the process.
973 // Even if we failed to resolve an exception type, we could have resolved its supertype
974 // or some implemented interfaces as a side-effect (the exception type could implement
975 // another unresolved interface) and we need to visit methods of such new resolved
976 // classes as they shall be recorded as image classes.
977 while (FindExceptionTypesToResolve(class_linker)) {
978 ResolveExceptionTypes(self, class_linker);
979 }
980 }
981
982 private:
983 class DexFileRecord {
984 public:
DexFileRecord(Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)985 DexFileRecord(Handle<mirror::DexCache> dex_cache, Handle<mirror::ClassLoader> class_loader)
986 REQUIRES_SHARED(Locks::mutator_lock_)
987 : dex_cache_(dex_cache),
988 class_loader_(class_loader),
989 processed_classes_(/*start_bits=*/ dex_cache->GetDexFile()->NumClassDefs(),
990 /*expandable=*/ false,
991 Allocator::GetCallocAllocator()),
992 processed_exception_types_(/*start_bits=*/ dex_cache->GetDexFile()->NumTypeIds(),
993 /*expandable=*/ false,
994 Allocator::GetCallocAllocator()) {}
995
GetDexCache()996 Handle<mirror::DexCache> GetDexCache() {
997 return dex_cache_;
998 }
999
GetClassLoader()1000 Handle<mirror::ClassLoader> GetClassLoader() {
1001 return class_loader_;
1002 }
1003
IsProcessedClass(ObjPtr<mirror::Class> c)1004 bool IsProcessedClass(ObjPtr<mirror::Class> c) REQUIRES_SHARED(Locks::mutator_lock_) {
1005 DCHECK_LT(c->GetDexClassDefIndex(), dex_cache_->GetDexFile()->NumClassDefs());
1006 return processed_classes_.IsBitSet(c->GetDexClassDefIndex());
1007 }
1008
MarkProcessedClass(ObjPtr<mirror::Class> c)1009 void MarkProcessedClass(ObjPtr<mirror::Class> c) REQUIRES_SHARED(Locks::mutator_lock_) {
1010 DCHECK_LT(c->GetDexClassDefIndex(), dex_cache_->GetDexFile()->NumClassDefs());
1011 processed_classes_.SetBit(c->GetDexClassDefIndex());
1012 }
1013
IsProcessedExceptionType(dex::TypeIndex type_idx)1014 bool IsProcessedExceptionType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
1015 DCHECK_LT(type_idx.index_, dex_cache_->GetDexFile()->NumTypeIds());
1016 return processed_exception_types_.IsBitSet(type_idx.index_);
1017 }
1018
MarkProcessedExceptionType(dex::TypeIndex type_idx)1019 void MarkProcessedExceptionType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
1020 DCHECK_LT(type_idx.index_, dex_cache_->GetDexFile()->NumTypeIds());
1021 processed_exception_types_.SetBit(type_idx.index_);
1022 }
1023
1024 private:
1025 Handle<mirror::DexCache> dex_cache_;
1026 Handle<mirror::ClassLoader> class_loader_;
1027 BitVector processed_classes_;
1028 BitVector processed_exception_types_;
1029 };
1030
1031 struct ExceptionTypeReference {
1032 dex::TypeIndex exception_type_idx;
1033 Handle<mirror::DexCache> dex_cache;
1034 Handle<mirror::ClassLoader> class_loader;
1035 };
1036
1037 bool FindExceptionTypesToResolve(ClassLinker* class_linker)
1038 REQUIRES_SHARED(Locks::mutator_lock_);
1039
ResolveExceptionTypes(Thread * self,ClassLinker * class_linker)1040 void ResolveExceptionTypes(Thread* self, ClassLinker* class_linker)
1041 REQUIRES_SHARED(Locks::mutator_lock_) {
1042 DCHECK(!exception_types_to_resolve_.empty());
1043 for (auto [exception_type_idx, dex_cache, class_loader] : exception_types_to_resolve_) {
1044 ObjPtr<mirror::Class> exception_class =
1045 class_linker->ResolveType(exception_type_idx, dex_cache, class_loader);
1046 if (exception_class == nullptr) {
1047 VLOG(compiler) << "Failed to resolve exception class "
1048 << dex_cache->GetDexFile()->GetTypeDescriptorView(exception_type_idx);
1049 self->ClearException();
1050 } else {
1051 DCHECK(GetClassRoot<mirror::Throwable>(class_linker)->IsAssignableFrom(exception_class));
1052 }
1053 }
1054 exception_types_to_resolve_.clear();
1055 }
1056
1057 VariableSizedHandleScope hs_;
1058 SafeMap<const DexFile*, DexFileRecord> dex_file_records_;
1059 std::vector<ObjPtr<mirror::Class>> unprocessed_classes_;
1060 std::vector<ExceptionTypeReference> exception_types_to_resolve_;
1061 const uint32_t boot_images_start_;
1062 const uint32_t boot_images_size_;
1063 };
1064
FindExceptionTypesToResolve(ClassLinker * class_linker)1065 bool ResolveCatchBlockExceptionsClassVisitor::FindExceptionTypesToResolve(
1066 ClassLinker* class_linker) {
1067 // Thread suspension is not allowed while the `ResolveCatchBlockExceptionsClassVisitor`
1068 // is using a `std::vector<ObjPtr<mirror::Class>>`.
1069 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
1070 DCHECK(unprocessed_classes_.empty());
1071 class_linker->VisitClasses(this);
1072 if (unprocessed_classes_.empty()) {
1073 return false;
1074 }
1075
1076 DCHECK(exception_types_to_resolve_.empty());
1077 const PointerSize pointer_size = class_linker->GetImagePointerSize();
1078 for (ObjPtr<mirror::Class> klass : unprocessed_classes_) {
1079 const DexFile* dex_file = &klass->GetDexFile();
1080 DexFileRecord& record = dex_file_records_.GetOrCreate(
1081 dex_file,
1082 // NO_THREAD_SAFETY_ANALYSIS: Called from unannotated `SafeMap<>::GetOrCreate()`.
1083 [&]() NO_THREAD_SAFETY_ANALYSIS {
1084 return DexFileRecord(hs_.NewHandle(klass->GetDexCache()),
1085 hs_.NewHandle(klass->GetClassLoader()));
1086 });
1087 DCHECK_EQ(klass->GetDexCache(), record.GetDexCache().Get());
1088 DCHECK_EQ(klass->GetClassLoader(), record.GetClassLoader().Get());
1089 DCHECK(!record.IsProcessedClass(klass));
1090 record.MarkProcessedClass(klass);
1091 for (ArtMethod& method : klass->GetDeclaredMethods(pointer_size)) {
1092 if (method.GetCodeItem() == nullptr) {
1093 continue; // native or abstract method
1094 }
1095 CodeItemDataAccessor accessor(method.DexInstructionData());
1096 if (accessor.TriesSize() == 0) {
1097 continue; // nothing to process
1098 }
1099 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData();
1100 size_t num_encoded_catch_handlers = DecodeUnsignedLeb128(&handlers_ptr);
1101 for (size_t i = 0; i < num_encoded_catch_handlers; i++) {
1102 CatchHandlerIterator iterator(handlers_ptr);
1103 for (; iterator.HasNext(); iterator.Next()) {
1104 dex::TypeIndex exception_type_idx = iterator.GetHandlerTypeIndex();
1105 if (exception_type_idx.IsValid() &&
1106 !record.IsProcessedExceptionType(exception_type_idx)) {
1107 record.MarkProcessedExceptionType(exception_type_idx);
1108 // Add to set of types to resolve if not resolved yet.
1109 ObjPtr<mirror::Class> type = class_linker->LookupResolvedType(
1110 exception_type_idx, record.GetDexCache().Get(), record.GetClassLoader().Get());
1111 if (type == nullptr) {
1112 exception_types_to_resolve_.push_back(
1113 {exception_type_idx, record.GetDexCache(), record.GetClassLoader()});
1114 }
1115 }
1116 }
1117 handlers_ptr = iterator.EndDataPointer();
1118 }
1119 }
1120 }
1121 unprocessed_classes_.clear();
1122 return !exception_types_to_resolve_.empty();
1123 }
1124
CanIncludeInCurrentImage(ObjPtr<mirror::Class> klass)1125 static inline bool CanIncludeInCurrentImage(ObjPtr<mirror::Class> klass)
1126 REQUIRES_SHARED(Locks::mutator_lock_) {
1127 DCHECK(klass != nullptr);
1128 gc::Heap* heap = Runtime::Current()->GetHeap();
1129 if (heap->GetBootImageSpaces().empty()) {
1130 return true; // We can include any class when compiling the primary boot image.
1131 }
1132 if (heap->ObjectIsInBootImageSpace(klass)) {
1133 return false; // Already included in the boot image we're compiling against.
1134 }
1135 return AotClassLinker::CanReferenceInBootImageExtensionOrAppImage(klass, heap);
1136 }
1137
1138 class RecordImageClassesVisitor : public ClassVisitor {
1139 public:
RecordImageClassesVisitor(HashSet<std::string> * image_classes)1140 explicit RecordImageClassesVisitor(HashSet<std::string>* image_classes)
1141 : image_classes_(image_classes) {}
1142
operator ()(ObjPtr<mirror::Class> klass)1143 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
1144 bool resolved = klass->IsResolved();
1145 DCHECK(resolved || klass->IsErroneousUnresolved());
1146 bool can_include_in_image = LIKELY(resolved) && CanIncludeInCurrentImage(klass);
1147 std::string temp;
1148 std::string_view descriptor(klass->GetDescriptor(&temp));
1149 if (can_include_in_image) {
1150 image_classes_->insert(std::string(descriptor)); // Does nothing if already present.
1151 } else {
1152 auto it = image_classes_->find(descriptor);
1153 if (it != image_classes_->end()) {
1154 VLOG(compiler) << "Removing " << (resolved ? "unsuitable" : "unresolved")
1155 << " class from image classes: " << descriptor;
1156 image_classes_->erase(it);
1157 }
1158 }
1159 return true;
1160 }
1161
1162 private:
1163 HashSet<std::string>* const image_classes_;
1164 };
1165
1166 // Verify that classes which contain intrinsics methods are in the list of image classes.
VerifyClassesContainingIntrinsicsAreImageClasses(HashSet<std::string> * image_classes)1167 static void VerifyClassesContainingIntrinsicsAreImageClasses(HashSet<std::string>* image_classes) {
1168 #define CHECK_INTRINSIC_OWNER_CLASS(_, __, ___, ____, _____, ClassName, ______, _______) \
1169 CHECK(image_classes->find(std::string_view(ClassName)) != image_classes->end());
1170
1171 ART_INTRINSICS_LIST(CHECK_INTRINSIC_OWNER_CLASS)
1172 #undef CHECK_INTRINSIC_OWNER_CLASS
1173 }
1174
1175 // We need to put classes required by app class loaders to the boot image,
1176 // otherwise we would not be able to store app class loaders in app images.
AddClassLoaderClasses(HashSet<std::string> * image_classes)1177 static void AddClassLoaderClasses(/* out */ HashSet<std::string>* image_classes) {
1178 ScopedObjectAccess soa(Thread::Current());
1179 // Well known classes have been loaded and shall be added to image classes
1180 // by the `RecordImageClassesVisitor`. However, there are fields with array
1181 // types which we need to add to the image classes explicitly.
1182 ArtField* class_loader_array_fields[] = {
1183 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders,
1184 // BaseDexClassLoader.sharedLibraryLoadersAfter has the same array type as above.
1185 WellKnownClasses::dalvik_system_DexPathList_dexElements,
1186 };
1187 for (ArtField* field : class_loader_array_fields) {
1188 const char* field_type_descriptor = field->GetTypeDescriptor();
1189 DCHECK_EQ(field_type_descriptor[0], '[');
1190 image_classes->insert(field_type_descriptor);
1191 }
1192 }
1193
VerifyClassLoaderClassesAreImageClasses(HashSet<std::string> * image_classes)1194 static void VerifyClassLoaderClassesAreImageClasses(/* out */ HashSet<std::string>* image_classes) {
1195 ScopedObjectAccess soa(Thread::Current());
1196 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
1197 ObjPtr<mirror::Class> class_loader_classes[] = {
1198 WellKnownClasses::dalvik_system_BaseDexClassLoader.Get(),
1199 WellKnownClasses::dalvik_system_DelegateLastClassLoader.Get(),
1200 WellKnownClasses::dalvik_system_DexClassLoader.Get(),
1201 WellKnownClasses::dalvik_system_DexFile.Get(),
1202 WellKnownClasses::dalvik_system_DexPathList.Get(),
1203 WellKnownClasses::dalvik_system_DexPathList__Element.Get(),
1204 WellKnownClasses::dalvik_system_InMemoryDexClassLoader.Get(),
1205 WellKnownClasses::dalvik_system_PathClassLoader.Get(),
1206 WellKnownClasses::java_lang_BootClassLoader.Get(),
1207 WellKnownClasses::java_lang_ClassLoader.Get(),
1208 };
1209 for (ObjPtr<mirror::Class> klass : class_loader_classes) {
1210 std::string temp;
1211 std::string_view descriptor = klass->GetDescriptor(&temp);
1212 CHECK(image_classes->find(descriptor) != image_classes->end());
1213 }
1214 ArtField* class_loader_fields[] = {
1215 WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList,
1216 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders,
1217 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter,
1218 WellKnownClasses::dalvik_system_DexFile_cookie,
1219 WellKnownClasses::dalvik_system_DexFile_fileName,
1220 WellKnownClasses::dalvik_system_DexPathList_dexElements,
1221 WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
1222 WellKnownClasses::java_lang_ClassLoader_parent,
1223 };
1224 for (ArtField* field : class_loader_fields) {
1225 std::string_view field_type_descriptor = field->GetTypeDescriptor();
1226 CHECK(image_classes->find(field_type_descriptor) != image_classes->end());
1227 }
1228 }
1229
1230 // Make a list of descriptors for classes to include in the image
LoadImageClasses(TimingLogger * timings,jobject class_loader,HashSet<std::string> * image_classes)1231 void CompilerDriver::LoadImageClasses(TimingLogger* timings,
1232 jobject class_loader,
1233 /*inout*/ HashSet<std::string>* image_classes) {
1234 CHECK(timings != nullptr);
1235 if (!GetCompilerOptions().IsGeneratingImage()) {
1236 return;
1237 }
1238
1239 TimingLogger::ScopedTiming t("LoadImageClasses", timings);
1240
1241 if (GetCompilerOptions().IsBootImage()) {
1242 // Image classes of intrinsics are loaded and shall be added
1243 // to image classes by the `RecordImageClassesVisitor`.
1244 // Add classes needed for storing class loaders in app images.
1245 AddClassLoaderClasses(image_classes);
1246 }
1247
1248 // Make a first pass to load all classes explicitly listed in the profile.
1249 Thread* self = Thread::Current();
1250 ScopedObjectAccess soa(self);
1251 StackHandleScope<2u> hs(self);
1252 Handle<mirror::ClassLoader> loader = hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader));
1253 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1254 CHECK(image_classes != nullptr);
1255 for (auto it = image_classes->begin(), end = image_classes->end(); it != end;) {
1256 const std::string& descriptor(*it);
1257 ObjPtr<mirror::Class> klass =
1258 class_linker->FindClass(self, descriptor.c_str(), descriptor.length(), loader);
1259 if (klass == nullptr) {
1260 VLOG(compiler) << "Failed to find class " << descriptor;
1261 it = image_classes->erase(it); // May cause some descriptors to be revisited.
1262 self->ClearException();
1263 } else {
1264 ++it;
1265 }
1266 }
1267
1268 // Resolve exception classes referenced by the loaded classes. The catch logic assumes
1269 // exceptions are resolved by the verifier when there is a catch block in an interested method.
1270 // Do this here so that exception classes appear to have been specified image classes.
1271 ResolveCatchBlockExceptionsClassVisitor resolve_exception_classes_visitor(self);
1272 resolve_exception_classes_visitor.FindAndResolveExceptionTypes(self, class_linker);
1273
1274 // We walk the roots looking for classes so that we'll pick up the
1275 // above classes plus any classes they depend on such super
1276 // classes, interfaces, and the required ClassLinker roots.
1277 RecordImageClassesVisitor visitor(image_classes);
1278 class_linker->VisitClasses(&visitor);
1279
1280 if (kIsDebugBuild && GetCompilerOptions().IsBootImage()) {
1281 VerifyClassesContainingIntrinsicsAreImageClasses(image_classes);
1282 VerifyClassLoaderClassesAreImageClasses(image_classes);
1283 }
1284
1285 if (GetCompilerOptions().IsBootImage()) {
1286 CHECK(!image_classes->empty());
1287 }
1288 }
1289
MaybeAddToImageClasses(Thread * self,ObjPtr<mirror::Class> klass,HashSet<std::string> * image_classes)1290 static void MaybeAddToImageClasses(Thread* self,
1291 ObjPtr<mirror::Class> klass,
1292 HashSet<std::string>* image_classes)
1293 REQUIRES_SHARED(Locks::mutator_lock_) {
1294 DCHECK_EQ(self, Thread::Current());
1295 DCHECK(klass->IsResolved());
1296 Runtime* runtime = Runtime::Current();
1297 gc::Heap* heap = runtime->GetHeap();
1298 if (heap->ObjectIsInBootImageSpace(klass)) {
1299 // We're compiling a boot image extension and the class is already
1300 // in the boot image we're compiling against.
1301 return;
1302 }
1303 const PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1304 std::string temp;
1305 while (!klass->IsObjectClass()) {
1306 const char* descriptor = klass->GetDescriptor(&temp);
1307 if (image_classes->find(std::string_view(descriptor)) != image_classes->end()) {
1308 break; // Previously inserted.
1309 }
1310 image_classes->insert(descriptor);
1311 VLOG(compiler) << "Adding " << descriptor << " to image classes";
1312 for (size_t i = 0, num_interfaces = klass->NumDirectInterfaces(); i != num_interfaces; ++i) {
1313 ObjPtr<mirror::Class> interface = klass->GetDirectInterface(i);
1314 DCHECK(interface != nullptr);
1315 MaybeAddToImageClasses(self, interface, image_classes);
1316 }
1317 for (auto& m : klass->GetVirtualMethods(pointer_size)) {
1318 MaybeAddToImageClasses(self, m.GetDeclaringClass(), image_classes);
1319 }
1320 if (klass->IsArrayClass()) {
1321 MaybeAddToImageClasses(self, klass->GetComponentType(), image_classes);
1322 }
1323 klass = klass->GetSuperClass();
1324 }
1325 }
1326
1327 // Keeps all the data for the update together. Also doubles as the reference visitor.
1328 // Note: we can use object pointers because we suspend all threads.
1329 class ClinitImageUpdate {
1330 public:
ClinitImageUpdate(HashSet<std::string> * image_class_descriptors,Thread * self)1331 ClinitImageUpdate(HashSet<std::string>* image_class_descriptors,
1332 Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
1333 : hs_(self),
1334 image_class_descriptors_(image_class_descriptors),
1335 self_(self) {
1336 CHECK(image_class_descriptors != nullptr);
1337
1338 // Make sure nobody interferes with us.
1339 old_cause_ = self->StartAssertNoThreadSuspension("Boot image closure");
1340 }
1341
~ClinitImageUpdate()1342 ~ClinitImageUpdate() {
1343 // Allow others to suspend again.
1344 self_->EndAssertNoThreadSuspension(old_cause_);
1345 }
1346
1347 // Visitor for VisitReferences.
operator ()(ObjPtr<mirror::Object> object,MemberOffset field_offset,bool is_static) const1348 void operator()(ObjPtr<mirror::Object> object,
1349 MemberOffset field_offset,
1350 [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
1351 mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset);
1352 if (ref != nullptr) {
1353 VisitClinitClassesObject(ref);
1354 }
1355 }
1356
1357 // java.lang.ref.Reference visitor for VisitReferences.
operator ()(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref) const1358 void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
1359 [[maybe_unused]] ObjPtr<mirror::Reference> ref) const {}
1360
1361 // Ignore class native roots.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1362 void VisitRootIfNonNull(
1363 [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1364 void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
1365
Walk()1366 void Walk() REQUIRES_SHARED(Locks::mutator_lock_) {
1367 // Find all the already-marked classes.
1368 WriterMutexLock mu(self_, *Locks::heap_bitmap_lock_);
1369 FindImageClassesVisitor visitor(this);
1370 Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
1371
1372 // Use the initial classes as roots for a search.
1373 for (Handle<mirror::Class> klass_root : image_classes_) {
1374 VisitClinitClassesObject(klass_root.Get());
1375 }
1376 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
1377 for (Handle<mirror::Class> h_klass : to_insert_) {
1378 MaybeAddToImageClasses(self_, h_klass.Get(), image_class_descriptors_);
1379 }
1380 }
1381
1382 private:
1383 class FindImageClassesVisitor : public ClassVisitor {
1384 public:
FindImageClassesVisitor(ClinitImageUpdate * data)1385 explicit FindImageClassesVisitor(ClinitImageUpdate* data)
1386 : data_(data) {}
1387
operator ()(ObjPtr<mirror::Class> klass)1388 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
1389 bool resolved = klass->IsResolved();
1390 DCHECK(resolved || klass->IsErroneousUnresolved());
1391 bool can_include_in_image =
1392 LIKELY(resolved) && LIKELY(!klass->IsErroneous()) && CanIncludeInCurrentImage(klass);
1393 std::string temp;
1394 std::string_view descriptor(klass->GetDescriptor(&temp));
1395 auto it = data_->image_class_descriptors_->find(descriptor);
1396 if (it != data_->image_class_descriptors_->end()) {
1397 if (can_include_in_image) {
1398 data_->image_classes_.push_back(data_->hs_.NewHandle(klass));
1399 } else {
1400 VLOG(compiler) << "Removing " << (resolved ? "unsuitable" : "unresolved")
1401 << " class from image classes: " << descriptor;
1402 data_->image_class_descriptors_->erase(it);
1403 }
1404 } else if (can_include_in_image) {
1405 // Check whether the class is initialized and has a clinit or static fields.
1406 // Such classes must be kept too.
1407 if (klass->IsInitialized()) {
1408 PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1409 if (klass->FindClassInitializer(pointer_size) != nullptr ||
1410 klass->NumStaticFields() != 0) {
1411 DCHECK(!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass->GetDexCache()))
1412 << klass->PrettyDescriptor();
1413 data_->image_classes_.push_back(data_->hs_.NewHandle(klass));
1414 }
1415 }
1416 }
1417 return true;
1418 }
1419
1420 private:
1421 ClinitImageUpdate* const data_;
1422 };
1423
VisitClinitClassesObject(mirror::Object * object) const1424 void VisitClinitClassesObject(mirror::Object* object) const
1425 REQUIRES_SHARED(Locks::mutator_lock_) {
1426 DCHECK(object != nullptr);
1427 if (marked_objects_.find(object) != marked_objects_.end()) {
1428 // Already processed.
1429 return;
1430 }
1431
1432 // Mark it.
1433 marked_objects_.insert(object);
1434
1435 if (object->IsClass()) {
1436 // Add to the TODO list since MaybeAddToImageClasses may cause thread suspension. Thread
1437 // suspensionb is not safe to do in VisitObjects or VisitReferences.
1438 to_insert_.push_back(hs_.NewHandle(object->AsClass()));
1439 } else {
1440 // Else visit the object's class.
1441 VisitClinitClassesObject(object->GetClass());
1442 }
1443
1444 // If it is not a DexCache, visit all references.
1445 if (!object->IsDexCache()) {
1446 object->VisitReferences(*this, *this);
1447 }
1448 }
1449
1450 mutable VariableSizedHandleScope hs_;
1451 mutable std::vector<Handle<mirror::Class>> to_insert_;
1452 mutable HashSet<mirror::Object*> marked_objects_;
1453 HashSet<std::string>* const image_class_descriptors_;
1454 std::vector<Handle<mirror::Class>> image_classes_;
1455 Thread* const self_;
1456 const char* old_cause_;
1457
1458 DISALLOW_COPY_AND_ASSIGN(ClinitImageUpdate);
1459 };
1460
UpdateImageClasses(TimingLogger * timings,HashSet<std::string> * image_classes)1461 void CompilerDriver::UpdateImageClasses(TimingLogger* timings,
1462 /*inout*/ HashSet<std::string>* image_classes) {
1463 DCHECK(GetCompilerOptions().IsGeneratingImage());
1464 TimingLogger::ScopedTiming t("UpdateImageClasses", timings);
1465
1466 // Suspend all threads.
1467 ScopedSuspendAll ssa(__FUNCTION__);
1468
1469 ClinitImageUpdate update(image_classes, Thread::Current());
1470
1471 // Do the marking.
1472 update.Walk();
1473 }
1474
ProcessedInstanceField(bool resolved)1475 void CompilerDriver::ProcessedInstanceField(bool resolved) {
1476 if (!resolved) {
1477 stats_->UnresolvedInstanceField();
1478 } else {
1479 stats_->ResolvedInstanceField();
1480 }
1481 }
1482
ProcessedStaticField(bool resolved,bool local)1483 void CompilerDriver::ProcessedStaticField(bool resolved, bool local) {
1484 if (!resolved) {
1485 stats_->UnresolvedStaticField();
1486 } else if (local) {
1487 stats_->ResolvedLocalStaticField();
1488 } else {
1489 stats_->ResolvedStaticField();
1490 }
1491 }
1492
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,const ScopedObjectAccess & soa)1493 ArtField* CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx,
1494 const DexCompilationUnit* mUnit,
1495 bool is_put,
1496 const ScopedObjectAccess& soa) {
1497 // Try to resolve the field and compiling method's class.
1498 ArtField* resolved_field;
1499 ObjPtr<mirror::Class> referrer_class;
1500 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache());
1501 {
1502 Handle<mirror::ClassLoader> class_loader = mUnit->GetClassLoader();
1503 resolved_field = ResolveField(soa, dex_cache, class_loader, field_idx, /* is_static= */ false);
1504 referrer_class = resolved_field != nullptr
1505 ? ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit) : nullptr;
1506 }
1507 bool can_link = false;
1508 if (resolved_field != nullptr && referrer_class != nullptr) {
1509 std::pair<bool, bool> fast_path = IsFastInstanceField(
1510 dex_cache.Get(), referrer_class, resolved_field, field_idx);
1511 can_link = is_put ? fast_path.second : fast_path.first;
1512 }
1513 ProcessedInstanceField(can_link);
1514 return can_link ? resolved_field : nullptr;
1515 }
1516
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,MemberOffset * field_offset,bool * is_volatile)1517 bool CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, const DexCompilationUnit* mUnit,
1518 bool is_put, MemberOffset* field_offset,
1519 bool* is_volatile) {
1520 ScopedObjectAccess soa(Thread::Current());
1521 ArtField* resolved_field = ComputeInstanceFieldInfo(field_idx, mUnit, is_put, soa);
1522
1523 if (resolved_field == nullptr) {
1524 // Conservative defaults.
1525 *is_volatile = true;
1526 *field_offset = MemberOffset(static_cast<size_t>(-1));
1527 return false;
1528 } else {
1529 *is_volatile = resolved_field->IsVolatile();
1530 *field_offset = resolved_field->GetOffset();
1531 return true;
1532 }
1533 }
1534
1535 class CompilationVisitor {
1536 public:
~CompilationVisitor()1537 virtual ~CompilationVisitor() {}
1538 virtual void Visit(size_t index) = 0;
1539 };
1540
1541 class ParallelCompilationManager {
1542 public:
ParallelCompilationManager(ClassLinker * class_linker,jobject class_loader,CompilerDriver * compiler,const DexFile * dex_file,ThreadPool * thread_pool)1543 ParallelCompilationManager(ClassLinker* class_linker,
1544 jobject class_loader,
1545 CompilerDriver* compiler,
1546 const DexFile* dex_file,
1547 ThreadPool* thread_pool)
1548 : index_(0),
1549 class_linker_(class_linker),
1550 class_loader_(class_loader),
1551 compiler_(compiler),
1552 dex_file_(dex_file),
1553 thread_pool_(thread_pool) {}
1554
GetClassLinker() const1555 ClassLinker* GetClassLinker() const {
1556 CHECK(class_linker_ != nullptr);
1557 return class_linker_;
1558 }
1559
GetClassLoader() const1560 jobject GetClassLoader() const {
1561 return class_loader_;
1562 }
1563
GetCompiler() const1564 CompilerDriver* GetCompiler() const {
1565 CHECK(compiler_ != nullptr);
1566 return compiler_;
1567 }
1568
GetDexFile() const1569 const DexFile* GetDexFile() const {
1570 CHECK(dex_file_ != nullptr);
1571 return dex_file_;
1572 }
1573
ForAll(size_t begin,size_t end,CompilationVisitor * visitor,size_t work_units)1574 void ForAll(size_t begin, size_t end, CompilationVisitor* visitor, size_t work_units)
1575 REQUIRES(!*Locks::mutator_lock_) {
1576 ForAllLambda(begin, end, [visitor](size_t index) { visitor->Visit(index); }, work_units);
1577 }
1578
1579 template <typename Fn>
ForAllLambda(size_t begin,size_t end,Fn fn,size_t work_units)1580 void ForAllLambda(size_t begin, size_t end, Fn fn, size_t work_units)
1581 REQUIRES(!*Locks::mutator_lock_) {
1582 Thread* self = Thread::Current();
1583 self->AssertNoPendingException();
1584 CHECK_GT(work_units, 0U);
1585
1586 index_.store(begin, std::memory_order_relaxed);
1587 for (size_t i = 0; i < work_units; ++i) {
1588 thread_pool_->AddTask(self, new ForAllClosureLambda<Fn>(this, end, fn));
1589 }
1590 thread_pool_->StartWorkers(self);
1591
1592 // Ensure we're suspended while we're blocked waiting for the other threads to finish (worker
1593 // thread destructor's called below perform join).
1594 CHECK_NE(self->GetState(), ThreadState::kRunnable);
1595
1596 // Wait for all the worker threads to finish.
1597 thread_pool_->Wait(self, true, false);
1598
1599 // And stop the workers accepting jobs.
1600 thread_pool_->StopWorkers(self);
1601 }
1602
NextIndex()1603 size_t NextIndex() {
1604 return index_.fetch_add(1, std::memory_order_seq_cst);
1605 }
1606
1607 private:
1608 template <typename Fn>
1609 class ForAllClosureLambda : public Task {
1610 public:
ForAllClosureLambda(ParallelCompilationManager * manager,size_t end,Fn fn)1611 ForAllClosureLambda(ParallelCompilationManager* manager, size_t end, Fn fn)
1612 : manager_(manager),
1613 end_(end),
1614 fn_(fn) {}
1615
Run(Thread * self)1616 void Run(Thread* self) override {
1617 while (true) {
1618 const size_t index = manager_->NextIndex();
1619 if (UNLIKELY(index >= end_)) {
1620 break;
1621 }
1622 fn_(index);
1623 self->AssertNoPendingException();
1624 }
1625 }
1626
Finalize()1627 void Finalize() override {
1628 delete this;
1629 }
1630
1631 private:
1632 ParallelCompilationManager* const manager_;
1633 const size_t end_;
1634 Fn fn_;
1635 };
1636
1637 AtomicInteger index_;
1638 ClassLinker* const class_linker_;
1639 const jobject class_loader_;
1640 CompilerDriver* const compiler_;
1641 const DexFile* const dex_file_;
1642 ThreadPool* const thread_pool_;
1643
1644 DISALLOW_COPY_AND_ASSIGN(ParallelCompilationManager);
1645 };
1646
1647 // A fast version of SkipClass above if the class pointer is available
1648 // that avoids the expensive FindInClassPath search.
SkipClass(jobject class_loader,const DexFile & dex_file,ObjPtr<mirror::Class> klass)1649 static bool SkipClass(jobject class_loader, const DexFile& dex_file, ObjPtr<mirror::Class> klass)
1650 REQUIRES_SHARED(Locks::mutator_lock_) {
1651 DCHECK(klass != nullptr);
1652 const DexFile& original_dex_file = klass->GetDexFile();
1653 if (&dex_file != &original_dex_file) {
1654 if (class_loader == nullptr) {
1655 LOG(WARNING) << "Skipping class " << klass->PrettyDescriptor() << " from "
1656 << dex_file.GetLocation() << " previously found in "
1657 << original_dex_file.GetLocation();
1658 }
1659 return true;
1660 }
1661 return false;
1662 }
1663
DCheckResolveException(mirror::Throwable * exception)1664 static void DCheckResolveException(mirror::Throwable* exception)
1665 REQUIRES_SHARED(Locks::mutator_lock_) {
1666 if (!kIsDebugBuild) {
1667 return;
1668 }
1669 std::string temp;
1670 const char* descriptor = exception->GetClass()->GetDescriptor(&temp);
1671 const char* expected_exceptions[] = {
1672 "Ljava/lang/ClassFormatError;",
1673 "Ljava/lang/ClassCircularityError;",
1674 "Ljava/lang/IllegalAccessError;",
1675 "Ljava/lang/IncompatibleClassChangeError;",
1676 "Ljava/lang/InstantiationError;",
1677 "Ljava/lang/LinkageError;",
1678 "Ljava/lang/NoClassDefFoundError;",
1679 "Ljava/lang/VerifyError;",
1680 };
1681 bool found = false;
1682 for (size_t i = 0; (found == false) && (i < arraysize(expected_exceptions)); ++i) {
1683 if (strcmp(descriptor, expected_exceptions[i]) == 0) {
1684 found = true;
1685 }
1686 }
1687 if (!found) {
1688 LOG(FATAL) << "Unexpected exception " << exception->Dump();
1689 }
1690 }
1691
1692 template <bool kApp>
1693 class ResolveTypeVisitor : public CompilationVisitor {
1694 public:
ResolveTypeVisitor(const ParallelCompilationManager * manager)1695 explicit ResolveTypeVisitor(const ParallelCompilationManager* manager) : manager_(manager) {
1696 }
Visit(size_t index)1697 void Visit(size_t index) override REQUIRES(!Locks::mutator_lock_) {
1698 const DexFile& dex_file = *manager_->GetDexFile();
1699 // For boot images we resolve all referenced types, such as arrays,
1700 // whereas for applications just those with classdefs.
1701 dex::TypeIndex type_idx = kApp ? dex_file.GetClassDef(index).class_idx_ : dex::TypeIndex(index);
1702 ClassLinker* class_linker = manager_->GetClassLinker();
1703 ScopedObjectAccess soa(Thread::Current());
1704 StackHandleScope<kApp ? 4u : 2u> hs(soa.Self());
1705 Handle<mirror::ClassLoader> class_loader(
1706 hs.NewHandle(soa.Decode<mirror::ClassLoader>(manager_->GetClassLoader())));
1707 // TODO: Fix tests that require `RegisterDexFile()` and use `FindDexCache()` in all cases.
1708 Handle<mirror::DexCache> dex_cache = hs.NewHandle(
1709 kApp ? class_linker->FindDexCache(soa.Self(), dex_file)
1710 : class_linker->RegisterDexFile(dex_file, class_loader.Get()));
1711 DCHECK(dex_cache != nullptr);
1712
1713 // Resolve the class.
1714 ObjPtr<mirror::Class> klass = class_linker->ResolveType(type_idx, dex_cache, class_loader);
1715 if (klass == nullptr) {
1716 mirror::Throwable* exception = soa.Self()->GetException();
1717 DCHECK(exception != nullptr);
1718 VLOG(compiler) << "Exception during type resolution: " << exception->Dump();
1719 if (exception->GetClass() == WellKnownClasses::java_lang_OutOfMemoryError.Get()) {
1720 // There's little point continuing compilation if the heap is exhausted.
1721 // Trying to do so would also introduce non-deterministic compilation results.
1722 LOG(FATAL) << "Out of memory during type resolution for compilation";
1723 }
1724 DCheckResolveException(exception);
1725 soa.Self()->ClearException();
1726 } else {
1727 if (kApp && manager_->GetCompiler()->GetCompilerOptions().IsCheckLinkageConditions()) {
1728 Handle<mirror::Class> hklass = hs.NewHandle(klass);
1729 bool is_fatal = manager_->GetCompiler()->GetCompilerOptions().IsCrashOnLinkageViolation();
1730 Handle<mirror::ClassLoader> defining_class_loader = hs.NewHandle(hklass->GetClassLoader());
1731 if (defining_class_loader.Get() != class_loader.Get()) {
1732 // Redefinition via different ClassLoaders.
1733 // This OptStat stuff is to enable logging from the APK scanner.
1734 if (is_fatal)
1735 LOG(FATAL) << "OptStat#" << hklass->PrettyClassAndClassLoader() << ": 1";
1736 else
1737 LOG(ERROR)
1738 << "LINKAGE VIOLATION: "
1739 << hklass->PrettyClassAndClassLoader()
1740 << " was redefined";
1741 }
1742 // Check that the current class is not a subclass of java.lang.ClassLoader.
1743 if (!hklass->IsInterface() &&
1744 hklass->IsSubClass(GetClassRoot<mirror::ClassLoader>(class_linker))) {
1745 // Subclassing of java.lang.ClassLoader.
1746 // This OptStat stuff is to enable logging from the APK scanner.
1747 if (is_fatal) {
1748 LOG(FATAL) << "OptStat#" << hklass->PrettyClassAndClassLoader() << ": 1";
1749 } else {
1750 LOG(ERROR)
1751 << "LINKAGE VIOLATION: "
1752 << hklass->PrettyClassAndClassLoader()
1753 << " is a subclass of java.lang.ClassLoader";
1754 }
1755 }
1756 CHECK(hklass->IsResolved()) << hklass->PrettyClass();
1757 }
1758 }
1759 }
1760
1761 private:
1762 const ParallelCompilationManager* const manager_;
1763 };
1764
ResolveDexFile(jobject class_loader,const DexFile & dex_file,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)1765 void CompilerDriver::ResolveDexFile(jobject class_loader,
1766 const DexFile& dex_file,
1767 ThreadPool* thread_pool,
1768 size_t thread_count,
1769 TimingLogger* timings) {
1770 ScopedTrace trace(__FUNCTION__);
1771 TimingLogger::ScopedTiming t("Resolve Types", timings);
1772 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1773
1774 // TODO: we could resolve strings here, although the string table is largely filled with class
1775 // and method names.
1776
1777 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, thread_pool);
1778 // For boot images we resolve all referenced types, such as arrays,
1779 // whereas for applications just those with classdefs.
1780 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
1781 ResolveTypeVisitor</*kApp=*/ false> visitor(&context);
1782 context.ForAll(0, dex_file.NumTypeIds(), &visitor, thread_count);
1783 } else {
1784 ResolveTypeVisitor</*kApp=*/ true> visitor(&context);
1785 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
1786 }
1787 }
1788
SetVerified(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1789 void CompilerDriver::SetVerified(jobject class_loader,
1790 const std::vector<const DexFile*>& dex_files,
1791 TimingLogger* timings) {
1792 // This can be run in parallel.
1793 for (const DexFile* dex_file : dex_files) {
1794 CHECK(dex_file != nullptr);
1795 SetVerifiedDexFile(class_loader,
1796 *dex_file,
1797 parallel_thread_pool_.get(),
1798 parallel_thread_count_,
1799 timings);
1800 }
1801 }
1802
LoadAndUpdateStatus(const ClassAccessor & accessor,ClassStatus status,Handle<mirror::ClassLoader> class_loader,Thread * self)1803 static void LoadAndUpdateStatus(const ClassAccessor& accessor,
1804 ClassStatus status,
1805 Handle<mirror::ClassLoader> class_loader,
1806 Thread* self)
1807 REQUIRES_SHARED(Locks::mutator_lock_) {
1808 StackHandleScope<1> hs(self);
1809 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1810 Handle<mirror::Class> cls(hs.NewHandle<mirror::Class>(
1811 class_linker->FindClass(self, accessor.GetDexFile(), accessor.GetClassIdx(), class_loader)));
1812 if (cls != nullptr) {
1813 // Check that the class is resolved with the current dex file. We might get
1814 // a boot image class, or a class in a different dex file for multidex, and
1815 // we should not update the status in that case.
1816 if (&cls->GetDexFile() == &accessor.GetDexFile()) {
1817 VLOG(compiler) << "Updating class status of " << accessor.GetDescriptor() << " to " << status;
1818 ObjectLock<mirror::Class> lock(self, cls);
1819 mirror::Class::SetStatus(cls, status, self);
1820 }
1821 } else {
1822 DCHECK(self->IsExceptionPending());
1823 self->ClearException();
1824 }
1825 }
1826
FastVerify(jobject jclass_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1827 bool CompilerDriver::FastVerify(jobject jclass_loader,
1828 const std::vector<const DexFile*>& dex_files,
1829 TimingLogger* timings) {
1830 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
1831 verifier::VerifierDeps* verifier_deps = callbacks->GetVerifierDeps();
1832 // If there exist VerifierDeps that aren't the ones we just created to output, use them to verify.
1833 if (verifier_deps == nullptr || verifier_deps->OutputOnly()) {
1834 return false;
1835 }
1836 TimingLogger::ScopedTiming t("Fast Verify", timings);
1837
1838 ScopedObjectAccess soa(Thread::Current());
1839 StackHandleScope<2> hs(soa.Self());
1840 Handle<mirror::ClassLoader> class_loader(
1841 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1842 std::string error_msg;
1843
1844 verifier_deps->ValidateDependenciesAndUpdateStatus(
1845 soa.Self(),
1846 class_loader,
1847 dex_files);
1848
1849 bool compiler_only_verifies =
1850 !GetCompilerOptions().IsAnyCompilationEnabled() &&
1851 !GetCompilerOptions().IsGeneratingImage();
1852
1853 const bool is_generating_image = GetCompilerOptions().IsGeneratingImage();
1854
1855 // We successfully validated the dependencies, now update class status
1856 // of verified classes. Note that the dependencies also record which classes
1857 // could not be fully verified; we could try again, but that would hurt verification
1858 // time. So instead we assume these classes still need to be verified at
1859 // runtime.
1860 for (const DexFile* dex_file : dex_files) {
1861 // Fetch the list of verified classes.
1862 const std::vector<bool>& verified_classes = verifier_deps->GetVerifiedClasses(*dex_file);
1863 DCHECK_EQ(verified_classes.size(), dex_file->NumClassDefs());
1864 for (ClassAccessor accessor : dex_file->GetClasses()) {
1865 ClassStatus status = verified_classes[accessor.GetClassDefIndex()]
1866 ? ClassStatus::kVerifiedNeedsAccessChecks
1867 : ClassStatus::kRetryVerificationAtRuntime;
1868 if (compiler_only_verifies) {
1869 // Just update the compiled_classes_ map. The compiler doesn't need to resolve
1870 // the type.
1871 ClassReference ref(dex_file, accessor.GetClassDefIndex());
1872 const ClassStatus existing = ClassStatus::kNotReady;
1873 // Note: when dex files are compiled inidividually, the class may have
1874 // been verified in a previous stage. This means this insertion can
1875 // fail, but that's OK.
1876 compiled_classes_.Insert(ref, existing, status);
1877 } else {
1878 if (is_generating_image &&
1879 status == ClassStatus::kVerifiedNeedsAccessChecks &&
1880 GetCompilerOptions().IsImageClass(accessor.GetDescriptor())) {
1881 // If the class will be in the image, we can rely on the ArtMethods
1882 // telling that they need access checks.
1883 VLOG(compiler) << "Promoting "
1884 << std::string(accessor.GetDescriptor())
1885 << " from needs access checks to verified given it is an image class";
1886 status = ClassStatus::kVerified;
1887 }
1888 // Update the class status, so later compilation stages know they don't need to verify
1889 // the class.
1890 LoadAndUpdateStatus(accessor, status, class_loader, soa.Self());
1891 }
1892
1893 // Vdex marks class as unverified for two reasons only:
1894 // 1. It has a hard failure, or
1895 // 2. One of its method needs lock counting.
1896 //
1897 // The optimizing compiler expects a method to not have a hard failure before
1898 // compiling it, so for simplicity just disable any compilation of methods
1899 // of these classes.
1900 if (status == ClassStatus::kRetryVerificationAtRuntime) {
1901 ClassReference ref(dex_file, accessor.GetClassDefIndex());
1902 callbacks->AddUncompilableClass(ref);
1903 }
1904 }
1905 }
1906 return true;
1907 }
1908
Verify(jobject jclass_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1909 void CompilerDriver::Verify(jobject jclass_loader,
1910 const std::vector<const DexFile*>& dex_files,
1911 TimingLogger* timings) {
1912 if (FastVerify(jclass_loader, dex_files, timings)) {
1913 return;
1914 }
1915
1916 // If there is no existing `verifier_deps` (because of non-existing vdex), or
1917 // the existing `verifier_deps` is not valid anymore, create a new one. The
1918 // verifier will need it to record the new dependencies. Then dex2oat can update
1919 // the vdex file with these new dependencies.
1920 // Dex2oat creates the verifier deps.
1921 // Create the main VerifierDeps, and set it to this thread.
1922 verifier::VerifierDeps* main_verifier_deps =
1923 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps();
1924 // Verifier deps can be null when unit testing.
1925 if (main_verifier_deps != nullptr) {
1926 Thread::Current()->SetVerifierDeps(main_verifier_deps);
1927 // Create per-thread VerifierDeps to avoid contention on the main one.
1928 // We will merge them after verification.
1929 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) {
1930 worker->GetThread()->SetVerifierDeps(
1931 new verifier::VerifierDeps(GetCompilerOptions().GetDexFilesForOatFile()));
1932 }
1933 }
1934
1935 // Verification updates VerifierDeps and needs to run single-threaded to be deterministic.
1936 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
1937 ThreadPool* verify_thread_pool =
1938 force_determinism ? single_thread_pool_.get() : parallel_thread_pool_.get();
1939 size_t verify_thread_count = force_determinism ? 1U : parallel_thread_count_;
1940 for (const DexFile* dex_file : dex_files) {
1941 CHECK(dex_file != nullptr);
1942 VerifyDexFile(jclass_loader,
1943 *dex_file,
1944 verify_thread_pool,
1945 verify_thread_count,
1946 timings);
1947 }
1948
1949 if (main_verifier_deps != nullptr) {
1950 // Merge all VerifierDeps into the main one.
1951 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) {
1952 std::unique_ptr<verifier::VerifierDeps> thread_deps(worker->GetThread()->GetVerifierDeps());
1953 worker->GetThread()->SetVerifierDeps(nullptr); // We just took ownership.
1954 main_verifier_deps->MergeWith(std::move(thread_deps),
1955 GetCompilerOptions().GetDexFilesForOatFile());
1956 }
1957 Thread::Current()->SetVerifierDeps(nullptr);
1958 }
1959 }
1960
1961 class VerifyClassVisitor : public CompilationVisitor {
1962 public:
VerifyClassVisitor(const ParallelCompilationManager * manager,verifier::HardFailLogMode log_level)1963 VerifyClassVisitor(const ParallelCompilationManager* manager, verifier::HardFailLogMode log_level)
1964 : manager_(manager),
1965 log_level_(log_level),
1966 sdk_version_(Runtime::Current()->GetTargetSdkVersion()) {}
1967
Visit(size_t class_def_index)1968 void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) override {
1969 ScopedTrace trace(__FUNCTION__);
1970 ScopedObjectAccess soa(Thread::Current());
1971 const DexFile& dex_file = *manager_->GetDexFile();
1972 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
1973 ClassLinker* class_linker = manager_->GetClassLinker();
1974 jobject jclass_loader = manager_->GetClassLoader();
1975 StackHandleScope<3> hs(soa.Self());
1976 Handle<mirror::ClassLoader> class_loader(
1977 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1978 Handle<mirror::Class> klass = hs.NewHandle(
1979 class_linker->FindClass(soa.Self(), dex_file, class_def.class_idx_, class_loader));
1980 ClassReference ref(manager_->GetDexFile(), class_def_index);
1981 verifier::FailureKind failure_kind;
1982 if (klass == nullptr) {
1983 CHECK(soa.Self()->IsExceptionPending());
1984 soa.Self()->ClearException();
1985
1986 /*
1987 * At compile time, we can still structurally verify the class even if FindClass fails.
1988 * This is to ensure the class is structurally sound for compilation. An unsound class
1989 * will be rejected by the verifier and later skipped during compilation in the compiler.
1990 */
1991 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
1992 soa.Self(), dex_file)));
1993 std::string error_msg;
1994 failure_kind =
1995 verifier::ClassVerifier::VerifyClass(soa.Self(),
1996 soa.Self()->GetVerifierDeps(),
1997 &dex_file,
1998 klass,
1999 dex_cache,
2000 class_loader,
2001 class_def,
2002 Runtime::Current()->GetCompilerCallbacks(),
2003 log_level_,
2004 sdk_version_,
2005 &error_msg);
2006 switch (failure_kind) {
2007 case verifier::FailureKind::kHardFailure: {
2008 manager_->GetCompiler()->SetHadHardVerifierFailure();
2009 break;
2010 }
2011 case verifier::FailureKind::kSoftFailure: {
2012 manager_->GetCompiler()->AddSoftVerifierFailure();
2013 break;
2014 }
2015 case verifier::FailureKind::kTypeChecksFailure: {
2016 // Don't record anything, we will do the type checks from the vdex
2017 // file at runtime.
2018 break;
2019 }
2020 case verifier::FailureKind::kAccessChecksFailure: {
2021 manager_->GetCompiler()->RecordClassStatus(ref, ClassStatus::kVerifiedNeedsAccessChecks);
2022 break;
2023 }
2024 case verifier::FailureKind::kNoFailure: {
2025 manager_->GetCompiler()->RecordClassStatus(ref, ClassStatus::kVerified);
2026 break;
2027 }
2028 }
2029 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) {
2030 // Skip a duplicate class (as the resolved class is from another, earlier dex file).
2031 return; // Do not update state.
2032 } else {
2033 CHECK(klass->IsResolved()) << klass->PrettyClass();
2034 failure_kind = class_linker->VerifyClass(soa.Self(),
2035 soa.Self()->GetVerifierDeps(),
2036 klass,
2037 log_level_);
2038
2039 DCHECK_EQ(klass->IsErroneous(), failure_kind == verifier::FailureKind::kHardFailure);
2040 if (failure_kind == verifier::FailureKind::kHardFailure) {
2041 // ClassLinker::VerifyClass throws, which isn't useful in the compiler.
2042 CHECK(soa.Self()->IsExceptionPending());
2043 soa.Self()->ClearException();
2044 manager_->GetCompiler()->SetHadHardVerifierFailure();
2045 } else if (failure_kind == verifier::FailureKind::kSoftFailure) {
2046 manager_->GetCompiler()->AddSoftVerifierFailure();
2047 }
2048
2049 CHECK(klass->ShouldVerifyAtRuntime() ||
2050 klass->IsVerifiedNeedsAccessChecks() ||
2051 klass->IsVerified() ||
2052 klass->IsErroneous())
2053 << klass->PrettyDescriptor() << ": state=" << klass->GetStatus();
2054
2055 // Class has a meaningful status for the compiler now, record it.
2056 ClassStatus status = klass->GetStatus();
2057 if (status == ClassStatus::kInitialized) {
2058 // Initialized classes shall be visibly initialized when loaded from the image.
2059 status = ClassStatus::kVisiblyInitialized;
2060 }
2061 manager_->GetCompiler()->RecordClassStatus(ref, status);
2062
2063 // It is *very* problematic if there are resolution errors in the boot classpath.
2064 //
2065 // It is also bad if classes fail verification. For example, we rely on things working
2066 // OK without verification when the decryption dialog is brought up. It is thus highly
2067 // recommended to compile the boot classpath with
2068 // --abort-on-hard-verifier-error --abort-on-soft-verifier-error
2069 // which is the default build system configuration.
2070 if (kIsDebugBuild) {
2071 if (manager_->GetCompiler()->GetCompilerOptions().IsBootImage() ||
2072 manager_->GetCompiler()->GetCompilerOptions().IsBootImageExtension()) {
2073 if (!klass->IsResolved() || klass->IsErroneous()) {
2074 LOG(FATAL) << "Boot classpath class " << klass->PrettyClass()
2075 << " failed to resolve/is erroneous: state= " << klass->GetStatus();
2076 UNREACHABLE();
2077 }
2078 }
2079 if (klass->IsVerified()) {
2080 DCHECK_EQ(failure_kind, verifier::FailureKind::kNoFailure);
2081 } else if (klass->IsVerifiedNeedsAccessChecks()) {
2082 DCHECK_EQ(failure_kind, verifier::FailureKind::kAccessChecksFailure);
2083 } else if (klass->ShouldVerifyAtRuntime()) {
2084 DCHECK_NE(failure_kind, verifier::FailureKind::kHardFailure);
2085 // This could either be due to:
2086 // - kTypeChecksFailure, or
2087 // - kSoftFailure, or
2088 // - the superclass or interfaces not being verified.
2089 } else {
2090 DCHECK_EQ(failure_kind, verifier::FailureKind::kHardFailure);
2091 }
2092 }
2093 }
2094 verifier::VerifierDeps::MaybeRecordVerificationStatus(soa.Self()->GetVerifierDeps(),
2095 dex_file,
2096 class_def,
2097 failure_kind);
2098 soa.Self()->AssertNoPendingException();
2099 }
2100
2101 private:
2102 const ParallelCompilationManager* const manager_;
2103 const verifier::HardFailLogMode log_level_;
2104 const uint32_t sdk_version_;
2105 };
2106
VerifyDexFile(jobject class_loader,const DexFile & dex_file,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2107 void CompilerDriver::VerifyDexFile(jobject class_loader,
2108 const DexFile& dex_file,
2109 ThreadPool* thread_pool,
2110 size_t thread_count,
2111 TimingLogger* timings) {
2112 TimingLogger::ScopedTiming t("Verify Dex File", timings);
2113 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2114 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, thread_pool);
2115 bool abort_on_verifier_failures = GetCompilerOptions().AbortOnHardVerifierFailure()
2116 || GetCompilerOptions().AbortOnSoftVerifierFailure();
2117 verifier::HardFailLogMode log_level = abort_on_verifier_failures
2118 ? verifier::HardFailLogMode::kLogInternalFatal
2119 : verifier::HardFailLogMode::kLogWarning;
2120 VerifyClassVisitor visitor(&context, log_level);
2121 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2122
2123 // Make initialized classes visibly initialized.
2124 class_linker->MakeInitializedClassesVisiblyInitialized(Thread::Current(), /*wait=*/ true);
2125 }
2126
2127 class SetVerifiedClassVisitor : public CompilationVisitor {
2128 public:
SetVerifiedClassVisitor(const ParallelCompilationManager * manager)2129 explicit SetVerifiedClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2130
Visit(size_t class_def_index)2131 void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) override {
2132 ScopedTrace trace(__FUNCTION__);
2133 ScopedObjectAccess soa(Thread::Current());
2134 const DexFile& dex_file = *manager_->GetDexFile();
2135 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2136 ClassLinker* class_linker = manager_->GetClassLinker();
2137 jobject jclass_loader = manager_->GetClassLoader();
2138 StackHandleScope<3> hs(soa.Self());
2139 Handle<mirror::ClassLoader> class_loader(
2140 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2141 Handle<mirror::Class> klass = hs.NewHandle(
2142 class_linker->FindClass(soa.Self(), dex_file, class_def.class_idx_, class_loader));
2143 // Class might have failed resolution. Then don't set it to verified.
2144 if (klass != nullptr) {
2145 // Only do this if the class is resolved. If even resolution fails, quickening will go very,
2146 // very wrong.
2147 if (klass->IsResolved() && !klass->IsErroneousResolved()) {
2148 if (klass->GetStatus() < ClassStatus::kVerified) {
2149 ObjectLock<mirror::Class> lock(soa.Self(), klass);
2150 // Set class status to verified.
2151 mirror::Class::SetStatus(klass, ClassStatus::kVerified, soa.Self());
2152 // Mark methods as pre-verified. If we don't do this, the interpreter will run with
2153 // access checks.
2154 InstructionSet instruction_set =
2155 manager_->GetCompiler()->GetCompilerOptions().GetInstructionSet();
2156 klass->SetSkipAccessChecksFlagOnAllMethods(GetInstructionSetPointerSize(instruction_set));
2157 }
2158 // Record the final class status if necessary.
2159 ClassReference ref(manager_->GetDexFile(), class_def_index);
2160 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus());
2161 }
2162 } else {
2163 Thread* self = soa.Self();
2164 DCHECK(self->IsExceptionPending());
2165 self->ClearException();
2166 }
2167 }
2168
2169 private:
2170 const ParallelCompilationManager* const manager_;
2171 };
2172
SetVerifiedDexFile(jobject class_loader,const DexFile & dex_file,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2173 void CompilerDriver::SetVerifiedDexFile(jobject class_loader,
2174 const DexFile& dex_file,
2175 ThreadPool* thread_pool,
2176 size_t thread_count,
2177 TimingLogger* timings) {
2178 TimingLogger::ScopedTiming t("Set Verified Dex File", timings);
2179 if (!compiled_classes_.HaveDexFile(&dex_file)) {
2180 compiled_classes_.AddDexFile(&dex_file);
2181 }
2182 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2183 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, thread_pool);
2184 SetVerifiedClassVisitor visitor(&context);
2185 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2186 }
2187
2188 class InitializeClassVisitor : public CompilationVisitor {
2189 public:
InitializeClassVisitor(const ParallelCompilationManager * manager)2190 explicit InitializeClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2191
Visit(size_t class_def_index)2192 void Visit(size_t class_def_index) override {
2193 ScopedTrace trace(__FUNCTION__);
2194 jobject jclass_loader = manager_->GetClassLoader();
2195 const DexFile& dex_file = *manager_->GetDexFile();
2196 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2197
2198 ScopedObjectAccess soa(Thread::Current());
2199 StackHandleScope<3> hs(soa.Self());
2200 Handle<mirror::ClassLoader> class_loader(
2201 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2202 Handle<mirror::Class> klass = hs.NewHandle(manager_->GetClassLinker()->FindClass(
2203 soa.Self(), dex_file, class_def.class_idx_, class_loader));
2204
2205 if (klass != nullptr) {
2206 if (!SkipClass(manager_->GetClassLoader(), dex_file, klass.Get())) {
2207 TryInitializeClass(soa.Self(), klass, class_loader);
2208 }
2209 manager_->GetCompiler()->stats_->AddClassStatus(klass->GetStatus());
2210 }
2211 // Clear any class not found or verification exceptions.
2212 soa.Self()->ClearException();
2213 }
2214
2215 // A helper function for initializing klass.
TryInitializeClass(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ClassLoader> & class_loader)2216 void TryInitializeClass(Thread* self,
2217 Handle<mirror::Class> klass,
2218 Handle<mirror::ClassLoader>& class_loader)
2219 REQUIRES_SHARED(Locks::mutator_lock_) {
2220 const DexFile& dex_file = klass->GetDexFile();
2221 const dex::ClassDef* class_def = klass->GetClassDef();
2222 const dex::TypeId& class_type_id = dex_file.GetTypeId(class_def->class_idx_);
2223 const char* descriptor = dex_file.GetStringData(class_type_id.descriptor_idx_);
2224 StackHandleScope<3> hs(self);
2225 AotClassLinker* const class_linker = down_cast<AotClassLinker*>(manager_->GetClassLinker());
2226 Runtime* const runtime = Runtime::Current();
2227 const CompilerOptions& compiler_options = manager_->GetCompiler()->GetCompilerOptions();
2228 const bool is_boot_image = compiler_options.IsBootImage();
2229 const bool is_boot_image_extension = compiler_options.IsBootImageExtension();
2230 const bool is_app_image = compiler_options.IsAppImage();
2231
2232 // For boot image extension, do not initialize classes defined
2233 // in dex files belonging to the boot image we're compiling against.
2234 if (is_boot_image_extension &&
2235 runtime->GetHeap()->ObjectIsInBootImageSpace(klass->GetDexCache())) {
2236 // Also return early and don't store the class status in the recorded class status.
2237 return;
2238 }
2239 // Do not initialize classes in boot space when compiling app (with or without image).
2240 if ((!is_boot_image && !is_boot_image_extension) && klass->IsBootStrapClassLoaded()) {
2241 // Also return early and don't store the class status in the recorded class status.
2242 return;
2243 }
2244
2245 ClassStatus old_status = klass->GetStatus();
2246 // Only try to initialize classes that were successfully verified.
2247 if (klass->IsVerified()) {
2248 // Attempt to initialize the class but bail if we either need to initialize the super-class
2249 // or static fields.
2250 class_linker->EnsureInitialized(self, klass, false, false);
2251 DCHECK(!self->IsExceptionPending());
2252 old_status = klass->GetStatus();
2253 if (!klass->IsInitialized()) {
2254 // We don't want non-trivial class initialization occurring on multiple threads due to
2255 // deadlock problems. For example, a parent class is initialized (holding its lock) that
2256 // refers to a sub-class in its static/class initializer causing it to try to acquire the
2257 // sub-class' lock. While on a second thread the sub-class is initialized (holding its lock)
2258 // after first initializing its parents, whose locks are acquired. This leads to a
2259 // parent-to-child and a child-to-parent lock ordering and consequent potential deadlock.
2260 // We need to use an ObjectLock due to potential suspension in the interpreting code. Rather
2261 // than use a special Object for the purpose we use the Class of java.lang.Class.
2262 Handle<mirror::Class> h_klass(hs.NewHandle(klass->GetClass()));
2263 ObjectLock<mirror::Class> lock(self, h_klass);
2264 // Attempt to initialize allowing initialization of parent classes but still not static
2265 // fields.
2266 // Initialize dependencies first only for app or boot image extension,
2267 // to make TryInitializeClass() recursive.
2268 bool try_initialize_with_superclasses =
2269 is_boot_image ? true : InitializeDependencies(klass, class_loader, self);
2270 if (try_initialize_with_superclasses) {
2271 class_linker->EnsureInitialized(self, klass, false, true);
2272 DCHECK(!self->IsExceptionPending());
2273 }
2274 // Otherwise it's in app image or boot image extension but superclasses
2275 // cannot be initialized, no need to proceed.
2276 old_status = klass->GetStatus();
2277
2278 bool too_many_encoded_fields = (!is_boot_image && !is_boot_image_extension) &&
2279 klass->NumStaticFields() > kMaxEncodedFields;
2280
2281 bool have_profile = (compiler_options.GetProfileCompilationInfo() != nullptr) &&
2282 !compiler_options.GetProfileCompilationInfo()->IsEmpty();
2283 // If the class was not initialized, we can proceed to see if we can initialize static
2284 // fields. Limit the max number of encoded fields.
2285 if (!klass->IsInitialized() &&
2286 (is_app_image || is_boot_image || is_boot_image_extension) &&
2287 try_initialize_with_superclasses && !too_many_encoded_fields &&
2288 compiler_options.IsImageClass(descriptor) &&
2289 // TODO(b/274077782): remove this test.
2290 (have_profile || !is_boot_image_extension)) {
2291 bool can_init_static_fields = false;
2292 if (is_boot_image || is_boot_image_extension) {
2293 // We need to initialize static fields, we only do this for image classes that aren't
2294 // marked with the $NoPreloadHolder (which implies this should not be initialized
2295 // early).
2296 can_init_static_fields = !std::string_view(descriptor).ends_with("$NoPreloadHolder;");
2297 } else {
2298 CHECK(is_app_image);
2299 // The boot image case doesn't need to recursively initialize the dependencies with
2300 // special logic since the class linker already does this.
2301 // Optimization will be disabled in debuggable build, because in debuggable mode we
2302 // want the <clinit> behavior to be observable for the debugger, so we don't do the
2303 // <clinit> at compile time.
2304 can_init_static_fields =
2305 ClassLinker::kAppImageMayContainStrings &&
2306 !self->IsExceptionPending() &&
2307 !compiler_options.GetDebuggable() &&
2308 (compiler_options.InitializeAppImageClasses() ||
2309 NoClinitInDependency(klass, self, &class_loader));
2310 // TODO The checking for clinit can be removed since it's already
2311 // checked when init superclass. Currently keep it because it contains
2312 // processing of intern strings. Will be removed later when intern strings
2313 // and clinit are both initialized.
2314 }
2315
2316 if (can_init_static_fields) {
2317 VLOG(compiler) << "Initializing: " << descriptor;
2318 // TODO multithreading support. We should ensure the current compilation thread has
2319 // exclusive access to the runtime and the transaction. To achieve this, we could use
2320 // a ReaderWriterMutex but we're holding the mutator lock so we fail the check of mutex
2321 // validity in Thread::AssertThreadSuspensionIsAllowable.
2322
2323 // Resolve and initialize the exception type before enabling the transaction in case
2324 // the transaction aborts and cannot resolve the type.
2325 // TransactionAbortError is not initialized ant not in boot image, needed only by
2326 // compiler and will be pruned by ImageWriter.
2327 Handle<mirror::Class> exception_class =
2328 hs.NewHandle(class_linker->FindSystemClass(self, kTransactionAbortErrorDescriptor));
2329 bool exception_initialized =
2330 class_linker->EnsureInitialized(self, exception_class, true, true);
2331 DCHECK(exception_initialized);
2332
2333 // Run the class initializer in transaction mode.
2334 class_linker->EnterTransactionMode(is_app_image, klass.Get());
2335
2336 bool success = class_linker->EnsureInitialized(self, klass, true, true);
2337 // TODO we detach transaction from runtime to indicate we quit the transactional
2338 // mode which prevents the GC from visiting objects modified during the transaction.
2339 // Ensure GC is not run so don't access freed objects when aborting transaction.
2340
2341 {
2342 ScopedAssertNoThreadSuspension ants("Transaction end");
2343
2344 if (success) {
2345 class_linker->ExitTransactionMode();
2346 DCHECK(!runtime->IsActiveTransaction());
2347
2348 if (is_boot_image || is_boot_image_extension) {
2349 // For boot image and boot image extension, we want to put the updated
2350 // status in the oat class. This is not the case for app image as we
2351 // want to keep the ability to load the oat file without the app image.
2352 old_status = klass->GetStatus();
2353 }
2354 } else {
2355 CHECK(self->IsExceptionPending());
2356 mirror::Throwable* exception = self->GetException();
2357 VLOG(compiler) << "Initialization of " << descriptor << " aborted because of "
2358 << exception->Dump();
2359 std::ostream* file_log = manager_->GetCompiler()->
2360 GetCompilerOptions().GetInitFailureOutput();
2361 if (file_log != nullptr) {
2362 *file_log << descriptor << "\n";
2363 *file_log << exception->Dump() << "\n";
2364 }
2365 self->ClearException();
2366 class_linker->RollbackAllTransactions();
2367 CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored";
2368 }
2369 }
2370
2371 if (!success && (is_boot_image || is_boot_image_extension)) {
2372 // On failure, still intern strings of static fields and seen in <clinit>, as these
2373 // will be created in the zygote. This is separated from the transaction code just
2374 // above as we will allocate strings, so must be allowed to suspend.
2375 // We only need to intern strings for boot image and boot image extension
2376 // because classes that failed to be initialized will not appear in app image.
2377 if (&klass->GetDexFile() == manager_->GetDexFile()) {
2378 InternStrings(klass, class_loader);
2379 } else {
2380 DCHECK(!is_boot_image) << "Boot image must have equal dex files";
2381 }
2382 }
2383 }
2384 }
2385 // Clear exception in case EnsureInitialized has caused one in the code above.
2386 // It's OK to clear the exception here since the compiler is supposed to be fault
2387 // tolerant and will silently not initialize classes that have exceptions.
2388 self->ClearException();
2389
2390 // If the class still isn't initialized, at least try some checks that initialization
2391 // would do so they can be skipped at runtime.
2392 if (!klass->IsInitialized() && class_linker->ValidateSuperClassDescriptors(klass)) {
2393 old_status = ClassStatus::kSuperclassValidated;
2394 } else {
2395 self->ClearException();
2396 }
2397 self->AssertNoPendingException();
2398 }
2399 }
2400 if (old_status == ClassStatus::kInitialized) {
2401 // Initialized classes shall be visibly initialized when loaded from the image.
2402 old_status = ClassStatus::kVisiblyInitialized;
2403 }
2404 // Record the final class status if necessary.
2405 ClassReference ref(&dex_file, klass->GetDexClassDefIndex());
2406 // Back up the status before doing initialization for static encoded fields,
2407 // because the static encoded branch wants to keep the status to uninitialized.
2408 manager_->GetCompiler()->RecordClassStatus(ref, old_status);
2409
2410 if (kIsDebugBuild) {
2411 // Make sure the class initialization did not leave any local references.
2412 self->GetJniEnv()->AssertLocalsEmpty();
2413 }
2414
2415 if (!klass->IsInitialized() &&
2416 (is_boot_image || is_boot_image_extension) &&
2417 !compiler_options.IsPreloadedClass(PrettyDescriptor(descriptor))) {
2418 klass->SetInBootImageAndNotInPreloadedClasses();
2419 }
2420
2421 if (compiler_options.CompileArtTest()) {
2422 // For stress testing and unit-testing the clinit check in compiled code feature.
2423 if (kIsDebugBuild || std::string_view(descriptor).ends_with("$NoPreloadHolder;")) {
2424 klass->SetInBootImageAndNotInPreloadedClasses();
2425 }
2426 }
2427 }
2428
2429 private:
InternStrings(Handle<mirror::Class> klass,Handle<mirror::ClassLoader> class_loader)2430 void InternStrings(Handle<mirror::Class> klass, Handle<mirror::ClassLoader> class_loader)
2431 REQUIRES_SHARED(Locks::mutator_lock_) {
2432 DCHECK(manager_->GetCompiler()->GetCompilerOptions().IsBootImage() ||
2433 manager_->GetCompiler()->GetCompilerOptions().IsBootImageExtension());
2434 DCHECK(klass->IsVerified());
2435 DCHECK(!klass->IsInitialized());
2436
2437 StackHandleScope<1> hs(Thread::Current());
2438 Handle<mirror::DexCache> dex_cache = hs.NewHandle(klass->GetDexCache());
2439 const dex::ClassDef* class_def = klass->GetClassDef();
2440 ClassLinker* class_linker = manager_->GetClassLinker();
2441
2442 // Check encoded final field values for strings and intern.
2443 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
2444 class_loader,
2445 manager_->GetClassLinker(),
2446 *class_def);
2447 for ( ; value_it.HasNext(); value_it.Next()) {
2448 if (value_it.GetValueType() == annotations::RuntimeEncodedStaticFieldValueIterator::kString) {
2449 // Resolve the string. This will intern the string.
2450 art::ObjPtr<mirror::String> resolved = class_linker->ResolveString(
2451 dex::StringIndex(value_it.GetJavaValue().i), dex_cache);
2452 CHECK(resolved != nullptr);
2453 }
2454 }
2455
2456 // Intern strings seen in <clinit>.
2457 ArtMethod* clinit = klass->FindClassInitializer(class_linker->GetImagePointerSize());
2458 if (clinit != nullptr) {
2459 for (const DexInstructionPcPair& inst : clinit->DexInstructions()) {
2460 if (inst->Opcode() == Instruction::CONST_STRING) {
2461 ObjPtr<mirror::String> s = class_linker->ResolveString(
2462 dex::StringIndex(inst->VRegB_21c()), dex_cache);
2463 CHECK(s != nullptr);
2464 } else if (inst->Opcode() == Instruction::CONST_STRING_JUMBO) {
2465 ObjPtr<mirror::String> s = class_linker->ResolveString(
2466 dex::StringIndex(inst->VRegB_31c()), dex_cache);
2467 CHECK(s != nullptr);
2468 }
2469 }
2470 }
2471 }
2472
ResolveTypesOfMethods(Thread * self,ArtMethod * m)2473 bool ResolveTypesOfMethods(Thread* self, ArtMethod* m)
2474 REQUIRES_SHARED(Locks::mutator_lock_) {
2475 // Return value of ResolveReturnType() is discarded because resolve will be done internally.
2476 ObjPtr<mirror::Class> rtn_type = m->ResolveReturnType();
2477 if (rtn_type == nullptr) {
2478 self->ClearException();
2479 return false;
2480 }
2481 const dex::TypeList* types = m->GetParameterTypeList();
2482 if (types != nullptr) {
2483 for (uint32_t i = 0; i < types->Size(); ++i) {
2484 dex::TypeIndex param_type_idx = types->GetTypeItem(i).type_idx_;
2485 ObjPtr<mirror::Class> param_type = m->ResolveClassFromTypeIndex(param_type_idx);
2486 if (param_type == nullptr) {
2487 self->ClearException();
2488 return false;
2489 }
2490 }
2491 }
2492 return true;
2493 }
2494
2495 // Pre resolve types mentioned in all method signatures before start a transaction
2496 // since ResolveType doesn't work in transaction mode.
PreResolveTypes(Thread * self,const Handle<mirror::Class> & klass)2497 bool PreResolveTypes(Thread* self, const Handle<mirror::Class>& klass)
2498 REQUIRES_SHARED(Locks::mutator_lock_) {
2499 PointerSize pointer_size = manager_->GetClassLinker()->GetImagePointerSize();
2500 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
2501 if (!ResolveTypesOfMethods(self, &m)) {
2502 return false;
2503 }
2504 }
2505 if (klass->IsInterface()) {
2506 return true;
2507 } else if (klass->HasSuperClass()) {
2508 StackHandleScope<1> hs(self);
2509 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(klass->GetSuperClass()));
2510 for (int i = super_klass->GetVTableLength() - 1; i >= 0; --i) {
2511 ArtMethod* m = klass->GetVTableEntry(i, pointer_size);
2512 ArtMethod* super_m = super_klass->GetVTableEntry(i, pointer_size);
2513 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) {
2514 return false;
2515 }
2516 }
2517 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
2518 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
2519 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
2520 uint32_t num_methods = super_klass->NumVirtualMethods();
2521 for (uint32_t j = 0; j < num_methods; ++j) {
2522 ArtMethod* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
2523 j, pointer_size);
2524 ArtMethod* super_m = super_klass->GetVirtualMethod(j, pointer_size);
2525 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) {
2526 return false;
2527 }
2528 }
2529 }
2530 }
2531 }
2532 return true;
2533 }
2534
2535 // Initialize the klass's dependencies recursively before initializing itself.
2536 // Checking for interfaces is also necessary since interfaces that contain
2537 // default methods must be initialized before the class.
InitializeDependencies(const Handle<mirror::Class> & klass,Handle<mirror::ClassLoader> class_loader,Thread * self)2538 bool InitializeDependencies(const Handle<mirror::Class>& klass,
2539 Handle<mirror::ClassLoader> class_loader,
2540 Thread* self)
2541 REQUIRES_SHARED(Locks::mutator_lock_) {
2542 if (klass->HasSuperClass()) {
2543 StackHandleScope<1> hs(self);
2544 Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
2545 if (!super_class->IsInitialized()) {
2546 this->TryInitializeClass(self, super_class, class_loader);
2547 if (!super_class->IsInitialized()) {
2548 return false;
2549 }
2550 }
2551 }
2552
2553 if (!klass->IsInterface()) {
2554 size_t num_interfaces = klass->GetIfTableCount();
2555 for (size_t i = 0; i < num_interfaces; ++i) {
2556 StackHandleScope<1> hs(self);
2557 Handle<mirror::Class> iface = hs.NewHandle(klass->GetIfTable()->GetInterface(i));
2558 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
2559 TryInitializeClass(self, iface, class_loader);
2560 if (!iface->IsInitialized()) {
2561 return false;
2562 }
2563 }
2564 }
2565 }
2566
2567 return PreResolveTypes(self, klass);
2568 }
2569
2570 // In this phase the classes containing class initializers are ignored. Make sure no
2571 // clinit appears in klass's super class chain and interfaces.
NoClinitInDependency(const Handle<mirror::Class> & klass,Thread * self,Handle<mirror::ClassLoader> * class_loader)2572 bool NoClinitInDependency(const Handle<mirror::Class>& klass,
2573 Thread* self,
2574 Handle<mirror::ClassLoader>* class_loader)
2575 REQUIRES_SHARED(Locks::mutator_lock_) {
2576 ArtMethod* clinit =
2577 klass->FindClassInitializer(manager_->GetClassLinker()->GetImagePointerSize());
2578 if (clinit != nullptr) {
2579 VLOG(compiler) << klass->PrettyClass() << ' ' << clinit->PrettyMethod(true);
2580 return false;
2581 }
2582 if (klass->HasSuperClass()) {
2583 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
2584 StackHandleScope<1> hs(self);
2585 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
2586 if (!NoClinitInDependency(handle_scope_super, self, class_loader)) {
2587 return false;
2588 }
2589 }
2590
2591 uint32_t num_if = klass->NumDirectInterfaces();
2592 for (size_t i = 0; i < num_if; i++) {
2593 ObjPtr<mirror::Class> interface = klass->GetDirectInterface(i);
2594 DCHECK(interface != nullptr);
2595 StackHandleScope<1> hs(self);
2596 Handle<mirror::Class> handle_interface(hs.NewHandle(interface));
2597 if (!NoClinitInDependency(handle_interface, self, class_loader)) {
2598 return false;
2599 }
2600 }
2601
2602 return true;
2603 }
2604
2605 const ParallelCompilationManager* const manager_;
2606 };
2607
InitializeClasses(jobject jni_class_loader,const DexFile & dex_file,TimingLogger * timings)2608 void CompilerDriver::InitializeClasses(jobject jni_class_loader,
2609 const DexFile& dex_file,
2610 TimingLogger* timings) {
2611 TimingLogger::ScopedTiming t("InitializeNoClinit", timings);
2612
2613 // Initialization allocates objects and needs to run single-threaded to be deterministic.
2614 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
2615 ThreadPool* init_thread_pool = force_determinism
2616 ? single_thread_pool_.get()
2617 : parallel_thread_pool_.get();
2618 size_t init_thread_count = force_determinism ? 1U : parallel_thread_count_;
2619
2620 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2621 ParallelCompilationManager context(
2622 class_linker, jni_class_loader, this, &dex_file, init_thread_pool);
2623
2624 if (GetCompilerOptions().IsBootImage() ||
2625 GetCompilerOptions().IsBootImageExtension() ||
2626 GetCompilerOptions().IsAppImage()) {
2627 // Set the concurrency thread to 1 to support initialization for images since transaction
2628 // doesn't support multithreading now.
2629 // TODO: remove this when transactional mode supports multithreading.
2630 init_thread_count = 1U;
2631 }
2632 InitializeClassVisitor visitor(&context);
2633 context.ForAll(0, dex_file.NumClassDefs(), &visitor, init_thread_count);
2634
2635 // Make initialized classes visibly initialized.
2636 class_linker->MakeInitializedClassesVisiblyInitialized(Thread::Current(), /*wait=*/ true);
2637 }
2638
InitializeClasses(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2639 void CompilerDriver::InitializeClasses(jobject class_loader,
2640 const std::vector<const DexFile*>& dex_files,
2641 TimingLogger* timings) {
2642 for (const DexFile* dex_file : dex_files) {
2643 CHECK(dex_file != nullptr);
2644 InitializeClasses(class_loader, *dex_file, timings);
2645 }
2646 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
2647 // Prune garbage objects created during aborted transactions.
2648 Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references= */ true);
2649 }
2650 }
2651
2652 template <typename CompileFn>
CompileDexFile(CompilerDriver * driver,jobject class_loader,const DexFile & dex_file,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings,const char * timing_name,CompileFn compile_fn)2653 static void CompileDexFile(CompilerDriver* driver,
2654 jobject class_loader,
2655 const DexFile& dex_file,
2656 ThreadPool* thread_pool,
2657 size_t thread_count,
2658 TimingLogger* timings,
2659 const char* timing_name,
2660 CompileFn compile_fn) {
2661 TimingLogger::ScopedTiming t(timing_name, timings);
2662 ParallelCompilationManager context(Runtime::Current()->GetClassLinker(),
2663 class_loader,
2664 driver,
2665 &dex_file,
2666 thread_pool);
2667 const CompilerOptions& compiler_options = driver->GetCompilerOptions();
2668 bool have_profile = (compiler_options.GetProfileCompilationInfo() != nullptr);
2669 bool use_profile = CompilerFilter::DependsOnProfile(compiler_options.GetCompilerFilter());
2670 ProfileCompilationInfo::ProfileIndexType profile_index = (have_profile && use_profile)
2671 ? compiler_options.GetProfileCompilationInfo()->FindDexFile(dex_file)
2672 : ProfileCompilationInfo::MaxProfileIndex();
2673
2674 auto compile = [&context, &compile_fn, profile_index](size_t class_def_index) {
2675 const DexFile& dex_file = *context.GetDexFile();
2676 SCOPED_TRACE << "compile " << dex_file.GetLocation() << "@" << class_def_index;
2677 ClassLinker* class_linker = context.GetClassLinker();
2678 jobject jclass_loader = context.GetClassLoader();
2679 ClassReference ref(&dex_file, class_def_index);
2680 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2681 ClassAccessor accessor(dex_file, class_def_index);
2682 CompilerDriver* const driver = context.GetCompiler();
2683 // Skip compiling classes with generic verifier failures since they will still fail at runtime
2684 DCHECK(driver->GetVerificationResults() != nullptr);
2685 if (driver->GetVerificationResults()->IsClassRejected(ref)) {
2686 return;
2687 }
2688 // Use a scoped object access to perform to the quick SkipClass check.
2689 ScopedObjectAccess soa(Thread::Current());
2690 StackHandleScope<3> hs(soa.Self());
2691 Handle<mirror::ClassLoader> class_loader(
2692 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2693 Handle<mirror::Class> klass = hs.NewHandle(
2694 class_linker->FindClass(soa.Self(), dex_file, class_def.class_idx_, class_loader));
2695 Handle<mirror::DexCache> dex_cache;
2696 if (klass == nullptr) {
2697 soa.Self()->AssertPendingException();
2698 soa.Self()->ClearException();
2699 dex_cache = hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file));
2700 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) {
2701 // Skip a duplicate class (as the resolved class is from another, earlier dex file).
2702 return; // Do not update state.
2703 } else {
2704 dex_cache = hs.NewHandle(klass->GetDexCache());
2705 }
2706
2707 // Avoid suspension if there are no methods to compile.
2708 if (accessor.NumDirectMethods() + accessor.NumVirtualMethods() == 0) {
2709 return;
2710 }
2711
2712 // Go to native so that we don't block GC during compilation.
2713 ScopedThreadSuspension sts(soa.Self(), ThreadState::kNative);
2714
2715 // Compile direct and virtual methods.
2716 int64_t previous_method_idx = -1;
2717 for (const ClassAccessor::Method& method : accessor.GetMethods()) {
2718 const uint32_t method_idx = method.GetIndex();
2719 if (method_idx == previous_method_idx) {
2720 // smali can create dex files with two encoded_methods sharing the same method_idx
2721 // http://code.google.com/p/smali/issues/detail?id=119
2722 continue;
2723 }
2724 previous_method_idx = method_idx;
2725 compile_fn(soa.Self(),
2726 driver,
2727 method.GetCodeItem(),
2728 method.GetAccessFlags(),
2729 class_def_index,
2730 method_idx,
2731 class_loader,
2732 dex_file,
2733 dex_cache,
2734 profile_index);
2735 }
2736 };
2737 context.ForAllLambda(0, dex_file.NumClassDefs(), compile, thread_count);
2738 }
2739
Compile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2740 void CompilerDriver::Compile(jobject class_loader,
2741 const std::vector<const DexFile*>& dex_files,
2742 TimingLogger* timings) {
2743 if (kDebugProfileGuidedCompilation) {
2744 const ProfileCompilationInfo* profile_compilation_info =
2745 GetCompilerOptions().GetProfileCompilationInfo();
2746 LOG(INFO) << "[ProfileGuidedCompilation] " <<
2747 ((profile_compilation_info == nullptr)
2748 ? "null"
2749 : profile_compilation_info->DumpInfo(dex_files));
2750 }
2751
2752 for (const DexFile* dex_file : dex_files) {
2753 CHECK(dex_file != nullptr);
2754 CompileDexFile(this,
2755 class_loader,
2756 *dex_file,
2757 parallel_thread_pool_.get(),
2758 parallel_thread_count_,
2759 timings,
2760 "Compile Dex File Quick",
2761 CompileMethodQuick);
2762 const ArenaPool* const arena_pool = Runtime::Current()->GetArenaPool();
2763 const size_t arena_alloc = arena_pool->GetBytesAllocated();
2764 max_arena_alloc_ = std::max(arena_alloc, max_arena_alloc_);
2765 Runtime::Current()->ReclaimArenaPoolMemory();
2766 }
2767
2768 VLOG(compiler) << "Compile: " << GetMemoryUsageString(false);
2769 }
2770
AddCompiledMethod(const MethodReference & method_ref,CompiledMethod * const compiled_method)2771 void CompilerDriver::AddCompiledMethod(const MethodReference& method_ref,
2772 CompiledMethod* const compiled_method) {
2773 DCHECK(GetCompiledMethod(method_ref) == nullptr) << method_ref.PrettyMethod();
2774 MethodTable::InsertResult result = compiled_methods_.Insert(method_ref,
2775 /*expected*/ nullptr,
2776 compiled_method);
2777 CHECK(result == MethodTable::kInsertResultSuccess);
2778 DCHECK(GetCompiledMethod(method_ref) != nullptr) << method_ref.PrettyMethod();
2779 }
2780
RemoveCompiledMethod(const MethodReference & method_ref)2781 CompiledMethod* CompilerDriver::RemoveCompiledMethod(const MethodReference& method_ref) {
2782 CompiledMethod* ret = nullptr;
2783 CHECK(compiled_methods_.Remove(method_ref, &ret));
2784 return ret;
2785 }
2786
GetCompiledClass(const ClassReference & ref,ClassStatus * status) const2787 bool CompilerDriver::GetCompiledClass(const ClassReference& ref, ClassStatus* status) const {
2788 DCHECK(status != nullptr);
2789 // The table doesn't know if something wasn't inserted. For this case it will return
2790 // ClassStatus::kNotReady. To handle this, just assume anything we didn't try to verify
2791 // is not compiled.
2792 if (!compiled_classes_.Get(ref, status) ||
2793 *status < ClassStatus::kRetryVerificationAtRuntime) {
2794 return false;
2795 }
2796 return true;
2797 }
2798
GetClassStatus(const ClassReference & ref) const2799 ClassStatus CompilerDriver::GetClassStatus(const ClassReference& ref) const {
2800 ClassStatus status = ClassStatus::kNotReady;
2801 if (!GetCompiledClass(ref, &status)) {
2802 classpath_classes_.Get(ref, &status);
2803 }
2804 return status;
2805 }
2806
RecordClassStatus(const ClassReference & ref,ClassStatus status)2807 void CompilerDriver::RecordClassStatus(const ClassReference& ref, ClassStatus status) {
2808 switch (status) {
2809 case ClassStatus::kErrorResolved:
2810 case ClassStatus::kErrorUnresolved:
2811 case ClassStatus::kNotReady:
2812 case ClassStatus::kResolved:
2813 case ClassStatus::kRetryVerificationAtRuntime:
2814 case ClassStatus::kVerifiedNeedsAccessChecks:
2815 case ClassStatus::kVerified:
2816 case ClassStatus::kSuperclassValidated:
2817 case ClassStatus::kVisiblyInitialized:
2818 break; // Expected states.
2819 default:
2820 LOG(FATAL) << "Unexpected class status for class "
2821 << PrettyDescriptor(
2822 ref.dex_file->GetClassDescriptor(ref.dex_file->GetClassDef(ref.index)))
2823 << " of " << status;
2824 }
2825
2826 ClassStateTable::InsertResult result;
2827 ClassStateTable* table = &compiled_classes_;
2828 do {
2829 ClassStatus existing = ClassStatus::kNotReady;
2830 if (!table->Get(ref, &existing)) {
2831 // A classpath class.
2832 if (kIsDebugBuild) {
2833 // Check to make sure it's not a dex file for an oat file we are compiling since these
2834 // should always succeed. These do not include classes in for used libraries.
2835 for (const DexFile* dex_file : GetCompilerOptions().GetDexFilesForOatFile()) {
2836 CHECK_NE(ref.dex_file, dex_file) << ref.dex_file->GetLocation();
2837 }
2838 }
2839 if (!classpath_classes_.HaveDexFile(ref.dex_file)) {
2840 // Boot classpath dex file.
2841 return;
2842 }
2843 table = &classpath_classes_;
2844 table->Get(ref, &existing);
2845 }
2846 if (existing >= status) {
2847 // Existing status is already better than we expect, break.
2848 break;
2849 }
2850 // Update the status if we now have a greater one. This happens with vdex,
2851 // which records a class is verified, but does not resolve it.
2852 result = table->Insert(ref, existing, status);
2853 CHECK(result != ClassStateTable::kInsertResultInvalidDexFile) << ref.dex_file->GetLocation();
2854 } while (result != ClassStateTable::kInsertResultSuccess);
2855 }
2856
GetCompiledMethod(MethodReference ref) const2857 CompiledMethod* CompilerDriver::GetCompiledMethod(MethodReference ref) const {
2858 CompiledMethod* compiled_method = nullptr;
2859 compiled_methods_.Get(ref, &compiled_method);
2860 return compiled_method;
2861 }
2862
GetMemoryUsageString(bool extended) const2863 std::string CompilerDriver::GetMemoryUsageString(bool extended) const {
2864 std::ostringstream oss;
2865 const gc::Heap* const heap = Runtime::Current()->GetHeap();
2866 const size_t java_alloc = heap->GetBytesAllocated();
2867 oss << "arena alloc=" << PrettySize(max_arena_alloc_) << " (" << max_arena_alloc_ << "B)";
2868 oss << " java alloc=" << PrettySize(java_alloc) << " (" << java_alloc << "B)";
2869 #if defined(__BIONIC__) || defined(__GLIBC__) || defined(ANDROID_HOST_MUSL)
2870 const struct mallinfo info = mallinfo();
2871 const size_t allocated_space = static_cast<size_t>(info.uordblks);
2872 const size_t free_space = static_cast<size_t>(info.fordblks);
2873 oss << " native alloc=" << PrettySize(allocated_space) << " (" << allocated_space << "B)"
2874 << " free=" << PrettySize(free_space) << " (" << free_space << "B)";
2875 #endif
2876 compiled_method_storage_.DumpMemoryUsage(oss, extended);
2877 return oss.str();
2878 }
2879
InitializeThreadPools()2880 void CompilerDriver::InitializeThreadPools() {
2881 size_t parallel_count = parallel_thread_count_ > 0 ? parallel_thread_count_ - 1 : 0;
2882 parallel_thread_pool_.reset(
2883 ThreadPool::Create("Compiler driver thread pool", parallel_count));
2884 single_thread_pool_.reset(ThreadPool::Create("Single-threaded Compiler driver thread pool", 0));
2885 }
2886
FreeThreadPools()2887 void CompilerDriver::FreeThreadPools() {
2888 parallel_thread_pool_.reset();
2889 single_thread_pool_.reset();
2890 }
2891
SetClasspathDexFiles(const std::vector<const DexFile * > & dex_files)2892 void CompilerDriver::SetClasspathDexFiles(const std::vector<const DexFile*>& dex_files) {
2893 classpath_classes_.AddDexFiles(dex_files);
2894 }
2895
2896 } // namespace art
2897