xref: /aosp_15_r20/art/runtime/runtime_image.cc (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "runtime_image.h"
18 
19 #include <lz4.h>
20 #include <unistd.h>
21 
22 #include "android-base/file.h"
23 #include "android-base/stringprintf.h"
24 #include "android-base/strings.h"
25 #include "arch/instruction_set.h"
26 #include "arch/instruction_set_features.h"
27 #include "base/arena_allocator.h"
28 #include "base/arena_containers.h"
29 #include "base/bit_utils.h"
30 #include "base/file_utils.h"
31 #include "base/length_prefixed_array.h"
32 #include "base/scoped_flock.h"
33 #include "base/stl_util.h"
34 #include "base/systrace.h"
35 #include "base/unix_file/fd_file.h"
36 #include "base/utils.h"
37 #include "class_loader_context.h"
38 #include "class_loader_utils.h"
39 #include "class_root-inl.h"
40 #include "dex/class_accessor-inl.h"
41 #include "gc/space/image_space.h"
42 #include "mirror/object-inl.h"
43 #include "mirror/object-refvisitor-inl.h"
44 #include "mirror/object_array-alloc-inl.h"
45 #include "mirror/object_array-inl.h"
46 #include "mirror/object_array.h"
47 #include "mirror/string-inl.h"
48 #include "nterp_helpers.h"
49 #include "oat/image.h"
50 #include "oat/oat.h"
51 #include "profile/profile_compilation_info.h"
52 #include "scoped_thread_state_change-inl.h"
53 #include "vdex_file.h"
54 
55 namespace art HIDDEN {
56 
57 using android::base::StringPrintf;
58 
59 /**
60  * The native data structures that we store in the image.
61  */
62 enum class NativeRelocationKind {
63   kArtFieldArray,
64   kArtMethodArray,
65   kArtMethod,
66   kImTable,
67   // For dex cache arrays which can stay in memory even after startup. Those are
68   // dex cache arrays whose size is below a given threshold, defined by
69   // DexCache::ShouldAllocateFullArray.
70   kFullNativeDexCacheArray,
71   // For dex cache arrays which we will want to release after app startup.
72   kStartupNativeDexCacheArray,
73 };
74 
75 /**
76  * Helper class to generate an app image at runtime.
77  */
78 class RuntimeImageHelper {
79  public:
RuntimeImageHelper(gc::Heap * heap)80   explicit RuntimeImageHelper(gc::Heap* heap) :
81     allocator_(Runtime::Current()->GetArenaPool()),
82     objects_(allocator_.Adapter()),
83     art_fields_(allocator_.Adapter()),
84     art_methods_(allocator_.Adapter()),
85     im_tables_(allocator_.Adapter()),
86     metadata_(allocator_.Adapter()),
87     dex_cache_arrays_(allocator_.Adapter()),
88     string_reference_offsets_(allocator_.Adapter()),
89     sections_(ImageHeader::kSectionCount, allocator_.Adapter()),
90     object_offsets_(allocator_.Adapter()),
91     classes_(allocator_.Adapter()),
92     array_classes_(allocator_.Adapter()),
93     dex_caches_(allocator_.Adapter()),
94     class_hashes_(allocator_.Adapter()),
95     native_relocations_(allocator_.Adapter()),
96     boot_image_begin_(heap->GetBootImagesStartAddress()),
97     boot_image_size_(heap->GetBootImagesSize()),
98     image_begin_(boot_image_begin_ + boot_image_size_),
99     // Note: image relocation considers the image header in the bitmap.
100     object_section_size_(sizeof(ImageHeader)),
101     intern_table_(InternStringHash(this), InternStringEquals(this)),
102     class_table_(ClassDescriptorHash(this), ClassDescriptorEquals()) {}
103 
Generate(std::string * error_msg)104   bool Generate(std::string* error_msg) {
105     if (!WriteObjects(error_msg)) {
106       return false;
107     }
108 
109     // Generate the sections information stored in the header.
110     CreateImageSections();
111 
112     // Now that all sections have been created and we know their offset and
113     // size, relocate native pointers inside classes and ImTables.
114     RelocateNativePointers();
115 
116     // Generate the bitmap section, stored kElfSegmentAlignment-aligned after the sections data and
117     // of size `object_section_size_` rounded up to kCardSize to match the bitmap size expected by
118     // Loader::Init at art::gc::space::ImageSpace.
119     size_t sections_end = sections_[ImageHeader::kSectionMetadata].End();
120     image_bitmap_ = gc::accounting::ContinuousSpaceBitmap::Create(
121         "image bitmap",
122         reinterpret_cast<uint8_t*>(image_begin_),
123         RoundUp(object_section_size_, gc::accounting::CardTable::kCardSize));
124     for (uint32_t offset : object_offsets_) {
125       DCHECK(IsAligned<kObjectAlignment>(image_begin_ + sizeof(ImageHeader) + offset));
126       image_bitmap_.Set(
127           reinterpret_cast<mirror::Object*>(image_begin_ + sizeof(ImageHeader) + offset));
128     }
129     const size_t bitmap_bytes = image_bitmap_.Size();
130     auto* bitmap_section = &sections_[ImageHeader::kSectionImageBitmap];
131     // The offset of the bitmap section should be aligned to kElfSegmentAlignment to enable mapping
132     // the section from file to memory. However the section size doesn't have to be rounded up as
133     // it is located at the end of the file. When mapping file contents to memory, if the last page
134     // of the mapping is only partially filled with data, the rest will be zero-filled.
135     *bitmap_section = ImageSection(RoundUp(sections_end, kElfSegmentAlignment), bitmap_bytes);
136 
137     // Compute boot image checksum and boot image components, to be stored in
138     // the header.
139     gc::Heap* const heap = Runtime::Current()->GetHeap();
140     uint32_t boot_image_components = 0u;
141     uint32_t boot_image_checksums = 0u;
142     const std::vector<gc::space::ImageSpace*>& image_spaces = heap->GetBootImageSpaces();
143     for (size_t i = 0u, size = image_spaces.size(); i != size; ) {
144       const ImageHeader& header = image_spaces[i]->GetImageHeader();
145       boot_image_components += header.GetComponentCount();
146       boot_image_checksums ^= header.GetImageChecksum();
147       DCHECK_LE(header.GetImageSpaceCount(), size - i);
148       i += header.GetImageSpaceCount();
149     }
150 
151     header_ = ImageHeader(
152         /* image_reservation_size= */ RoundUp(sections_end, kElfSegmentAlignment),
153         /* component_count= */ 1,
154         image_begin_,
155         sections_end,
156         sections_.data(),
157         /* image_roots= */ image_begin_ + sizeof(ImageHeader),
158         /* oat_checksum= */ 0,
159         /* oat_file_begin= */ 0,
160         /* oat_data_begin= */ 0,
161         /* oat_data_end= */ 0,
162         /* oat_file_end= */ 0,
163         heap->GetBootImagesStartAddress(),
164         heap->GetBootImagesSize(),
165         boot_image_components,
166         boot_image_checksums,
167         kRuntimePointerSize);
168 
169     // Data size includes everything except the bitmap and the header.
170     header_.data_size_ = sections_end - sizeof(ImageHeader);
171 
172     // Write image methods - needs to happen after creation of the header.
173     WriteImageMethods();
174 
175     return true;
176   }
177 
FillData(std::vector<uint8_t> & data)178   void FillData(std::vector<uint8_t>& data) {
179     // Note we don't put the header, we only have it reserved in `data` as
180     // Image::WriteData expects the object section to contain the image header.
181     auto compute_dest = [&](const ImageSection& section) {
182       return data.data() + section.Offset();
183     };
184 
185     auto objects_section = header_.GetImageSection(ImageHeader::kSectionObjects);
186     memcpy(compute_dest(objects_section) + sizeof(ImageHeader), objects_.data(), objects_.size());
187 
188     auto fields_section = header_.GetImageSection(ImageHeader::kSectionArtFields);
189     memcpy(compute_dest(fields_section), art_fields_.data(), fields_section.Size());
190 
191     auto methods_section = header_.GetImageSection(ImageHeader::kSectionArtMethods);
192     memcpy(compute_dest(methods_section), art_methods_.data(), methods_section.Size());
193 
194     auto im_tables_section = header_.GetImageSection(ImageHeader::kSectionImTables);
195     memcpy(compute_dest(im_tables_section), im_tables_.data(), im_tables_section.Size());
196 
197     auto intern_section = header_.GetImageSection(ImageHeader::kSectionInternedStrings);
198     intern_table_.WriteToMemory(compute_dest(intern_section));
199 
200     auto class_table_section = header_.GetImageSection(ImageHeader::kSectionClassTable);
201     class_table_.WriteToMemory(compute_dest(class_table_section));
202 
203     auto string_offsets_section =
204         header_.GetImageSection(ImageHeader::kSectionStringReferenceOffsets);
205     memcpy(compute_dest(string_offsets_section),
206            string_reference_offsets_.data(),
207            string_offsets_section.Size());
208 
209     auto dex_cache_section = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays);
210     memcpy(compute_dest(dex_cache_section), dex_cache_arrays_.data(), dex_cache_section.Size());
211 
212     auto metadata_section = header_.GetImageSection(ImageHeader::kSectionMetadata);
213     memcpy(compute_dest(metadata_section), metadata_.data(), metadata_section.Size());
214 
215     DCHECK_EQ(metadata_section.Offset() + metadata_section.Size(), data.size());
216   }
217 
218 
GetHeader()219   ImageHeader* GetHeader() {
220     return &header_;
221   }
222 
GetImageBitmap() const223   const gc::accounting::ContinuousSpaceBitmap& GetImageBitmap() const {
224     return image_bitmap_;
225   }
226 
GetDexLocation() const227   const std::string& GetDexLocation() const {
228     return dex_location_;
229   }
230 
231  private:
IsInBootImage(const void * obj) const232   bool IsInBootImage(const void* obj) const {
233     return reinterpret_cast<uintptr_t>(obj) - boot_image_begin_ < boot_image_size_;
234   }
235 
236   // Returns the image contents for `cls`. If `cls` is in the boot image, the
237   // method just returns it.
GetClassContent(ObjPtr<mirror::Class> cls)238   mirror::Class* GetClassContent(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
239     if (cls == nullptr || IsInBootImage(cls.Ptr())) {
240       return cls.Ptr();
241     }
242     const dex::ClassDef* class_def = cls->GetClassDef();
243     DCHECK(class_def != nullptr) << cls->PrettyClass();
244     auto it = classes_.find(class_def);
245     DCHECK(it != classes_.end()) << cls->PrettyClass();
246     mirror::Class* result = reinterpret_cast<mirror::Class*>(objects_.data() + it->second);
247     DCHECK(result->GetClass()->IsClass());
248     return result;
249   }
250 
251   // Returns a pointer that can be stored in `objects_`:
252   // - The pointer itself for boot image objects,
253   // - The offset in the image for all other objects.
GetOrComputeImageAddress(ObjPtr<T> object)254   template <typename T> T* GetOrComputeImageAddress(ObjPtr<T> object)
255       REQUIRES_SHARED(Locks::mutator_lock_) {
256     if (object == nullptr || IsInBootImage(object.Ptr())) {
257       DCHECK(object == nullptr || Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(object));
258       return object.Ptr();
259     }
260 
261     if (object->IsClassLoader()) {
262       // DexCache and Class point to class loaders. For runtime-generated app
263       // images, we don't encode the class loader. It will be set when the
264       // runtime is loading the image.
265       return nullptr;
266     }
267 
268     if (object->GetClass() == GetClassRoot<mirror::ClassExt>()) {
269       // No need to encode `ClassExt`. If needed, it will be reconstructed at
270       // runtime.
271       return nullptr;
272     }
273 
274     uint32_t offset = 0u;
275     if (object->IsClass()) {
276       offset = CopyClass(object->AsClass());
277     } else if (object->IsDexCache()) {
278       offset = CopyDexCache(object->AsDexCache());
279     } else {
280       offset = CopyObject(object);
281     }
282     return reinterpret_cast<T*>(image_begin_ + sizeof(ImageHeader) + offset);
283   }
284 
CreateImageSections()285   void CreateImageSections() {
286     sections_[ImageHeader::kSectionObjects] = ImageSection(0u, object_section_size_);
287     sections_[ImageHeader::kSectionArtFields] =
288         ImageSection(sections_[ImageHeader::kSectionObjects].End(), art_fields_.size());
289 
290     // Round up to the alignment for ArtMethod.
291     static_assert(IsAligned<sizeof(void*)>(ArtMethod::Size(kRuntimePointerSize)));
292     size_t cur_pos = RoundUp(sections_[ImageHeader::kSectionArtFields].End(), sizeof(void*));
293     sections_[ImageHeader::kSectionArtMethods] = ImageSection(cur_pos, art_methods_.size());
294 
295     // Round up to the alignment for ImTables.
296     cur_pos = RoundUp(sections_[ImageHeader::kSectionArtMethods].End(), sizeof(void*));
297     sections_[ImageHeader::kSectionImTables] = ImageSection(cur_pos, im_tables_.size());
298 
299     // Round up to the alignment for conflict tables.
300     cur_pos = RoundUp(sections_[ImageHeader::kSectionImTables].End(), sizeof(void*));
301     sections_[ImageHeader::kSectionIMTConflictTables] = ImageSection(cur_pos, 0u);
302 
303     sections_[ImageHeader::kSectionRuntimeMethods] =
304         ImageSection(sections_[ImageHeader::kSectionIMTConflictTables].End(), 0u);
305 
306     // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
307     cur_pos = RoundUp(sections_[ImageHeader::kSectionRuntimeMethods].End(), sizeof(uint64_t));
308 
309     size_t intern_table_bytes = intern_table_.WriteToMemory(nullptr);
310     sections_[ImageHeader::kSectionInternedStrings] = ImageSection(cur_pos, intern_table_bytes);
311 
312     // Obtain the new position and round it up to the appropriate alignment.
313     cur_pos = RoundUp(sections_[ImageHeader::kSectionInternedStrings].End(), sizeof(uint64_t));
314 
315     size_t class_table_bytes = class_table_.WriteToMemory(nullptr);
316     sections_[ImageHeader::kSectionClassTable] = ImageSection(cur_pos, class_table_bytes);
317 
318     // Round up to the alignment of the offsets we are going to store.
319     cur_pos = RoundUp(sections_[ImageHeader::kSectionClassTable].End(), sizeof(uint32_t));
320     sections_[ImageHeader::kSectionStringReferenceOffsets] = ImageSection(
321         cur_pos, string_reference_offsets_.size() * sizeof(string_reference_offsets_[0]));
322 
323     // Round up to the alignment dex caches arrays expects.
324     cur_pos =
325         RoundUp(sections_[ImageHeader::kSectionStringReferenceOffsets].End(), sizeof(void*));
326     sections_[ImageHeader::kSectionDexCacheArrays] =
327         ImageSection(cur_pos, dex_cache_arrays_.size());
328 
329     // Round up to the alignment expected for the metadata, which holds dex
330     // cache arrays.
331     cur_pos = RoundUp(sections_[ImageHeader::kSectionDexCacheArrays].End(), sizeof(void*));
332     sections_[ImageHeader::kSectionMetadata] = ImageSection(cur_pos, metadata_.size());
333   }
334 
335   // Returns the copied mirror Object if in the image, or the object directly if
336   // in the boot image. For the copy, this is really its content, it should not
337   // be returned as an `ObjPtr` (as it's not a GC object), nor stored anywhere.
FromImageOffsetToRuntimeContent(uint32_t offset)338   template<typename T> T* FromImageOffsetToRuntimeContent(uint32_t offset) {
339     if (offset == 0u || IsInBootImage(reinterpret_cast<const void*>(offset))) {
340       return reinterpret_cast<T*>(offset);
341     }
342     uint32_t vector_data_offset = FromImageOffsetToVectorOffset(offset);
343     return reinterpret_cast<T*>(objects_.data() + vector_data_offset);
344   }
345 
FromImageOffsetToVectorOffset(uint32_t offset) const346   uint32_t FromImageOffsetToVectorOffset(uint32_t offset) const {
347     DCHECK(!IsInBootImage(reinterpret_cast<const void*>(offset)));
348     return offset - sizeof(ImageHeader) - image_begin_;
349   }
350 
351   class InternStringHash {
352    public:
InternStringHash(RuntimeImageHelper * helper)353     explicit InternStringHash(RuntimeImageHelper* helper) : helper_(helper) {}
354 
355     // NO_THREAD_SAFETY_ANALYSIS as these helpers get passed to `HashSet`.
operator ()(mirror::String * str) const356     size_t operator()(mirror::String* str) const NO_THREAD_SAFETY_ANALYSIS {
357       int32_t hash = str->GetStoredHashCode();
358       DCHECK_EQ(hash, str->ComputeHashCode());
359       // An additional cast to prevent undesired sign extension.
360       return static_cast<uint32_t>(hash);
361     }
362 
operator ()(uint32_t entry) const363     size_t operator()(uint32_t entry) const NO_THREAD_SAFETY_ANALYSIS {
364       return (*this)(helper_->FromImageOffsetToRuntimeContent<mirror::String>(entry));
365     }
366 
367    private:
368     RuntimeImageHelper* helper_;
369   };
370 
371   class InternStringEquals {
372    public:
InternStringEquals(RuntimeImageHelper * helper)373     explicit InternStringEquals(RuntimeImageHelper* helper) : helper_(helper) {}
374 
375     // NO_THREAD_SAFETY_ANALYSIS as these helpers get passed to `HashSet`.
operator ()(uint32_t entry,mirror::String * other) const376     bool operator()(uint32_t entry, mirror::String* other) const NO_THREAD_SAFETY_ANALYSIS {
377       if (kIsDebugBuild) {
378         Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
379       }
380       return other->Equals(helper_->FromImageOffsetToRuntimeContent<mirror::String>(entry));
381     }
382 
operator ()(uint32_t entry,uint32_t other) const383     bool operator()(uint32_t entry, uint32_t other) const NO_THREAD_SAFETY_ANALYSIS {
384       return (*this)(entry, helper_->FromImageOffsetToRuntimeContent<mirror::String>(other));
385     }
386 
387    private:
388     RuntimeImageHelper* helper_;
389   };
390 
391   using InternTableSet =
392         HashSet<uint32_t, DefaultEmptyFn<uint32_t>, InternStringHash, InternStringEquals>;
393 
394   class ClassDescriptorHash {
395    public:
ClassDescriptorHash(RuntimeImageHelper * helper)396     explicit ClassDescriptorHash(RuntimeImageHelper* helper) : helper_(helper) {}
397 
operator ()(const ClassTable::TableSlot & slot) const398     uint32_t operator()(const ClassTable::TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS {
399       uint32_t ptr = slot.NonHashData();
400       if (helper_->IsInBootImage(reinterpret_cast32<const void*>(ptr))) {
401         return reinterpret_cast32<mirror::Class*>(ptr)->DescriptorHash();
402       }
403       return helper_->class_hashes_.Get(helper_->FromImageOffsetToVectorOffset(ptr));
404     }
405 
406    private:
407     RuntimeImageHelper* helper_;
408   };
409 
410   class ClassDescriptorEquals {
411    public:
ClassDescriptorEquals()412     ClassDescriptorEquals() {}
413 
operator ()(const ClassTable::TableSlot & a,const ClassTable::TableSlot & b) const414     bool operator()(const ClassTable::TableSlot& a, const ClassTable::TableSlot& b)
415         const NO_THREAD_SAFETY_ANALYSIS {
416       // No need to fetch the descriptor: we know the classes we are inserting
417       // in the ClassTable are unique.
418       return a.Data() == b.Data();
419     }
420   };
421 
422   using ClassTableSet = HashSet<ClassTable::TableSlot,
423                                 ClassTable::TableSlotEmptyFn,
424                                 ClassDescriptorHash,
425                                 ClassDescriptorEquals>;
426 
427   // Helper class to collect classes that we will generate in the image.
428   class ClassTableVisitor {
429    public:
ClassTableVisitor(Handle<mirror::ClassLoader> loader,VariableSizedHandleScope & handles)430     ClassTableVisitor(Handle<mirror::ClassLoader> loader, VariableSizedHandleScope& handles)
431         : loader_(loader), handles_(handles) {}
432 
operator ()(ObjPtr<mirror::Class> klass)433     bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
434       // Record app classes and boot classpath classes: app classes will be
435       // generated in the image and put in the class table, boot classpath
436       // classes will be put in the class table.
437       ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
438       if (klass->IsResolved() && (class_loader == loader_.Get() || class_loader == nullptr)) {
439         handles_.NewHandle(klass);
440       }
441       return true;
442     }
443 
444    private:
445     Handle<mirror::ClassLoader> loader_;
446     VariableSizedHandleScope& handles_;
447   };
448 
449   // Helper class visitor to filter out classes we cannot emit.
450   class PruneVisitor {
451    public:
PruneVisitor(Thread * self,RuntimeImageHelper * helper,const ArenaSet<const DexFile * > & dex_files,ArenaVector<Handle<mirror::Class>> & classes,ArenaAllocator & allocator)452     PruneVisitor(Thread* self,
453                  RuntimeImageHelper* helper,
454                  const ArenaSet<const DexFile*>& dex_files,
455                  ArenaVector<Handle<mirror::Class>>& classes,
456                  ArenaAllocator& allocator)
457         : self_(self),
458           helper_(helper),
459           dex_files_(dex_files),
460           visited_(allocator.Adapter()),
461           classes_to_write_(classes) {}
462 
CanEmitHelper(Handle<mirror::Class> cls)463     bool CanEmitHelper(Handle<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
464       // If the class comes from a dex file which is not part of the primary
465       // APK, don't encode it.
466       if (!ContainsElement(dex_files_, &cls->GetDexFile())) {
467         return false;
468       }
469 
470       // Ensure pointers to classes in `cls` can also be emitted.
471       StackHandleScope<1> hs(self_);
472       MutableHandle<mirror::Class> other_class = hs.NewHandle(cls->GetSuperClass());
473       if (!CanEmit(other_class)) {
474         return false;
475       }
476 
477       other_class.Assign(cls->GetComponentType());
478       if (!CanEmit(other_class)) {
479         return false;
480       }
481 
482       for (size_t i = 0, num_interfaces = cls->NumDirectInterfaces(); i < num_interfaces; ++i) {
483         other_class.Assign(cls->GetDirectInterface(i));
484         DCHECK(other_class != nullptr);
485         if (!CanEmit(other_class)) {
486           return false;
487         }
488       }
489       return true;
490     }
491 
CanEmit(Handle<mirror::Class> cls)492     bool CanEmit(Handle<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
493       if (cls == nullptr) {
494         return true;
495       }
496       DCHECK(cls->IsResolved());
497       // Only emit classes that are resolved and not erroneous.
498       if (cls->IsErroneous()) {
499         return false;
500       }
501 
502       // Proxy classes are generated at runtime, so don't emit them.
503       if (cls->IsProxyClass()) {
504         return false;
505       }
506 
507       // Classes in the boot image can be trivially encoded directly.
508       if (helper_->IsInBootImage(cls.Get())) {
509         return true;
510       }
511 
512       if (cls->IsBootStrapClassLoaded()) {
513         // We cannot encode classes that are part of the boot classpath.
514         return false;
515       }
516 
517       DCHECK(!cls->IsPrimitive());
518 
519       if (cls->IsArrayClass()) {
520         if (cls->IsBootStrapClassLoaded()) {
521           // For boot classpath arrays, we can only emit them if they are
522           // in the boot image already.
523           return helper_->IsInBootImage(cls.Get());
524         }
525         ObjPtr<mirror::Class> temp = cls.Get();
526         while ((temp = temp->GetComponentType())->IsArrayClass()) {}
527         StackHandleScope<1> hs(self_);
528         Handle<mirror::Class> other_class = hs.NewHandle(temp);
529         return CanEmit(other_class);
530       }
531       const dex::ClassDef* class_def = cls->GetClassDef();
532       DCHECK_NE(class_def, nullptr);
533       auto existing = visited_.find(class_def);
534       if (existing != visited_.end()) {
535         // Already processed;
536         return existing->second == VisitState::kCanEmit;
537       }
538 
539       visited_.Put(class_def, VisitState::kVisiting);
540       if (CanEmitHelper(cls)) {
541         visited_.Overwrite(class_def, VisitState::kCanEmit);
542         return true;
543       } else {
544         visited_.Overwrite(class_def, VisitState::kCannotEmit);
545         return false;
546       }
547     }
548 
Visit(Handle<mirror::Object> obj)549     void Visit(Handle<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
550       MutableHandle<mirror::Class> cls(obj.GetReference());
551       if (CanEmit(cls)) {
552         if (cls->IsBootStrapClassLoaded()) {
553           DCHECK(helper_->IsInBootImage(cls.Get()));
554           // Insert the bootclasspath class in the class table.
555           uint32_t hash = cls->DescriptorHash();
556           helper_->class_table_.InsertWithHash(ClassTable::TableSlot(cls.Get(), hash), hash);
557         } else {
558           classes_to_write_.push_back(cls);
559         }
560       }
561     }
562 
563    private:
564     enum class VisitState {
565       kVisiting,
566       kCanEmit,
567       kCannotEmit,
568     };
569 
570     Thread* const self_;
571     RuntimeImageHelper* const helper_;
572     const ArenaSet<const DexFile*>& dex_files_;
573     ArenaSafeMap<const dex::ClassDef*, VisitState> visited_;
574     ArenaVector<Handle<mirror::Class>>& classes_to_write_;
575   };
576 
EmitClasses(Thread * self,Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array)577   void EmitClasses(Thread* self, Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array)
578       REQUIRES_SHARED(Locks::mutator_lock_) {
579     ScopedTrace trace("Emit strings and classes");
580     ArenaSet<const DexFile*> dex_files(allocator_.Adapter());
581     for (int32_t i = 0; i < dex_cache_array->GetLength(); ++i) {
582       dex_files.insert(dex_cache_array->Get(i)->AsDexCache()->GetDexFile());
583     }
584 
585     StackHandleScope<1> hs(self);
586     Handle<mirror::ClassLoader> loader = hs.NewHandle(
587         dex_cache_array->Get(0)->AsDexCache()->GetClassLoader());
588     ClassTable* const class_table = loader->GetClassTable();
589     if (class_table == nullptr) {
590       return;
591     }
592 
593     VariableSizedHandleScope handles(self);
594     {
595       ClassTableVisitor class_table_visitor(loader, handles);
596       class_table->Visit(class_table_visitor);
597     }
598 
599     ArenaVector<Handle<mirror::Class>> classes_to_write(allocator_.Adapter());
600     classes_to_write.reserve(class_table->Size());
601     {
602       PruneVisitor prune_visitor(self, this, dex_files, classes_to_write, allocator_);
603       handles.VisitHandles(prune_visitor);
604     }
605 
606     for (Handle<mirror::Class> cls : classes_to_write) {
607       {
608         ScopedAssertNoThreadSuspension sants("Writing class");
609         CopyClass(cls.Get());
610       }
611       self->AllowThreadSuspension();
612     }
613 
614     // Relocate the type array entries. We do this now before creating image
615     // sections because we may add new boot image classes into our
616     // `class_table`_.
617     for (auto entry : dex_caches_) {
618       const DexFile& dex_file = *entry.first;
619       mirror::DexCache* cache = reinterpret_cast<mirror::DexCache*>(&objects_[entry.second]);
620       mirror::GcRootArray<mirror::Class>* old_types_array = cache->GetResolvedTypesArray();
621       if (HasNativeRelocation(old_types_array)) {
622         auto reloc_it = native_relocations_.find(old_types_array);
623         DCHECK(reloc_it != native_relocations_.end());
624         ArenaVector<uint8_t>& data =
625             (reloc_it->second.first == NativeRelocationKind::kFullNativeDexCacheArray)
626                 ? dex_cache_arrays_ : metadata_;
627         mirror::GcRootArray<mirror::Class>* content_array =
628             reinterpret_cast<mirror::GcRootArray<mirror::Class>*>(
629                 data.data() + reloc_it->second.second);
630         for (uint32_t i = 0; i < dex_file.NumTypeIds(); ++i) {
631           ObjPtr<mirror::Class> cls = old_types_array->Get(i);
632           if (cls == nullptr) {
633             content_array->Set(i, nullptr);
634           } else if (IsInBootImage(cls.Ptr())) {
635             if (!cls->IsPrimitive()) {
636               // The dex cache is concurrently updated by the app. If the class
637               // collection logic in `PruneVisitor` did not see this class, insert it now.
638               // Note that application class tables do not contain primitive
639               // classes.
640               uint32_t hash = cls->DescriptorHash();
641               class_table_.InsertWithHash(ClassTable::TableSlot(cls.Ptr(), hash), hash);
642             }
643             content_array->Set(i, cls.Ptr());
644           } else if (cls->IsArrayClass()) {
645             std::string class_name;
646             cls->GetDescriptor(&class_name);
647             auto class_it = array_classes_.find(class_name);
648             if (class_it == array_classes_.end()) {
649               content_array->Set(i, nullptr);
650             } else {
651               mirror::Class* ptr = reinterpret_cast<mirror::Class*>(
652                   image_begin_ + sizeof(ImageHeader) + class_it->second);
653               content_array->Set(i, ptr);
654             }
655           } else {
656             DCHECK(!cls->IsPrimitive());
657             DCHECK(!cls->IsProxyClass());
658             const dex::ClassDef* class_def = cls->GetClassDef();
659             DCHECK_NE(class_def, nullptr);
660             auto class_it = classes_.find(class_def);
661             if (class_it == classes_.end()) {
662               content_array->Set(i, nullptr);
663             } else {
664               mirror::Class* ptr = reinterpret_cast<mirror::Class*>(
665                   image_begin_ + sizeof(ImageHeader) + class_it->second);
666               content_array->Set(i, ptr);
667             }
668           }
669         }
670       }
671     }
672   }
673 
674   // Helper visitor returning the location of a native pointer in the image.
675   class NativePointerVisitor {
676    public:
NativePointerVisitor(RuntimeImageHelper * helper)677     explicit NativePointerVisitor(RuntimeImageHelper* helper) : helper_(helper) {}
678 
679     template <typename T>
operator ()(T * ptr,void ** dest_addr) const680     T* operator()(T* ptr, [[maybe_unused]] void** dest_addr) const {
681       return helper_->NativeLocationInImage(ptr, /* must_have_relocation= */ true);
682     }
683 
operator ()(T * ptr,bool must_have_relocation=true) const684     template <typename T> T* operator()(T* ptr, bool must_have_relocation = true) const {
685       return helper_->NativeLocationInImage(ptr, must_have_relocation);
686     }
687 
688    private:
689     RuntimeImageHelper* helper_;
690   };
691 
NativeLocationInImage(T * ptr,bool must_have_relocation) const692   template <typename T> T* NativeLocationInImage(T* ptr, bool must_have_relocation) const {
693     if (ptr == nullptr || IsInBootImage(ptr)) {
694       return ptr;
695     }
696 
697     auto it = native_relocations_.find(ptr);
698     if (it == native_relocations_.end()) {
699       DCHECK(!must_have_relocation);
700       return nullptr;
701     }
702     switch (it->second.first) {
703       case NativeRelocationKind::kArtMethod:
704       case NativeRelocationKind::kArtMethodArray: {
705         uint32_t offset = sections_[ImageHeader::kSectionArtMethods].Offset();
706         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
707       }
708       case NativeRelocationKind::kArtFieldArray: {
709         uint32_t offset = sections_[ImageHeader::kSectionArtFields].Offset();
710         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
711       }
712       case NativeRelocationKind::kImTable: {
713         uint32_t offset = sections_[ImageHeader::kSectionImTables].Offset();
714         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
715       }
716       case NativeRelocationKind::kStartupNativeDexCacheArray: {
717         uint32_t offset = sections_[ImageHeader::kSectionMetadata].Offset();
718         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
719       }
720       case NativeRelocationKind::kFullNativeDexCacheArray: {
721         uint32_t offset = sections_[ImageHeader::kSectionDexCacheArrays].Offset();
722         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
723       }
724     }
725   }
726 
727   template <typename Visitor>
RelocateMethodPointerArrays(mirror::Class * klass,const Visitor & visitor)728   void RelocateMethodPointerArrays(mirror::Class* klass, const Visitor& visitor)
729       REQUIRES_SHARED(Locks::mutator_lock_) {
730     // A bit of magic here: we cast contents from our buffer to mirror::Class,
731     // and do pointer comparison between 1) these classes, and 2) boot image objects.
732     // Both kinds do not move.
733 
734     // See if we need to fixup the vtable field.
735     mirror::Class* super = FromImageOffsetToRuntimeContent<mirror::Class>(
736         reinterpret_cast32<uint32_t>(
737             klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>().Ptr()));
738     DCHECK(super != nullptr) << "j.l.Object should never be in an app runtime image";
739     mirror::PointerArray* vtable = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
740         reinterpret_cast32<uint32_t>(klass->GetVTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
741     mirror::PointerArray* super_vtable = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
742         reinterpret_cast32<uint32_t>(super->GetVTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
743     if (vtable != nullptr && vtable != super_vtable) {
744       DCHECK(!IsInBootImage(vtable));
745       vtable->Fixup(vtable, kRuntimePointerSize, visitor);
746     }
747 
748     // See if we need to fixup entries in the IfTable.
749     mirror::IfTable* iftable = FromImageOffsetToRuntimeContent<mirror::IfTable>(
750         reinterpret_cast32<uint32_t>(
751             klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
752     mirror::IfTable* super_iftable = FromImageOffsetToRuntimeContent<mirror::IfTable>(
753         reinterpret_cast32<uint32_t>(
754             super->GetIfTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
755     int32_t iftable_count = iftable->Count();
756     int32_t super_iftable_count = super_iftable->Count();
757     for (int32_t i = 0; i < iftable_count; ++i) {
758       mirror::PointerArray* methods = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
759           reinterpret_cast32<uint32_t>(
760               iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i).Ptr()));
761       mirror::PointerArray* super_methods = (i < super_iftable_count)
762           ? FromImageOffsetToRuntimeContent<mirror::PointerArray>(
763                 reinterpret_cast32<uint32_t>(
764                     super_iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i).Ptr()))
765           : nullptr;
766       if (methods != super_methods) {
767         DCHECK(!IsInBootImage(methods));
768         methods->Fixup(methods, kRuntimePointerSize, visitor);
769       }
770     }
771   }
772 
773   template <typename Visitor, typename T>
RelocateNativeDexCacheArray(mirror::NativeArray<T> * old_method_array,uint32_t num_ids,const Visitor & visitor)774   void RelocateNativeDexCacheArray(mirror::NativeArray<T>* old_method_array,
775                                    uint32_t num_ids,
776                                    const Visitor& visitor)
777       REQUIRES_SHARED(Locks::mutator_lock_) {
778     if (old_method_array == nullptr) {
779       return;
780     }
781 
782     auto it = native_relocations_.find(old_method_array);
783     DCHECK(it != native_relocations_.end());
784     ArenaVector<uint8_t>& data =
785         (it->second.first == NativeRelocationKind::kFullNativeDexCacheArray)
786             ? dex_cache_arrays_ : metadata_;
787 
788     mirror::NativeArray<T>* content_array =
789         reinterpret_cast<mirror::NativeArray<T>*>(data.data() + it->second.second);
790     for (uint32_t i = 0; i < num_ids; ++i) {
791       // We may not have relocations for some entries, in which case we'll
792       // just store null.
793       content_array->Set(i, visitor(content_array->Get(i), /* must_have_relocation= */ false));
794     }
795   }
796 
797   template <typename Visitor>
RelocateDexCacheArrays(mirror::DexCache * cache,const DexFile & dex_file,const Visitor & visitor)798   void RelocateDexCacheArrays(mirror::DexCache* cache,
799                               const DexFile& dex_file,
800                               const Visitor& visitor)
801       REQUIRES_SHARED(Locks::mutator_lock_) {
802     mirror::NativeArray<ArtMethod>* old_method_array = cache->GetResolvedMethodsArray();
803     cache->SetResolvedMethodsArray(visitor(old_method_array));
804     RelocateNativeDexCacheArray(old_method_array, dex_file.NumMethodIds(), visitor);
805 
806     mirror::NativeArray<ArtField>* old_field_array = cache->GetResolvedFieldsArray();
807     cache->SetResolvedFieldsArray(visitor(old_field_array));
808     RelocateNativeDexCacheArray(old_field_array, dex_file.NumFieldIds(), visitor);
809 
810     mirror::GcRootArray<mirror::String>* old_strings_array = cache->GetStringsArray();
811     cache->SetStringsArray(visitor(old_strings_array));
812 
813     mirror::GcRootArray<mirror::Class>* old_types_array = cache->GetResolvedTypesArray();
814     cache->SetResolvedTypesArray(visitor(old_types_array));
815   }
816 
RelocateNativePointers()817   void RelocateNativePointers() {
818     ScopedTrace relocate_native_pointers("Relocate native pointers");
819     ScopedObjectAccess soa(Thread::Current());
820     NativePointerVisitor visitor(this);
821     for (auto&& entry : classes_) {
822       mirror::Class* cls = reinterpret_cast<mirror::Class*>(&objects_[entry.second]);
823       cls->FixupNativePointers(cls, kRuntimePointerSize, visitor);
824       RelocateMethodPointerArrays(cls, visitor);
825     }
826     for (auto&& entry : array_classes_) {
827       mirror::Class* cls = reinterpret_cast<mirror::Class*>(&objects_[entry.second]);
828       cls->FixupNativePointers(cls, kRuntimePointerSize, visitor);
829       RelocateMethodPointerArrays(cls, visitor);
830     }
831     for (auto&& entry : native_relocations_) {
832       if (entry.second.first == NativeRelocationKind::kImTable) {
833         ImTable* im_table = reinterpret_cast<ImTable*>(im_tables_.data() + entry.second.second);
834         RelocateImTable(im_table, visitor);
835       }
836     }
837     for (auto&& entry : dex_caches_) {
838       mirror::DexCache* cache = reinterpret_cast<mirror::DexCache*>(&objects_[entry.second]);
839       RelocateDexCacheArrays(cache, *entry.first, visitor);
840     }
841   }
842 
RelocateImTable(ImTable * im_table,const NativePointerVisitor & visitor)843   void RelocateImTable(ImTable* im_table, const NativePointerVisitor& visitor) {
844     for (size_t i = 0; i < ImTable::kSize; ++i) {
845       ArtMethod* method = im_table->Get(i, kRuntimePointerSize);
846       ArtMethod* new_method = nullptr;
847       if (method->IsRuntimeMethod() && !IsInBootImage(method)) {
848         // New IMT conflict method: just use the boot image version.
849         // TODO: Consider copying the new IMT conflict method.
850         new_method = Runtime::Current()->GetImtConflictMethod();
851         DCHECK(IsInBootImage(new_method));
852       } else {
853         new_method = visitor(method);
854       }
855       if (method != new_method) {
856         im_table->Set(i, new_method, kRuntimePointerSize);
857       }
858     }
859   }
860 
CopyFieldArrays(ObjPtr<mirror::Class> cls,uint32_t class_image_address)861   void CopyFieldArrays(ObjPtr<mirror::Class> cls, uint32_t class_image_address)
862       REQUIRES_SHARED(Locks::mutator_lock_) {
863     LengthPrefixedArray<ArtField>* fields[] = {
864         cls->GetSFieldsPtr(), cls->GetIFieldsPtr(),
865     };
866     for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
867       if (cur_fields != nullptr) {
868         // Copy the array.
869         size_t number_of_fields = cur_fields->size();
870         size_t size = LengthPrefixedArray<ArtField>::ComputeSize(number_of_fields);
871         size_t offset = art_fields_.size();
872         art_fields_.resize(offset + size);
873         auto* dest_array =
874             reinterpret_cast<LengthPrefixedArray<ArtField>*>(art_fields_.data() + offset);
875         memcpy(dest_array, cur_fields, size);
876         native_relocations_.Put(cur_fields,
877                                 std::make_pair(NativeRelocationKind::kArtFieldArray, offset));
878 
879         // Update the class pointer of individual fields.
880         for (size_t i = 0; i != number_of_fields; ++i) {
881           dest_array->At(i).GetDeclaringClassAddressWithoutBarrier()->Assign(
882               reinterpret_cast<mirror::Class*>(class_image_address));
883         }
884       }
885     }
886   }
887 
CopyMethodArrays(ObjPtr<mirror::Class> cls,uint32_t class_image_address,bool is_class_initialized)888   void CopyMethodArrays(ObjPtr<mirror::Class> cls,
889                         uint32_t class_image_address,
890                         bool is_class_initialized)
891       REQUIRES_SHARED(Locks::mutator_lock_) {
892     size_t number_of_methods = cls->NumMethods();
893     if (number_of_methods == 0) {
894       return;
895     }
896 
897     size_t size = LengthPrefixedArray<ArtMethod>::ComputeSize(number_of_methods);
898     size_t offset = art_methods_.size();
899     art_methods_.resize(offset + size);
900     auto* dest_array =
901         reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(art_methods_.data() + offset);
902     memcpy(dest_array, cls->GetMethodsPtr(), size);
903     native_relocations_.Put(cls->GetMethodsPtr(),
904                             std::make_pair(NativeRelocationKind::kArtMethodArray, offset));
905 
906     for (size_t i = 0; i != number_of_methods; ++i) {
907       ArtMethod* method = &cls->GetMethodsPtr()->At(i);
908       ArtMethod* copy = &dest_array->At(i);
909 
910       // Update the class pointer.
911       ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
912       if (declaring_class == cls) {
913         copy->GetDeclaringClassAddressWithoutBarrier()->Assign(
914             reinterpret_cast<mirror::Class*>(class_image_address));
915       } else {
916         DCHECK(method->IsCopied());
917         if (!IsInBootImage(declaring_class.Ptr())) {
918           DCHECK(classes_.find(declaring_class->GetClassDef()) != classes_.end());
919           copy->GetDeclaringClassAddressWithoutBarrier()->Assign(
920               reinterpret_cast<mirror::Class*>(
921                   image_begin_ +
922                   sizeof(ImageHeader) +
923                   classes_.Get(declaring_class->GetClassDef())));
924         }
925       }
926 
927       // Record the native relocation of the method.
928       uintptr_t copy_offset =
929           reinterpret_cast<uintptr_t>(copy) - reinterpret_cast<uintptr_t>(art_methods_.data());
930       native_relocations_.Put(method,
931                               std::make_pair(NativeRelocationKind::kArtMethod, copy_offset));
932 
933       // Ignore the single-implementation info for abstract method.
934       if (method->IsAbstract()) {
935         copy->SetHasSingleImplementation(false);
936         copy->SetSingleImplementation(nullptr, kRuntimePointerSize);
937       }
938 
939       // Set the entrypoint and data pointer of the method.
940       StubType stub;
941       if (method->IsNative()) {
942         stub = StubType::kQuickGenericJNITrampoline;
943       } else if (!cls->IsVerified()) {
944         stub = StubType::kQuickToInterpreterBridge;
945       } else if (!is_class_initialized && method->NeedsClinitCheckBeforeCall()) {
946         stub = StubType::kQuickResolutionTrampoline;
947       } else if (interpreter::IsNterpSupported() && CanMethodUseNterp(method)) {
948         stub = StubType::kNterpTrampoline;
949       } else {
950         stub = StubType::kQuickToInterpreterBridge;
951       }
952       const std::vector<gc::space::ImageSpace*>& image_spaces =
953           Runtime::Current()->GetHeap()->GetBootImageSpaces();
954       DCHECK(!image_spaces.empty());
955       const OatFile* oat_file = image_spaces[0]->GetOatFile();
956       DCHECK(oat_file != nullptr);
957       const OatHeader& header = oat_file->GetOatHeader();
958       const void* entrypoint = header.GetOatAddress(stub);
959       if (method->IsNative() && (is_class_initialized || !method->NeedsClinitCheckBeforeCall())) {
960         // Use boot JNI stub if found.
961         ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
962         const void* boot_jni_stub = class_linker->FindBootJniStub(method);
963         if (boot_jni_stub != nullptr) {
964           entrypoint = boot_jni_stub;
965         }
966       }
967       copy->SetEntryPointFromQuickCompiledCode(entrypoint);
968 
969       if (method->IsNative()) {
970         StubType stub_type = method->IsCriticalNative()
971             ? StubType::kJNIDlsymLookupCriticalTrampoline
972             : StubType::kJNIDlsymLookupTrampoline;
973         copy->SetEntryPointFromJni(header.GetOatAddress(stub_type));
974       } else if (method->HasCodeItem()) {
975         const uint8_t* code_item = reinterpret_cast<const uint8_t*>(method->GetCodeItem());
976         DCHECK_GE(code_item, method->GetDexFile()->DataBegin());
977         uint32_t code_item_offset = dchecked_integral_cast<uint32_t>(
978             code_item - method->GetDexFile()->DataBegin());;
979         copy->SetDataPtrSize(
980             reinterpret_cast<const void*>(code_item_offset), kRuntimePointerSize);
981       }
982     }
983   }
984 
CopyImTable(ObjPtr<mirror::Class> cls)985   void CopyImTable(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
986     ImTable* table = cls->GetImt(kRuntimePointerSize);
987 
988     // If the table is null or shared and/or already emitted, we can skip.
989     if (table == nullptr || IsInBootImage(table) || HasNativeRelocation(table)) {
990       return;
991     }
992     const size_t size = ImTable::SizeInBytes(kRuntimePointerSize);
993     size_t offset = im_tables_.size();
994     im_tables_.resize(offset + size);
995     uint8_t* dest = im_tables_.data() + offset;
996     memcpy(dest, table, size);
997     native_relocations_.Put(table, std::make_pair(NativeRelocationKind::kImTable, offset));
998   }
999 
HasNativeRelocation(void * ptr) const1000   bool HasNativeRelocation(void* ptr) const {
1001     return native_relocations_.find(ptr) != native_relocations_.end();
1002   }
1003 
1004 
LoadClassesFromReferenceProfile(Thread * self,const dchecked_vector<Handle<mirror::DexCache>> & dex_caches)1005   static void LoadClassesFromReferenceProfile(
1006       Thread* self,
1007       const dchecked_vector<Handle<mirror::DexCache>>& dex_caches)
1008           REQUIRES_SHARED(Locks::mutator_lock_) {
1009     AppInfo* app_info = Runtime::Current()->GetAppInfo();
1010     std::string profile_file = app_info->GetPrimaryApkReferenceProfile();
1011 
1012     if (profile_file.empty()) {
1013       return;
1014     }
1015 
1016     // Lock the file, it could be concurrently updated by the system. Don't block
1017     // as this is app startup sensitive.
1018     std::string error;
1019     ScopedFlock profile =
1020         LockedFile::Open(profile_file.c_str(), O_RDONLY, /*block=*/false, &error);
1021 
1022     if (profile == nullptr) {
1023       LOG(DEBUG) << "Couldn't lock the profile file " << profile_file << ": " << error;
1024       return;
1025     }
1026 
1027     ProfileCompilationInfo profile_info(/* for_boot_image= */ false);
1028 
1029     if (!profile_info.Load(profile->Fd())) {
1030       LOG(DEBUG) << "Could not load profile file";
1031       return;
1032     }
1033 
1034     StackHandleScope<1> hs(self);
1035     Handle<mirror::ClassLoader> class_loader =
1036         hs.NewHandle<mirror::ClassLoader>(dex_caches[0]->GetClassLoader());
1037     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1038     ScopedTrace loading_classes("Loading classes from profile");
1039     for (auto dex_cache : dex_caches) {
1040       const DexFile* dex_file = dex_cache->GetDexFile();
1041       const ArenaSet<dex::TypeIndex>* class_types = profile_info.GetClasses(*dex_file);
1042       if (class_types == nullptr) {
1043         // This means the profile file did not reference the dex file, which is the case
1044         // if there's no classes and methods of that dex file in the profile.
1045         continue;
1046       }
1047 
1048       for (dex::TypeIndex idx : *class_types) {
1049         // The index is greater or equal to NumTypeIds if the type is an extra
1050         // descriptor, not referenced by the dex file.
1051         if (idx.index_ < dex_file->NumTypeIds()) {
1052           ObjPtr<mirror::Class> klass = class_linker->ResolveType(idx, dex_cache, class_loader);
1053           if (klass == nullptr) {
1054             self->ClearException();
1055             LOG(DEBUG) << "Failed to preload " << dex_file->PrettyType(idx);
1056             continue;
1057           }
1058         }
1059       }
1060     }
1061   }
1062 
WriteObjects(std::string * error_msg)1063   bool WriteObjects(std::string* error_msg) {
1064     ScopedTrace write_objects("Writing objects");
1065     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1066     ScopedObjectAccess soa(Thread::Current());
1067     VariableSizedHandleScope handles(soa.Self());
1068 
1069     Handle<mirror::Class> object_array_class = handles.NewHandle(
1070         GetClassRoot<mirror::ObjectArray<mirror::Object>>(class_linker));
1071 
1072     Handle<mirror::ObjectArray<mirror::Object>> image_roots = handles.NewHandle(
1073         mirror::ObjectArray<mirror::Object>::Alloc(
1074             soa.Self(), object_array_class.Get(), ImageHeader::kImageRootsMax));
1075 
1076     if (image_roots == nullptr) {
1077       DCHECK(soa.Self()->IsExceptionPending());
1078       soa.Self()->ClearException();
1079       *error_msg = "Out of memory when trying to generate a runtime app image";
1080       return false;
1081     }
1082 
1083     // Find the dex files that will be used for generating the app image.
1084     dchecked_vector<Handle<mirror::DexCache>> dex_caches;
1085     FindDexCaches(soa.Self(), dex_caches, handles);
1086 
1087     if (dex_caches.size() == 0) {
1088       *error_msg = "Did not find dex caches to generate an app image";
1089       return false;
1090     }
1091     const OatDexFile* oat_dex_file = dex_caches[0]->GetDexFile()->GetOatDexFile();
1092     VdexFile* vdex_file = oat_dex_file->GetOatFile()->GetVdexFile();
1093     // The first entry in `dex_caches` contains the location of the primary APK.
1094     dex_location_ = oat_dex_file->GetDexFileLocation();
1095 
1096     size_t number_of_dex_files = vdex_file->GetNumberOfDexFiles();
1097     if (number_of_dex_files != dex_caches.size()) {
1098       // This means some dex files haven't been executed. For simplicity, just
1099       // register them and recollect dex caches.
1100       Handle<mirror::ClassLoader> loader = handles.NewHandle(dex_caches[0]->GetClassLoader());
1101       VisitClassLoaderDexFiles(soa.Self(), loader, [&](const art::DexFile* dex_file)
1102           REQUIRES_SHARED(Locks::mutator_lock_) {
1103         class_linker->RegisterDexFile(*dex_file, dex_caches[0]->GetClassLoader());
1104         return true;  // Continue with other dex files.
1105       });
1106       dex_caches.clear();
1107       FindDexCaches(soa.Self(), dex_caches, handles);
1108       if (number_of_dex_files != dex_caches.size()) {
1109         *error_msg = "Number of dex caches does not match number of dex files in the primary APK";
1110         return false;
1111       }
1112     }
1113 
1114     // If classes referenced in the reference profile are not loaded, preload
1115     // them. This makes sure we generate a good runtime app image, even if this
1116     // current app run did not load all startup classes.
1117     LoadClassesFromReferenceProfile(soa.Self(), dex_caches);
1118 
1119     // We store the checksums of the dex files used at runtime. These can be
1120     // different compared to the vdex checksums due to compact dex.
1121     std::vector<uint32_t> checksums(number_of_dex_files);
1122     uint32_t checksum_index = 0;
1123     for (const OatDexFile* current_oat_dex_file : oat_dex_file->GetOatFile()->GetOatDexFiles()) {
1124       const DexFile::Header* header =
1125           reinterpret_cast<const DexFile::Header*>(current_oat_dex_file->GetDexFilePointer());
1126       checksums[checksum_index++] = header->checksum_;
1127     }
1128     DCHECK_EQ(checksum_index, number_of_dex_files);
1129 
1130     // Create the fake OatHeader to store the dependencies of the image.
1131     SafeMap<std::string, std::string> key_value_store;
1132     Runtime* runtime = Runtime::Current();
1133     key_value_store.Put(OatHeader::kApexVersionsKey, runtime->GetApexVersions());
1134     key_value_store.Put(OatHeader::kBootClassPathKey,
1135                         android::base::Join(runtime->GetBootClassPathLocations(), ':'));
1136     key_value_store.Put(OatHeader::kBootClassPathChecksumsKey,
1137                         runtime->GetBootClassPathChecksums());
1138     key_value_store.Put(OatHeader::kClassPathKey,
1139                         oat_dex_file->GetOatFile()->GetClassLoaderContext());
1140     key_value_store.Put(OatHeader::kConcurrentCopying,
1141                         gUseReadBarrier ? OatHeader::kTrueValue : OatHeader::kFalseValue);
1142 
1143     std::unique_ptr<const InstructionSetFeatures> isa_features =
1144         InstructionSetFeatures::FromCppDefines();
1145     std::unique_ptr<OatHeader> oat_header(
1146         OatHeader::Create(kRuntimeQuickCodeISA,
1147                           isa_features.get(),
1148                           number_of_dex_files,
1149                           &key_value_store));
1150 
1151     // Create the byte array containing the oat header and dex checksums.
1152     uint32_t checksums_size = checksums.size() * sizeof(uint32_t);
1153     Handle<mirror::ByteArray> header_data = handles.NewHandle(
1154         mirror::ByteArray::Alloc(soa.Self(), oat_header->GetHeaderSize() + checksums_size));
1155 
1156     if (header_data == nullptr) {
1157       DCHECK(soa.Self()->IsExceptionPending());
1158       soa.Self()->ClearException();
1159       *error_msg = "Out of memory when trying to generate a runtime app image";
1160       return false;
1161     }
1162 
1163     memcpy(header_data->GetData(), oat_header.get(), oat_header->GetHeaderSize());
1164     memcpy(header_data->GetData() + oat_header->GetHeaderSize(), checksums.data(), checksums_size);
1165 
1166     // Create and populate the dex caches aray.
1167     Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array = handles.NewHandle(
1168         mirror::ObjectArray<mirror::Object>::Alloc(
1169             soa.Self(), object_array_class.Get(), dex_caches.size()));
1170 
1171     if (dex_cache_array == nullptr) {
1172       DCHECK(soa.Self()->IsExceptionPending());
1173       soa.Self()->ClearException();
1174       *error_msg = "Out of memory when trying to generate a runtime app image";
1175       return false;
1176     }
1177 
1178     for (uint32_t i = 0; i < dex_caches.size(); ++i) {
1179       dex_cache_array->Set(i, dex_caches[i].Get());
1180     }
1181 
1182     image_roots->Set(ImageHeader::kDexCaches, dex_cache_array.Get());
1183     image_roots->Set(ImageHeader::kClassRoots, class_linker->GetClassRoots());
1184     image_roots->Set(ImageHeader::kAppImageOatHeader, header_data.Get());
1185 
1186     {
1187       // Now that we have created all objects needed for the `image_roots`, copy
1188       // it into the buffer. Note that this will recursively copy all objects
1189       // contained in `image_roots`. That's acceptable as we don't have cycles,
1190       // nor a deep graph.
1191       ScopedAssertNoThreadSuspension sants("Writing runtime app image");
1192       CopyObject(image_roots.Get());
1193     }
1194 
1195     // Emit classes defined in the app class loader (which will also indirectly
1196     // emit dex caches and their arrays).
1197     EmitClasses(soa.Self(), dex_cache_array);
1198 
1199     return true;
1200   }
1201 
1202   class FixupVisitor {
1203    public:
FixupVisitor(RuntimeImageHelper * image,size_t copy_offset)1204     FixupVisitor(RuntimeImageHelper* image, size_t copy_offset)
1205         : image_(image), copy_offset_(copy_offset) {}
1206 
1207     // We do not visit native roots. These are handled with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1208     void VisitRootIfNonNull(
1209         [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
1210       LOG(FATAL) << "UNREACHABLE";
1211     }
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1212     void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
1213       LOG(FATAL) << "UNREACHABLE";
1214     }
1215 
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static) const1216     void operator()(ObjPtr<mirror::Object> obj,
1217                     MemberOffset offset,
1218                     bool is_static) const
1219         REQUIRES_SHARED(Locks::mutator_lock_) {
1220       // We don't copy static fields, they are being handled when we try to
1221       // initialize the class.
1222       ObjPtr<mirror::Object> ref =
1223           is_static ? nullptr : obj->GetFieldObject<mirror::Object>(offset);
1224       mirror::Object* address = image_->GetOrComputeImageAddress(ref);
1225       mirror::Object* copy =
1226           reinterpret_cast<mirror::Object*>(image_->objects_.data() + copy_offset_);
1227       copy->GetFieldObjectReferenceAddr<kVerifyNone>(offset)->Assign(address);
1228     }
1229 
1230     // java.lang.ref.Reference visitor.
operator ()(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref) const1231     void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
1232                     ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1233       operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1234     }
1235 
1236    private:
1237     RuntimeImageHelper* image_;
1238     size_t copy_offset_;
1239   };
1240 
1241   template <typename T>
CopyNativeDexCacheArray(uint32_t num_entries,uint32_t max_entries,mirror::NativeArray<T> * array)1242   void CopyNativeDexCacheArray(uint32_t num_entries,
1243                                uint32_t max_entries,
1244                                mirror::NativeArray<T>* array) {
1245     if (array == nullptr) {
1246       return;
1247     }
1248 
1249     bool only_startup = !mirror::DexCache::ShouldAllocateFullArray(num_entries, max_entries);
1250     ArenaVector<uint8_t>& data = only_startup ? metadata_ : dex_cache_arrays_;
1251     NativeRelocationKind relocation_kind = only_startup
1252         ? NativeRelocationKind::kStartupNativeDexCacheArray
1253         : NativeRelocationKind::kFullNativeDexCacheArray;
1254 
1255     size_t size = num_entries * sizeof(void*);
1256     // We need to reserve space to store `num_entries` because ImageSpace doesn't have
1257     // access to the dex files when relocating dex caches.
1258     size_t offset = RoundUp(data.size(), sizeof(void*)) + sizeof(uintptr_t);
1259     data.resize(RoundUp(data.size(), sizeof(void*)) + sizeof(uintptr_t) + size);
1260     reinterpret_cast<uintptr_t*>(data.data() + offset)[-1] = num_entries;
1261 
1262     // Copy each entry individually. We cannot use memcpy, as the entries may be
1263     // updated concurrently by other mutator threads.
1264     mirror::NativeArray<T>* copy = reinterpret_cast<mirror::NativeArray<T>*>(data.data() + offset);
1265     for (uint32_t i = 0; i < num_entries; ++i) {
1266       copy->Set(i, array->Get(i));
1267     }
1268     native_relocations_.Put(array, std::make_pair(relocation_kind, offset));
1269   }
1270 
1271   template <typename T>
CreateGcRootDexCacheArray(uint32_t num_entries,uint32_t max_entries,mirror::GcRootArray<T> * array)1272   mirror::GcRootArray<T>* CreateGcRootDexCacheArray(uint32_t num_entries,
1273                                                     uint32_t max_entries,
1274                                                     mirror::GcRootArray<T>* array) {
1275     if (array == nullptr) {
1276       return nullptr;
1277     }
1278     bool only_startup = !mirror::DexCache::ShouldAllocateFullArray(num_entries, max_entries);
1279     ArenaVector<uint8_t>& data = only_startup ? metadata_ : dex_cache_arrays_;
1280     NativeRelocationKind relocation_kind = only_startup
1281         ? NativeRelocationKind::kStartupNativeDexCacheArray
1282         : NativeRelocationKind::kFullNativeDexCacheArray;
1283     size_t size = num_entries * sizeof(GcRoot<T>);
1284     // We need to reserve space to store `num_entries` because ImageSpace doesn't have
1285     // access to the dex files when relocating dex caches.
1286     static_assert(sizeof(GcRoot<T>) == sizeof(uint32_t));
1287     size_t offset = data.size() + sizeof(uint32_t);
1288     data.resize(data.size() + sizeof(uint32_t) + size);
1289     reinterpret_cast<uint32_t*>(data.data() + offset)[-1] = num_entries;
1290     native_relocations_.Put(array, std::make_pair(relocation_kind, offset));
1291 
1292     return reinterpret_cast<mirror::GcRootArray<T>*>(data.data() + offset);
1293   }
EmitDexCacheArrays()1294   static bool EmitDexCacheArrays() {
1295     // We need to treat dex cache arrays specially in an image for userfaultfd.
1296     // Disable for now. See b/270936884.
1297     return !gUseUserfaultfd;
1298   }
1299 
CopyDexCache(ObjPtr<mirror::DexCache> cache)1300   uint32_t CopyDexCache(ObjPtr<mirror::DexCache> cache) REQUIRES_SHARED(Locks::mutator_lock_) {
1301     auto it = dex_caches_.find(cache->GetDexFile());
1302     if (it != dex_caches_.end()) {
1303       return it->second;
1304     }
1305     uint32_t offset = CopyObject(cache);
1306     dex_caches_.Put(cache->GetDexFile(), offset);
1307     // For dex caches, clear pointers to data that will be set at runtime.
1308     mirror::Object* copy = reinterpret_cast<mirror::Object*>(objects_.data() + offset);
1309     reinterpret_cast<mirror::DexCache*>(copy)->ResetNativeArrays();
1310     reinterpret_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr);
1311 
1312     if (!EmitDexCacheArrays()) {
1313       return offset;
1314     }
1315 
1316     // Copy the ArtMethod array.
1317     mirror::NativeArray<ArtMethod>* resolved_methods = cache->GetResolvedMethodsArray();
1318     CopyNativeDexCacheArray(cache->GetDexFile()->NumMethodIds(),
1319                             mirror::DexCache::kDexCacheMethodCacheSize,
1320                             resolved_methods);
1321     // Store the array pointer in the dex cache, which will be relocated at the end.
1322     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedMethodsArray(resolved_methods);
1323 
1324     // Copy the ArtField array.
1325     mirror::NativeArray<ArtField>* resolved_fields = cache->GetResolvedFieldsArray();
1326     CopyNativeDexCacheArray(cache->GetDexFile()->NumFieldIds(),
1327                             mirror::DexCache::kDexCacheFieldCacheSize,
1328                             resolved_fields);
1329     // Store the array pointer in the dex cache, which will be relocated at the end.
1330     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedFieldsArray(resolved_fields);
1331 
1332     // Copy the type array.
1333     mirror::GcRootArray<mirror::Class>* resolved_types = cache->GetResolvedTypesArray();
1334     CreateGcRootDexCacheArray(cache->GetDexFile()->NumTypeIds(),
1335                               mirror::DexCache::kDexCacheTypeCacheSize,
1336                               resolved_types);
1337     // Store the array pointer in the dex cache, which will be relocated at the end.
1338     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedTypesArray(resolved_types);
1339 
1340     // Copy the string array.
1341     mirror::GcRootArray<mirror::String>* strings = cache->GetStringsArray();
1342     // Note: `new_strings` points to temporary data, and is only valid here.
1343     mirror::GcRootArray<mirror::String>* new_strings =
1344         CreateGcRootDexCacheArray(cache->GetDexFile()->NumStringIds(),
1345                                   mirror::DexCache::kDexCacheStringCacheSize,
1346                                   strings);
1347     // Store the array pointer in the dex cache, which will be relocated at the end.
1348     reinterpret_cast<mirror::DexCache*>(copy)->SetStringsArray(strings);
1349 
1350     // The code below copies new objects, so invalidate the address we have for
1351     // `copy`.
1352     copy = nullptr;
1353     if (strings != nullptr) {
1354       for (uint32_t i = 0; i < cache->GetDexFile()->NumStringIds(); ++i) {
1355         ObjPtr<mirror::String> str = strings->Get(i);
1356         if (str == nullptr || IsInBootImage(str.Ptr())) {
1357           new_strings->Set(i, str.Ptr());
1358         } else {
1359           uint32_t hash = static_cast<uint32_t>(str->GetStoredHashCode());
1360           DCHECK_EQ(hash, static_cast<uint32_t>(str->ComputeHashCode()))
1361               << "Dex cache strings should be interned";
1362           auto it2 = intern_table_.FindWithHash(str.Ptr(), hash);
1363           if (it2 == intern_table_.end()) {
1364             uint32_t string_offset = CopyObject(str);
1365             uint32_t address = image_begin_ + string_offset + sizeof(ImageHeader);
1366             intern_table_.InsertWithHash(address, hash);
1367             new_strings->Set(i, reinterpret_cast<mirror::String*>(address));
1368           } else {
1369             new_strings->Set(i, reinterpret_cast<mirror::String*>(*it2));
1370           }
1371           // To not confuse string references from the dex cache object and
1372           // string references from the array, we put an offset bigger than the
1373           // size of a DexCache object. ClassLinker::VisitInternedStringReferences
1374           // knows how to decode this offset.
1375           string_reference_offsets_.emplace_back(
1376               sizeof(ImageHeader) + offset, sizeof(mirror::DexCache) + i);
1377         }
1378       }
1379     }
1380 
1381     return offset;
1382   }
1383 
IsInitialized(mirror::Class * cls)1384   bool IsInitialized(mirror::Class* cls) REQUIRES_SHARED(Locks::mutator_lock_) {
1385     if (IsInBootImage(cls)) {
1386       const OatDexFile* oat_dex_file = cls->GetDexFile().GetOatDexFile();
1387       DCHECK(oat_dex_file != nullptr) << "We should always have an .oat file for a boot image";
1388       uint16_t class_def_index = cls->GetDexClassDefIndex();
1389       ClassStatus oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
1390       return oat_file_class_status == ClassStatus::kVisiblyInitialized;
1391     } else {
1392       return cls->IsVisiblyInitialized<kVerifyNone>();
1393     }
1394   }
1395   // Try to initialize `copy`. Note that `cls` may not be initialized.
1396   // This is called after the image generation logic has visited super classes
1397   // and super interfaces, so we can just check those directly.
TryInitializeClass(mirror::Class * copy,ObjPtr<mirror::Class> cls,uint32_t class_offset)1398   bool TryInitializeClass(mirror::Class* copy, ObjPtr<mirror::Class> cls, uint32_t class_offset)
1399       REQUIRES_SHARED(Locks::mutator_lock_) {
1400     if (!cls->IsVerified()) {
1401       return false;
1402     }
1403     if (cls->IsArrayClass()) {
1404       return true;
1405     }
1406 
1407     // Check if we have been able to initialize the super class.
1408     mirror::Class* super = GetClassContent(cls->GetSuperClass());
1409     DCHECK(super != nullptr)
1410         << "App image classes should always have a super class: " << cls->PrettyClass();
1411     if (!IsInitialized(super)) {
1412       return false;
1413     }
1414 
1415     // We won't initialize class with class initializers.
1416     if (cls->FindClassInitializer(kRuntimePointerSize) != nullptr) {
1417       return false;
1418     }
1419 
1420     // For non-interface classes, we require all implemented interfaces to be
1421     // initialized.
1422     if (!cls->IsInterface()) {
1423       for (size_t i = 0; i < cls->NumDirectInterfaces(); i++) {
1424         mirror::Class* itf = GetClassContent(cls->GetDirectInterface(i));
1425         if (!IsInitialized(itf)) {
1426           return false;
1427         }
1428       }
1429     }
1430 
1431     // Trivial case: no static fields.
1432     if (cls->NumStaticFields() == 0u) {
1433       return true;
1434     }
1435 
1436     // Go over all static fields and try to initialize them.
1437     EncodedStaticFieldValueIterator it(cls->GetDexFile(), *cls->GetClassDef());
1438     if (!it.HasNext()) {
1439       return true;
1440     }
1441 
1442     // Temporary string offsets in case we failed to initialize the class. We
1443     // will add the offsets at the end of this method if we are successful.
1444     ArenaVector<AppImageReferenceOffsetInfo> string_offsets(allocator_.Adapter());
1445     ClassLinker* linker = Runtime::Current()->GetClassLinker();
1446     ClassAccessor accessor(cls->GetDexFile(), *cls->GetClassDef());
1447     for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
1448       if (!it.HasNext()) {
1449         break;
1450       }
1451       ArtField* art_field = linker->LookupResolvedField(field.GetIndex(),
1452                                                         cls->GetDexCache(),
1453                                                         cls->GetClassLoader(),
1454                                                         /* is_static= */ true);
1455       DCHECK_NE(art_field, nullptr);
1456       MemberOffset offset(art_field->GetOffset());
1457       switch (it.GetValueType()) {
1458         case EncodedArrayValueIterator::ValueType::kBoolean:
1459           copy->SetFieldBoolean<false>(offset, it.GetJavaValue().z);
1460           break;
1461         case EncodedArrayValueIterator::ValueType::kByte:
1462           copy->SetFieldByte<false>(offset, it.GetJavaValue().b);
1463           break;
1464         case EncodedArrayValueIterator::ValueType::kShort:
1465           copy->SetFieldShort<false>(offset, it.GetJavaValue().s);
1466           break;
1467         case EncodedArrayValueIterator::ValueType::kChar:
1468           copy->SetFieldChar<false>(offset, it.GetJavaValue().c);
1469           break;
1470         case EncodedArrayValueIterator::ValueType::kInt:
1471           copy->SetField32<false>(offset, it.GetJavaValue().i);
1472           break;
1473         case EncodedArrayValueIterator::ValueType::kLong:
1474           copy->SetField64<false>(offset, it.GetJavaValue().j);
1475           break;
1476         case EncodedArrayValueIterator::ValueType::kFloat:
1477           copy->SetField32<false>(offset, it.GetJavaValue().i);
1478           break;
1479         case EncodedArrayValueIterator::ValueType::kDouble:
1480           copy->SetField64<false>(offset, it.GetJavaValue().j);
1481           break;
1482         case EncodedArrayValueIterator::ValueType::kNull:
1483           copy->SetFieldObject<false>(offset, nullptr);
1484           break;
1485         case EncodedArrayValueIterator::ValueType::kString: {
1486           ObjPtr<mirror::String> str =
1487               linker->LookupString(dex::StringIndex(it.GetJavaValue().i), cls->GetDexCache());
1488           mirror::String* str_copy = nullptr;
1489           if (str == nullptr) {
1490             // String wasn't created yet.
1491             return false;
1492           } else if (IsInBootImage(str.Ptr())) {
1493             str_copy = str.Ptr();
1494           } else {
1495             uint32_t hash = static_cast<uint32_t>(str->GetStoredHashCode());
1496             DCHECK_EQ(hash, static_cast<uint32_t>(str->ComputeHashCode()))
1497                 << "Dex cache strings should be interned";
1498             auto string_it = intern_table_.FindWithHash(str.Ptr(), hash);
1499             if (string_it == intern_table_.end()) {
1500               // The string must be interned.
1501               uint32_t string_offset = CopyObject(str);
1502               // Reload the class copy after having copied the string.
1503               copy = reinterpret_cast<mirror::Class*>(objects_.data() + class_offset);
1504               uint32_t address = image_begin_ + string_offset + sizeof(ImageHeader);
1505               intern_table_.InsertWithHash(address, hash);
1506               str_copy = reinterpret_cast<mirror::String*>(address);
1507             } else {
1508               str_copy = reinterpret_cast<mirror::String*>(*string_it);
1509             }
1510             string_offsets.emplace_back(sizeof(ImageHeader) + class_offset, offset.Int32Value());
1511           }
1512           uint8_t* raw_addr = reinterpret_cast<uint8_t*>(copy) + offset.Int32Value();
1513           mirror::HeapReference<mirror::Object>* objref_addr =
1514               reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_addr);
1515           objref_addr->Assign</* kIsVolatile= */ false>(str_copy);
1516           break;
1517         }
1518         case EncodedArrayValueIterator::ValueType::kType: {
1519           // Note that it may be that the referenced type hasn't been processed
1520           // yet by the image generation logic. In this case we bail out for
1521           // simplicity.
1522           ObjPtr<mirror::Class> type =
1523               linker->LookupResolvedType(dex::TypeIndex(it.GetJavaValue().i), cls);
1524           mirror::Class* type_copy = nullptr;
1525           if (type == nullptr) {
1526             // Class wasn't resolved yet.
1527             return false;
1528           } else if (IsInBootImage(type.Ptr())) {
1529             // Make sure the type is in our class table.
1530             uint32_t hash = type->DescriptorHash();
1531             class_table_.InsertWithHash(ClassTable::TableSlot(type.Ptr(), hash), hash);
1532             type_copy = type.Ptr();
1533           } else if (type->IsArrayClass()) {
1534             std::string class_name;
1535             type->GetDescriptor(&class_name);
1536             auto class_it = array_classes_.find(class_name);
1537             if (class_it == array_classes_.end()) {
1538               return false;
1539             }
1540             type_copy = reinterpret_cast<mirror::Class*>(
1541                 image_begin_ + sizeof(ImageHeader) + class_it->second);
1542           } else {
1543             const dex::ClassDef* class_def = type->GetClassDef();
1544             DCHECK_NE(class_def, nullptr);
1545             auto class_it = classes_.find(class_def);
1546             if (class_it == classes_.end()) {
1547               return false;
1548             }
1549             type_copy = reinterpret_cast<mirror::Class*>(
1550                 image_begin_ + sizeof(ImageHeader) + class_it->second);
1551           }
1552           uint8_t* raw_addr = reinterpret_cast<uint8_t*>(copy) + offset.Int32Value();
1553           mirror::HeapReference<mirror::Object>* objref_addr =
1554               reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_addr);
1555           objref_addr->Assign</* kIsVolatile= */ false>(type_copy);
1556           break;
1557         }
1558         default:
1559           LOG(FATAL) << "Unreachable";
1560       }
1561       it.Next();
1562     }
1563     // We have successfully initialized the class, we can now record the string
1564     // offsets.
1565     string_reference_offsets_.insert(
1566         string_reference_offsets_.end(), string_offsets.begin(), string_offsets.end());
1567     return true;
1568   }
1569 
CopyClass(ObjPtr<mirror::Class> cls)1570   uint32_t CopyClass(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
1571     DCHECK(!cls->IsBootStrapClassLoaded());
1572     uint32_t offset = 0u;
1573     if (cls->IsArrayClass()) {
1574       std::string class_name;
1575       cls->GetDescriptor(&class_name);
1576       auto it = array_classes_.find(class_name);
1577       if (it != array_classes_.end()) {
1578         return it->second;
1579       }
1580       offset = CopyObject(cls);
1581       array_classes_.Put(class_name, offset);
1582     } else {
1583       const dex::ClassDef* class_def = cls->GetClassDef();
1584       auto it = classes_.find(class_def);
1585       if (it != classes_.end()) {
1586         return it->second;
1587       }
1588       offset = CopyObject(cls);
1589       classes_.Put(class_def, offset);
1590     }
1591 
1592     uint32_t hash = cls->DescriptorHash();
1593     // Save the hash, the `HashSet` implementation requires to find it.
1594     class_hashes_.Put(offset, hash);
1595     uint32_t class_image_address = image_begin_ + sizeof(ImageHeader) + offset;
1596     bool inserted =
1597         class_table_.InsertWithHash(ClassTable::TableSlot(class_image_address, hash), hash).second;
1598     DCHECK(inserted) << "Class " << cls->PrettyDescriptor()
1599                      << " (" << cls.Ptr() << ") already inserted";
1600 
1601     // Clear internal state.
1602     mirror::Class* copy = reinterpret_cast<mirror::Class*>(objects_.data() + offset);
1603     copy->SetClinitThreadId(static_cast<pid_t>(0u));
1604     if (cls->IsArrayClass()) {
1605       DCHECK(copy->IsVisiblyInitialized());
1606     } else {
1607       copy->SetStatusInternal(cls->IsVerified() ? ClassStatus::kVerified : ClassStatus::kResolved);
1608     }
1609 
1610     // Clear static field values.
1611     auto clear_class = [&] () REQUIRES_SHARED(Locks::mutator_lock_) {
1612       MemberOffset static_offset = cls->GetFirstReferenceStaticFieldOffset(kRuntimePointerSize);
1613       uint32_t ref_offsets = cls->GetReferenceInstanceOffsets();
1614       size_t size = cls->GetClassSize() - static_offset.Uint32Value();
1615       // Adjust for overflow instance-offset bitmap, which is after the static
1616       // fields.
1617       if ((ref_offsets & mirror::Class::kVisitReferencesSlowpathMask) != 0) {
1618         ref_offsets &= ~mirror::Class::kVisitReferencesSlowpathMask;
1619         size -= ref_offsets * sizeof(uint32_t);
1620       }
1621       memset(objects_.data() + offset + static_offset.Uint32Value(), 0, size);
1622     };
1623     clear_class();
1624 
1625     bool is_class_initialized = TryInitializeClass(copy, cls, offset);
1626     // Reload the copy, it may have moved after `TryInitializeClass`.
1627     copy = reinterpret_cast<mirror::Class*>(objects_.data() + offset);
1628     if (is_class_initialized) {
1629       copy->SetStatusInternal(ClassStatus::kVisiblyInitialized);
1630       if (!cls->IsArrayClass() && !cls->IsFinalizable()) {
1631         copy->SetObjectSizeAllocFastPath(RoundUp(cls->GetObjectSize(), kObjectAlignment));
1632       }
1633       if (cls->IsInterface()) {
1634         copy->SetAccessFlags(copy->GetAccessFlags() | kAccRecursivelyInitialized);
1635       }
1636     } else {
1637       // If we fail to initialize, remove initialization related flags and
1638       // clear again.
1639       copy->SetObjectSizeAllocFastPath(std::numeric_limits<uint32_t>::max());
1640       copy->SetAccessFlags(copy->GetAccessFlags() & ~kAccRecursivelyInitialized);
1641       clear_class();
1642     }
1643 
1644     CopyFieldArrays(cls, class_image_address);
1645     CopyMethodArrays(cls, class_image_address, is_class_initialized);
1646     if (cls->ShouldHaveImt()) {
1647       CopyImTable(cls);
1648     }
1649 
1650     return offset;
1651   }
1652 
1653   // Copy `obj` in `objects_` and relocate references. Returns the offset
1654   // within our buffer.
CopyObject(ObjPtr<mirror::Object> obj)1655   uint32_t CopyObject(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1656     // Copy the object in `objects_`.
1657     size_t object_size = obj->SizeOf();
1658     size_t offset = objects_.size();
1659     DCHECK(IsAligned<kObjectAlignment>(offset));
1660     object_offsets_.push_back(offset);
1661     objects_.resize(RoundUp(offset + object_size, kObjectAlignment));
1662 
1663     mirror::Object* copy = reinterpret_cast<mirror::Object*>(objects_.data() + offset);
1664     mirror::Object::CopyRawObjectData(
1665         reinterpret_cast<uint8_t*>(copy), obj, object_size - sizeof(mirror::Object));
1666     // Clear any lockword data.
1667     copy->SetLockWord(LockWord::Default(), /* as_volatile= */ false);
1668     copy->SetClass(obj->GetClass());
1669 
1670     // Fixup reference pointers.
1671     FixupVisitor visitor(this, offset);
1672     obj->VisitReferences</*kVisitNativeRoots=*/ false>(visitor, visitor);
1673 
1674     if (obj->IsString()) {
1675       // Ensure a string always has a hashcode stored. This is checked at
1676       // runtime because boot images don't want strings dirtied due to hashcode.
1677       reinterpret_cast<mirror::String*>(copy)->GetHashCode();
1678     }
1679 
1680     object_section_size_ += RoundUp(object_size, kObjectAlignment);
1681     return offset;
1682   }
1683 
1684   class CollectDexCacheVisitor : public DexCacheVisitor {
1685    public:
CollectDexCacheVisitor(VariableSizedHandleScope & handles)1686     explicit CollectDexCacheVisitor(VariableSizedHandleScope& handles) : handles_(handles) {}
1687 
Visit(ObjPtr<mirror::DexCache> dex_cache)1688     void Visit(ObjPtr<mirror::DexCache> dex_cache)
1689         REQUIRES_SHARED(Locks::dex_lock_, Locks::mutator_lock_) override {
1690       dex_caches_.push_back(handles_.NewHandle(dex_cache));
1691     }
GetDexCaches() const1692     const std::vector<Handle<mirror::DexCache>>& GetDexCaches() const {
1693       return dex_caches_;
1694     }
1695    private:
1696     VariableSizedHandleScope& handles_;
1697     std::vector<Handle<mirror::DexCache>> dex_caches_;
1698   };
1699 
1700   // Find dex caches corresponding to the primary APK.
FindDexCaches(Thread * self,dchecked_vector<Handle<mirror::DexCache>> & dex_caches,VariableSizedHandleScope & handles)1701   void FindDexCaches(Thread* self,
1702                      dchecked_vector<Handle<mirror::DexCache>>& dex_caches,
1703                      VariableSizedHandleScope& handles)
1704       REQUIRES_SHARED(Locks::mutator_lock_) {
1705     ScopedTrace trace("Find dex caches");
1706     DCHECK(dex_caches.empty());
1707     // Collect all dex caches.
1708     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1709     CollectDexCacheVisitor visitor(handles);
1710     {
1711       ReaderMutexLock mu(self, *Locks::dex_lock_);
1712       class_linker->VisitDexCaches(&visitor);
1713     }
1714 
1715     // Find the primary APK.
1716     AppInfo* app_info = Runtime::Current()->GetAppInfo();
1717     for (Handle<mirror::DexCache> cache : visitor.GetDexCaches()) {
1718       if (app_info->GetRegisteredCodeType(cache->GetDexFile()->GetLocation()) ==
1719               AppInfo::CodeType::kPrimaryApk) {
1720         dex_caches.push_back(handles.NewHandle(cache.Get()));
1721         break;
1722       }
1723     }
1724 
1725     if (dex_caches.empty()) {
1726       return;
1727     }
1728 
1729     const OatDexFile* oat_dex_file = dex_caches[0]->GetDexFile()->GetOatDexFile();
1730     if (oat_dex_file == nullptr) {
1731       // We need a .oat file for loading an app image;
1732       dex_caches.clear();
1733       return;
1734     }
1735 
1736     // Store the dex caches in the order in which their corresponding dex files
1737     // are stored in the oat file. When we check for checksums at the point of
1738     // loading the image, we rely on this order.
1739     for (const OatDexFile* current : oat_dex_file->GetOatFile()->GetOatDexFiles()) {
1740       if (current != oat_dex_file) {
1741         for (Handle<mirror::DexCache> cache : visitor.GetDexCaches()) {
1742           if (cache->GetDexFile()->GetOatDexFile() == current) {
1743             dex_caches.push_back(handles.NewHandle(cache.Get()));
1744           }
1745         }
1746       }
1747     }
1748   }
1749 
PointerToUint64(void * ptr)1750   static uint64_t PointerToUint64(void* ptr) {
1751     return reinterpret_cast64<uint64_t>(ptr);
1752   }
1753 
WriteImageMethods()1754   void WriteImageMethods() {
1755     ScopedObjectAccess soa(Thread::Current());
1756     // We can just use plain runtime pointers.
1757     Runtime* runtime = Runtime::Current();
1758     header_.image_methods_[ImageHeader::kResolutionMethod] =
1759         PointerToUint64(runtime->GetResolutionMethod());
1760     header_.image_methods_[ImageHeader::kImtConflictMethod] =
1761         PointerToUint64(runtime->GetImtConflictMethod());
1762     header_.image_methods_[ImageHeader::kImtUnimplementedMethod] =
1763         PointerToUint64(runtime->GetImtUnimplementedMethod());
1764     header_.image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
1765         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves));
1766     header_.image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
1767         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly));
1768     header_.image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
1769         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
1770     header_.image_methods_[ImageHeader::kSaveEverythingMethod] =
1771         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything));
1772     header_.image_methods_[ImageHeader::kSaveEverythingMethodForClinit] =
1773         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit));
1774     header_.image_methods_[ImageHeader::kSaveEverythingMethodForSuspendCheck] =
1775         PointerToUint64(
1776             runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck));
1777   }
1778 
1779   // Header for the image, created at the end once we know the size of all
1780   // sections.
1781   ImageHeader header_;
1782 
1783   // Allocator for the various data structures to allocate while generating the
1784   // image.
1785   ArenaAllocator allocator_;
1786 
1787   // Contents of the various sections.
1788   ArenaVector<uint8_t> objects_;
1789   ArenaVector<uint8_t> art_fields_;
1790   ArenaVector<uint8_t> art_methods_;
1791   ArenaVector<uint8_t> im_tables_;
1792   ArenaVector<uint8_t> metadata_;
1793   ArenaVector<uint8_t> dex_cache_arrays_;
1794 
1795   ArenaVector<AppImageReferenceOffsetInfo> string_reference_offsets_;
1796 
1797   // Bitmap of live objects in `objects_`. Populated from `object_offsets_`
1798   // once we know `object_section_size`.
1799   gc::accounting::ContinuousSpaceBitmap image_bitmap_;
1800 
1801   // Sections stored in the header.
1802   ArenaVector<ImageSection> sections_;
1803 
1804   // A list of offsets in `objects_` where objects begin.
1805   ArenaVector<uint32_t> object_offsets_;
1806 
1807   ArenaSafeMap<const dex::ClassDef*, uint32_t> classes_;
1808   ArenaSafeMap<std::string, uint32_t> array_classes_;
1809   ArenaSafeMap<const DexFile*, uint32_t> dex_caches_;
1810   ArenaSafeMap<uint32_t, uint32_t> class_hashes_;
1811 
1812   ArenaSafeMap<void*, std::pair<NativeRelocationKind, uint32_t>> native_relocations_;
1813 
1814   // Cached values of boot image information.
1815   const uint32_t boot_image_begin_;
1816   const uint32_t boot_image_size_;
1817 
1818   // Where the image begins: just after the boot image.
1819   const uint32_t image_begin_;
1820 
1821   // Size of the `kSectionObjects` section.
1822   size_t object_section_size_;
1823 
1824   // The location of the primary APK / dex file.
1825   std::string dex_location_;
1826 
1827   // The intern table for strings that we will write to disk.
1828   InternTableSet intern_table_;
1829 
1830   // The class table holding classes that we will write to disk.
1831   ClassTableSet class_table_;
1832 
1833   friend class ClassDescriptorHash;
1834   friend class PruneVisitor;
1835   friend class NativePointerVisitor;
1836 };
1837 
GetRuntimeImageDir(const std::string & app_data_dir)1838 std::string RuntimeImage::GetRuntimeImageDir(const std::string& app_data_dir) {
1839   if (app_data_dir.empty()) {
1840     // The data directory is empty for tests.
1841     return "";
1842   }
1843   return app_data_dir + "/cache/oat_primary/";
1844 }
1845 
1846 // Note: this may return a relative path for tests.
GetRuntimeImagePath(const std::string & app_data_dir,const std::string & dex_location,const std::string & isa)1847 std::string RuntimeImage::GetRuntimeImagePath(const std::string& app_data_dir,
1848                                               const std::string& dex_location,
1849                                               const std::string& isa) {
1850   std::string basename = android::base::Basename(dex_location);
1851   std::string filename = ReplaceFileExtension(basename, kArtExtension);
1852 
1853   return GetRuntimeImageDir(app_data_dir) + isa + "/" + filename;
1854 }
1855 
GetRuntimeImagePath(const std::string & dex_location)1856 std::string RuntimeImage::GetRuntimeImagePath(const std::string& dex_location) {
1857   return GetRuntimeImagePath(Runtime::Current()->GetProcessDataDirectory(),
1858                              dex_location,
1859                              GetInstructionSetString(kRuntimeQuickCodeISA));
1860 }
1861 
EnsureDirectoryExists(const std::string & directory,std::string * error_msg)1862 static bool EnsureDirectoryExists(const std::string& directory, std::string* error_msg) {
1863   if (!OS::DirectoryExists(directory.c_str())) {
1864     static constexpr mode_t kDirectoryMode = S_IRWXU | S_IRGRP | S_IXGRP| S_IROTH | S_IXOTH;
1865     if (mkdir(directory.c_str(), kDirectoryMode) != 0) {
1866       *error_msg =
1867           StringPrintf("Could not create directory %s: %s", directory.c_str(), strerror(errno));
1868       return false;
1869     }
1870   }
1871   return true;
1872 }
1873 
WriteImageToDisk(std::string * error_msg)1874 bool RuntimeImage::WriteImageToDisk(std::string* error_msg) {
1875   if (gPageSize != kMinPageSize) {
1876     *error_msg = "Writing runtime image is only supported on devices with 4K page size";
1877     return false;
1878   }
1879 
1880   gc::Heap* heap = Runtime::Current()->GetHeap();
1881   if (!heap->HasBootImageSpace()) {
1882     *error_msg = "Cannot generate an app image without a boot image";
1883     return false;
1884   }
1885   std::string oat_path = GetRuntimeImageDir(Runtime::Current()->GetProcessDataDirectory());
1886   if (!oat_path.empty() && !EnsureDirectoryExists(oat_path, error_msg)) {
1887     return false;
1888   }
1889 
1890   ScopedTrace generate_image_trace("Generating runtime image");
1891   std::unique_ptr<RuntimeImageHelper> image(new RuntimeImageHelper(heap));
1892   if (!image->Generate(error_msg)) {
1893     return false;
1894   }
1895 
1896   ScopedTrace write_image_trace("Writing runtime image to disk");
1897 
1898   const std::string path = GetRuntimeImagePath(image->GetDexLocation());
1899   if (!EnsureDirectoryExists(android::base::Dirname(path), error_msg)) {
1900     return false;
1901   }
1902 
1903   // We first generate the app image in a temporary file, which we will then
1904   // move to `path`.
1905   const std::string temp_path = ReplaceFileExtension(path, std::to_string(getpid()) + ".tmp");
1906   ImageFileGuard image_file;
1907   image_file.reset(OS::CreateEmptyFileWriteOnly(temp_path.c_str()));
1908 
1909   if (image_file == nullptr) {
1910     *error_msg = "Could not open " + temp_path + " for writing";
1911     return false;
1912   }
1913 
1914   std::vector<uint8_t> full_data(image->GetHeader()->GetImageSize());
1915   image->FillData(full_data);
1916 
1917   // Specify default block size of 512K to enable parallel image decompression.
1918   static constexpr size_t kMaxImageBlockSize = 524288;
1919   // Use LZ4 as good compromise between CPU time and compression. LZ4HC
1920   // empirically takes 10x more time compressing.
1921   static constexpr ImageHeader::StorageMode kImageStorageMode = ImageHeader::kStorageModeLZ4;
1922   // Note: no need to update the checksum of the runtime app image: we have no
1923   // use for it, and computing it takes CPU time.
1924   if (!image->GetHeader()->WriteData(
1925           image_file,
1926           full_data.data(),
1927           reinterpret_cast<const uint8_t*>(image->GetImageBitmap().Begin()),
1928           kImageStorageMode,
1929           kMaxImageBlockSize,
1930           /* update_checksum= */ false,
1931           error_msg)) {
1932     return false;
1933   }
1934 
1935   if (!image_file.WriteHeaderAndClose(temp_path, image->GetHeader(), error_msg)) {
1936     return false;
1937   }
1938 
1939   if (rename(temp_path.c_str(), path.c_str()) != 0) {
1940     *error_msg =
1941         "Failed to move runtime app image to " + path + ": " + std::string(strerror(errno));
1942     // Unlink directly: we cannot use `out` as we have closed it.
1943     unlink(temp_path.c_str());
1944     return false;
1945   }
1946 
1947   return true;
1948 }
1949 
1950 }  // namespace art
1951