xref: /aosp_15_r20/build/make/tools/releasetools/ota_from_target_files.py (revision 9e94795a3d4ef5c1d47486f9a02bb378756cea8a)
1#!/usr/bin/env python
2#
3# Copyright (C) 2008 The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#      http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17"""
18Given a target-files zipfile, produces an OTA package that installs that build.
19An incremental OTA is produced if -i is given, otherwise a full OTA is produced.
20
21Usage:  ota_from_target_files [options] input_target_files output_ota_package
22
23Common options that apply to both of non-A/B and A/B OTAs
24
25  --downgrade
26      Intentionally generate an incremental OTA that updates from a newer build
27      to an older one (e.g. downgrading from P preview back to O MR1).
28      "ota-downgrade=yes" will be set in the package metadata file. A data wipe
29      will always be enforced when using this flag, so "ota-wipe=yes" will also
30      be included in the metadata file. The update-binary in the source build
31      will be used in the OTA package, unless --binary flag is specified. Please
32      also check the comment for --override_timestamp below.
33
34  -i  (--incremental_from) <file>
35      Generate an incremental OTA using the given target-files zip as the
36      starting build.
37
38  -k  (--package_key) <key>
39      Key to use to sign the package (default is the value of
40      default_system_dev_certificate from the input target-files's
41      META/misc_info.txt, or "build/make/target/product/security/testkey" if
42      that value is not specified).
43
44      For incremental OTAs, the default value is based on the source
45      target-file, not the target build.
46
47  --override_timestamp
48      Intentionally generate an incremental OTA that updates from a newer build
49      to an older one (based on timestamp comparison), by setting the downgrade
50      flag in the package metadata. This differs from --downgrade flag, as we
51      don't enforce a data wipe with this flag. Because we know for sure this is
52      NOT an actual downgrade case, but two builds happen to be cut in a reverse
53      order (e.g. from two branches). A legit use case is that we cut a new
54      build C (after having A and B), but want to enfore an update path of A ->
55      C -> B. Specifying --downgrade may not help since that would enforce a
56      data wipe for C -> B update.
57
58      We used to set a fake timestamp in the package metadata for this flow. But
59      now we consolidate the two cases (i.e. an actual downgrade, or a downgrade
60      based on timestamp) with the same "ota-downgrade=yes" flag, with the
61      difference being whether "ota-wipe=yes" is set.
62
63  --wipe_user_data
64      Generate an OTA package that will wipe the user data partition when
65      installed.
66
67  --retrofit_dynamic_partitions
68      Generates an OTA package that updates a device to support dynamic
69      partitions (default False). This flag is implied when generating
70      an incremental OTA where the base build does not support dynamic
71      partitions but the target build does. For A/B, when this flag is set,
72      --skip_postinstall is implied.
73
74  --skip_compatibility_check
75      Skip checking compatibility of the input target files package.
76
77  --output_metadata_path
78      Write a copy of the metadata to a separate file. Therefore, users can
79      read the post build fingerprint without extracting the OTA package.
80
81  --force_non_ab
82      This flag can only be set on an A/B device that also supports non-A/B
83      updates. Implies --two_step.
84      If set, generate that non-A/B update package.
85      If not set, generates A/B package for A/B device and non-A/B package for
86      non-A/B device.
87
88  -o  (--oem_settings) <main_file[,additional_files...]>
89      Comma separated list of files used to specify the expected OEM-specific
90      properties on the OEM partition of the intended device. Multiple expected
91      values can be used by providing multiple files. Only the first dict will
92      be used to compute fingerprint, while the rest will be used to assert
93      OEM-specific properties.
94
95Non-A/B OTA specific options
96
97  -b  (--binary) <file>
98      Use the given binary as the update-binary in the output package, instead
99      of the binary in the build's target_files. Use for development only.
100
101  --block
102      Generate a block-based OTA for non-A/B device. We have deprecated the
103      support for file-based OTA since O. Block-based OTA will be used by
104      default for all non-A/B devices. Keeping this flag here to not break
105      existing callers.
106
107  -e  (--extra_script) <file>
108      Insert the contents of file at the end of the update script.
109
110  --full_bootloader
111      Similar to --full_radio. When generating an incremental OTA, always
112      include a full copy of bootloader image.
113
114  --full_radio
115      When generating an incremental OTA, always include a full copy of radio
116      image. This option is only meaningful when -i is specified, because a full
117      radio is always included in a full OTA if applicable.
118
119  --log_diff <file>
120      Generate a log file that shows the differences in the source and target
121      builds for an incremental package. This option is only meaningful when -i
122      is specified.
123
124  --oem_no_mount
125      For devices with OEM-specific properties but without an OEM partition, do
126      not mount the OEM partition in the updater-script. This should be very
127      rarely used, since it's expected to have a dedicated OEM partition for
128      OEM-specific properties. Only meaningful when -o is specified.
129
130  --stash_threshold <float>
131      Specify the threshold that will be used to compute the maximum allowed
132      stash size (defaults to 0.8).
133
134  -t  (--worker_threads) <int>
135      Specify the number of worker-threads that will be used when generating
136      patches for incremental updates (defaults to 3).
137
138  --verify
139      Verify the checksums of the updated system and vendor (if any) partitions.
140      Non-A/B incremental OTAs only.
141
142  -2  (--two_step)
143      Generate a 'two-step' OTA package, where recovery is updated first, so
144      that any changes made to the system partition are done using the new
145      recovery (new kernel, etc.).
146
147A/B OTA specific options
148
149  --disable_fec_computation
150      Disable the on device FEC data computation for incremental updates. OTA will be larger but installation will be faster.
151
152  --include_secondary
153      Additionally include the payload for secondary slot images (default:
154      False). Only meaningful when generating A/B OTAs.
155
156      By default, an A/B OTA package doesn't contain the images for the
157      secondary slot (e.g. system_other.img). Specifying this flag allows
158      generating a separate payload that will install secondary slot images.
159
160      Such a package needs to be applied in a two-stage manner, with a reboot
161      in-between. During the first stage, the updater applies the primary
162      payload only. Upon finishing, it reboots the device into the newly updated
163      slot. It then continues to install the secondary payload to the inactive
164      slot, but without switching the active slot at the end (needs the matching
165      support in update_engine, i.e. SWITCH_SLOT_ON_REBOOT flag).
166
167      Due to the special install procedure, the secondary payload will be always
168      generated as a full payload.
169
170  --payload_signer <signer>
171      Specify the signer when signing the payload and metadata for A/B OTAs.
172      By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
173      with the package private key. If the private key cannot be accessed
174      directly, a payload signer that knows how to do that should be specified.
175      The signer will be supplied with "-inkey <path_to_key>",
176      "-in <input_file>" and "-out <output_file>" parameters.
177
178  --payload_signer_args <args>
179      Specify the arguments needed for payload signer.
180
181  --payload_signer_maximum_signature_size <signature_size>
182      The maximum signature size (in bytes) that would be generated by the given
183      payload signer. Only meaningful when custom payload signer is specified
184      via '--payload_signer'.
185      If the signer uses a RSA key, this should be the number of bytes to
186      represent the modulus. If it uses an EC key, this is the size of a
187      DER-encoded ECDSA signature.
188
189  --payload_signer_key_size <key_size>
190      Deprecated. Use the '--payload_signer_maximum_signature_size' instead.
191
192  --boot_variable_file <path>
193      A file that contains the possible values of ro.boot.* properties. It's
194      used to calculate the possible runtime fingerprints when some
195      ro.product.* properties are overridden by the 'import' statement.
196      The file expects one property per line, and each line has the following
197      format: 'prop_name=value1,value2'. e.g. 'ro.boot.product.sku=std,pro'
198      The path specified can either be relative to the current working directory
199      or the path to a file inside of input_target_files.
200
201  --skip_postinstall
202      Skip the postinstall hooks when generating an A/B OTA package (default:
203      False). Note that this discards ALL the hooks, including non-optional
204      ones. Should only be used if caller knows it's safe to do so (e.g. all the
205      postinstall work is to dexopt apps and a data wipe will happen immediately
206      after). Only meaningful when generating A/B OTAs.
207
208  --partial "<PARTITION> [<PARTITION>[...]]"
209      Generate partial updates, overriding ab_partitions list with the given
210      list. Specify --partial= without partition list to let tooling auto detect
211      partial partition list.
212
213  --custom_image <custom_partition=custom_image>
214      Use the specified custom_image to update custom_partition when generating
215      an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
216      cus=cus_test.img"
217
218  --disable_vabc
219      Disable Virtual A/B Compression, for builds that have compression enabled
220      by default.
221
222  --vabc_downgrade
223      Don't disable Virtual A/B Compression for downgrading OTAs.
224      For VABC downgrades, we must finish merging before doing data wipe, and
225      since data wipe is required for downgrading OTA, this might cause long
226      wait time in recovery.
227
228  --enable_vabc_xor
229      Enable the VABC xor feature. Will reduce space requirements for OTA, but OTA installation will be slower.
230
231  --force_minor_version
232      Override the update_engine minor version for delta generation.
233
234  --compressor_types
235      A colon ':' separated list of compressors. Allowed values are bz2 and brotli.
236
237  --enable_zucchini
238      Whether to enable to zucchini feature. Will generate smaller OTA but uses more memory, OTA generation will take longer.
239
240  --enable_puffdiff
241      Whether to enable to puffdiff feature. Will generate smaller OTA but uses more memory, OTA generation will take longer.
242
243  --enable_lz4diff
244      Whether to enable lz4diff feature. Will generate smaller OTA for EROFS but
245      uses more memory.
246
247  --spl_downgrade
248      Force generate an SPL downgrade OTA. Only needed if target build has an
249      older SPL.
250
251  --vabc_compression_param
252      Compression algorithm to be used for VABC. Available options: gz, lz4, zstd, brotli, none.
253      Compression level can be specified by appending ",$LEVEL" to option.
254      e.g. --vabc_compression_param=gz,9 specifies level 9 compression with gz algorithm
255
256  --security_patch_level
257      Override the security patch level in target files
258
259  --max_threads
260      Specify max number of threads allowed when generating A/B OTA
261
262  --vabc_cow_version
263      Specify the VABC cow version to be used
264
265  --compression_factor
266      Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k
267
268  --full_ota_partitions
269      Specify list of partitions should be updated in full OTA fashion, even if
270      an incremental OTA is about to be generated
271"""
272
273from __future__ import print_function
274
275import logging
276import multiprocessing
277import os
278import os.path
279import re
280import shutil
281import subprocess
282import sys
283import zipfile
284
285import care_map_pb2
286import common
287import ota_utils
288import payload_signer
289from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
290                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR)
291from common import DoesInputFileContain, IsSparseImage
292import target_files_diff
293from non_ab_ota import GenerateNonAbOtaPackage
294from payload_signer import PayloadSigner
295
296if sys.hexversion < 0x02070000:
297  print("Python 2.7 or newer is required.", file=sys.stderr)
298  sys.exit(1)
299
300logger = logging.getLogger(__name__)
301
302OPTIONS = ota_utils.OPTIONS
303OPTIONS.verify = False
304OPTIONS.patch_threshold = 0.95
305OPTIONS.wipe_user_data = False
306OPTIONS.extra_script = None
307OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
308if OPTIONS.worker_threads == 0:
309  OPTIONS.worker_threads = 1
310OPTIONS.two_step = False
311OPTIONS.include_secondary = False
312OPTIONS.block_based = True
313OPTIONS.updater_binary = None
314OPTIONS.oem_dicts = None
315OPTIONS.oem_source = None
316OPTIONS.oem_no_mount = False
317OPTIONS.full_radio = False
318OPTIONS.full_bootloader = False
319# Stash size cannot exceed cache_size * threshold.
320OPTIONS.cache_size = None
321OPTIONS.stash_threshold = 0.8
322OPTIONS.log_diff = None
323OPTIONS.extracted_input = None
324OPTIONS.skip_postinstall = False
325OPTIONS.skip_compatibility_check = False
326OPTIONS.disable_fec_computation = False
327OPTIONS.disable_verity_computation = False
328OPTIONS.partial = None
329OPTIONS.custom_images = {}
330OPTIONS.disable_vabc = False
331OPTIONS.spl_downgrade = False
332OPTIONS.vabc_downgrade = False
333OPTIONS.enable_vabc_xor = True
334OPTIONS.force_minor_version = None
335OPTIONS.compressor_types = None
336OPTIONS.enable_zucchini = False
337OPTIONS.enable_puffdiff = None
338OPTIONS.enable_lz4diff = False
339OPTIONS.vabc_compression_param = None
340OPTIONS.security_patch_level = None
341OPTIONS.max_threads = None
342OPTIONS.vabc_cow_version = None
343OPTIONS.compression_factor = None
344OPTIONS.full_ota_partitions = None
345
346
347POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
348DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
349MISC_INFO = 'META/misc_info.txt'
350AB_PARTITIONS = 'META/ab_partitions.txt'
351
352# Files to be unzipped for target diffing purpose.
353TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*',
354                                'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*',
355                                'VENDOR_DLKM/*', 'ODM_DLKM/*', 'SYSTEM_DLKM/*']
356RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS]
357
358# Images to be excluded from secondary payload. We essentially only keep
359# 'system_other' and bootloader partitions.
360SECONDARY_PAYLOAD_SKIPPED_IMAGES = [
361    'boot', 'dtbo', 'modem', 'odm', 'odm_dlkm', 'product', 'radio', 'recovery',
362    'system_dlkm', 'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor',
363    'vendor', 'vendor_boot']
364
365
366def _LoadOemDicts(oem_source):
367  """Returns the list of loaded OEM properties dict."""
368  if not oem_source:
369    return None
370
371  oem_dicts = []
372  for oem_file in oem_source:
373    oem_dicts.append(common.LoadDictionaryFromFile(oem_file))
374  return oem_dicts
375
376def ModifyKeyvalueList(content: str, key: str, value: str):
377  """ Update update the key value list with specified key and value
378  Args:
379    content: The string content of dynamic_partitions_info.txt. Each line
380      should be a key valur pair, where string before the first '=' are keys,
381      remaining parts are values.
382    key: the key of the key value pair to modify
383    value: the new value to replace with
384
385  Returns:
386    Updated content of the key value list
387  """
388  output_list = []
389  for line in content.splitlines():
390    if line.startswith(key+"="):
391      continue
392    output_list.append(line)
393  output_list.append("{}={}".format(key, value))
394  return "\n".join(output_list)
395
396def ModifyVABCCompressionParam(content, algo):
397  """ Update update VABC Compression Param in dynamic_partitions_info.txt
398  Args:
399    content: The string content of dynamic_partitions_info.txt
400    algo: The compression algorithm should be used for VABC. See
401          https://cs.android.com/android/platform/superproject/+/master:system/core/fs_mgr/libsnapshot/cow_writer.cpp;l=127;bpv=1;bpt=1?q=CowWriter::ParseOptions&sq=
402  Returns:
403    Updated content of dynamic_partitions_info.txt , with custom compression algo
404  """
405  return ModifyKeyvalueList(content, "virtual_ab_compression_method", algo)
406
407
408def UpdatesInfoForSpecialUpdates(content, partitions_filter,
409                                 delete_keys=None):
410  """ Updates info file for secondary payload generation, partial update, etc.
411
412    Scan each line in the info file, and remove the unwanted partitions from
413    the dynamic partition list in the related properties. e.g.
414    "super_google_dynamic_partitions_partition_list=system vendor product"
415    will become "super_google_dynamic_partitions_partition_list=system".
416
417  Args:
418    content: The content of the input info file. e.g. misc_info.txt.
419    partitions_filter: A function to filter the desired partitions from a given
420      list
421    delete_keys: A list of keys to delete in the info file
422
423  Returns:
424    A string of the updated info content.
425  """
426
427  output_list = []
428  # The suffix in partition_list variables that follows the name of the
429  # partition group.
430  list_suffix = 'partition_list'
431  for line in content.splitlines():
432    if line.startswith('#') or '=' not in line:
433      output_list.append(line)
434      continue
435    key, value = line.strip().split('=', 1)
436
437    if delete_keys and key in delete_keys:
438      pass
439    elif key.endswith(list_suffix):
440      partitions = value.split()
441      # TODO for partial update, partitions in the same group must be all
442      # updated or all omitted
443      partitions = filter(partitions_filter, partitions)
444      output_list.append('{}={}'.format(key, ' '.join(partitions)))
445    else:
446      output_list.append(line)
447  return '\n'.join(output_list)
448
449
450def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
451  """Returns a target-files.zip file for generating secondary payload.
452
453  Although the original target-files.zip already contains secondary slot
454  images (i.e. IMAGES/system_other.img), we need to rename the files to the
455  ones without _other suffix. Note that we cannot instead modify the names in
456  META/ab_partitions.txt, because there are no matching partitions on device.
457
458  For the partitions that don't have secondary images, the ones for primary
459  slot will be used. This is to ensure that we always have valid boot, vbmeta,
460  bootloader images in the inactive slot.
461
462  After writing system_other to inactive slot's system partiiton,
463  PackageManagerService will read `ro.cp_system_other_odex`, and set
464  `sys.cppreopt` to "requested". Then, according to
465  system/extras/cppreopts/cppreopts.rc , init will mount system_other at
466  /postinstall, and execute `cppreopts` to copy optimized APKs from
467  /postinstall to /data .
468
469  Args:
470    input_file: The input target-files.zip file.
471    skip_postinstall: Whether to skip copying the postinstall config file.
472
473  Returns:
474    The filename of the target-files.zip for generating secondary payload.
475  """
476
477  def GetInfoForSecondaryImages(info_file):
478    """Updates info file for secondary payload generation."""
479    with open(info_file) as f:
480      content = f.read()
481    # Remove virtual_ab flag from secondary payload so that OTA client
482    # don't use snapshots for secondary update
483    delete_keys = ['virtual_ab', "virtual_ab_retrofit"]
484    return UpdatesInfoForSpecialUpdates(
485        content, lambda p: p not in SECONDARY_PAYLOAD_SKIPPED_IMAGES,
486        delete_keys)
487
488  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
489  target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
490
491  fileslist = []
492  for (root, dirs, files) in os.walk(input_file):
493    root = root.lstrip(input_file).lstrip("/")
494    fileslist.extend([os.path.join(root, d) for d in dirs])
495    fileslist.extend([os.path.join(root, d) for d in files])
496
497  input_tmp = input_file
498  for filename in fileslist:
499    unzipped_file = os.path.join(input_tmp, *filename.split('/'))
500    if filename == 'IMAGES/system_other.img':
501      common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
502
503    # Primary images and friends need to be skipped explicitly.
504    elif filename in ('IMAGES/system.img',
505                      'IMAGES/system.map'):
506      pass
507
508    # Copy images that are not in SECONDARY_PAYLOAD_SKIPPED_IMAGES.
509    elif filename.startswith(('IMAGES/', 'RADIO/')):
510      image_name = os.path.basename(filename)
511      if image_name not in ['{}.img'.format(partition) for partition in
512                            SECONDARY_PAYLOAD_SKIPPED_IMAGES]:
513        common.ZipWrite(target_zip, unzipped_file, arcname=filename)
514
515    # Skip copying the postinstall config if requested.
516    elif skip_postinstall and filename == POSTINSTALL_CONFIG:
517      pass
518
519    elif filename.startswith('META/'):
520      # Remove the unnecessary partitions for secondary images from the
521      # ab_partitions file.
522      if filename == AB_PARTITIONS:
523        with open(unzipped_file) as f:
524          partition_list = f.read().splitlines()
525        partition_list = [partition for partition in partition_list if partition
526                          and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
527        common.ZipWriteStr(target_zip, filename,
528                           '\n'.join(partition_list))
529      # Remove the unnecessary partitions from the dynamic partitions list.
530      elif (filename == 'META/misc_info.txt' or
531            filename == DYNAMIC_PARTITION_INFO):
532        modified_info = GetInfoForSecondaryImages(unzipped_file)
533        common.ZipWriteStr(target_zip, filename, modified_info)
534      else:
535        common.ZipWrite(target_zip, unzipped_file, arcname=filename)
536
537  common.ZipClose(target_zip)
538
539  return target_file
540
541
542def GetTargetFilesZipWithoutPostinstallConfig(input_file):
543  """Returns a target-files.zip that's not containing postinstall_config.txt.
544
545  This allows brillo_update_payload script to skip writing all the postinstall
546  hooks in the generated payload. The input target-files.zip file will be
547  duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
548  contain the postinstall_config.txt entry, the input file will be returned.
549
550  Args:
551    input_file: The input target-files.zip filename.
552
553  Returns:
554    The filename of target-files.zip that doesn't contain postinstall config.
555  """
556  config_path = os.path.join(input_file, POSTINSTALL_CONFIG)
557  if os.path.exists(config_path):
558    os.unlink(config_path)
559  return input_file
560
561
562def ParseInfoDict(target_file_path):
563  return common.LoadInfoDict(target_file_path)
564
565def ModifyTargetFilesDynamicPartitionInfo(input_file, key, value):
566  """Returns a target-files.zip with a custom VABC compression param.
567  Args:
568    input_file: The input target-files.zip path
569    vabc_compression_param: Custom Virtual AB Compression algorithm
570
571  Returns:
572    The path to modified target-files.zip
573  """
574  if os.path.isdir(input_file):
575    dynamic_partition_info_path = os.path.join(
576        input_file, *DYNAMIC_PARTITION_INFO.split("/"))
577    with open(dynamic_partition_info_path, "r") as fp:
578      dynamic_partition_info = fp.read()
579    dynamic_partition_info = ModifyKeyvalueList(
580        dynamic_partition_info, key, value)
581    with open(dynamic_partition_info_path, "w") as fp:
582      fp.write(dynamic_partition_info)
583    return input_file
584
585  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
586  shutil.copyfile(input_file, target_file)
587  common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
588  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as zfp:
589    dynamic_partition_info = zfp.read(DYNAMIC_PARTITION_INFO).decode()
590    dynamic_partition_info = ModifyKeyvalueList(
591        dynamic_partition_info, key, value)
592    with zipfile.ZipFile(target_file, "a", allowZip64=True) as output_zip:
593      output_zip.writestr(DYNAMIC_PARTITION_INFO, dynamic_partition_info)
594  return target_file
595
596def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
597  """Returns a target-files.zip with a custom VABC compression param.
598  Args:
599    input_file: The input target-files.zip path
600    vabc_compression_param: Custom Virtual AB Compression algorithm
601
602  Returns:
603    The path to modified target-files.zip
604  """
605  return ModifyTargetFilesDynamicPartitionInfo(input_file, "virtual_ab_compression_method", vabc_compression_param)
606
607
608def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
609  """Returns a target-files.zip for partial ota update package generation.
610
611  This function modifies ab_partitions list with the desired partitions before
612  calling the brillo_update_payload script. It also cleans up the reference to
613  the excluded partitions in the info file, e.g. misc_info.txt.
614
615  Args:
616    input_file: The input target-files.zip filename.
617    ab_partitions: A list of partitions to include in the partial update
618
619  Returns:
620    The filename of target-files.zip used for partial ota update.
621  """
622
623  original_ab_partitions = common.ReadFromInputFile(input_file, AB_PARTITIONS)
624
625  unrecognized_partitions = [partition for partition in ab_partitions if
626                             partition not in original_ab_partitions]
627  if unrecognized_partitions:
628    raise ValueError("Unrecognized partitions when generating partial updates",
629                     unrecognized_partitions)
630
631  logger.info("Generating partial updates for %s", ab_partitions)
632  for subdir in ["IMAGES", "RADIO", "PREBUILT_IMAGES"]:
633    image_dir = os.path.join(subdir)
634    if not os.path.exists(image_dir):
635      continue
636    for filename in os.listdir(image_dir):
637      filepath = os.path.join(image_dir, filename)
638      if filename.endswith(".img"):
639        partition_name = filename.removesuffix(".img")
640        if partition_name not in ab_partitions:
641          os.unlink(filepath)
642
643  common.WriteToInputFile(input_file, 'META/ab_partitions.txt',
644                          '\n'.join(ab_partitions))
645  CARE_MAP_ENTRY = "META/care_map.pb"
646  if DoesInputFileContain(input_file, CARE_MAP_ENTRY):
647    caremap = care_map_pb2.CareMap()
648    caremap.ParseFromString(
649        common.ReadBytesFromInputFile(input_file, CARE_MAP_ENTRY))
650    filtered = [
651        part for part in caremap.partitions if part.name in ab_partitions]
652    del caremap.partitions[:]
653    caremap.partitions.extend(filtered)
654    common.WriteBytesToInputFile(input_file, CARE_MAP_ENTRY,
655                                 caremap.SerializeToString())
656
657  for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
658    if not DoesInputFileContain(input_file, info_file):
659      logger.warning('Cannot find %s in input zipfile', info_file)
660      continue
661
662    content = common.ReadFromInputFile(input_file, info_file)
663    modified_info = UpdatesInfoForSpecialUpdates(
664        content, lambda p: p in ab_partitions)
665    if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
666      modified_info = ModifyVABCCompressionParam(
667          modified_info, OPTIONS.vabc_compression_param)
668    common.WriteToInputFile(input_file, info_file, modified_info)
669
670  def IsInPartialList(postinstall_line: str):
671    idx = postinstall_line.find("=")
672    if idx < 0:
673      return False
674    key = postinstall_line[:idx]
675    logger.info("%s %s", key, ab_partitions)
676    for part in ab_partitions:
677      if key.endswith("_" + part):
678        return True
679    return False
680
681  if common.DoesInputFileContain(input_file, POSTINSTALL_CONFIG):
682    postinstall_config = common.ReadFromInputFile(
683        input_file, POSTINSTALL_CONFIG)
684    postinstall_config = [
685        line for line in postinstall_config.splitlines() if IsInPartialList(line)]
686    if postinstall_config:
687      postinstall_config = "\n".join(postinstall_config)
688      common.WriteToInputFile(
689          input_file, POSTINSTALL_CONFIG, postinstall_config)
690    else:
691      os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
692
693  return input_file
694
695
696def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
697                                                  super_block_devices,
698                                                  dynamic_partition_list):
699  """Returns a target-files.zip for retrofitting dynamic partitions.
700
701  This allows brillo_update_payload to generate an OTA based on the exact
702  bits on the block devices. Postinstall is disabled.
703
704  Args:
705    input_file: The input target-files.zip filename.
706    super_block_devices: The list of super block devices
707    dynamic_partition_list: The list of dynamic partitions
708
709  Returns:
710    The filename of target-files.zip with *.img replaced with super_*.img for
711    each block device in super_block_devices.
712  """
713  assert super_block_devices, "No super_block_devices are specified."
714
715  replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
716             for dev in super_block_devices}
717
718  # Remove partitions from META/ab_partitions.txt that is in
719  # dynamic_partition_list but not in super_block_devices so that
720  # brillo_update_payload won't generate update for those logical partitions.
721  ab_partitions_lines = common.ReadFromInputFile(
722      input_file, AB_PARTITIONS).split("\n")
723  ab_partitions = [line.strip() for line in ab_partitions_lines]
724  # Assert that all super_block_devices are in ab_partitions
725  super_device_not_updated = [partition for partition in super_block_devices
726                              if partition not in ab_partitions]
727  assert not super_device_not_updated, \
728      "{} is in super_block_devices but not in {}".format(
729          super_device_not_updated, AB_PARTITIONS)
730  # ab_partitions -= (dynamic_partition_list - super_block_devices)
731  to_delete = [AB_PARTITIONS]
732
733  # Always skip postinstall for a retrofit update.
734  to_delete += [POSTINSTALL_CONFIG]
735
736  # Delete dynamic_partitions_info.txt so that brillo_update_payload thinks this
737  # is a regular update on devices without dynamic partitions support.
738  to_delete += [DYNAMIC_PARTITION_INFO]
739
740  # Remove the existing partition images as well as the map files.
741  to_delete += list(replace.values())
742  to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
743  for item in to_delete:
744    os.unlink(os.path.join(input_file, item))
745
746  # Write super_{foo}.img as {foo}.img.
747  for src, dst in replace.items():
748    assert DoesInputFileContain(input_file, src), \
749        'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
750    source_path = os.path.join(input_file, *src.split("/"))
751    target_path = os.path.join(input_file, *dst.split("/"))
752    os.rename(source_path, target_path)
753
754  # Write new ab_partitions.txt file
755  new_ab_partitions = os.path.join(input_file, AB_PARTITIONS)
756  with open(new_ab_partitions, 'w') as f:
757    for partition in ab_partitions:
758      if (partition in dynamic_partition_list and
759              partition not in super_block_devices):
760        logger.info("Dropping %s from ab_partitions.txt", partition)
761        continue
762      f.write(partition + "\n")
763
764  return input_file
765
766
767def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images: dict):
768  """Returns a target-files.zip for custom partitions update.
769
770  This function modifies ab_partitions list with the desired custom partitions
771  and puts the custom images into the target target-files.zip.
772
773  Args:
774    input_file: The input target-files extracted directory
775    custom_images: A map of custom partitions and custom images.
776
777  Returns:
778    The extracted dir of a target-files.zip which has renamed the custom images
779    in the IMAGES/ to their partition names.
780  """
781  for custom_image in custom_images.values():
782    if not os.path.exists(os.path.join(input_file, "IMAGES", custom_image)):
783      raise ValueError("Specified custom image {} not found in target files {}, available images are {}",
784                       custom_image, input_file, os.listdir(os.path.join(input_file, "IMAGES")))
785
786  for custom_partition, custom_image in custom_images.items():
787    default_custom_image = '{}.img'.format(custom_partition)
788    if default_custom_image != custom_image:
789      src = os.path.join(input_file, 'IMAGES', custom_image)
790      dst = os.path.join(input_file, 'IMAGES', default_custom_image)
791      os.rename(src, dst)
792
793  return input_file
794
795
796def GeneratePartitionTimestampFlags(partition_state):
797  partition_timestamps = [
798      part.partition_name + ":" + part.version
799      for part in partition_state]
800  return ["--partition_timestamps", ",".join(partition_timestamps)]
801
802
803def GeneratePartitionTimestampFlagsDowngrade(
804        pre_partition_state, post_partition_state):
805  assert pre_partition_state is not None
806  partition_timestamps = {}
807  for part in post_partition_state:
808    partition_timestamps[part.partition_name] = part.version
809  for part in pre_partition_state:
810    if part.partition_name in partition_timestamps:
811      partition_timestamps[part.partition_name] = \
812          max(part.version, partition_timestamps[part.partition_name])
813  return [
814      "--partition_timestamps",
815      ",".join([key + ":" + val for (key, val)
816                in partition_timestamps.items()])
817  ]
818
819
820def SupportsMainlineGkiUpdates(target_file):
821  """Return True if the build supports MainlineGKIUpdates.
822
823  This function scans the product.img file in IMAGES/ directory for
824  pattern |*/apex/com.android.gki.*.apex|. If there are files
825  matching this pattern, conclude that build supports mainline
826  GKI and return True
827
828  Args:
829    target_file: Path to a target_file.zip, or an extracted directory
830  Return:
831    True if thisb uild supports Mainline GKI Updates.
832  """
833  if target_file is None:
834    return False
835  if os.path.isfile(target_file):
836    target_file = common.UnzipTemp(target_file, ["IMAGES/product.img"])
837  if not os.path.isdir(target_file):
838    assert os.path.isdir(target_file), \
839        "{} must be a path to zip archive or dir containing extracted"\
840        " target_files".format(target_file)
841  image_file = os.path.join(target_file, "IMAGES", "product.img")
842
843  if not os.path.isfile(image_file):
844    return False
845
846  if IsSparseImage(image_file):
847    # Unsparse the image
848    tmp_img = common.MakeTempFile(suffix=".img")
849    subprocess.check_output(["simg2img", image_file, tmp_img])
850    image_file = tmp_img
851
852  cmd = ["debugfs_static", "-R", "ls -p /apex", image_file]
853  output = subprocess.check_output(cmd).decode()
854
855  pattern = re.compile(r"com\.android\.gki\..*\.apex")
856  return pattern.search(output) is not None
857
858
859def ExtractOrCopyTargetFiles(target_file):
860  if os.path.isdir(target_file):
861    return CopyTargetFilesDir(target_file)
862  else:
863    return ExtractTargetFiles(target_file)
864
865
866def ValidateCompressionParam(target_info):
867  vabc_compression_param = OPTIONS.vabc_compression_param
868  if vabc_compression_param:
869    minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[vabc_compression_param.split(",")[0]]
870    if target_info.vendor_api_level < minimum_api_level_required:
871      raise ValueError("Specified VABC compression param {} is only supported for API level >= {}, device is on API level {}".format(
872          vabc_compression_param, minimum_api_level_required, target_info.vendor_api_level))
873
874
875def GenerateAbOtaPackage(target_file, output_file, source_file=None):
876  """Generates an Android OTA package that has A/B update payload."""
877  # If input target_files are directories, create a copy so that we can modify
878  # them directly
879  target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
880  if OPTIONS.disable_vabc and target_info.is_release_key:
881    raise ValueError("Disabling VABC on release-key builds is not supported.")
882
883  ValidateCompressionParam(target_info)
884  vabc_compression_param = target_info.vabc_compression_param
885
886  target_file = ExtractOrCopyTargetFiles(target_file)
887  if source_file is not None:
888    source_file = ExtractOrCopyTargetFiles(source_file)
889  # Stage the output zip package for package signing.
890  if not OPTIONS.no_signing:
891    staging_file = common.MakeTempFile(suffix='.zip')
892  else:
893    staging_file = output_file
894  output_zip = zipfile.ZipFile(staging_file, "w",
895                               compression=zipfile.ZIP_DEFLATED,
896                               allowZip64=True)
897
898  if source_file is not None:
899    source_file = ExtractTargetFiles(source_file)
900    if OPTIONS.full_ota_partitions:
901      for partition in OPTIONS.full_ota_partitions:
902        for subdir in TARGET_FILES_IMAGES_SUBDIR:
903          image_path = os.path.join(source_file, subdir, partition + ".img")
904          if os.path.exists(image_path):
905            logger.info(
906                "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition)
907            os.remove(image_path)
908    assert "ab_partitions" in OPTIONS.source_info_dict, \
909        "META/ab_partitions.txt is required for ab_update."
910    assert "ab_partitions" in OPTIONS.target_info_dict, \
911        "META/ab_partitions.txt is required for ab_update."
912    target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
913    source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
914    # If source supports VABC, delta_generator/update_engine will attempt to
915    # use VABC. This dangerous, as the target build won't have snapuserd to
916    # serve I/O request when device boots. Therefore, disable VABC if source
917    # build doesn't supports it.
918    if not source_info.is_vabc or not target_info.is_vabc:
919      logger.info("Either source or target does not support VABC, disabling.")
920      OPTIONS.disable_vabc = True
921    if OPTIONS.vabc_compression_param is None and \
922            source_info.vabc_compression_param != target_info.vabc_compression_param:
923      logger.info("Source build and target build use different compression methods {} vs {}, default to source builds parameter {}".format(
924          source_info.vabc_compression_param, target_info.vabc_compression_param, source_info.vabc_compression_param))
925      vabc_compression_param = source_info.vabc_compression_param
926    # Virtual AB Cow version 3 is introduced in Android U with improved memory
927    # and install time performance. All OTA's with
928    # both the source build and target build with VIRTUAL_AB_COW_VERSION = 3
929    # can support the new format. Otherwise, fallback on older versions
930    if not OPTIONS.vabc_cow_version:
931      if not source_info.vabc_cow_version or not target_info.vabc_cow_version:
932        logger.info("Source or Target doesn't have VABC_COW_VERSION specified, default to version 2")
933        OPTIONS.vabc_cow_version = 2
934      elif source_info.vabc_cow_version != target_info.vabc_cow_version:
935        logger.info("Source and Target have different cow VABC_COW_VERSION specified, default to minimum version")
936        OPTIONS.vabc_cow_version = min(source_info.vabc_cow_version, target_info.vabc_cow_version)
937
938    # Virtual AB Compression was introduced in Androd S.
939    # Later, we backported VABC to Android R. But verity support was not
940    # backported, so if VABC is used and we are on Android R, disable
941    # verity computation.
942    if not OPTIONS.disable_vabc and source_info.is_android_r:
943      OPTIONS.disable_verity_computation = True
944      OPTIONS.disable_fec_computation = True
945
946  else:
947    assert "ab_partitions" in OPTIONS.info_dict, \
948        "META/ab_partitions.txt is required for ab_update."
949    source_info = None
950    if not OPTIONS.vabc_cow_version:
951      if not target_info.vabc_cow_version:
952          OPTIONS.vabc_cow_version = 2
953      elif target_info.vabc_cow_version >= "3" and target_info.vendor_api_level < 35:
954        logger.warning(
955              "This full OTA is configured to use VABC cow version"
956              " 3 which is supported since"
957              " Android API level 35, but device is "
958              "launched with {} . If this full OTA is"
959              " served to a device running old build, OTA might fail due to "
960              "unsupported vabc cow version. For safety, version 2 is used because "
961              "it's supported since day 1.".format(
962                  target_info.vendor_api_level))
963        OPTIONS.vabc_cow_version = 2
964    if OPTIONS.vabc_compression_param is None and vabc_compression_param:
965      minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[
966          vabc_compression_param]
967      if target_info.vendor_api_level < minimum_api_level_required:
968        logger.warning(
969            "This full OTA is configured to use VABC compression algorithm"
970            " {}, which is supported since"
971            " Android API level {}, but device is "
972            "launched with {} . If this full OTA is"
973            " served to a device running old build, OTA might fail due to "
974            "unsupported compression parameter. For safety, gz is used because "
975            "it's supported since day 1.".format(
976                vabc_compression_param,
977                minimum_api_level_required,
978                target_info.vendor_api_level))
979        vabc_compression_param = "gz"
980
981  if OPTIONS.partial == []:
982    logger.info(
983        "Automatically detecting partial partition list from input target files.")
984    OPTIONS.partial = target_info.get(
985        "partial_ota_update_partitions_list").split()
986    assert OPTIONS.partial, "Input target_file does not have"
987    " partial_ota_update_partitions_list defined, failed to auto detect partial"
988    " partition list. Please specify list of partitions to update manually via"
989    " --partial=a,b,c , or generate a complete OTA by removing the --partial"
990    " option"
991    OPTIONS.partial.sort()
992    if source_info:
993      source_partial_list = source_info.get(
994          "partial_ota_update_partitions_list").split()
995      if source_partial_list:
996        source_partial_list.sort()
997        if source_partial_list != OPTIONS.partial:
998          logger.warning("Source build and target build have different partial partition lists. Source: %s, target: %s, taking the intersection.",
999                         source_partial_list, OPTIONS.partial)
1000          OPTIONS.partial = list(
1001              set(OPTIONS.partial) and set(source_partial_list))
1002          OPTIONS.partial.sort()
1003    logger.info("Automatically deduced partial partition list: %s",
1004                OPTIONS.partial)
1005
1006  if target_info.vendor_suppressed_vabc:
1007    logger.info("Vendor suppressed VABC. Disabling")
1008    OPTIONS.disable_vabc = True
1009
1010  # Both source and target build need to support VABC XOR for us to use it.
1011  # Source build's update_engine must be able to write XOR ops, and target
1012  # build's snapuserd must be able to interpret XOR ops.
1013  if not target_info.is_vabc_xor or OPTIONS.disable_vabc or \
1014          (source_info is not None and not source_info.is_vabc_xor):
1015    logger.info("VABC XOR Not supported, disabling")
1016    OPTIONS.enable_vabc_xor = False
1017
1018  if OPTIONS.vabc_compression_param == "none":
1019    logger.info(
1020        "VABC Compression algorithm is set to 'none', disabling VABC xor")
1021    OPTIONS.enable_vabc_xor = False
1022
1023  if OPTIONS.enable_vabc_xor:
1024    api_level = -1
1025    if source_info is not None:
1026      api_level = source_info.vendor_api_level
1027    if api_level == -1:
1028      api_level = target_info.vendor_api_level
1029
1030    # XOR is only supported on T and higher.
1031    if api_level < 33:
1032      logger.error("VABC XOR not supported on this vendor, disabling")
1033      OPTIONS.enable_vabc_xor = False
1034
1035  if OPTIONS.vabc_compression_param:
1036    vabc_compression_param = OPTIONS.vabc_compression_param
1037
1038  additional_args = []
1039
1040  # Prepare custom images.
1041  if OPTIONS.custom_images:
1042    if source_file is not None:
1043      source_file = GetTargetFilesZipForCustomImagesUpdates(
1044           source_file, OPTIONS.custom_images)
1045    target_file = GetTargetFilesZipForCustomImagesUpdates(
1046        target_file, OPTIONS.custom_images)
1047
1048  if OPTIONS.retrofit_dynamic_partitions:
1049    target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
1050        target_file, target_info.get("super_block_devices").strip().split(),
1051        target_info.get("dynamic_partition_list").strip().split())
1052  elif OPTIONS.partial:
1053    target_file = GetTargetFilesZipForPartialUpdates(target_file,
1054                                                     OPTIONS.partial)
1055  if vabc_compression_param != target_info.vabc_compression_param:
1056    target_file = GetTargetFilesZipForCustomVABCCompression(
1057        target_file, vabc_compression_param)
1058  if OPTIONS.vabc_cow_version:
1059    target_file = ModifyTargetFilesDynamicPartitionInfo(target_file, "virtual_ab_cow_version", OPTIONS.vabc_cow_version)
1060  if OPTIONS.compression_factor:
1061    target_file = ModifyTargetFilesDynamicPartitionInfo(target_file, "virtual_ab_compression_factor", OPTIONS.compression_factor)
1062  if OPTIONS.skip_postinstall:
1063    target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
1064  # Target_file may have been modified, reparse ab_partitions
1065  target_info.info_dict['ab_partitions'] = common.ReadFromInputFile(target_file,
1066                                                                    AB_PARTITIONS).strip().split("\n")
1067
1068  from check_target_files_vintf import CheckVintfIfTrebleEnabled
1069  CheckVintfIfTrebleEnabled(target_file, target_info)
1070
1071  # Allow boot_variable_file to also exist in target-files
1072  if OPTIONS.boot_variable_file:
1073    if not os.path.isfile(OPTIONS.boot_variable_file):
1074      OPTIONS.boot_variable_file = os.path.join(target_file, OPTIONS.boot_variable_file)
1075  # Metadata to comply with Android OTA package format.
1076  metadata = GetPackageMetadata(target_info, source_info)
1077  # Generate payload.
1078  payload = PayloadGenerator(
1079      wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial, spl_downgrade=OPTIONS.spl_downgrade)
1080
1081  partition_timestamps_flags = []
1082  # Enforce a max timestamp this payload can be applied on top of.
1083  if OPTIONS.downgrade:
1084    # When generating ota between merged target-files, partition build date can
1085    # decrease in target, at the same time as ro.build.date.utc increases,
1086    # so always pick largest value.
1087    max_timestamp = max(source_info.GetBuildProp("ro.build.date.utc"),
1088        str(metadata.postcondition.timestamp))
1089    partition_timestamps_flags = GeneratePartitionTimestampFlagsDowngrade(
1090        metadata.precondition.partition_state,
1091        metadata.postcondition.partition_state
1092    )
1093  else:
1094    max_timestamp = str(metadata.postcondition.timestamp)
1095    partition_timestamps_flags = GeneratePartitionTimestampFlags(
1096        metadata.postcondition.partition_state)
1097
1098  if not ota_utils.IsZucchiniCompatible(source_file, target_file):
1099    logger.warning(
1100        "Builds doesn't support zucchini, or source/target don't have compatible zucchini versions. Disabling zucchini.")
1101    OPTIONS.enable_zucchini = False
1102
1103  security_patch_level = target_info.GetBuildProp(
1104      "ro.build.version.security_patch")
1105  if OPTIONS.security_patch_level is not None:
1106    security_patch_level = OPTIONS.security_patch_level
1107
1108  additional_args += ["--security_patch_level", security_patch_level]
1109
1110  if OPTIONS.max_threads:
1111    additional_args += ["--max_threads", OPTIONS.max_threads]
1112
1113  additional_args += ["--enable_zucchini=" +
1114                      str(OPTIONS.enable_zucchini).lower()]
1115  if OPTIONS.enable_puffdiff is not None:
1116    additional_args += ["--enable_puffdiff=" +
1117                        str(OPTIONS.enable_puffdiff).lower()]
1118
1119  if not ota_utils.IsLz4diffCompatible(source_file, target_file):
1120    logger.warning(
1121        "Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
1122    OPTIONS.enable_lz4diff = False
1123
1124  additional_args += ["--enable_lz4diff=" +
1125                      str(OPTIONS.enable_lz4diff).lower()]
1126
1127  env_override = {}
1128  if source_file and OPTIONS.enable_lz4diff:
1129    liblz4_path = os.path.join(source_file, "META", "liblz4.so")
1130    assert os.path.exists(
1131        liblz4_path), "liblz4.so not found in META/ dir of target file {}".format(liblz4_path)
1132    logger.info("Enabling lz4diff %s", liblz4_path)
1133    erofs_compression_param = OPTIONS.target_info_dict.get(
1134        "erofs_default_compressor")
1135    assert erofs_compression_param is not None, "'erofs_default_compressor' not found in META/misc_info.txt of target build. This is required to enable lz4diff."
1136    additional_args += ["--erofs_compression_param", erofs_compression_param]
1137    env_override["LD_PRELOAD"] = liblz4_path + \
1138        ":" + os.environ.get("LD_PRELOAD", "")
1139
1140  if OPTIONS.disable_vabc:
1141    additional_args += ["--disable_vabc=true"]
1142  if OPTIONS.enable_vabc_xor:
1143    additional_args += ["--enable_vabc_xor=true"]
1144  if OPTIONS.compressor_types:
1145    additional_args += ["--compressor_types", OPTIONS.compressor_types]
1146  additional_args += ["--max_timestamp", max_timestamp]
1147
1148  env = dict(os.environ)
1149  if env_override:
1150    logger.info("Using environment variables %s", env_override)
1151    env.update(env_override)
1152  payload.Generate(
1153      target_file,
1154      source_file,
1155      additional_args + partition_timestamps_flags,
1156      env=env
1157  )
1158
1159  # Sign the payload.
1160  pw = OPTIONS.key_passwords[OPTIONS.package_key]
1161  payload_signer = PayloadSigner(
1162      OPTIONS.package_key, OPTIONS.private_key_suffix,
1163      pw, OPTIONS.payload_signer)
1164  payload.Sign(payload_signer)
1165
1166  # Write the payload into output zip.
1167  payload.WriteToZip(output_zip)
1168
1169  # Generate and include the secondary payload that installs secondary images
1170  # (e.g. system_other.img).
1171  if OPTIONS.include_secondary:
1172    # We always include a full payload for the secondary slot, even when
1173    # building an incremental OTA. See the comments for "--include_secondary".
1174    secondary_target_file = GetTargetFilesZipForSecondaryImages(
1175        target_file, OPTIONS.skip_postinstall)
1176    secondary_payload = PayloadGenerator(secondary=True)
1177    secondary_payload.Generate(secondary_target_file,
1178                               additional_args=["--max_timestamp",
1179                                                max_timestamp])
1180    secondary_payload.Sign(payload_signer)
1181    secondary_payload.WriteToZip(output_zip)
1182
1183  # If dm-verity is supported for the device, copy contents of care_map
1184  # into A/B OTA package.
1185  if target_info.get("avb_enable") == "true":
1186    # Adds care_map if either the protobuf format or the plain text one exists.
1187    for care_map_name in ["care_map.pb", "care_map.txt"]:
1188      if not DoesInputFileContain(target_file, "META/" + care_map_name):
1189        continue
1190      care_map_data = common.ReadBytesFromInputFile(
1191          target_file, "META/" + care_map_name)
1192      # In order to support streaming, care_map needs to be packed as
1193      # ZIP_STORED.
1194      common.ZipWriteStr(output_zip, care_map_name, care_map_data,
1195                         compress_type=zipfile.ZIP_STORED)
1196      # break here to avoid going into else when care map has been handled
1197      break
1198    else:
1199      logger.warning("Cannot find care map file in target_file package")
1200
1201  # Add the source apex version for incremental ota updates, and write the
1202  # result apex info to the ota package.
1203  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_file, source_file)
1204  if ota_apex_info is not None:
1205    common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
1206                       compress_type=zipfile.ZIP_STORED)
1207
1208  # We haven't written the metadata entry yet, which will be handled in
1209  # FinalizeMetadata().
1210  common.ZipClose(output_zip)
1211
1212  FinalizeMetadata(metadata, staging_file, output_file,
1213                   package_key=OPTIONS.package_key)
1214
1215
1216def main(argv):
1217
1218  def option_handler(o, a: str):
1219    if o in ("-i", "--incremental_from"):
1220      OPTIONS.incremental_source = a
1221    elif o == "--full_radio":
1222      OPTIONS.full_radio = True
1223    elif o == "--full_bootloader":
1224      OPTIONS.full_bootloader = True
1225    elif o == "--wipe_user_data":
1226      OPTIONS.wipe_user_data = True
1227    elif o == "--downgrade":
1228      OPTIONS.downgrade = True
1229      OPTIONS.wipe_user_data = True
1230    elif o == "--override_timestamp":
1231      OPTIONS.downgrade = True
1232    elif o in ("-o", "--oem_settings"):
1233      OPTIONS.oem_source = a.split(',')
1234    elif o == "--oem_no_mount":
1235      OPTIONS.oem_no_mount = True
1236    elif o in ("-e", "--extra_script"):
1237      OPTIONS.extra_script = a
1238    elif o in ("-t", "--worker_threads"):
1239      if a.isdigit():
1240        OPTIONS.worker_threads = int(a)
1241      else:
1242        raise ValueError("Cannot parse value %r for option %r - only "
1243                         "integers are allowed." % (a, o))
1244    elif o in ("-2", "--two_step"):
1245      OPTIONS.two_step = True
1246    elif o == "--include_secondary":
1247      OPTIONS.include_secondary = True
1248    elif o == "--no_signing":
1249      OPTIONS.no_signing = True
1250    elif o == "--verify":
1251      OPTIONS.verify = True
1252    elif o == "--block":
1253      OPTIONS.block_based = True
1254    elif o in ("-b", "--binary"):
1255      OPTIONS.updater_binary = a
1256    elif o == "--stash_threshold":
1257      try:
1258        OPTIONS.stash_threshold = float(a)
1259      except ValueError:
1260        raise ValueError("Cannot parse value %r for option %r - expecting "
1261                         "a float" % (a, o))
1262    elif o == "--log_diff":
1263      OPTIONS.log_diff = a
1264    elif o == "--extracted_input_target_files":
1265      OPTIONS.extracted_input = a
1266    elif o == "--skip_postinstall":
1267      OPTIONS.skip_postinstall = True
1268    elif o == "--retrofit_dynamic_partitions":
1269      OPTIONS.retrofit_dynamic_partitions = True
1270    elif o == "--skip_compatibility_check":
1271      OPTIONS.skip_compatibility_check = True
1272    elif o == "--output_metadata_path":
1273      OPTIONS.output_metadata_path = a
1274    elif o == "--disable_fec_computation":
1275      OPTIONS.disable_fec_computation = True
1276    elif o == "--disable_verity_computation":
1277      OPTIONS.disable_verity_computation = True
1278    elif o == "--force_non_ab":
1279      OPTIONS.force_non_ab = True
1280    elif o == "--boot_variable_file":
1281      OPTIONS.boot_variable_file = a
1282    elif o == "--partial":
1283      if a:
1284        partitions = a.split()
1285        if not partitions:
1286          raise ValueError("Cannot parse partitions in {}".format(a))
1287      else:
1288        partitions = []
1289      OPTIONS.partial = partitions
1290    elif o == "--custom_image":
1291      custom_partition, custom_image = a.split("=")
1292      OPTIONS.custom_images[custom_partition] = custom_image
1293    elif o == "--disable_vabc":
1294      OPTIONS.disable_vabc = True
1295    elif o == "--spl_downgrade":
1296      OPTIONS.spl_downgrade = True
1297      OPTIONS.wipe_user_data = True
1298    elif o == "--vabc_downgrade":
1299      OPTIONS.vabc_downgrade = True
1300    elif o == "--enable_vabc_xor":
1301      assert a.lower() in ["true", "false"]
1302      OPTIONS.enable_vabc_xor = a.lower() != "false"
1303    elif o == "--force_minor_version":
1304      OPTIONS.force_minor_version = a
1305    elif o == "--compressor_types":
1306      OPTIONS.compressor_types = a
1307    elif o == "--enable_zucchini":
1308      assert a.lower() in ["true", "false"]
1309      OPTIONS.enable_zucchini = a.lower() != "false"
1310    elif o == "--enable_puffdiff":
1311      assert a.lower() in ["true", "false"]
1312      OPTIONS.enable_puffdiff = a.lower() != "false"
1313    elif o == "--enable_lz4diff":
1314      assert a.lower() in ["true", "false"]
1315      OPTIONS.enable_lz4diff = a.lower() != "false"
1316    elif o == "--vabc_compression_param":
1317      words = a.split(",")
1318      assert len(words) >= 1 and len(words) <= 2
1319      OPTIONS.vabc_compression_param = a.lower()
1320      if len(words) == 2:
1321        if not words[1].lstrip("-").isdigit():
1322          raise ValueError("Cannot parse value %r for option $COMPRESSION_LEVEL - only "
1323                           "integers are allowed." % words[1])
1324    elif o == "--security_patch_level":
1325      OPTIONS.security_patch_level = a
1326    elif o in ("--max_threads"):
1327      if a.isdigit():
1328        OPTIONS.max_threads = a
1329      else:
1330        raise ValueError("Cannot parse value %r for option %r - only "
1331                         "integers are allowed." % (a, o))
1332    elif o in ("--compression_factor"):
1333        values = ["4k", "8k", "16k", "32k", "64k", "128k", "256k"]
1334        if a[:-1].isdigit() and a in values and a.endswith("k"):
1335            OPTIONS.compression_factor = str(int(a[:-1]) * 1024)
1336        else:
1337            raise ValueError("Please specify value from following options: 4k, 8k, 16k, 32k, 64k, 128k", "256k")
1338
1339    elif o == "--vabc_cow_version":
1340      if a.isdigit():
1341        OPTIONS.vabc_cow_version = a
1342      else:
1343        raise ValueError("Cannot parse value %r for option %r - only "
1344                         "integers are allowed." % (a, o))
1345    elif o == "--full_ota_partitions":
1346      OPTIONS.full_ota_partitions = set(
1347          a.strip().strip("\"").strip("'").split(","))
1348    else:
1349      return False
1350    return True
1351
1352  args = common.ParseOptions(argv, __doc__,
1353                             extra_opts="b:k:i:d:e:t:2o:",
1354                             extra_long_opts=[
1355                                 "incremental_from=",
1356                                 "full_radio",
1357                                 "full_bootloader",
1358                                 "wipe_user_data",
1359                                 "downgrade",
1360                                 "override_timestamp",
1361                                 "extra_script=",
1362                                 "worker_threads=",
1363                                 "two_step",
1364                                 "include_secondary",
1365                                 "no_signing",
1366                                 "block",
1367                                 "binary=",
1368                                 "oem_settings=",
1369                                 "oem_no_mount",
1370                                 "verify",
1371                                 "stash_threshold=",
1372                                 "log_diff=",
1373                                 "extracted_input_target_files=",
1374                                 "skip_postinstall",
1375                                 "retrofit_dynamic_partitions",
1376                                 "skip_compatibility_check",
1377                                 "output_metadata_path=",
1378                                 "disable_fec_computation",
1379                                 "disable_verity_computation",
1380                                 "force_non_ab",
1381                                 "boot_variable_file=",
1382                                 "partial=",
1383                                 "custom_image=",
1384                                 "disable_vabc",
1385                                 "spl_downgrade",
1386                                 "vabc_downgrade",
1387                                 "enable_vabc_xor=",
1388                                 "force_minor_version=",
1389                                 "compressor_types=",
1390                                 "enable_zucchini=",
1391                                 "enable_puffdiff=",
1392                                 "enable_lz4diff=",
1393                                 "vabc_compression_param=",
1394                                 "security_patch_level=",
1395                                 "max_threads=",
1396                                 "vabc_cow_version=",
1397                                 "compression_factor=",
1398                                 "full_ota_partitions=",
1399                             ], extra_option_handler=[option_handler, payload_signer.signer_options])
1400  common.InitLogging()
1401
1402  if len(args) != 2:
1403    common.Usage(__doc__)
1404    sys.exit(1)
1405
1406  # Load the build info dicts from the zip directly or the extracted input
1407  # directory. We don't need to unzip the entire target-files zips, because they
1408  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
1409  # When loading the info dicts, we don't need to provide the second parameter
1410  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
1411  # some properties with their actual paths, such as 'selinux_fc',
1412  # 'ramdisk_dir', which won't be used during OTA generation.
1413  if OPTIONS.extracted_input is not None:
1414    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
1415  else:
1416    OPTIONS.info_dict = common.LoadInfoDict(args[0])
1417
1418  if OPTIONS.wipe_user_data:
1419    if not OPTIONS.vabc_downgrade:
1420      logger.info("Detected downgrade/datawipe OTA."
1421                  "When wiping userdata, VABC OTA makes the user "
1422                  "wait in recovery mode for merge to finish. Disable VABC by "
1423                  "default. If you really want to do VABC downgrade, pass "
1424                  "--vabc_downgrade")
1425      OPTIONS.disable_vabc = True
1426    # We should only allow downgrading incrementals (as opposed to full).
1427    # Otherwise the device may go back from arbitrary build with this full
1428    # OTA package.
1429  if OPTIONS.incremental_source is None and OPTIONS.downgrade:
1430    raise ValueError("Cannot generate downgradable full OTAs")
1431
1432  # TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the
1433  # target-file and reload the info_dict. So the info will be consistent with
1434  # the modified target-file.
1435
1436  logger.info("--- target info ---")
1437  common.DumpInfoDict(OPTIONS.info_dict)
1438
1439  # Load the source build dict if applicable.
1440  if OPTIONS.incremental_source is not None:
1441    OPTIONS.target_info_dict = OPTIONS.info_dict
1442    OPTIONS.source_info_dict = ParseInfoDict(OPTIONS.incremental_source)
1443
1444    logger.info("--- source info ---")
1445    common.DumpInfoDict(OPTIONS.source_info_dict)
1446
1447  if OPTIONS.partial:
1448    OPTIONS.info_dict['ab_partitions'] = \
1449        list(
1450        set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
1451    )
1452    if OPTIONS.source_info_dict:
1453      OPTIONS.source_info_dict['ab_partitions'] = \
1454          list(
1455          set(OPTIONS.source_info_dict['ab_partitions']) &
1456          set(OPTIONS.partial)
1457      )
1458
1459  # Load OEM dicts if provided.
1460  OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
1461
1462  # Assume retrofitting dynamic partitions when base build does not set
1463  # use_dynamic_partitions but target build does.
1464  if (OPTIONS.source_info_dict and
1465      OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
1466          OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
1467    if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
1468      raise common.ExternalError(
1469          "Expect to generate incremental OTA for retrofitting dynamic "
1470          "partitions, but dynamic_partition_retrofit is not set in target "
1471          "build.")
1472    logger.info("Implicitly generating retrofit incremental OTA.")
1473    OPTIONS.retrofit_dynamic_partitions = True
1474
1475  # Skip postinstall for retrofitting dynamic partitions.
1476  if OPTIONS.retrofit_dynamic_partitions:
1477    OPTIONS.skip_postinstall = True
1478
1479  ab_update = OPTIONS.info_dict.get("ab_update") == "true"
1480  allow_non_ab = OPTIONS.info_dict.get("allow_non_ab") == "true"
1481  if OPTIONS.force_non_ab:
1482    assert allow_non_ab,\
1483        "--force_non_ab only allowed on devices that supports non-A/B"
1484    assert ab_update, "--force_non_ab only allowed on A/B devices"
1485
1486  generate_ab = not OPTIONS.force_non_ab and ab_update
1487
1488  # Use the default key to sign the package if not specified with package_key.
1489  # package_keys are needed on ab_updates, so always define them if an
1490  # A/B update is getting created.
1491  if not OPTIONS.no_signing or generate_ab:
1492    if OPTIONS.package_key is None:
1493      OPTIONS.package_key = OPTIONS.info_dict.get(
1494          "default_system_dev_certificate",
1495          "build/make/target/product/security/testkey")
1496    # Get signing keys
1497    OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
1498
1499    # Only check for existence of key file if using the default signer.
1500    # Because the custom signer might not need the key file AT all.
1501    # b/191704641
1502    if not OPTIONS.payload_signer:
1503      private_key_path = OPTIONS.package_key + OPTIONS.private_key_suffix
1504      if not os.path.exists(private_key_path):
1505        raise common.ExternalError(
1506            "Private key {} doesn't exist. Make sure you passed the"
1507            " correct key path through -k option".format(
1508                private_key_path)
1509        )
1510      signapk_abs_path = os.path.join(
1511          OPTIONS.search_path, OPTIONS.signapk_path)
1512      if not os.path.exists(signapk_abs_path):
1513        raise common.ExternalError(
1514            "Failed to find sign apk binary {} in search path {}. Make sure the correct search path is passed via -p".format(OPTIONS.signapk_path, OPTIONS.search_path))
1515
1516  if OPTIONS.source_info_dict:
1517    source_build_prop = OPTIONS.source_info_dict["build.prop"]
1518    target_build_prop = OPTIONS.target_info_dict["build.prop"]
1519    source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
1520    target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
1521    is_spl_downgrade = target_spl < source_spl
1522    if is_spl_downgrade and target_build_prop.GetProp("ro.build.tags") == "release-keys":
1523      raise common.ExternalError(
1524          "Target security patch level {} is older than source SPL {} "
1525          "A locked bootloader will reject SPL downgrade no matter "
1526          "what(even if data wipe is done), so SPL downgrade on any "
1527          "release-keys build is not allowed.".format(target_spl, source_spl))
1528
1529    logger.info("SPL downgrade on %s",
1530                target_build_prop.GetProp("ro.build.tags"))
1531    if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
1532      raise common.ExternalError(
1533          "Target security patch level {} is older than source SPL {} applying "
1534          "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
1535          "to override this check. This script expects security patch level to "
1536          "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
1537          "separators other than -, so as long as it's used consistenly across "
1538          "all SPL dates".format(target_spl, source_spl))
1539    elif not is_spl_downgrade and OPTIONS.spl_downgrade:
1540      raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
1541                       " detected. Please only pass in this flag if you want a"
1542                       " SPL downgrade. Target SPL: {} Source SPL: {}"
1543                       .format(target_spl, source_spl))
1544  if generate_ab:
1545    GenerateAbOtaPackage(
1546        target_file=args[0],
1547        output_file=args[1],
1548        source_file=OPTIONS.incremental_source)
1549
1550  else:
1551    GenerateNonAbOtaPackage(
1552        target_file=args[0],
1553        output_file=args[1],
1554        source_file=OPTIONS.incremental_source)
1555
1556  # Post OTA generation works.
1557  if OPTIONS.incremental_source is not None and OPTIONS.log_diff:
1558    logger.info("Generating diff logs...")
1559    logger.info("Unzipping target-files for diffing...")
1560    target_dir = common.UnzipTemp(args[0], TARGET_DIFFING_UNZIP_PATTERN)
1561    source_dir = common.UnzipTemp(
1562        OPTIONS.incremental_source, TARGET_DIFFING_UNZIP_PATTERN)
1563
1564    with open(OPTIONS.log_diff, 'w') as out_file:
1565      target_files_diff.recursiveDiff(
1566          '', source_dir, target_dir, out_file)
1567
1568  logger.info("done.")
1569
1570
1571if __name__ == '__main__':
1572  try:
1573    common.CloseInheritedPipes()
1574    main(sys.argv[1:])
1575  finally:
1576    common.Cleanup()
1577