def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
target_has_recovery_patch = HasRecoveryPatch(target_zip) //檢查目標(biāo)文件包是否存在SYSTEM/recovery-from-boot.p
source_has_recovery_patch = HasRecoveryPatch(source_zip) //檢查源文件包是否存在SYSTEM/recovery-from-boot.p
if (OPTIONS.block_based and
target_has_recovery_patch and
source_has_recovery_patch):
//如果傳入?yún)?shù)指定了使用塊模式制作OTA,則調(diào)用WriteBlockIncrementalOTAPackage進(jìn)行制作
return WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip)
//獲取源文件包和目標(biāo)文件包的recovery_api_version
source_version = OPTIONS.source_info_dict["recovery_api_version"]
target_version = OPTIONS.target_info_dict["recovery_api_version"]
//如果source_version等于0陕习,則生成一個(gè)不能進(jìn)行安裝的updater-script腳本蹲盘,這個(gè)腳本是由edify_generator模塊生成的
if source_version == 0:
print ("WARNING: generating edify script for a source that "
"can't install it.")
script = edify_generator.EdifyGenerator(
source_version, OPTIONS.target_info_dict,
fstab=OPTIONS.source_info_dict["fstab"])
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
recovery_mount_options = OPTIONS.source_info_dict.get(
"recovery_mount_options")
oem_dict = None
if oem_props is not None and len(oem_props) > 0:
if OPTIONS.oem_source is None:
raise common.ExternalError("OEM source required for this build")
script.Mount("/oem", recovery_mount_options)
oem_dict = common.LoadDictionaryFromLines(
open(OPTIONS.oem_source).readlines())
//創(chuàng)建一個(gè)元數(shù)據(jù)字典來存儲一些相關(guān)的值
metadata = {
"pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
OPTIONS.source_info_dict),
"post-timestamp": GetBuildProp("ro.build.date.utc",
OPTIONS.target_info_dict),
}
初始化device_specific
device_specific = common.DeviceSpecificParams(
source_zip=source_zip,
source_version=source_version,
target_zip=target_zip,
target_version=target_version,
output_zip=output_zip,
script=script,
metadata=metadata,
info_dict=OPTIONS.source_info_dict)
//比較system分區(qū)的差異
system_diff = FileDifference("system", source_zip, target_zip, output_zip)
FileDifference類的初始化函數(shù)
class FileDifference(object):
def __init__(self, partition, source_zip, target_zip, output_zip):
self.deferred_patch_list = None
//讀取目標(biāo)文件包中system分區(qū)的文件沸伏,保存這些文件的信息
print "Loading target..."
self.target_data = target_data = LoadPartitionFiles(target_zip, partition)
//讀取源文件包中system分區(qū)的文件,保存這些文件的信息
print "Loading source..."
self.source_data = source_data = LoadPartitionFiles(source_zip, partition)
self.verbatim_targets = verbatim_targets = []
self.patch_list = patch_list = []
diffs = []
self.renames = renames = {}
known_paths = set()
largest_source_size = 0
matching_file_cache = {}
for fn, sf in source_data.items():
assert fn == sf.name
matching_file_cache["path:" + fn] = sf
if fn in target_data.keys():
AddToKnownPaths(fn, known_paths)
# Only allow eligibility for filename/sha matching
# if there isn't a perfect path match.
if target_data.get(sf.name) is None:
matching_file_cache["file:" + fn.split("/")[-1]] = sf
matching_file_cache["sha:" + sf.sha1] = sf
//比較源文件包和目標(biāo)文件包的路徑結(jié)構(gòu)援岩,找出經(jīng)過重命名的文件
for fn in sorted(target_data.keys()):
tf = target_data[fn]
assert fn == tf.name
sf = ClosestFileMatch(tf, matching_file_cache, renames)
if sf is not None and sf.name != tf.name:
print "File has moved from " + sf.name + " to " + tf.name
renames[sf.name] = tf
if sf is None or fn in OPTIONS.require_verbatim:
# This file should be included verbatim
if fn in OPTIONS.prohibit_verbatim:
raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
print "send", fn, "verbatim"
tf.AddToZip(output_zip)
verbatim_targets.append((fn, tf.size, tf.sha1))
if fn in target_data.keys():
AddToKnownPaths(fn, known_paths)
elif tf.sha1 != sf.sha1:
# File is different; consider sending as a patch
diffs.append(common.Difference(tf, sf)) //將需要比較差異的文件保存起來
else:
# Target file data identical to source (may still be renamed)
pass
common.ComputeDifferences(diffs)
for diff in diffs:
tf, sf, d = diff.GetPatch()
path = "/".join(tf.name.split("/")[:-1])
if d is None or len(d) > tf.size * OPTIONS.patch_threshold or \
path not in known_paths:
# patch is almost as big as the file; don't bother patching
# or a patch + rename cannot take place due to the target
# directory not existing
tf.AddToZip(output_zip)
verbatim_targets.append((tf.name, tf.size, tf.sha1))
if sf.name in renames:
del renames[sf.name]
AddToKnownPaths(tf.name, known_paths)
else:
common.ZipWriteStr(output_zip, "patch/" + sf.name + ".p", d)
patch_list.append((tf, sf, tf.size, common.sha1(d).hexdigest()))
largest_source_size = max(largest_source_size, sf.size)
self.largest_source_size = largest_source_size