binman: Support replacing data in a cbfs

At present binman cannot replace data within a CBFS since it does not
allow rewriting of the files in that CBFS. Implement this by using the
new WriteData() method to handle the case.

Add a header to compressed data so that the amount of compressed data can
be determined without reference to the size of the containing entry. This
allows the entry to be larger that the contents, without causing errors in
decompression. This is necessary to cope with a compressed device tree
being updated in such a way that it shrinks after the entry size is
already set (an obscure case). It is not used with CBFS since it has its
own metadata for this. Increase the number of passes allowed to resolve
the position of entries, to handle this case.

Add a test for this new logic.

Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
Simon Glass 2019-07-20 12:24:06 -06:00
parent 7210c89eac
commit eb0f4a4cb4
7 changed files with 87 additions and 9 deletions

View File

@ -208,6 +208,7 @@ class CbfsFile(object):
cbfs_offset: Offset of file data in bytes from start of CBFS, or None to
place this file anyway
data: Contents of file, uncompressed
orig_data: Original data added to the file, possibly compressed
data_len: Length of (possibly compressed) data in bytes
ftype: File type (TYPE_...)
compression: Compression type (COMPRESS_...)
@ -226,6 +227,7 @@ class CbfsFile(object):
self.offset = None
self.cbfs_offset = cbfs_offset
self.data = data
self.orig_data = data
self.ftype = ftype
self.compress = compress
self.memlen = None
@ -240,9 +242,9 @@ class CbfsFile(object):
"""Handle decompressing data if necessary"""
indata = self.data
if self.compress == COMPRESS_LZ4:
data = tools.Decompress(indata, 'lz4')
data = tools.Decompress(indata, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
data = tools.Decompress(indata, 'lzma')
data = tools.Decompress(indata, 'lzma', with_header=False)
else:
data = indata
self.memlen = len(data)
@ -361,9 +363,9 @@ class CbfsFile(object):
elif self.ftype == TYPE_RAW:
orig_data = data
if self.compress == COMPRESS_LZ4:
data = tools.Compress(orig_data, 'lz4')
data = tools.Compress(orig_data, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
data = tools.Compress(orig_data, 'lzma')
data = tools.Compress(orig_data, 'lzma', with_header=False)
self.memlen = len(orig_data)
self.data_len = len(data)
attr = struct.pack(ATTR_COMPRESSION_FORMAT,

View File

@ -311,7 +311,7 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
# since changing an offset from 0x100 to 0x104 (for example) can
# alter the compressed size of the device tree. So we need a
# third pass for this.
passes = 3
passes = 5
for pack_pass in range(passes):
try:
image.PackEntries()

View File

@ -92,6 +92,11 @@ class TestEntry(unittest.TestCase):
dtb = entry.Entry.Create(None, self.GetNode(), 'u-boot-dtb')
self.assertEqual('u-boot-dtb', dtb.GetFdtEtype())
def testWriteChildData(self):
"""Test the WriteChildData() method of the base class"""
base = entry.Entry.Create(None, self.GetNode(), 'blob-dtb')
self.assertTrue(base.WriteChildData(base))
if __name__ == "__main__":
unittest.main()

View File

@ -168,6 +168,7 @@ class Entry_cbfs(Entry):
self._cbfs_arg = fdt_util.GetString(node, 'cbfs-arch', 'x86')
self._cbfs_entries = OrderedDict()
self._ReadSubnodes()
self.reader = None
def ObtainContents(self, skip=None):
arch = cbfs_util.find_arch(self._cbfs_arg)
@ -202,7 +203,7 @@ class Entry_cbfs(Entry):
def _ReadSubnodes(self):
"""Read the subnodes to find out what should go in this IFWI"""
for node in self._node.subnodes:
entry = Entry.Create(self.section, node)
entry = Entry.Create(self, node)
entry.ReadNode()
entry._cbfs_name = fdt_util.GetString(node, 'cbfs-name', entry.name)
entry._type = fdt_util.GetString(node, 'cbfs-type')

View File

@ -2485,7 +2485,7 @@ class TestFunctional(unittest.TestCase):
def testExtractCbfsRaw(self):
"""Test extracting CBFS compressed data without decompressing it"""
data = self._RunExtractCmd('section/cbfs/u-boot-dtb', decomp=False)
dtb = tools.Decompress(data, 'lzma')
dtb = tools.Decompress(data, 'lzma', with_header=False)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
def testExtractBadEntry(self):
@ -2984,6 +2984,32 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(0xff800000, desc.offset);
self.assertEqual(0xff800000, desc.image_pos);
def testReplaceCbfs(self):
"""Test replacing a single file in CBFS without changing the size"""
self._CheckLz4()
expected = b'x' * len(U_BOOT_DATA)
data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin')
tools.WriteFile(updated_fname, data)
entry_name = 'section/cbfs/u-boot'
control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=True)
data = control.ReadEntry(updated_fname, entry_name)
self.assertEqual(expected, data)
def testReplaceResizeCbfs(self):
"""Test replacing a single file in CBFS with one of a different size"""
self._CheckLz4()
expected = U_BOOT_DATA + b'x'
data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin')
tools.WriteFile(updated_fname, data)
entry_name = 'section/cbfs/u-boot'
control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=True)
data = control.ReadEntry(updated_fname, entry_name)
self.assertEqual(expected, data)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,37 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
size = <0xe00>;
allow-repack;
u-boot {
};
section {
align = <0x100>;
cbfs {
size = <0x400>;
u-boot {
cbfs-type = "raw";
};
u-boot-dtb {
cbfs-type = "raw";
cbfs-compress = "lzma";
cbfs-offset = <0x80>;
};
};
u-boot-dtb {
compress = "lz4";
};
};
fdtmap {
};
image-header {
location = "end";
};
};
};

View File

@ -9,6 +9,7 @@ import command
import glob
import os
import shutil
import struct
import sys
import tempfile
@ -377,7 +378,7 @@ def ToBytes(string):
return string.encode('utf-8')
return string
def Compress(indata, algo):
def Compress(indata, algo, with_header=True):
"""Compress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
@ -408,9 +409,12 @@ def Compress(indata, algo):
data = Run('gzip', '-c', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
if with_header:
hdr = struct.pack('<I', len(data))
data = hdr + data
return data
def Decompress(indata, algo):
def Decompress(indata, algo, with_header=True):
"""Decompress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
@ -428,6 +432,9 @@ def Decompress(indata, algo):
"""
if algo == 'none':
return indata
if with_header:
data_len = struct.unpack('<I', indata[:4])[0]
indata = indata[4:4 + data_len]
fname = GetOutputFilename('%s.decomp.tmp' % algo)
with open(fname, 'wb') as fd:
fd.write(indata)