mirror of
https://github.com/brain-hackers/u-boot-brain
synced 2024-10-01 00:50:43 +09:00
binman: Move compression into the Entry base class
Compression is currently available only with blobs. However we want to report the compression algorithm and uncompressed size for all entries, so that other entry types can support compression. This will help with the forthcoming 'list' feature which lists entries in the image. Move the compression properties into the base class. Also fix up the docs which had the wrong property name. Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
parent
53cd5d921d
commit
8287ee852d
@ -339,6 +339,10 @@ expand-size:
|
||||
limited by the size of the image/section and the position of the next
|
||||
entry.
|
||||
|
||||
compress:
|
||||
Sets the compression algortihm to use (for blobs only). See the entry
|
||||
documentation for details.
|
||||
|
||||
The attributes supported for images and sections are described below. Several
|
||||
are similar to those for entries.
|
||||
|
||||
@ -649,15 +653,16 @@ Compression
|
||||
-----------
|
||||
|
||||
Binman support compression for 'blob' entries (those of type 'blob' and
|
||||
derivatives). To enable this for an entry, add a 'compression' property:
|
||||
derivatives). To enable this for an entry, add a 'compress' property:
|
||||
|
||||
blob {
|
||||
filename = "datafile";
|
||||
compression = "lz4";
|
||||
compress = "lz4";
|
||||
};
|
||||
|
||||
The entry will then contain the compressed data, using the 'lz4' compression
|
||||
algorithm. Currently this is the only one that is supported.
|
||||
algorithm. Currently this is the only one that is supported. The uncompressed
|
||||
size is written to the node in an 'uncomp-size' property, if -u is used.
|
||||
|
||||
|
||||
|
||||
|
@ -51,6 +51,8 @@ class Entry(object):
|
||||
offset: Offset of entry within the section, None if not known yet (in
|
||||
which case it will be calculated by Pack())
|
||||
size: Entry size in bytes, None if not known
|
||||
uncomp_size: Size of uncompressed data in bytes, if the entry is
|
||||
compressed, else None
|
||||
contents_size: Size of contents in bytes, 0 by default
|
||||
align: Entry start offset alignment, or None
|
||||
align_size: Entry size alignment, or None
|
||||
@ -58,6 +60,7 @@ class Entry(object):
|
||||
pad_before: Number of pad bytes before the contents, 0 if none
|
||||
pad_after: Number of pad bytes after the contents, 0 if none
|
||||
data: Contents of entry (string of bytes)
|
||||
compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
|
||||
"""
|
||||
def __init__(self, section, etype, node, read_node=True, name_prefix=''):
|
||||
self.section = section
|
||||
@ -66,6 +69,7 @@ class Entry(object):
|
||||
self.name = node and (name_prefix + node.name) or 'none'
|
||||
self.offset = None
|
||||
self.size = None
|
||||
self.uncomp_size = None
|
||||
self.data = None
|
||||
self.contents_size = 0
|
||||
self.align = None
|
||||
@ -76,6 +80,7 @@ class Entry(object):
|
||||
self.offset_unset = False
|
||||
self.image_pos = None
|
||||
self._expand_size = False
|
||||
self.compress = 'none'
|
||||
if read_node:
|
||||
self.ReadNode()
|
||||
|
||||
@ -188,6 +193,8 @@ class Entry(object):
|
||||
for prop in ['offset', 'size', 'image-pos']:
|
||||
if not prop in self._node.props:
|
||||
state.AddZeroProp(self._node, prop)
|
||||
if self.compress != 'none':
|
||||
state.AddZeroProp(self._node, 'uncomp-size')
|
||||
err = state.CheckAddHashProp(self._node)
|
||||
if err:
|
||||
self.Raise(err)
|
||||
@ -198,6 +205,8 @@ class Entry(object):
|
||||
state.SetInt(self._node, 'size', self.size)
|
||||
state.SetInt(self._node, 'image-pos',
|
||||
self.image_pos - self.section.GetRootSkipAtStart())
|
||||
if self.uncomp_size is not None:
|
||||
state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
|
||||
state.CheckSetHashValue(self._node, self.GetData)
|
||||
|
||||
def ProcessFdt(self, fdt):
|
||||
|
@ -33,8 +33,7 @@ class Entry_blob(Entry):
|
||||
def __init__(self, section, etype, node):
|
||||
Entry.__init__(self, section, etype, node)
|
||||
self._filename = fdt_util.GetString(self._node, 'filename', self.etype)
|
||||
self._compress = fdt_util.GetString(self._node, 'compress', 'none')
|
||||
self._uncompressed_size = None
|
||||
self.compress = fdt_util.GetString(self._node, 'compress', 'none')
|
||||
|
||||
def ObtainContents(self):
|
||||
self._filename = self.GetDefaultFilename()
|
||||
@ -50,21 +49,11 @@ class Entry_blob(Entry):
|
||||
# the data in chunks and avoid reading it all at once. For now
|
||||
# this seems like an unnecessary complication.
|
||||
indata = tools.ReadFile(self._pathname)
|
||||
if self._compress != 'none':
|
||||
self._uncompressed_size = len(indata)
|
||||
data = tools.Compress(indata, self._compress)
|
||||
if self.compress != 'none':
|
||||
self.uncomp_size = len(indata)
|
||||
data = tools.Compress(indata, self.compress)
|
||||
self.SetContents(data)
|
||||
return True
|
||||
|
||||
def GetDefaultFilename(self):
|
||||
return self._filename
|
||||
|
||||
def AddMissingProperties(self):
|
||||
Entry.AddMissingProperties(self)
|
||||
if self._compress != 'none':
|
||||
state.AddZeroProp(self._node, 'uncomp-size')
|
||||
|
||||
def SetCalculatedProperties(self):
|
||||
Entry.SetCalculatedProperties(self)
|
||||
if self._uncompressed_size is not None:
|
||||
state.SetInt(self._node, 'uncomp-size', self._uncompressed_size)
|
||||
|
Loading…
Reference in New Issue
Block a user