2018-06-02 00:38:16 +09:00
|
|
|
# SPDX-License-Identifier: GPL-2.0+
|
|
|
|
# Copyright (c) 2018 Google, Inc
|
|
|
|
# Written by Simon Glass <sjg@chromium.org>
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
"""Entry-type module for sections (groups of entries)
|
|
|
|
|
|
|
|
Sections are entries which can contain other entries. This allows hierarchical
|
|
|
|
images to be created.
|
|
|
|
"""
|
|
|
|
|
|
|
|
from collections import OrderedDict
|
|
|
|
import re
|
|
|
|
import sys
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2020-04-18 09:09:03 +09:00
|
|
|
from binman.entry import Entry
|
|
|
|
from dtoc import fdt_util
|
2020-04-18 09:09:04 +09:00
|
|
|
from patman import tools
|
|
|
|
from patman import tout
|
2020-10-27 08:40:12 +09:00
|
|
|
from patman.tools import ToHexSize
|
2018-06-02 00:38:16 +09:00
|
|
|
|
|
|
|
|
|
|
|
class Entry_section(Entry):
|
2018-07-18 04:25:35 +09:00
|
|
|
"""Entry that contains other entries
|
|
|
|
|
|
|
|
Properties / Entry arguments: (see binman README for more information)
|
2019-07-09 05:25:47 +09:00
|
|
|
pad-byte: Pad byte to use when padding
|
|
|
|
sort-by-offset: True if entries should be sorted by offset, False if
|
|
|
|
they must be in-order in the device tree description
|
|
|
|
end-at-4gb: Used to build an x86 ROM which ends at 4GB (2^32)
|
|
|
|
skip-at-start: Number of bytes before the first entry starts. These
|
|
|
|
effectively adjust the starting offset of entries. For example,
|
|
|
|
if this is 16, then the first entry would start at 16. An entry
|
|
|
|
with offset = 20 would in fact be written at offset 4 in the image
|
|
|
|
file, since the first 16 bytes are skipped when writing.
|
|
|
|
name-prefix: Adds a prefix to the name of every entry in the section
|
2018-07-18 04:25:35 +09:00
|
|
|
when writing out the map
|
|
|
|
|
2020-07-10 09:39:38 +09:00
|
|
|
Properties:
|
2020-09-01 20:13:57 +09:00
|
|
|
allow_missing: True if this section permits external blobs to be
|
2020-07-10 09:39:38 +09:00
|
|
|
missing their contents. The second will produce an image but of
|
|
|
|
course it will not work.
|
|
|
|
|
2019-07-09 05:25:47 +09:00
|
|
|
Since a section is also an entry, it inherits all the properies of entries
|
|
|
|
too.
|
|
|
|
|
2018-07-18 04:25:35 +09:00
|
|
|
A section is an entry which can contain other entries, thus allowing
|
|
|
|
hierarchical images to be created. See 'Sections and hierarchical images'
|
|
|
|
in the binman README for more information.
|
|
|
|
"""
|
2019-07-09 05:25:47 +09:00
|
|
|
def __init__(self, section, etype, node, test=False):
|
|
|
|
if not test:
|
2020-07-10 09:39:35 +09:00
|
|
|
super().__init__(section, etype, node)
|
2019-07-09 05:25:47 +09:00
|
|
|
self._entries = OrderedDict()
|
|
|
|
self._pad_byte = 0
|
|
|
|
self._sort = False
|
|
|
|
self._skip_at_start = None
|
|
|
|
self._end_4gb = False
|
|
|
|
|
2019-07-21 03:23:45 +09:00
|
|
|
def ReadNode(self):
|
2020-10-27 08:40:04 +09:00
|
|
|
"""Read properties from the section node"""
|
2020-07-10 09:39:35 +09:00
|
|
|
super().ReadNode()
|
2019-07-09 05:25:47 +09:00
|
|
|
self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0)
|
|
|
|
self._sort = fdt_util.GetBool(self._node, 'sort-by-offset')
|
|
|
|
self._end_4gb = fdt_util.GetBool(self._node, 'end-at-4gb')
|
|
|
|
self._skip_at_start = fdt_util.GetInt(self._node, 'skip-at-start')
|
|
|
|
if self._end_4gb:
|
|
|
|
if not self.size:
|
|
|
|
self.Raise("Section size must be provided when using end-at-4gb")
|
|
|
|
if self._skip_at_start is not None:
|
|
|
|
self.Raise("Provide either 'end-at-4gb' or 'skip-at-start'")
|
|
|
|
else:
|
|
|
|
self._skip_at_start = 0x100000000 - self.size
|
|
|
|
else:
|
|
|
|
if self._skip_at_start is None:
|
|
|
|
self._skip_at_start = 0
|
|
|
|
self._name_prefix = fdt_util.GetString(self._node, 'name-prefix')
|
|
|
|
filename = fdt_util.GetString(self._node, 'filename')
|
|
|
|
if filename:
|
|
|
|
self._filename = filename
|
|
|
|
|
2019-07-21 03:23:45 +09:00
|
|
|
self._ReadEntries()
|
|
|
|
|
2019-07-09 05:25:47 +09:00
|
|
|
def _ReadEntries(self):
|
|
|
|
for node in self._node.subnodes:
|
2020-08-31 18:58:18 +09:00
|
|
|
if node.name.startswith('hash') or node.name.startswith('signature'):
|
2019-07-09 05:25:47 +09:00
|
|
|
continue
|
|
|
|
entry = Entry.Create(self, node)
|
2019-07-21 03:23:45 +09:00
|
|
|
entry.ReadNode()
|
2019-07-09 05:25:47 +09:00
|
|
|
entry.SetPrefix(self._name_prefix)
|
|
|
|
self._entries[node.name] = entry
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2019-07-21 03:23:45 +09:00
|
|
|
def _Raise(self, msg):
|
|
|
|
"""Raises an error for this section
|
|
|
|
|
|
|
|
Args:
|
|
|
|
msg: Error message to use in the raise string
|
|
|
|
Raises:
|
|
|
|
ValueError()
|
|
|
|
"""
|
|
|
|
raise ValueError("Section '%s': %s" % (self._node.path, msg))
|
|
|
|
|
2019-07-21 03:23:28 +09:00
|
|
|
def GetFdts(self):
|
|
|
|
fdts = {}
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
2019-07-21 03:23:28 +09:00
|
|
|
fdts.update(entry.GetFdts())
|
|
|
|
return fdts
|
2018-09-14 19:57:24 +09:00
|
|
|
|
2018-07-07 01:27:40 +09:00
|
|
|
def ProcessFdt(self, fdt):
|
2019-07-09 05:25:47 +09:00
|
|
|
"""Allow entries to adjust the device tree
|
|
|
|
|
|
|
|
Some entries need to adjust the device tree for their purposes. This
|
|
|
|
may involve adding or deleting properties.
|
|
|
|
"""
|
|
|
|
todo = self._entries.values()
|
|
|
|
for passnum in range(3):
|
|
|
|
next_todo = []
|
|
|
|
for entry in todo:
|
|
|
|
if not entry.ProcessFdt(fdt):
|
|
|
|
next_todo.append(entry)
|
|
|
|
todo = next_todo
|
|
|
|
if not todo:
|
|
|
|
break
|
|
|
|
if todo:
|
|
|
|
self.Raise('Internal error: Could not complete processing of Fdt: remaining %s' %
|
|
|
|
todo)
|
|
|
|
return True
|
2018-07-07 01:27:40 +09:00
|
|
|
|
2018-09-14 19:57:29 +09:00
|
|
|
def ExpandEntries(self):
|
2019-07-09 05:25:47 +09:00
|
|
|
"""Expand out any entries which have calculated sub-entries
|
|
|
|
|
|
|
|
Some entries are expanded out at runtime, e.g. 'files', which produces
|
|
|
|
a section containing a list of files. Process these entries so that
|
|
|
|
this information is added to the device tree.
|
|
|
|
"""
|
2020-07-10 09:39:35 +09:00
|
|
|
super().ExpandEntries()
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.ExpandEntries()
|
2018-09-14 19:57:29 +09:00
|
|
|
|
2020-10-27 08:40:17 +09:00
|
|
|
def AddMissingProperties(self, have_image_pos):
|
2019-07-09 05:25:47 +09:00
|
|
|
"""Add new properties to the device tree as needed for this entry"""
|
2020-10-27 08:40:17 +09:00
|
|
|
super().AddMissingProperties(have_image_pos)
|
|
|
|
if self.compress != 'none':
|
|
|
|
have_image_pos = False
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
2020-10-27 08:40:17 +09:00
|
|
|
entry.AddMissingProperties(have_image_pos)
|
2018-07-07 01:27:41 +09:00
|
|
|
|
2018-06-02 00:38:16 +09:00
|
|
|
def ObtainContents(self):
|
2019-07-09 05:25:47 +09:00
|
|
|
return self.GetEntryContents()
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2020-10-27 08:40:12 +09:00
|
|
|
def GetPaddedDataForEntry(self, entry):
|
|
|
|
"""Get the data for an entry including any padding
|
|
|
|
|
|
|
|
Gets the entry data and uses the section pad-byte value to add padding
|
|
|
|
before and after as defined by the pad-before and pad-after properties.
|
|
|
|
This does not consider alignment.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
entry: Entry to check
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Contents of the entry along with any pad bytes before and
|
|
|
|
after it (bytes)
|
|
|
|
"""
|
2020-10-27 08:40:13 +09:00
|
|
|
pad_byte = (entry._pad_byte if isinstance(entry, Entry_section)
|
|
|
|
else self._pad_byte)
|
|
|
|
|
2020-10-27 08:40:12 +09:00
|
|
|
data = b''
|
|
|
|
# Handle padding before the entry
|
|
|
|
if entry.pad_before:
|
2020-10-27 08:40:14 +09:00
|
|
|
data += tools.GetBytes(self._pad_byte, entry.pad_before)
|
2020-10-27 08:40:12 +09:00
|
|
|
|
|
|
|
# Add in the actual entry data
|
|
|
|
data += entry.GetData()
|
|
|
|
|
|
|
|
# Handle padding after the entry
|
|
|
|
if entry.pad_after:
|
2020-10-27 08:40:14 +09:00
|
|
|
data += tools.GetBytes(self._pad_byte, entry.pad_after)
|
2020-10-27 08:40:13 +09:00
|
|
|
|
|
|
|
if entry.size:
|
|
|
|
data += tools.GetBytes(pad_byte, entry.size - len(data))
|
2020-10-27 08:40:12 +09:00
|
|
|
|
|
|
|
self.Detail('GetPaddedDataForEntry: size %s' % ToHexSize(self.data))
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2020-10-27 08:40:11 +09:00
|
|
|
def _BuildSectionData(self):
|
|
|
|
"""Build the contents of a section
|
|
|
|
|
|
|
|
This places all entries at the right place, dealing with padding before
|
|
|
|
and after entries. It does not do padding for the section itself (the
|
|
|
|
pad-before and pad-after properties in the section items) since that is
|
|
|
|
handled by the parent section.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Contents of the section (bytes)
|
|
|
|
"""
|
2019-08-24 22:23:03 +09:00
|
|
|
section_data = b''
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
for entry in self._entries.values():
|
2020-10-27 08:40:12 +09:00
|
|
|
data = self.GetPaddedDataForEntry(entry)
|
2020-10-27 08:40:11 +09:00
|
|
|
# Handle empty space before the entry
|
|
|
|
pad = (entry.offset or 0) - self._skip_at_start - len(section_data)
|
2019-08-24 22:23:03 +09:00
|
|
|
if pad > 0:
|
|
|
|
section_data += tools.GetBytes(self._pad_byte, pad)
|
2020-10-27 08:40:11 +09:00
|
|
|
|
|
|
|
# Add in the actual entry data
|
2019-08-24 22:23:03 +09:00
|
|
|
section_data += data
|
2020-10-27 08:40:11 +09:00
|
|
|
|
2019-07-21 03:23:36 +09:00
|
|
|
self.Detail('GetData: %d entries, total size %#x' %
|
|
|
|
(len(self._entries), len(section_data)))
|
2020-10-27 08:40:07 +09:00
|
|
|
return self.CompressData(section_data)
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2020-10-27 08:40:12 +09:00
|
|
|
def GetPaddedData(self):
|
|
|
|
"""Get the data for a section including any padding
|
|
|
|
|
|
|
|
Gets the section data and uses the parent section's pad-byte value to
|
|
|
|
add padding before and after as defined by the pad-before and pad-after
|
|
|
|
properties. If this is a top-level section (i.e. an image), this is the
|
|
|
|
same as GetData(), since padding is not supported.
|
|
|
|
|
|
|
|
This does not consider alignment.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Contents of the section along with any pad bytes before and
|
|
|
|
after it (bytes)
|
|
|
|
"""
|
2020-10-27 08:40:13 +09:00
|
|
|
section = self.section or self
|
|
|
|
return section.GetPaddedDataForEntry(self)
|
2020-10-27 08:40:12 +09:00
|
|
|
|
2020-10-27 08:40:11 +09:00
|
|
|
def GetData(self):
|
2020-10-27 08:40:16 +09:00
|
|
|
"""Get the contents of an entry
|
|
|
|
|
|
|
|
This builds the contents of the section, stores this as the contents of
|
|
|
|
the section and returns it
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bytes content of the section, made up for all all of its subentries.
|
|
|
|
This excludes any padding. If the section is compressed, the
|
|
|
|
compressed data is returned
|
|
|
|
"""
|
|
|
|
data = self._BuildSectionData()
|
|
|
|
self.SetContents(data)
|
|
|
|
return data
|
2020-10-27 08:40:11 +09:00
|
|
|
|
2018-08-02 06:22:37 +09:00
|
|
|
def GetOffsets(self):
|
|
|
|
"""Handle entries that want to set the offset/size of other entries
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2018-08-02 06:22:37 +09:00
|
|
|
This calls each entry's GetOffsets() method. If it returns a list
|
2018-06-02 00:38:16 +09:00
|
|
|
of entries to update, it updates them.
|
|
|
|
"""
|
2019-07-09 05:25:47 +09:00
|
|
|
self.GetEntryOffsets()
|
2018-06-02 00:38:16 +09:00
|
|
|
return {}
|
|
|
|
|
2019-07-09 05:25:37 +09:00
|
|
|
def ResetForPack(self):
|
|
|
|
"""Reset offset/size fields so that packing can be done again"""
|
2020-07-10 09:39:35 +09:00
|
|
|
super().ResetForPack()
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.ResetForPack()
|
2019-07-09 05:25:37 +09:00
|
|
|
|
2018-08-02 06:22:37 +09:00
|
|
|
def Pack(self, offset):
|
2018-06-02 00:38:16 +09:00
|
|
|
"""Pack all entries into the section"""
|
2019-07-09 05:25:47 +09:00
|
|
|
self._PackEntries()
|
2020-07-10 09:39:35 +09:00
|
|
|
return super().Pack(offset)
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2019-07-09 05:25:47 +09:00
|
|
|
def _PackEntries(self):
|
2020-10-27 08:40:04 +09:00
|
|
|
"""Pack all entries into the section"""
|
2019-07-09 05:25:47 +09:00
|
|
|
offset = self._skip_at_start
|
|
|
|
for entry in self._entries.values():
|
|
|
|
offset = entry.Pack(offset)
|
|
|
|
self.size = self.CheckSize()
|
|
|
|
|
|
|
|
def _ExpandEntries(self):
|
|
|
|
"""Expand any entries that are permitted to"""
|
|
|
|
exp_entry = None
|
|
|
|
for entry in self._entries.values():
|
|
|
|
if exp_entry:
|
|
|
|
exp_entry.ExpandToLimit(entry.offset)
|
|
|
|
exp_entry = None
|
|
|
|
if entry.expand_size:
|
|
|
|
exp_entry = entry
|
|
|
|
if exp_entry:
|
|
|
|
exp_entry.ExpandToLimit(self.size)
|
|
|
|
|
|
|
|
def _SortEntries(self):
|
|
|
|
"""Sort entries by offset"""
|
|
|
|
entries = sorted(self._entries.values(), key=lambda entry: entry.offset)
|
|
|
|
self._entries.clear()
|
|
|
|
for entry in entries:
|
|
|
|
self._entries[entry._node.name] = entry
|
|
|
|
|
|
|
|
def CheckEntries(self):
|
2020-10-27 08:40:04 +09:00
|
|
|
"""Check that entries do not overlap or extend outside the section"""
|
2019-07-09 05:25:47 +09:00
|
|
|
if self._sort:
|
|
|
|
self._SortEntries()
|
|
|
|
self._ExpandEntries()
|
|
|
|
offset = 0
|
|
|
|
prev_name = 'None'
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.CheckOffset()
|
|
|
|
if (entry.offset < self._skip_at_start or
|
|
|
|
entry.offset + entry.size > self._skip_at_start +
|
|
|
|
self.size):
|
2020-10-27 08:40:05 +09:00
|
|
|
entry.Raise('Offset %#x (%d) size %#x (%d) is outside the '
|
|
|
|
"section '%s' starting at %#x (%d) "
|
|
|
|
'of size %#x (%d)' %
|
|
|
|
(entry.offset, entry.offset, entry.size, entry.size,
|
|
|
|
self._node.path, self._skip_at_start,
|
|
|
|
self._skip_at_start, self.size, self.size))
|
2020-07-10 09:39:42 +09:00
|
|
|
if entry.offset < offset and entry.size:
|
2019-07-09 05:25:47 +09:00
|
|
|
entry.Raise("Offset %#x (%d) overlaps with previous entry '%s' "
|
|
|
|
"ending at %#x (%d)" %
|
|
|
|
(entry.offset, entry.offset, prev_name, offset, offset))
|
|
|
|
offset = entry.offset + entry.size
|
|
|
|
prev_name = entry.GetPath()
|
2018-08-02 06:22:42 +09:00
|
|
|
|
2018-06-02 00:38:16 +09:00
|
|
|
def WriteSymbols(self, section):
|
|
|
|
"""Write symbol values into binary files for access at run time"""
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.WriteSymbols(self)
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2018-07-07 01:27:41 +09:00
|
|
|
def SetCalculatedProperties(self):
|
2020-07-10 09:39:35 +09:00
|
|
|
super().SetCalculatedProperties()
|
2019-07-09 05:25:47 +09:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.SetCalculatedProperties()
|
|
|
|
|
|
|
|
def SetImagePos(self, image_pos):
|
2020-07-10 09:39:35 +09:00
|
|
|
super().SetImagePos(image_pos)
|
2020-10-27 08:40:17 +09:00
|
|
|
if self.compress == 'none':
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.SetImagePos(image_pos + self.offset)
|
2018-07-07 01:27:41 +09:00
|
|
|
|
2018-06-02 00:38:16 +09:00
|
|
|
def ProcessContents(self):
|
2019-07-09 05:25:35 +09:00
|
|
|
sizes_ok_base = super(Entry_section, self).ProcessContents()
|
2019-07-09 05:25:47 +09:00
|
|
|
sizes_ok = True
|
|
|
|
for entry in self._entries.values():
|
|
|
|
if not entry.ProcessContents():
|
|
|
|
sizes_ok = False
|
2019-07-09 05:25:35 +09:00
|
|
|
return sizes_ok and sizes_ok_base
|
2018-06-02 00:38:16 +09:00
|
|
|
|
2018-08-02 06:22:37 +09:00
|
|
|
def CheckOffset(self):
|
2019-07-09 05:25:47 +09:00
|
|
|
self.CheckEntries()
|
2018-06-02 00:38:20 +09:00
|
|
|
|
|
|
|
def WriteMap(self, fd, indent):
|
|
|
|
"""Write a map of the section to a .map file
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fd: File to write the map to
|
|
|
|
"""
|
2019-07-09 05:25:47 +09:00
|
|
|
Entry.WriteMapLine(fd, indent, self.name, self.offset or 0,
|
|
|
|
self.size, self.image_pos)
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.WriteMap(fd, indent + 1)
|
2018-07-18 04:25:38 +09:00
|
|
|
|
|
|
|
def GetEntries(self):
|
2019-07-09 05:25:47 +09:00
|
|
|
return self._entries
|
|
|
|
|
|
|
|
def GetContentsByPhandle(self, phandle, source_entry):
|
|
|
|
"""Get the data contents of an entry specified by a phandle
|
|
|
|
|
|
|
|
This uses a phandle to look up a node and and find the entry
|
|
|
|
associated with it. Then it returnst he contents of that entry.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
phandle: Phandle to look up (integer)
|
|
|
|
source_entry: Entry containing that phandle (used for error
|
|
|
|
reporting)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
data from associated entry (as a string), or None if not found
|
|
|
|
"""
|
|
|
|
node = self._node.GetFdt().LookupPhandle(phandle)
|
|
|
|
if not node:
|
|
|
|
source_entry.Raise("Cannot find node for phandle %d" % phandle)
|
|
|
|
for entry in self._entries.values():
|
|
|
|
if entry._node == node:
|
|
|
|
return entry.GetData()
|
|
|
|
source_entry.Raise("Cannot find entry for node '%s'" % node.name)
|
|
|
|
|
2019-11-07 09:22:44 +09:00
|
|
|
def LookupSymbol(self, sym_name, optional, msg, base_addr):
|
2019-07-09 05:25:47 +09:00
|
|
|
"""Look up a symbol in an ELF file
|
|
|
|
|
|
|
|
Looks up a symbol in an ELF file. Only entry types which come from an
|
|
|
|
ELF image can be used by this function.
|
|
|
|
|
2019-11-07 09:22:44 +09:00
|
|
|
At present the only entry properties supported are:
|
|
|
|
offset
|
|
|
|
image_pos - 'base_addr' is added if this is not an end-at-4gb image
|
|
|
|
size
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
Args:
|
|
|
|
sym_name: Symbol name in the ELF file to look up in the format
|
|
|
|
_binman_<entry>_prop_<property> where <entry> is the name of
|
|
|
|
the entry and <property> is the property to find (e.g.
|
|
|
|
_binman_u_boot_prop_offset). As a special case, you can append
|
|
|
|
_any to <entry> to have it search for any matching entry. E.g.
|
|
|
|
_binman_u_boot_any_prop_offset will match entries called u-boot,
|
|
|
|
u-boot-img and u-boot-nodtb)
|
|
|
|
optional: True if the symbol is optional. If False this function
|
|
|
|
will raise if the symbol is not found
|
|
|
|
msg: Message to display if an error occurs
|
2019-11-07 09:22:44 +09:00
|
|
|
base_addr: Base address of image. This is added to the returned
|
|
|
|
image_pos in most cases so that the returned position indicates
|
|
|
|
where the targetted entry/binary has actually been loaded. But
|
|
|
|
if end-at-4gb is used, this is not done, since the binary is
|
|
|
|
already assumed to be linked to the ROM position and using
|
|
|
|
execute-in-place (XIP).
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Value that should be assigned to that symbol, or None if it was
|
|
|
|
optional and not found
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError if the symbol is invalid or not found, or references a
|
|
|
|
property which is not supported
|
|
|
|
"""
|
|
|
|
m = re.match(r'^_binman_(\w+)_prop_(\w+)$', sym_name)
|
|
|
|
if not m:
|
|
|
|
raise ValueError("%s: Symbol '%s' has invalid format" %
|
|
|
|
(msg, sym_name))
|
|
|
|
entry_name, prop_name = m.groups()
|
|
|
|
entry_name = entry_name.replace('_', '-')
|
|
|
|
entry = self._entries.get(entry_name)
|
|
|
|
if not entry:
|
|
|
|
if entry_name.endswith('-any'):
|
|
|
|
root = entry_name[:-4]
|
|
|
|
for name in self._entries:
|
|
|
|
if name.startswith(root):
|
|
|
|
rest = name[len(root):]
|
|
|
|
if rest in ['', '-img', '-nodtb']:
|
|
|
|
entry = self._entries[name]
|
|
|
|
if not entry:
|
|
|
|
err = ("%s: Entry '%s' not found in list (%s)" %
|
|
|
|
(msg, entry_name, ','.join(self._entries.keys())))
|
|
|
|
if optional:
|
|
|
|
print('Warning: %s' % err, file=sys.stderr)
|
|
|
|
return None
|
|
|
|
raise ValueError(err)
|
|
|
|
if prop_name == 'offset':
|
|
|
|
return entry.offset
|
|
|
|
elif prop_name == 'image_pos':
|
2019-11-07 09:22:44 +09:00
|
|
|
value = entry.image_pos
|
|
|
|
if not self.GetImage()._end_4gb:
|
|
|
|
value += base_addr
|
|
|
|
return value
|
2019-08-24 22:23:05 +09:00
|
|
|
if prop_name == 'size':
|
|
|
|
return entry.size
|
2019-07-09 05:25:47 +09:00
|
|
|
else:
|
|
|
|
raise ValueError("%s: No such property '%s'" % (msg, prop_name))
|
|
|
|
|
|
|
|
def GetRootSkipAtStart(self):
|
|
|
|
"""Get the skip-at-start value for the top-level section
|
|
|
|
|
|
|
|
This is used to find out the starting offset for root section that
|
|
|
|
contains this section. If this is a top-level section then it returns
|
|
|
|
the skip-at-start offset for this section.
|
|
|
|
|
|
|
|
This is used to get the absolute position of section within the image.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Integer skip-at-start value for the root section containing this
|
|
|
|
section
|
|
|
|
"""
|
|
|
|
if self.section:
|
|
|
|
return self.section.GetRootSkipAtStart()
|
|
|
|
return self._skip_at_start
|
|
|
|
|
|
|
|
def GetStartOffset(self):
|
|
|
|
"""Get the start offset for this section
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The first available offset in this section (typically 0)
|
|
|
|
"""
|
|
|
|
return self._skip_at_start
|
|
|
|
|
|
|
|
def GetImageSize(self):
|
|
|
|
"""Get the size of the image containing this section
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Image size as an integer number of bytes, which may be None if the
|
|
|
|
image size is dynamic and its sections have not yet been packed
|
|
|
|
"""
|
2019-07-21 03:23:46 +09:00
|
|
|
return self.GetImage().size
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
def FindEntryType(self, etype):
|
|
|
|
"""Find an entry type in the section
|
|
|
|
|
|
|
|
Args:
|
|
|
|
etype: Entry type to find
|
|
|
|
Returns:
|
|
|
|
entry matching that type, or None if not found
|
|
|
|
"""
|
|
|
|
for entry in self._entries.values():
|
|
|
|
if entry.etype == etype:
|
|
|
|
return entry
|
|
|
|
return None
|
|
|
|
|
|
|
|
def GetEntryContents(self):
|
2020-10-27 08:40:07 +09:00
|
|
|
"""Call ObtainContents() for each entry in the section
|
2019-07-09 05:25:47 +09:00
|
|
|
"""
|
|
|
|
todo = self._entries.values()
|
|
|
|
for passnum in range(3):
|
|
|
|
next_todo = []
|
|
|
|
for entry in todo:
|
|
|
|
if not entry.ObtainContents():
|
|
|
|
next_todo.append(entry)
|
|
|
|
todo = next_todo
|
|
|
|
if not todo:
|
|
|
|
break
|
|
|
|
if todo:
|
|
|
|
self.Raise('Internal error: Could not complete processing of contents: remaining %s' %
|
|
|
|
todo)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _SetEntryOffsetSize(self, name, offset, size):
|
|
|
|
"""Set the offset and size of an entry
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: Entry name to update
|
|
|
|
offset: New offset, or None to leave alone
|
|
|
|
size: New size, or None to leave alone
|
|
|
|
"""
|
|
|
|
entry = self._entries.get(name)
|
|
|
|
if not entry:
|
|
|
|
self._Raise("Unable to set offset/size for unknown entry '%s'" %
|
|
|
|
name)
|
2020-07-10 09:39:41 +09:00
|
|
|
entry.SetOffsetSize(self._skip_at_start + offset if offset is not None
|
|
|
|
else None, size)
|
2019-07-09 05:25:47 +09:00
|
|
|
|
|
|
|
def GetEntryOffsets(self):
|
|
|
|
"""Handle entries that want to set the offset/size of other entries
|
|
|
|
|
|
|
|
This calls each entry's GetOffsets() method. If it returns a list
|
|
|
|
of entries to update, it updates them.
|
|
|
|
"""
|
|
|
|
for entry in self._entries.values():
|
|
|
|
offset_dict = entry.GetOffsets()
|
|
|
|
for name, info in offset_dict.items():
|
|
|
|
self._SetEntryOffsetSize(name, *info)
|
|
|
|
|
|
|
|
|
|
|
|
def CheckSize(self):
|
2020-10-27 08:40:04 +09:00
|
|
|
"""Check that the section contents does not exceed its size, etc."""
|
2019-07-09 05:25:47 +09:00
|
|
|
contents_size = 0
|
|
|
|
for entry in self._entries.values():
|
|
|
|
contents_size = max(contents_size, entry.offset + entry.size)
|
|
|
|
|
|
|
|
contents_size -= self._skip_at_start
|
|
|
|
|
|
|
|
size = self.size
|
|
|
|
if not size:
|
|
|
|
size = self.pad_before + contents_size + self.pad_after
|
|
|
|
size = tools.Align(size, self.align_size)
|
2018-09-14 19:57:29 +09:00
|
|
|
|
2019-07-09 05:25:47 +09:00
|
|
|
if self.size and contents_size > self.size:
|
|
|
|
self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" %
|
|
|
|
(contents_size, contents_size, self.size, self.size))
|
|
|
|
if not self.size:
|
|
|
|
self.size = size
|
|
|
|
if self.size != tools.Align(self.size, self.align_size):
|
|
|
|
self._Raise("Size %#x (%d) does not match align-size %#x (%d)" %
|
|
|
|
(self.size, self.size, self.align_size,
|
|
|
|
self.align_size))
|
|
|
|
return size
|
2019-07-09 05:25:43 +09:00
|
|
|
|
|
|
|
def ListEntries(self, entries, indent):
|
|
|
|
"""List the files in the section"""
|
2019-07-09 05:25:47 +09:00
|
|
|
Entry.AddEntryInfo(entries, indent, self.name, 'section', self.size,
|
|
|
|
self.image_pos, None, self.offset, self)
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.ListEntries(entries, indent + 1)
|
2019-07-21 03:23:41 +09:00
|
|
|
|
|
|
|
def LoadData(self, decomp=True):
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.LoadData(decomp)
|
|
|
|
self.Detail('Loaded data')
|
2019-07-21 03:23:46 +09:00
|
|
|
|
|
|
|
def GetImage(self):
|
|
|
|
"""Get the image containing this section
|
|
|
|
|
|
|
|
Note that a top-level section is actually an Image, so this function may
|
|
|
|
return self.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Image object containing this section
|
|
|
|
"""
|
|
|
|
if not self.section:
|
|
|
|
return self
|
|
|
|
return self.section.GetImage()
|
2019-07-21 03:23:55 +09:00
|
|
|
|
|
|
|
def GetSort(self):
|
|
|
|
"""Check if the entries in this section will be sorted
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if to be sorted, False if entries will be left in the order
|
|
|
|
they appear in the device tree
|
|
|
|
"""
|
|
|
|
return self._sort
|
2019-07-21 03:24:04 +09:00
|
|
|
|
|
|
|
def ReadData(self, decomp=True):
|
|
|
|
tout.Info("ReadData path='%s'" % self.GetPath())
|
|
|
|
parent_data = self.section.ReadData(True)
|
|
|
|
tout.Info('%s: Reading data from offset %#x-%#x, size %#x' %
|
|
|
|
(self.GetPath(), self.offset, self.offset + self.size,
|
|
|
|
self.size))
|
|
|
|
data = parent_data[self.offset:self.offset + self.size]
|
|
|
|
return data
|
|
|
|
|
|
|
|
def ReadChildData(self, child, decomp=True):
|
2019-09-25 23:56:21 +09:00
|
|
|
tout.Debug("ReadChildData for child '%s'" % child.GetPath())
|
2019-07-21 03:24:04 +09:00
|
|
|
parent_data = self.ReadData(True)
|
2019-09-25 23:56:21 +09:00
|
|
|
offset = child.offset - self._skip_at_start
|
|
|
|
tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
|
|
|
|
(child.GetPath(), child.offset, self._skip_at_start, offset))
|
|
|
|
data = parent_data[offset:offset + child.size]
|
2019-07-21 03:24:04 +09:00
|
|
|
if decomp:
|
|
|
|
indata = data
|
|
|
|
data = tools.Decompress(indata, child.compress)
|
|
|
|
if child.uncomp_size:
|
|
|
|
tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
|
|
|
|
(child.GetPath(), len(indata), child.compress,
|
|
|
|
len(data)))
|
|
|
|
return data
|
2019-07-21 03:24:05 +09:00
|
|
|
|
|
|
|
def WriteChildData(self, child):
|
|
|
|
return True
|
2020-07-10 09:39:38 +09:00
|
|
|
|
|
|
|
def SetAllowMissing(self, allow_missing):
|
|
|
|
"""Set whether a section allows missing external blobs
|
|
|
|
|
|
|
|
Args:
|
|
|
|
allow_missing: True if allowed, False if not allowed
|
|
|
|
"""
|
2020-09-01 20:13:57 +09:00
|
|
|
self.allow_missing = allow_missing
|
2020-07-10 09:39:38 +09:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.SetAllowMissing(allow_missing)
|
|
|
|
|
2020-07-10 09:39:40 +09:00
|
|
|
def CheckMissing(self, missing_list):
|
|
|
|
"""Check if any entries in this section have missing external blobs
|
|
|
|
|
|
|
|
If there are missing blobs, the entries are added to the list
|
|
|
|
|
|
|
|
Args:
|
|
|
|
missing_list: List of Entry objects to be added to
|
|
|
|
"""
|
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.CheckMissing(missing_list)
|