buildman environment fix

binman FMAP improvements
 minor test improvements and fixes
 minor dm improvements
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmCKoLERHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreYCpgf/ROfOg76k18PdWcQTZr10CnU0iqTaOyYx
 129hvb4n4bx4Q7Zr/HrWT6y7vvD01NOiNCwSX8qdF7S1bqs2/2AM+Yk/vLimXVOg
 UDGXGRABkbHQFVMzBUSmlmQvKoG5G6AhS6/5/F7q4jj+QCVMGarves7G7ImhFOFw
 6WKeNGv6FWX4UPSxm0TDJETiIqURNZOsu2vU9DmkR/b1eY13/qf/9J4zzEP1DaJt
 7PrKmyblRhPlMsi59EjNyEQOnY/A17en8Ik3Zt336tLPELlczRc4Xp8HslQsy2i3
 o457il6x8kl828oVwm3Sx4R/UvM8+qE1jWRcPA7vWrxCy3+AqDcTfQ==
 =tiPD
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull-29apr21' of https://source.denx.de/u-boot/custodians/u-boot-dm

buildman environment fix
binman FMAP improvements
minor test improvements and fixes
minor dm improvements
This commit is contained in:
Tom Rini 2021-04-29 21:03:38 -04:00
commit 8ddaf94358
30 changed files with 487 additions and 292 deletions

View File

@ -144,7 +144,7 @@ jobs:
export USER=azure
virtualenv -p /usr/bin/python3 /tmp/venv
. /tmp/venv/bin/activate
pip install pyelftools pytest pygit2
pip install -r test/py/requirements.txt
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}

View File

@ -151,7 +151,7 @@ Run binman, buildman, dtoc, Kconfig and patman testsuites:
export USER=gitlab;
virtualenv -p /usr/bin/python3 /tmp/venv;
. /tmp/venv/bin/activate;
pip install pyelftools pytest pygit2;
pip install -r test/py/requirements.txt;
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl;
export PYTHONPATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt";
export PATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}";

View File

@ -303,6 +303,8 @@ fdt_addr_t ofnode_get_addr_size_index(ofnode node, int index, fdt_size_t *size)
{
int na, ns;
*size = FDT_SIZE_T_NONE;
if (ofnode_is_np(node)) {
const __be32 *prop_val;
u64 size64;
@ -347,6 +349,15 @@ fdt_addr_t ofnode_get_addr(ofnode node)
return ofnode_get_addr_index(node, 0);
}
fdt_size_t ofnode_get_size(ofnode node)
{
fdt_size_t size;
ofnode_get_addr_size_index(node, 0, &size);
return size;
}
int ofnode_stringlist_search(ofnode node, const char *property,
const char *string)
{

View File

@ -265,7 +265,7 @@ int dm_scan_plat(bool pre_reloc_only)
static int dm_scan_fdt_node(struct udevice *parent, ofnode parent_node,
bool pre_reloc_only)
{
int ret = 0, err;
int ret = 0, err = 0;
ofnode node;
if (!ofnode_valid(parent_node))

View File

@ -30,8 +30,6 @@ int scmi_dt_get_smt_buffer(struct udevice *dev, struct scmi_smt *smt)
int ret;
struct ofnode_phandle_args args;
struct resource resource;
fdt32_t faddr;
phys_addr_t paddr;
ret = dev_read_phandle_with_args(dev, "shmem", NULL, 0, 0, &args);
if (ret)
@ -41,21 +39,13 @@ int scmi_dt_get_smt_buffer(struct udevice *dev, struct scmi_smt *smt)
if (ret)
return ret;
/* TEMP workaround for ofnode_read_resource translation issue */
if (of_live_active()) {
paddr = resource.start;
} else {
faddr = cpu_to_fdt32(resource.start);
paddr = ofnode_translate_address(args.node, &faddr);
}
smt->size = resource_size(&resource);
if (smt->size < sizeof(struct scmi_smt_header)) {
dev_err(dev, "Shared memory buffer too small\n");
return -EINVAL;
}
smt->buf = devm_ioremap(dev, paddr, smt->size);
smt->buf = devm_ioremap(dev, resource.start, smt->size);
if (!smt->buf)
return -ENOMEM;

View File

@ -863,7 +863,6 @@ static int jr2_probe(struct udevice *dev)
int i;
int ret;
struct resource res;
fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@ -926,9 +925,8 @@ static int jr2_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
faddr = cpu_to_fdt32(res.start);
addr_base = ofnode_translate_address(mdio_node, &faddr);
addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */

View File

@ -530,7 +530,6 @@ static int ocelot_probe(struct udevice *dev)
struct ocelot_private *priv = dev_get_priv(dev);
int i, ret;
struct resource res;
fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@ -578,9 +577,8 @@ static int ocelot_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
faddr = cpu_to_fdt32(res.start);
addr_base = ofnode_translate_address(mdio_node, &faddr);
addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */

View File

@ -482,7 +482,6 @@ static int serval_probe(struct udevice *dev)
struct serval_private *priv = dev_get_priv(dev);
int i, ret;
struct resource res;
fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@ -533,9 +532,8 @@ static int serval_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
faddr = cpu_to_fdt32(res.start);
addr_base = ofnode_translate_address(mdio_node, &faddr);
addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */

View File

@ -412,7 +412,6 @@ static int servalt_probe(struct udevice *dev)
struct servalt_private *priv = dev_get_priv(dev);
int i;
struct resource res;
fdt32_t faddr;
phys_addr_t addr_base;
unsigned long addr_size;
ofnode eth_node, node, mdio_node;
@ -461,9 +460,8 @@ static int servalt_probe(struct udevice *dev)
if (ofnode_read_resource(mdio_node, 0, &res))
return -ENOMEM;
faddr = cpu_to_fdt32(res.start);
addr_base = ofnode_translate_address(mdio_node, &faddr);
addr_base = res.start;
addr_size = res.end - res.start;
/* If the bus is new then create a new bus */

View File

@ -510,6 +510,16 @@ phys_addr_t ofnode_get_addr_index(ofnode node, int index);
*/
phys_addr_t ofnode_get_addr(ofnode node);
/**
* ofnode_get_size() - get size from a node
*
* This reads the register size from a node
*
* @node: node to read from
* @return size of the address, or FDT_SIZE_T_NONE if not present or invalid
*/
fdt_size_t ofnode_get_size(ofnode node);
/**
* ofnode_stringlist_search() - find a string in a string list and return index
*

View File

@ -24,15 +24,16 @@
typedef phys_addr_t fdt_addr_t;
typedef phys_size_t fdt_size_t;
#ifdef CONFIG_PHYS_64BIT
#define FDT_ADDR_T_NONE (-1U)
#define FDT_SIZE_T_NONE (-1U)
#ifdef CONFIG_PHYS_64BIT
#define fdt_addr_to_cpu(reg) be64_to_cpu(reg)
#define fdt_size_to_cpu(reg) be64_to_cpu(reg)
#define cpu_to_fdt_addr(reg) cpu_to_be64(reg)
#define cpu_to_fdt_size(reg) cpu_to_be64(reg)
typedef fdt64_t fdt_val_t;
#else
#define FDT_ADDR_T_NONE (-1U)
#define fdt_addr_to_cpu(reg) be32_to_cpu(reg)
#define fdt_size_to_cpu(reg) be32_to_cpu(reg)
#define cpu_to_fdt_addr(reg) cpu_to_be32(reg)

View File

@ -53,14 +53,22 @@ struct udevice;
#define TPM2_PT_MAX_COMMAND_SIZE (u32)(TPM2_PT_FIXED + 30)
#define TPM2_PT_MAX_RESPONSE_SIZE (u32)(TPM2_PT_FIXED + 31)
/* event types */
#define EV_POST_CODE ((u32)0x00000001)
#define EV_NO_ACTION ((u32)0x00000003)
#define EV_SEPARATOR ((u32)0x00000004)
#define EV_S_CRTM_CONTENTS ((u32)0x00000007)
#define EV_S_CRTM_VERSION ((u32)0x00000008)
#define EV_CPU_MICROCODE ((u32)0x00000009)
#define EV_TABLE_OF_DEVICES ((u32)0x0000000B)
/*
* event types, cf.
* "TCG Server Management Domain Firmware Profile Specification",
* rev 1.00, 2020-05-01
*/
#define EV_POST_CODE ((u32)0x00000001)
#define EV_NO_ACTION ((u32)0x00000003)
#define EV_SEPARATOR ((u32)0x00000004)
#define EV_ACTION ((u32)0x00000005)
#define EV_TAG ((u32)0x00000006)
#define EV_S_CRTM_CONTENTS ((u32)0x00000007)
#define EV_S_CRTM_VERSION ((u32)0x00000008)
#define EV_CPU_MICROCODE ((u32)0x00000009)
#define EV_PLATFORM_CONFIG_FLAGS ((u32)0x0000000A)
#define EV_TABLE_OF_DEVICES ((u32)0x0000000B)
#define EV_COMPACT_HASH ((u32)0x0000000C)
/* TPMS_TAGGED_PROPERTY Structure */
struct tpms_tagged_property {

View File

@ -942,7 +942,11 @@ int fdt_get_resource(const void *fdt, int node, const char *property,
while (ptr + na + ns <= end) {
if (i == index) {
res->start = fdtdec_get_number(ptr, na);
if (CONFIG_IS_ENABLED(OF_TRANSLATE))
res->start = fdt_translate_address(fdt, node, ptr);
else
res->start = fdtdec_get_number(ptr, na);
res->end = res->start;
res->end += fdtdec_get_number(&ptr[na], ns) - 1;
return 0;

View File

@ -2326,13 +2326,15 @@ sub get_raw_comment {
# suffix: Suffix to expect on member, e.g. "_priv"
# warning: Warning name, e.g. "PRIV_AUTO"
sub u_boot_struct_name {
my ($line, $auto, $suffix, $warning) = @_;
my ($line, $auto, $suffix, $warning, $herecurr) = @_;
# Use _priv as a suffix for the device-private data struct
if ($line =~ /^\+\s*\.${auto}\s*=\s*sizeof\(struct\((\w+)\).*/) {
my $struct_name = $1;
if ($struct_name !~ /^\w+${suffix}/) {
WARN($warning, "struct \'$struct_name\' should have a ${suffix} suffix");
WARN($warning,
"struct \'$struct_name\' should have a ${suffix} suffix\n"
. $herecurr);
}
}
}
@ -2410,17 +2412,17 @@ sub u_boot_line {
}
# Check struct names for the 'auto' members of struct driver
u_boot_struct_name($line, "priv_auto", "_priv", "PRIV_AUTO");
u_boot_struct_name($line, "plat_auto", "_plat", "PLAT_AUTO");
u_boot_struct_name($line, "per_child_auto", "_priv", "CHILD_PRIV_AUTO");
u_boot_struct_name($line, "priv_auto", "_priv", "PRIV_AUTO", $herecurr);
u_boot_struct_name($line, "plat_auto", "_plat", "PLAT_AUTO", $herecurr);
u_boot_struct_name($line, "per_child_auto", "_priv", "CHILD_PRIV_AUTO", $herecurr);
u_boot_struct_name($line, "per_child_plat_auto", "_plat",
"CHILD_PLAT_AUTO");
"CHILD_PLAT_AUTO", $herecurr);
# Now the ones for struct uclass, skipping those in common with above
u_boot_struct_name($line, "per_device_auto", "_priv",
"DEVICE_PRIV_AUTO");
"DEVICE_PRIV_AUTO", $herecurr);
u_boot_struct_name($line, "per_device_plat_auto", "_plat",
"DEVICE_PLAT_AUTO");
"DEVICE_PLAT_AUTO", $herecurr);
}
sub process {

View File

@ -261,3 +261,34 @@ static int dm_test_ofnode_is_enabled(struct unit_test_state *uts)
return 0;
}
DM_TEST(dm_test_ofnode_is_enabled, UT_TESTF_SCAN_PDATA | UT_TESTF_SCAN_FDT);
static int dm_test_ofnode_get_reg(struct unit_test_state *uts)
{
ofnode node;
fdt_addr_t addr;
fdt_size_t size;
node = ofnode_path("/translation-test@8000");
ut_assert(ofnode_valid(node));
addr = ofnode_get_addr(node);
size = ofnode_get_size(node);
ut_asserteq(0x8000, addr);
ut_asserteq(0x4000, size);
node = ofnode_path("/translation-test@8000/dev@1,100");
ut_assert(ofnode_valid(node));
addr = ofnode_get_addr(node);
size = ofnode_get_size(node);
ut_asserteq(0x9000, addr);
ut_asserteq(0x1000, size);
node = ofnode_path("/emul-mux-controller");
ut_assert(ofnode_valid(node));
addr = ofnode_get_addr(node);
size = ofnode_get_size(node);
ut_asserteq(FDT_ADDR_T_NONE, addr);
ut_asserteq(FDT_SIZE_T_NONE, size);
return 0;
}
DM_TEST(dm_test_ofnode_get_reg, UT_TESTF_SCAN_PDATA | UT_TESTF_SCAN_FDT);

View File

@ -17,6 +17,7 @@ pyparsing==2.4.2
pytest==5.2.1
python-mimeparse==1.6.0
python-subunit==1.3.0
requests==2.25.1
six==1.12.0
testtools==2.3.0
traceback2==1.4.0

View File

@ -135,25 +135,32 @@ static bool ut_test_run_on_flattree(struct unit_test *test)
static bool test_matches(const char *prefix, const char *test_name,
const char *select_name)
{
size_t len;
if (!select_name)
return true;
if (!strcmp(test_name, select_name))
/* Allow glob expansion in the test name */
len = select_name[strlen(select_name) - 1] == '*' ? strlen(select_name) : 0;
if (len-- == 1)
return true;
if (!prefix) {
if (!strncmp(test_name, select_name, len))
return true;
if (prefix) {
/* All tests have this prefix */
if (!strncmp(test_name, prefix, strlen(prefix)))
test_name += strlen(prefix);
} else {
const char *p = strstr(test_name, "_test_");
/* convert xxx_test_yyy to yyy, i.e. remove the suite name */
if (p)
test_name = p + 6;
} else {
/* All tests have this prefix */
if (!strncmp(test_name, prefix, strlen(prefix)))
test_name += strlen(prefix);
test_name = p + strlen("_test_");
}
if (!strcmp(test_name, select_name))
if (!strncmp(test_name, select_name, len))
return true;
return false;

View File

@ -461,8 +461,12 @@ see www.flashrom.org/Flashrom for more information.
When used, this entry will be populated with an FMAP which reflects the
entries in the current image. Note that any hierarchy is squashed, since
FMAP does not support this. Also, CBFS entries appear as a single entry -
the sub-entries are ignored.
FMAP does not support this. Sections are represented as an area appearing
before its contents, so that it is possible to reconstruct the hierarchy
from the FMAP by using the offset information. This convention does not
seem to be documented, but is used in Chromium OS.
CBFS entries appear as a single entry, i.e. the sub-entries are ignored.
@ -804,6 +808,11 @@ Properties:
missing their contents. The second will produce an image but of
course it will not work.
Properties:
_allow_missing: True if this section permits external blobs to be
missing their contents. The second will produce an image but of
course it will not work.
Since a section is also an entry, it inherits all the properies of entries
too.

View File

@ -28,8 +28,12 @@ class Entry_fmap(Entry):
When used, this entry will be populated with an FMAP which reflects the
entries in the current image. Note that any hierarchy is squashed, since
FMAP does not support this. Also, CBFS entries appear as a single entry -
the sub-entries are ignored.
FMAP does not support this. Sections are represented as an area appearing
before its contents, so that it is possible to reconstruct the hierarchy
from the FMAP by using the offset information. This convention does not
seem to be documented, but is used in Chromium OS.
CBFS entries appear as a single entry, i.e. the sub-entries are ignored.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
@ -45,6 +49,17 @@ class Entry_fmap(Entry):
tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
(entry.GetPath(), entry.etype, ToHexSize(entries)))
if entries and entry.etype != 'cbfs':
# Create an area for the section, which encompasses all entries
# within it
if entry.image_pos is None:
pos = 0
else:
pos = entry.image_pos - entry.GetRootSkipAtStart()
# Drop @ symbols in name
name = entry.name.replace('@', '')
areas.append(
fmap_util.FmapArea(pos, entry.size or 0, name, 0))
for subentry in entries.values():
_AddEntries(areas, subentry)
else:

View File

@ -1341,6 +1341,7 @@ class TestFunctional(unittest.TestCase):
def testSplNoDtb(self):
"""Test that an image with spl/u-boot-spl-nodtb.bin can be created"""
self._SetupSplElf()
data = self._DoReadFile('052_u_boot_spl_nodtb.dts')
self.assertEqual(U_BOOT_SPL_NODTB_DATA, data[:len(U_BOOT_SPL_NODTB_DATA)])
@ -1594,26 +1595,41 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(1, fhdr.ver_major)
self.assertEqual(0, fhdr.ver_minor)
self.assertEqual(0, fhdr.base)
self.assertEqual(16 + 16 +
fmap_util.FMAP_HEADER_LEN +
fmap_util.FMAP_AREA_LEN * 3, fhdr.image_size)
expect_size = fmap_util.FMAP_HEADER_LEN + fmap_util.FMAP_AREA_LEN * 5
self.assertEqual(16 + 16 + expect_size, fhdr.image_size)
self.assertEqual(b'FMAP', fhdr.name)
self.assertEqual(3, fhdr.nareas)
for fentry in fentries:
self.assertEqual(0, fentry.flags)
self.assertEqual(5, fhdr.nareas)
fiter = iter(fentries)
self.assertEqual(0, fentries[0].offset)
self.assertEqual(4, fentries[0].size)
self.assertEqual(b'RO_U_BOOT', fentries[0].name)
fentry = next(fiter)
self.assertEqual(b'SECTION0', fentry.name)
self.assertEqual(0, fentry.offset)
self.assertEqual(16, fentry.size)
self.assertEqual(0, fentry.flags)
self.assertEqual(16, fentries[1].offset)
self.assertEqual(4, fentries[1].size)
self.assertEqual(b'RW_U_BOOT', fentries[1].name)
fentry = next(fiter)
self.assertEqual(b'RO_U_BOOT', fentry.name)
self.assertEqual(0, fentry.offset)
self.assertEqual(4, fentry.size)
self.assertEqual(0, fentry.flags)
self.assertEqual(32, fentries[2].offset)
self.assertEqual(fmap_util.FMAP_HEADER_LEN +
fmap_util.FMAP_AREA_LEN * 3, fentries[2].size)
self.assertEqual(b'FMAP', fentries[2].name)
fentry = next(fiter)
self.assertEqual(b'SECTION1', fentry.name)
self.assertEqual(16, fentry.offset)
self.assertEqual(16, fentry.size)
self.assertEqual(0, fentry.flags)
fentry = next(fiter)
self.assertEqual(b'RW_U_BOOT', fentry.name)
self.assertEqual(16, fentry.offset)
self.assertEqual(4, fentry.size)
self.assertEqual(0, fentry.flags)
fentry = next(fiter)
self.assertEqual(b'FMAP', fentry.name)
self.assertEqual(32, fentry.offset)
self.assertEqual(expect_size, fentry.size)
self.assertEqual(0, fentry.flags)
def testBlobNamedByArg(self):
"""Test we can add a blob with the filename coming from an entry arg"""
@ -2063,20 +2079,29 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(expected, data[:32])
fhdr, fentries = fmap_util.DecodeFmap(data[36:])
self.assertEqual(0x100, fhdr.image_size)
self.assertEqual(0x180, fhdr.image_size)
expect_size = fmap_util.FMAP_HEADER_LEN + fmap_util.FMAP_AREA_LEN * 4
fiter = iter(fentries)
self.assertEqual(0, fentries[0].offset)
self.assertEqual(4, fentries[0].size)
self.assertEqual(b'U_BOOT', fentries[0].name)
fentry = next(fiter)
self.assertEqual(b'U_BOOT', fentry.name)
self.assertEqual(0, fentry.offset)
self.assertEqual(4, fentry.size)
self.assertEqual(4, fentries[1].offset)
self.assertEqual(3, fentries[1].size)
self.assertEqual(b'INTEL_MRC', fentries[1].name)
fentry = next(fiter)
self.assertEqual(b'SECTION', fentry.name)
self.assertEqual(4, fentry.offset)
self.assertEqual(0x20 + expect_size, fentry.size)
self.assertEqual(36, fentries[2].offset)
self.assertEqual(fmap_util.FMAP_HEADER_LEN +
fmap_util.FMAP_AREA_LEN * 3, fentries[2].size)
self.assertEqual(b'FMAP', fentries[2].name)
fentry = next(fiter)
self.assertEqual(b'INTEL_MRC', fentry.name)
self.assertEqual(4, fentry.offset)
self.assertEqual(3, fentry.size)
fentry = next(fiter)
self.assertEqual(b'FMAP', fentry.name)
self.assertEqual(36, fentry.offset)
self.assertEqual(expect_size, fentry.size)
def testElf(self):
"""Basic test of ELF entries"""
@ -4272,6 +4297,7 @@ class TestFunctional(unittest.TestCase):
def testTplNoDtb(self):
"""Test that an image with tpl/u-boot-tpl-nodtb.bin can be created"""
self._SetupTplElf()
data = self._DoReadFile('192_u_boot_tpl_nodtb.dts')
self.assertEqual(U_BOOT_TPL_NODTB_DATA,
data[:len(U_BOOT_TPL_NODTB_DATA)])

View File

@ -7,7 +7,7 @@
binman {
end-at-4gb;
size = <0x100>;
size = <0x180>;
u-boot {
};
section {

View File

@ -182,6 +182,7 @@ class Builder:
only useful for testing in-tree builds.
work_in_output: Use the output directory as the work directory and
don't write to a separate output directory.
thread_exceptions: List of exceptions raised by thread jobs
Private members:
_base_board_dict: Last-summarised Dict of boards
@ -235,7 +236,8 @@ class Builder:
no_subdirs=False, full_path=False, verbose_build=False,
mrproper=False, per_board_out_dir=False,
config_only=False, squash_config_y=False,
warnings_as_errors=False, work_in_output=False):
warnings_as_errors=False, work_in_output=False,
test_thread_exceptions=False):
"""Create a new Builder object
Args:
@ -262,6 +264,9 @@ class Builder:
warnings_as_errors: Treat all compiler warnings as errors
work_in_output: Use the output directory as the work directory and
don't write to a separate output directory.
test_thread_exceptions: Uses for tests only, True to make the
threads raise an exception instead of reporting their result.
This simulates a failure in the code somewhere
"""
self.toolchains = toolchains
self.base_dir = base_dir
@ -311,13 +316,16 @@ class Builder:
self._re_migration_warning = re.compile(r'^={21} WARNING ={22}\n.*\n=+\n',
re.MULTILINE | re.DOTALL)
self.thread_exceptions = []
self.test_thread_exceptions = test_thread_exceptions
if self.num_threads:
self._single_builder = None
self.queue = queue.Queue()
self.out_queue = queue.Queue()
for i in range(self.num_threads):
t = builderthread.BuilderThread(self, i, mrproper,
per_board_out_dir)
t = builderthread.BuilderThread(
self, i, mrproper, per_board_out_dir,
test_exception=test_thread_exceptions)
t.setDaemon(True)
t.start()
self.threads.append(t)
@ -1676,6 +1684,7 @@ class Builder:
Tuple containing:
- number of boards that failed to build
- number of boards that issued warnings
- list of thread exceptions raised
"""
self.commit_count = len(commits) if commits else 1
self.commits = commits
@ -1689,7 +1698,7 @@ class Builder:
Print('\rStarting build...', newline=False)
self.SetupBuild(board_selected, commits)
self.ProcessResult(None)
self.thread_exceptions = []
# Create jobs to build all commits for each board
for brd in board_selected.values():
job = builderthread.BuilderJob()
@ -1728,5 +1737,8 @@ class Builder:
rate = float(self.count) / duration.total_seconds()
msg += ', duration %s, rate %1.2f' % (duration, rate)
Print(msg)
if self.thread_exceptions:
Print('Failed: %d thread exceptions' % len(self.thread_exceptions),
colour=self.col.RED)
return (self.fail, self.warned)
return (self.fail, self.warned, self.thread_exceptions)

View File

@ -89,16 +89,23 @@ class BuilderThread(threading.Thread):
Members:
builder: The builder which contains information we might need
thread_num: Our thread number (0-n-1), used to decide on a
temporary directory. If this is -1 then there are no threads
and we are the (only) main process
temporary directory. If this is -1 then there are no threads
and we are the (only) main process
mrproper: Use 'make mrproper' before each reconfigure
per_board_out_dir: True to build in a separate persistent directory per
board rather than a thread-specific directory
test_exception: Used for testing; True to raise an exception instead of
reporting the build result
"""
def __init__(self, builder, thread_num, mrproper, per_board_out_dir):
def __init__(self, builder, thread_num, mrproper, per_board_out_dir,
test_exception=False):
"""Set up a new builder thread"""
threading.Thread.__init__(self)
self.builder = builder
self.thread_num = thread_num
self.mrproper = mrproper
self.per_board_out_dir = per_board_out_dir
self.test_exception = test_exception
def Make(self, commit, brd, stage, cwd, *args, **kwargs):
"""Run 'make' on a particular commit and board.
@ -344,10 +351,9 @@ class BuilderThread(threading.Thread):
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
with open(os.path.join(build_dir, 'out-env'), 'w',
encoding='utf-8') as fd:
with open(os.path.join(build_dir, 'out-env'), 'wb') as fd:
for var in sorted(env.keys()):
print('%s="%s"' % (var, env[var]), file=fd)
fd.write(b'%s="%s"' % (var, env[var]))
lines = []
for fname in BASE_ELF_FILENAMES:
cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
@ -440,6 +446,22 @@ class BuilderThread(threading.Thread):
target = '%s-%s%s' % (base, dirname, ext)
shutil.copy(fname, os.path.join(build_dir, target))
def _SendResult(self, result):
"""Send a result to the builder for processing
Args:
result: CommandResult object containing the results of the build
Raises:
ValueError if self.test_exception is true (for testing)
"""
if self.test_exception:
raise ValueError('test exception')
if self.thread_num != -1:
self.builder.out_queue.put(result)
else:
self.builder.ProcessResult(result)
def RunJob(self, job):
"""Run a single job
@ -513,10 +535,7 @@ class BuilderThread(threading.Thread):
# We have the build results, so output the result
self._WriteResult(result, job.keep_outputs, job.work_in_output)
if self.thread_num != -1:
self.builder.out_queue.put(result)
else:
self.builder.ProcessResult(result)
self._SendResult(result)
else:
# Just build the currently checked-out build
result, request_config = self.RunCommit(None, brd, work_dir, True,
@ -525,10 +544,7 @@ class BuilderThread(threading.Thread):
work_in_output=job.work_in_output)
result.commit_upto = 0
self._WriteResult(result, job.keep_outputs, job.work_in_output)
if self.thread_num != -1:
self.builder.out_queue.put(result)
else:
self.builder.ProcessResult(result)
self._SendResult(result)
def run(self):
"""Our thread's run function
@ -538,5 +554,9 @@ class BuilderThread(threading.Thread):
"""
while True:
job = self.builder.queue.get()
self.RunJob(job)
try:
self.RunJob(job)
except Exception as e:
print('Thread exception:', e)
self.builder.thread_exceptions.append(e)
self.builder.queue.task_done()

View File

@ -110,7 +110,7 @@ def ShowToolchainPrefix(boards, toolchains):
return None
def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
clean_dir=False):
clean_dir=False, test_thread_exceptions=False):
"""The main control code for buildman
Args:
@ -124,6 +124,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
arguments. This setting is useful for tests.
board: Boards() object to use, containing a list of available
boards. If this is None it will be created and scanned.
clean_dir: Used for tests only, indicates that the existing output_dir
should be removed before starting the build
test_thread_exceptions: Uses for tests only, True to make the threads
raise an exception instead of reporting their result. This simulates
a failure in the code somewhere
"""
global builder
@ -328,7 +333,8 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
config_only=options.config_only,
squash_config_y=not options.preserve_config_y,
warnings_as_errors=options.warnings_as_errors,
work_in_output=options.work_in_output)
work_in_output=options.work_in_output,
test_thread_exceptions=test_thread_exceptions)
builder.force_config_on_failure = not options.quick
if make_func:
builder.do_make = make_func
@ -368,9 +374,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
if options.summary:
builder.ShowSummary(commits, board_selected)
else:
fail, warned = builder.BuildBoards(commits, board_selected,
options.keep_outputs, options.verbose)
if fail:
fail, warned, excs = builder.BuildBoards(
commits, board_selected, options.keep_outputs, options.verbose)
if excs:
return 102
elif fail:
return 100
elif warned and not options.ignore_warnings:
return 101

View File

@ -16,6 +16,7 @@ from buildman import toolchain
from patman import command
from patman import gitutil
from patman import terminal
from patman import test_util
from patman import tools
settings_data = '''
@ -219,12 +220,28 @@ class TestFunctional(unittest.TestCase):
return command.RunPipe([[self._buildman_pathname] + list(args)],
capture=True, capture_stderr=True)
def _RunControl(self, *args, clean_dir=False, boards=None):
def _RunControl(self, *args, boards=None, clean_dir=False,
test_thread_exceptions=False):
"""Run buildman
Args:
args: List of arguments to pass
boards:
clean_dir: Used for tests only, indicates that the existing output_dir
should be removed before starting the build
test_thread_exceptions: Uses for tests only, True to make the threads
raise an exception instead of reporting their result. This simulates
a failure in the code somewhere
Returns:
result code from buildman
"""
sys.argv = [sys.argv[0]] + list(args)
options, args = cmdline.ParseArgs()
result = control.DoBuildman(options, args, toolchains=self._toolchains,
make_func=self._HandleMake, boards=boards or self._boards,
clean_dir=clean_dir)
clean_dir=clean_dir,
test_thread_exceptions=test_thread_exceptions)
self._builder = control.builder
return result
@ -555,6 +572,18 @@ class TestFunctional(unittest.TestCase):
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'done')))
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'out-env')))
def testEnvironmentUnicode(self):
"""Test there are no unicode errors when the env has non-ASCII chars"""
try:
varname = b'buildman_test_var'
os.environb[varname] = b'strange\x80chars'
self.assertEqual(0, self._RunControl('-o', self._output_dir))
board0_dir = os.path.join(self._output_dir, 'current', 'board0')
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'done')))
self.assertTrue(os.path.exists(os.path.join(board0_dir, 'out-env')))
finally:
del os.environb[varname]
def testWorkInOutput(self):
"""Test the -w option which should write directly to the output dir"""
board_list = board.Boards()
@ -588,3 +617,10 @@ class TestFunctional(unittest.TestCase):
with self.assertRaises(SystemExit) as e:
self._RunControl('-w', clean_dir=False)
self.assertIn("specify -o", str(e.exception))
def testThreadExceptions(self):
"""Test that exceptions in threads are reported"""
with test_util.capture_sys_output() as (stdout, stderr):
self.assertEqual(102, self._RunControl('-o', self._output_dir,
test_thread_exceptions=True))
self.assertIn('Thread exception: test exception', stdout.getvalue())

View File

@ -179,27 +179,35 @@ class Toolchain:
output and possibly unicode encoded output of all build tools by
adding LC_ALL=C.
Note that os.environb is used to obtain the environment, since in some
cases the environment many contain non-ASCII characters and we see
errors like:
UnicodeEncodeError: 'utf-8' codec can't encode characters in position
569-570: surrogates not allowed
Args:
full_path: Return the full path in CROSS_COMPILE and don't set
PATH
Returns:
Dict containing the environemnt to use. This is based on the current
environment, with changes as needed to CROSS_COMPILE, PATH and
LC_ALL.
Dict containing the (bytes) environment to use. This is based on the
current environment, with changes as needed to CROSS_COMPILE, PATH
and LC_ALL.
"""
env = dict(os.environ)
env = dict(os.environb)
wrapper = self.GetWrapper()
if self.override_toolchain:
# We'll use MakeArgs() to provide this
pass
elif full_path:
env['CROSS_COMPILE'] = wrapper + os.path.join(self.path, self.cross)
env[b'CROSS_COMPILE'] = tools.ToBytes(
wrapper + os.path.join(self.path, self.cross))
else:
env['CROSS_COMPILE'] = wrapper + self.cross
env['PATH'] = self.path + ':' + env['PATH']
env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross)
env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH']
env['LC_ALL'] = 'C'
env[b'LC_ALL'] = b'C'
return env

View File

@ -824,8 +824,6 @@ class DtbPlatdata():
self.buf('\t},\n')
def generate_uclasses(self):
if not self.check_instantiate(True):
return
self.out('\n')
self.out('#include <common.h>\n')
self.out('#include <dm.h>\n')
@ -1038,22 +1036,6 @@ class DtbPlatdata():
self.out(''.join(self.get_buf()))
def check_instantiate(self, require):
"""Check if self._instantiate is set to the required value
If not, this outputs a message into the current file
Args:
require: True to require --instantiate, False to require that it not
be enabled
"""
if require != self._instantiate:
self.out(
'/* This file is not used: --instantiate was %senabled */\n' %
('not ' if require else ''))
return False
return True
def generate_plat(self):
"""Generate device defintions for the platform data
@ -1064,8 +1046,6 @@ class DtbPlatdata():
See the documentation in doc/driver-model/of-plat.rst for more
information.
"""
if not self.check_instantiate(False):
return
self.out('/* Allow use of U_BOOT_DRVINFO() in this file */\n')
self.out('#define DT_PLAT_C\n')
self.out('\n')
@ -1102,8 +1082,6 @@ class DtbPlatdata():
See the documentation in doc/driver-model/of-plat.rst for more
information.
"""
if not self.check_instantiate(True):
return
self.out('#include <common.h>\n')
self.out('#include <dm.h>\n')
self.out('#include <dt-structs.h>\n')
@ -1216,7 +1194,7 @@ def run_steps(args, dtb_file, include_disabled, output, output_dirs, phase,
plat.assign_seqs()
# Figure out what output files we plan to generate
output_files = OUTPUT_FILES_COMMON
output_files = dict(OUTPUT_FILES_COMMON)
if instantiate:
output_files.update(OUTPUT_FILES_INST)
else:

View File

@ -74,10 +74,6 @@ UCLASS_HEADER_COMMON = '''/*
*/
'''
UCLASS_HEADER = UCLASS_HEADER_COMMON + '''
/* This file is not used: --instantiate was not enabled */
'''
# Scanner saved from a previous run of the tests (to speed things up)
saved_scan = None
@ -412,7 +408,6 @@ U_BOOT_DRVINFO(spl_test3) = {
};
'''
uclass_text = UCLASS_HEADER
uclass_text_inst = '''
#include <common.h>
@ -511,15 +506,6 @@ DM_UCLASS_INST(testfdt) = {
},
};
'''
device_text = '''/*
* DO NOT MODIFY
*
* Declares the DM_DEVICE_INST() records.
* This was generated by dtoc from a .dtb (device tree binary) file.
*/
/* This file is not used: --instantiate was not enabled */
'''
device_text_inst = '''/*
* DO NOT MODIFY
@ -833,8 +819,7 @@ DM_DEVICE_INST(test0) = {
self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False)
self._check_strings(
self.decl_text + self.device_text + self.platdata_text +
self.struct_text + self.uclass_text, data)
self.decl_text + self.platdata_text + self.struct_text, data)
def test_driver_alias(self):
"""Test output from a device tree file with a driver alias"""
@ -1537,8 +1522,7 @@ U_BOOT_DRVINFO(spl_test2) = {
self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False)
self._check_strings(
self.decl_text + self.device_text + self.platdata_text +
self.struct_text + self.uclass_text, data)
self.decl_text + self.platdata_text + self.struct_text, data)
def test_no_command(self):
"""Test running dtoc without a command"""
@ -1566,8 +1550,7 @@ U_BOOT_DRVINFO(spl_test2) = {
self.assertIn("Must specify either output or output_dirs, not both",
str(exc.exception))
def test_output_dirs(self):
"""Test outputting files to a directory"""
def check_output_dirs(self, instantiate):
# Remove the directory so that files from other tests are not there
tools._RemoveOutputDir()
tools.PrepareOutputDir(None)
@ -1579,14 +1562,30 @@ U_BOOT_DRVINFO(spl_test2) = {
self.assertEqual(2, len(fnames))
dtb_platdata.run_steps(
['all'], dtb_file, False, None, [outdir], None, False,
['all'], dtb_file, False, None, [outdir], None, instantiate,
warning_disabled=True, scan=copy_scan())
fnames = glob.glob(outdir + '/*')
self.assertEqual(7, len(fnames))
return fnames
def test_output_dirs(self):
"""Test outputting files to a directory"""
fnames = self.check_output_dirs(False)
self.assertEqual(5, len(fnames))
leafs = set(os.path.basename(fname) for fname in fnames)
self.assertEqual(
{'dt-structs-gen.h', 'source.dts', 'dt-plat.c', 'source.dtb',
'dt-decl.h'},
leafs)
def test_output_dirs_inst(self):
"""Test outputting files to a directory with instantiation"""
fnames = self.check_output_dirs(True)
self.assertEqual(6, len(fnames))
leafs = set(os.path.basename(fname) for fname in fnames)
self.assertEqual(
{'dt-structs-gen.h', 'source.dts', 'source.dtb',
'dt-uclass.c', 'dt-decl.h', 'dt-device.c'},
leafs)
@ -1785,14 +1784,6 @@ U_BOOT_DRVINFO(spl_test2) = {
self._check_strings(self.decl_text_inst, data)
self.run_test(['platdata'], dtb_file, output, True)
with open(output) as infile:
data = infile.read()
self._check_strings(C_HEADER_PRE + '''
/* This file is not used: --instantiate was enabled */
''', data)
self.run_test(['uclass'], dtb_file, output, True)
with open(output) as infile:
data = infile.read()

View File

@ -10,7 +10,15 @@ import sys
from patman import command
from patman import gitutil
from patman import terminal
from patman import tools
EMACS_PREFIX = r'(?:[0-9]{4}.*\.patch:[0-9]+: )?'
TYPE_NAME = r'([A-Z_]+:)?'
RE_ERROR = re.compile(r'ERROR:%s (.*)' % TYPE_NAME)
RE_WARNING = re.compile(EMACS_PREFIX + r'WARNING:%s (.*)' % TYPE_NAME)
RE_CHECK = re.compile(r'CHECK:%s (.*)' % TYPE_NAME)
RE_FILE = re.compile(r'#(\d+): (FILE: ([^:]*):(\d+):)?')
RE_NOTE = re.compile(r'NOTE: (.*)')
def FindCheckPatch():
top_level = gitutil.GetTopLevel()
@ -38,8 +46,148 @@ def FindCheckPatch():
sys.exit('Cannot find checkpatch.pl - please put it in your ' +
'~/bin directory or use --no-check')
def CheckPatchParseOneMessage(message):
"""Parse one checkpatch message
Args:
message: string to parse
Returns:
dict:
'type'; error or warning
'msg': text message
'file' : filename
'line': line number
"""
if RE_NOTE.match(message):
return {}
item = {}
err_match = RE_ERROR.match(message)
warn_match = RE_WARNING.match(message)
check_match = RE_CHECK.match(message)
if err_match:
item['cptype'] = err_match.group(1)
item['msg'] = err_match.group(2)
item['type'] = 'error'
elif warn_match:
item['cptype'] = warn_match.group(1)
item['msg'] = warn_match.group(2)
item['type'] = 'warning'
elif check_match:
item['cptype'] = check_match.group(1)
item['msg'] = check_match.group(2)
item['type'] = 'check'
else:
message_indent = ' '
print('patman: failed to parse checkpatch message:\n%s' %
(message_indent + message.replace('\n', '\n' + message_indent)),
file=sys.stderr)
return {}
file_match = RE_FILE.search(message)
# some messages have no file, catch those here
no_file_match = any(s in message for s in [
'\nSubject:', 'Missing Signed-off-by: line(s)',
'does MAINTAINERS need updating'
])
if file_match:
err_fname = file_match.group(3)
if err_fname:
item['file'] = err_fname
item['line'] = int(file_match.group(4))
else:
item['file'] = '<patch>'
item['line'] = int(file_match.group(1))
elif no_file_match:
item['file'] = '<patch>'
else:
message_indent = ' '
print('patman: failed to find file / line information:\n%s' %
(message_indent + message.replace('\n', '\n' + message_indent)),
file=sys.stderr)
return item
def CheckPatchParse(checkpatch_output, verbose=False):
"""Parse checkpatch.pl output
Args:
checkpatch_output: string to parse
verbose: True to print out every line of the checkpatch output as it is
parsed
Returns:
namedtuple containing:
ok: False=failure, True=ok
problems: List of problems, each a dict:
'type'; error or warning
'msg': text message
'file' : filename
'line': line number
errors: Number of errors
warnings: Number of warnings
checks: Number of checks
lines: Number of lines
stdout: checkpatch_output
"""
fields = ['ok', 'problems', 'errors', 'warnings', 'checks', 'lines',
'stdout']
result = collections.namedtuple('CheckPatchResult', fields)
result.stdout = checkpatch_output
result.ok = False
result.errors, result.warnings, result.checks = 0, 0, 0
result.lines = 0
result.problems = []
# total: 0 errors, 0 warnings, 159 lines checked
# or:
# total: 0 errors, 2 warnings, 7 checks, 473 lines checked
emacs_stats = r'(?:[0-9]{4}.*\.patch )?'
re_stats = re.compile(emacs_stats +
r'total: (\d+) errors, (\d+) warnings, (\d+)')
re_stats_full = re.compile(emacs_stats +
r'total: (\d+) errors, (\d+) warnings, (\d+)'
r' checks, (\d+)')
re_ok = re.compile(r'.*has no obvious style problems')
re_bad = re.compile(r'.*has style problems, please review')
# A blank line indicates the end of a message
for message in result.stdout.split('\n\n'):
if verbose:
print(message)
# either find stats, the verdict, or delegate
match = re_stats_full.match(message)
if not match:
match = re_stats.match(message)
if match:
result.errors = int(match.group(1))
result.warnings = int(match.group(2))
if len(match.groups()) == 4:
result.checks = int(match.group(3))
result.lines = int(match.group(4))
else:
result.lines = int(match.group(3))
elif re_ok.match(message):
result.ok = True
elif re_bad.match(message):
result.ok = False
else:
problem = CheckPatchParseOneMessage(message)
if problem:
result.problems.append(problem)
return result
def CheckPatch(fname, verbose=False, show_types=False):
"""Run checkpatch.pl on a file.
"""Run checkpatch.pl on a file and parse the results.
Args:
fname: Filename to check
@ -61,112 +209,14 @@ def CheckPatch(fname, verbose=False, show_types=False):
lines: Number of lines
stdout: Full output of checkpatch
"""
fields = ['ok', 'problems', 'errors', 'warnings', 'checks', 'lines',
'stdout']
result = collections.namedtuple('CheckPatchResult', fields)
result.ok = False
result.errors, result.warnings, result.checks = 0, 0, 0
result.lines = 0
result.problems = []
chk = FindCheckPatch()
item = {}
args = [chk, '--no-tree']
if show_types:
args.append('--show-types')
result.stdout = command.Output(*args, fname, raise_on_error=False)
#pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
#stdout, stderr = pipe.communicate()
output = command.Output(*args, fname, raise_on_error=False)
# total: 0 errors, 0 warnings, 159 lines checked
# or:
# total: 0 errors, 2 warnings, 7 checks, 473 lines checked
emacs_prefix = '(?:[0-9]{4}.*\.patch:[0-9]+: )?'
emacs_stats = '(?:[0-9]{4}.*\.patch )?'
re_stats = re.compile(emacs_stats +
'total: (\\d+) errors, (\d+) warnings, (\d+)')
re_stats_full = re.compile(emacs_stats +
'total: (\\d+) errors, (\d+) warnings, (\d+)'
' checks, (\d+)')
re_ok = re.compile('.*has no obvious style problems')
re_bad = re.compile('.*has style problems, please review')
type_name = '([A-Z_]+:)?'
re_error = re.compile('ERROR:%s (.*)' % type_name)
re_warning = re.compile(emacs_prefix + 'WARNING:%s (.*)' % type_name)
re_check = re.compile('CHECK:%s (.*)' % type_name)
re_file = re.compile('#(\d+): (FILE: ([^:]*):(\d+):)?')
re_note = re.compile('NOTE: (.*)')
re_new_file = re.compile('new file mode .*')
indent = ' ' * 6
for line in result.stdout.splitlines():
if verbose:
print(line)
return CheckPatchParse(output, verbose)
# A blank line indicates the end of a message
if not line:
if item:
result.problems.append(item)
item = {}
continue
if re_note.match(line):
continue
# Skip lines which quote code
if line.startswith(indent):
continue
# Skip code quotes
if line.startswith('+'):
continue
if re_new_file.match(line):
continue
match = re_stats_full.match(line)
if not match:
match = re_stats.match(line)
if match:
result.errors = int(match.group(1))
result.warnings = int(match.group(2))
if len(match.groups()) == 4:
result.checks = int(match.group(3))
result.lines = int(match.group(4))
else:
result.lines = int(match.group(3))
continue
elif re_ok.match(line):
result.ok = True
continue
elif re_bad.match(line):
result.ok = False
continue
err_match = re_error.match(line)
warn_match = re_warning.match(line)
file_match = re_file.match(line)
check_match = re_check.match(line)
subject_match = line.startswith('Subject:')
if err_match:
item['cptype'] = err_match.group(1)
item['msg'] = err_match.group(2)
item['type'] = 'error'
elif warn_match:
item['cptype'] = warn_match.group(1)
item['msg'] = warn_match.group(2)
item['type'] = 'warning'
elif check_match:
item['cptype'] = check_match.group(1)
item['msg'] = check_match.group(2)
item['type'] = 'check'
elif file_match:
err_fname = file_match.group(3)
if err_fname:
item['file'] = err_fname
item['line'] = int(file_match.group(4))
else:
item['file'] = '<patch>'
item['line'] = int(file_match.group(1))
elif subject_match:
item['file'] = '<patch subject>'
item['line'] = None
else:
print('bad line "%s", %d' % (line, len(line)))
return result
def GetWarningMsg(col, msg_type, fname, line, msg):
'''Create a message for a given file/line

View File

@ -25,13 +25,8 @@ from patman import terminal
from patman import tools
from patman.test_util import capture_sys_output
try:
import pygit2
HAVE_PYGIT2 = True
from patman import status
except ModuleNotFoundError:
HAVE_PYGIT2 = False
import pygit2
from patman import status
class TestFunctional(unittest.TestCase):
"""Functional tests for checking that patman behaves correctly"""
@ -458,7 +453,6 @@ complicated as possible''')
repo.branches.local.create('base', base_target)
return repo
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testBranch(self):
"""Test creating patches from a branch"""
repo = self.make_git_tree()
@ -604,7 +598,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
["Found possible blank line(s) at end of file 'lib/fdtdec.c'"],
pstrm.commit.warn)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testNoUpstream(self):
"""Test CountCommitsToBranch when there is no upstream"""
repo = self.make_git_tree()
@ -642,7 +635,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
{'id': '1', 'name': 'Some patch'}]}
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testStatusMismatch(self):
"""Test Patchwork patches not matching the series"""
series = Series()
@ -652,7 +644,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertIn('Warning: Patchwork reports 1 patches, series has 0',
err.getvalue())
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testStatusReadPatch(self):
"""Test handling a single patch in Patchwork"""
series = Series()
@ -665,7 +656,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('1', patch.id)
self.assertEqual('Some patch', patch.raw_subject)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testParseSubject(self):
"""Test parsing of the patch subject"""
patch = status.Patch('1')
@ -728,7 +718,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('RESEND', patch.prefix)
self.assertEqual(None, patch.version)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testCompareSeries(self):
"""Test operation of compare_with_series()"""
commit1 = Commit('abcd')
@ -831,7 +820,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
return patch.comments
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testFindNewResponses(self):
"""Test operation of find_new_responses()"""
commit1 = Commit('abcd')
@ -970,7 +958,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
return patch.comments
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testCreateBranch(self):
"""Test operation of create_branch()"""
repo = self.make_git_tree()
@ -1058,7 +1045,6 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
self.assertEqual('Reviewed-by: %s' % self.mary, next(lines))
self.assertEqual('Tested-by: %s' % self.leb, next(lines))
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testParseSnippets(self):
"""Test parsing of review snippets"""
text = '''Hi Fred,
@ -1142,7 +1128,6 @@ line8
'line2', 'line3', 'line4', 'line5', 'line6', 'line7', 'line8']],
pstrm.snippets)
@unittest.skipIf(not HAVE_PYGIT2, 'Missing python3-pygit2')
def testReviewSnippets(self):
"""Test showing of review snippets"""
def _to_submitter(who):