patman status subcommand to collect tags from Patchwork

patman showing email replies from Patchwork
 sandbox poweroff command
 minor fixes in binman, tests
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAl+kJL8RHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreaDpAf+MajyuxlmGmFTjpyiB026aWiYE4rAn4AE
 bXEDxHpOTIH4hDX7kYVWPmuKivHJo2hF0WUpIFBEAgtc2dOfjOP+mrDjBzG1Pikl
 z5yuilG7eHwC1kTIkPv/tPHwDWRBI5VNwTYq0VTtffMKr8LCBo96wEYEbeYK0xdQ
 kpNa9d4G+tpx20BCRgPLeOMk2pg5SVszkCCkmmPd12rO2zJ9+wWa8fwA759E93Rw
 RshoRCtLNo2nEA3uJVG2aN9n3eAdM/iupDVdBLg50SFKabUxt7OcvGOC8NzGdAmT
 9UbB8scvQJyI/kylGT+ghH3o2RqQGvuIRXmDyETckdkpiqK0SQvysg==
 =eOGz
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull5nov20' of git://git.denx.de/u-boot-dm

patman status subcommand to collect tags from Patchwork
patman showing email replies from Patchwork
sandbox poweroff command
minor fixes in binman, tests
This commit is contained in:
Tom Rini 2020-11-06 11:27:14 -05:00
commit 22ad69b798
37 changed files with 2294 additions and 389 deletions

View File

@ -140,7 +140,7 @@ jobs:
export USER=azure
virtualenv -p /usr/bin/python3 /tmp/venv
. /tmp/venv/bin/activate
pip install pyelftools pytest
pip install pyelftools pytest pygit2
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}

View File

@ -161,7 +161,7 @@ Run binman, buildman, dtoc, Kconfig and patman testsuites:
export USER=gitlab;
virtualenv -p /usr/bin/python3 /tmp/venv;
. /tmp/venv/bin/activate;
pip install pyelftools pytest;
pip install pyelftools pytest pygit2;
export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl;
export PYTHONPATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt";
export PATH="${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}";

View File

@ -26,6 +26,7 @@ addons:
- python3-sphinx
- python3-virtualenv
- python3-pip
- python3-pygit2
- swig
- libpython-dev
- iasl

View File

@ -92,6 +92,7 @@ config SANDBOX
bool "Sandbox"
select BOARD_LATE_INIT
select BZIP2
select CMD_POWEROFF
select DM
select DM_GPIO
select DM_I2C
@ -107,7 +108,7 @@ config SANDBOX
select PCI_ENDPOINT
select SPI
select SUPPORT_OF_CONTROL
select SYSRESET_CMD_POWEROFF if CMD_POWEROFF
select SYSRESET_CMD_POWEROFF
imply BITREVERSE
select BLOBLIST
imply CMD_DM

View File

@ -53,7 +53,7 @@ int sandbox_eth_raw_os_is_local(const char *ifname)
}
ret = !!(ifr.ifr_flags & IFF_LOOPBACK);
out:
close(fd);
os_close(fd);
return ret;
}
@ -220,7 +220,7 @@ int sandbox_eth_raw_os_send(void *packet, int length,
struct sockaddr_in addr;
if (priv->local_bind_sd != -1)
close(priv->local_bind_sd);
os_close(priv->local_bind_sd);
/* A normal UDP socket is required to bind */
priv->local_bind_sd = socket(AF_INET, SOCK_DGRAM, 0);
@ -284,11 +284,11 @@ void sandbox_eth_raw_os_stop(struct eth_sandbox_raw_priv *priv)
{
free(priv->device);
priv->device = NULL;
close(priv->sd);
os_close(priv->sd);
priv->sd = -1;
if (priv->local) {
if (priv->local_bind_sd != -1)
close(priv->local_bind_sd);
os_close(priv->local_bind_sd);
priv->local_bind_sd = -1;
priv->local_bind_udp_port = 0;
}

View File

@ -80,13 +80,21 @@ int os_open(const char *pathname, int os_flags)
flags |= O_CREAT;
if (os_flags & OS_O_TRUNC)
flags |= O_TRUNC;
/*
* During a cold reset execv() is used to relaunch the U-Boot binary.
* We must ensure that all files are closed in this case.
*/
flags |= O_CLOEXEC;
return open(pathname, flags, 0777);
}
int os_close(int fd)
{
return close(fd);
/* Do not close the console input */
if (fd)
return close(fd);
return -1;
}
int os_unlink(const char *pathname)
@ -814,3 +822,9 @@ void *os_find_text_base(void)
return base;
}
void os_relaunch(char *argv[])
{
execv(argv[0], argv);
os_exit(1);
}

View File

@ -5,6 +5,7 @@
#include <common.h>
#include <command.h>
#include <dm/root.h>
#include <errno.h>
#include <init.h>
#include <os.h>
@ -19,6 +20,8 @@
DECLARE_GLOBAL_DATA_PTR;
static char **os_argv;
/* Compare two options so that they can be sorted into alphabetical order */
static int h_compare_opt(const void *p1, const void *p2)
{
@ -403,12 +406,35 @@ void state_show(struct sandbox_state *state)
printf("\n");
}
void sandbox_reset(void)
{
/* Do this here while it still has an effect */
os_fd_restore();
if (state_uninit())
os_exit(2);
if (dm_uninit())
os_exit(2);
/* Restart U-Boot */
os_relaunch(os_argv);
}
int main(int argc, char *argv[])
{
struct sandbox_state *state;
gd_t data;
int ret;
/*
* Copy argv[] so that we can pass the arguments in the original
* sequence when resetting the sandbox.
*/
os_argv = calloc(argc + 1, sizeof(char *));
if (!os_argv)
os_exit(1);
memcpy(os_argv, argv, sizeof(char *) * (argc + 1));
memset(&data, '\0', sizeof(data));
gd = &data;
gd->arch.text_base = os_find_text_base();

View File

@ -358,6 +358,7 @@ void state_reset_for_test(struct sandbox_state *state)
/* No reset yet, so mark it as such. Always allow power reset */
state->last_sysreset = SYSRESET_COUNT;
state->sysreset_allowed[SYSRESET_POWER_OFF] = true;
state->sysreset_allowed[SYSRESET_COLD] = true;
state->allow_memio = false;
memset(&state->wdt, '\0', sizeof(state->wdt));

View File

@ -84,6 +84,16 @@ void sandbox_set_enable_pci_map(int enable);
*/
int sandbox_read_fdt_from_file(void);
/**
* sandbox_reset() - reset sandbox
*
* This functions implements the cold reboot of the sandbox. It relaunches the
* U-Boot binary with the same command line parameters as the original call.
* The PID of the process stays the same. All file descriptors that have not
* been opened with O_CLOEXEC stay open including stdin, stdout, stderr.
*/
void sandbox_reset(void);
/* Exit sandbox (quit U-Boot) */
void sandbox_exit(void);

View File

@ -12,6 +12,7 @@ U-Boot API documentation
linker_lists
pinctrl
rng
sandbox
serial
timer
unicode

9
doc/api/sandbox.rst Normal file
View File

@ -0,0 +1,9 @@
.. SPDX-License-Identifier: GPL-2.0+
Sandbox
=======
The following API routines are used to implement the U-Boot sandbox.
.. kernel-doc:: include/os.h
:internal:

View File

@ -47,15 +47,35 @@ static int check_for_keys(struct udevice *dev, struct key_matrix_key *keys,
struct key_matrix_key *key;
static struct mbkp_keyscan last_scan;
static bool last_scan_valid;
struct mbkp_keyscan scan;
struct ec_response_get_next_event event;
struct mbkp_keyscan *scan = (struct mbkp_keyscan *)
&event.data.key_matrix;
unsigned int row, col, bit, data;
int num_keys;
int ret;
if (cros_ec_scan_keyboard(dev->parent, &scan)) {
debug("%s: keyboard scan failed\n", __func__);
/* Get pending MKBP event. It may not be a key matrix event. */
do {
ret = cros_ec_get_next_event(dev->parent, &event);
/* The EC has no events for us at this time. */
if (ret == -EC_RES_UNAVAILABLE)
return -EIO;
else if (ret)
break;
} while (event.event_type != EC_MKBP_EVENT_KEY_MATRIX);
/* Try the old command if the EC doesn't support the above. */
if (ret == -EC_RES_INVALID_COMMAND) {
if (cros_ec_scan_keyboard(dev->parent, scan)) {
debug("%s: keyboard scan failed\n", __func__);
return -EIO;
}
} else if (ret) {
debug("%s: Error getting next MKBP event. (%d)\n",
__func__, ret);
return -EIO;
}
*samep = last_scan_valid && !memcmp(&last_scan, &scan, sizeof(scan));
*samep = last_scan_valid && !memcmp(&last_scan, scan, sizeof(*scan));
/*
* This is a bit odd. The EC has no way to tell us that it has run
@ -64,14 +84,14 @@ static int check_for_keys(struct udevice *dev, struct key_matrix_key *keys,
* that this scan is the same as the last.
*/
last_scan_valid = true;
memcpy(&last_scan, &scan, sizeof(last_scan));
memcpy(&last_scan, scan, sizeof(last_scan));
for (col = num_keys = bit = 0; col < priv->matrix.num_cols;
col++) {
for (row = 0; row < priv->matrix.num_rows; row++) {
unsigned int mask = 1 << (bit & 7);
data = scan.data[bit / 8];
data = scan->data[bit / 8];
if ((data & mask) && num_keys < max_count) {
key = keys + num_keys++;
key->row = row;

View File

@ -415,6 +415,21 @@ int cros_ec_scan_keyboard(struct udevice *dev, struct mbkp_keyscan *scan)
return 0;
}
int cros_ec_get_next_event(struct udevice *dev,
struct ec_response_get_next_event *event)
{
int ret;
ret = ec_command(dev, EC_CMD_GET_NEXT_EVENT, 0, NULL, 0,
event, sizeof(*event));
if (ret < 0)
return ret;
else if (ret != sizeof(*event))
return -EC_RES_INVALID_RESPONSE;
return 0;
}
int cros_ec_read_id(struct udevice *dev, char *id, int maxlen)
{
struct ec_response_get_version *r;

View File

@ -56,6 +56,9 @@ static int sandbox_sysreset_request(struct udevice *dev, enum sysreset_t type)
switch (type) {
case SYSRESET_COLD:
state->last_sysreset = type;
if (!state->sysreset_allowed[type])
return -EACCES;
sandbox_reset();
break;
case SYSRESET_POWER_OFF:
state->last_sysreset = type;

View File

@ -16,7 +16,7 @@
#ifdef CONFIG_BINMAN
/**
* binman_symname() - Internal fnuction to get a binman symbol name
* binman_symname() - Internal function to get a binman symbol name
*
* @entry_name: Name of the entry to look for (e.g. 'u_boot_spl')
* @_prop_name: Property value to get from that entry (e.g. 'pos')

View File

@ -82,6 +82,17 @@ int cros_ec_read_id(struct udevice *dev, char *id, int maxlen);
*/
int cros_ec_scan_keyboard(struct udevice *dev, struct mbkp_keyscan *scan);
/**
* Get the next pending MKBP event from the ChromeOS EC device.
*
* Send a message requesting the next event and return the result.
*
* @param event Place to put the event.
* @return 0 if ok, <0 on error.
*/
int cros_ec_get_next_event(struct udevice *dev,
struct ec_response_get_next_event *event);
/**
* Read which image is currently running on the CROS-EC device.
*

View File

@ -19,30 +19,30 @@ struct sandbox_state;
/**
* Access to the OS read() system call
*
* \param fd File descriptor as returned by os_open()
* \param buf Buffer to place data
* \param count Number of bytes to read
* \return number of bytes read, or -1 on error
* @fd: File descriptor as returned by os_open()
* @buf: Buffer to place data
* @count: Number of bytes to read
* Return: number of bytes read, or -1 on error
*/
ssize_t os_read(int fd, void *buf, size_t count);
/**
* Access to the OS write() system call
*
* \param fd File descriptor as returned by os_open()
* \param buf Buffer containing data to write
* \param count Number of bytes to write
* \return number of bytes written, or -1 on error
* @fd: File descriptor as returned by os_open()
* @buf: Buffer containing data to write
* @count: Number of bytes to write
* Return: number of bytes written, or -1 on error
*/
ssize_t os_write(int fd, const void *buf, size_t count);
/**
* Access to the OS lseek() system call
*
* \param fd File descriptor as returned by os_open()
* \param offset File offset (based on whence)
* \param whence Position offset is relative to (see below)
* \return new file offset
* @fd: File descriptor as returned by os_open()
* @offset: File offset (based on whence)
* @whence: Position offset is relative to (see below)
* Return: new file offset
*/
off_t os_lseek(int fd, off_t offset, int whence);
@ -54,9 +54,9 @@ off_t os_lseek(int fd, off_t offset, int whence);
/**
* Access to the OS open() system call
*
* \param pathname Pathname of file to open
* \param flags Flags, like OS_O_RDONLY, OS_O_RDWR
* \return file descriptor, or -1 on error
* @pathname: Pathname of file to open
* @flags: Flags, like OS_O_RDONLY, OS_O_RDWR
* Return: file descriptor, or -1 on error
*/
int os_open(const char *pathname, int flags);
@ -68,42 +68,42 @@ int os_open(const char *pathname, int flags);
#define OS_O_TRUNC 01000
/**
* Access to the OS close() system call
* os_close() - access to the OS close() system call
*
* \param fd File descriptor to close
* \return 0 on success, -1 on error
* @fd: File descriptor to close
* Return: 0 on success, -1 on error
*/
int os_close(int fd);
/**
* Access to the OS unlink() system call
* os_unlink() - access to the OS unlink() system call
*
* \param pathname Path of file to delete
* \return 0 for success, other for error
* @pathname: Path of file to delete
* Return: 0 for success, other for error
*/
int os_unlink(const char *pathname);
/**
* Access to the OS exit() system call
* os_exit() - access to the OS exit() system call
*
* This exits with the supplied return code, which should be 0 to indicate
* success.
*
* @param exit_code exit code for U-Boot
* @exit_code: exit code for U-Boot
*/
void os_exit(int exit_code) __attribute__((noreturn));
/**
* Put tty into raw mode to mimic serial console better
* os_tty_raw() - put tty into raw mode to mimic serial console better
*
* @param fd File descriptor of stdin (normally 0)
* @param allow_sigs Allow Ctrl-C, Ctrl-Z to generate signals rather than
* be handled by U-Boot
* @fd: File descriptor of stdin (normally 0)
* @allow_sigs: Allow Ctrl-C, Ctrl-Z to generate signals rather than
* be handled by U-Boot
*/
void os_tty_raw(int fd, bool allow_sigs);
/**
* Restore the tty to its original mode
* os_fs_restore() - restore the tty to its original mode
*
* Call this to restore the original terminal mode, after it has been changed
* by os_tty_raw(). This is an internal function.
@ -111,144 +111,180 @@ void os_tty_raw(int fd, bool allow_sigs);
void os_fd_restore(void);
/**
* Acquires some memory from the underlying os.
* os_malloc() - aquires some memory from the underlying os.
*
* \param length Number of bytes to be allocated
* \return Pointer to length bytes or NULL on error
* @length: Number of bytes to be allocated
* Return: Pointer to length bytes or NULL on error
*/
void *os_malloc(size_t length);
/**
* Free memory previous allocated with os_malloc()
* os_free() - free memory previous allocated with os_malloc()
*
* This returns the memory to the OS.
*
* \param ptr Pointer to memory block to free
* @ptr: Pointer to memory block to free
*/
void os_free(void *ptr);
/**
* Access to the usleep function of the os
* os_usleep() - access to the usleep function of the os
*
* \param usec Time to sleep in micro seconds
* @usec: time to sleep in micro seconds
*/
void os_usleep(unsigned long usec);
/**
* Gets a monotonic increasing number of nano seconds from the OS
*
* \return A monotonic increasing time scaled in nano seconds
* Return: a monotonic increasing time scaled in nano seconds
*/
uint64_t os_get_nsec(void);
/**
* Parse arguments and update sandbox state.
*
* @param state Sandbox state to update
* @param argc Argument count
* @param argv Argument vector
* @return 0 if ok, and program should continue;
* 1 if ok, but program should stop;
* -1 on error: program should terminate.
* @state: sandbox state to update
* @argc: argument count
* @argv: argument vector
* Return:
* * 0 if ok, and program should continue
* * 1 if ok, but program should stop
* * -1 on error: program should terminate
*/
int os_parse_args(struct sandbox_state *state, int argc, char *argv[]);
/*
* enum os_dirent_t - type of directory entry
*
* Types of directory entry that we support. See also os_dirent_typename in
* the C file.
*/
enum os_dirent_t {
OS_FILET_REG, /* Regular file */
OS_FILET_LNK, /* Symbolic link */
OS_FILET_DIR, /* Directory */
OS_FILET_UNKNOWN, /* Something else */
/**
* @OS_FILET_REG: regular file
*/
OS_FILET_REG,
/**
* @OS_FILET_LNK: symbolic link
*/
OS_FILET_LNK,
/**
* @OS_FILET_DIR: directory
*/
OS_FILET_DIR,
/**
* @OS_FILET_UNKNOWN: something else
*/
OS_FILET_UNKNOWN,
/**
* @OS_FILET_COUNT: number of directory entry types
*/
OS_FILET_COUNT,
};
/** A directory entry node, containing information about a single dirent */
/**
* struct os_dirent_node - directory node
*
* A directory entry node, containing information about a single dirent
*
*/
struct os_dirent_node {
struct os_dirent_node *next; /* Pointer to next node, or NULL */
ulong size; /* Size of file in bytes */
enum os_dirent_t type; /* Type of entry */
char name[0]; /* Name of entry */
/**
* @next: pointer to next node, or NULL
*/
struct os_dirent_node *next;
/**
* @size: size of file in bytes
*/
ulong size;
/**
* @type: type of entry
*/
enum os_dirent_t type;
/**
* @name: name of entry
*/
char name[0];
};
/**
* Get a directionry listing
* os_dirent_ls() - get a directory listing
*
* This allocates and returns a linked list containing the directory listing.
*
* @param dirname Directory to examine
* @param headp Returns pointer to head of linked list, or NULL if none
* @return 0 if ok, -ve on error
* @dirname: directory to examine
* @headp: on return pointer to head of linked list, or NULL if none
* Return: 0 if ok, -ve on error
*/
int os_dirent_ls(const char *dirname, struct os_dirent_node **headp);
/**
* Free directory list
* os_dirent_free() - free directory list
*
* This frees a linked list containing a directory listing.
*
* @param node Pointer to head of linked list
* @node: pointer to head of linked list
*/
void os_dirent_free(struct os_dirent_node *node);
/**
* Get the name of a directory entry type
* os_dirent_get_typename() - get the name of a directory entry type
*
* @param type Type to check
* @return string containing the name of that type, or "???" if none/invalid
* @type: type to check
* Return:
* string containing the name of that type,
* or "???" if none/invalid
*/
const char *os_dirent_get_typename(enum os_dirent_t type);
/**
* Get the size of a file
* os_get_filesize() - get the size of a file
*
* @param fname Filename to check
* @param size size of file is returned if no error
* @return 0 on success or -1 if an error ocurred
* @fname: filename to check
* @size: size of file is returned if no error
* Return: 0 on success or -1 if an error ocurred
*/
int os_get_filesize(const char *fname, loff_t *size);
/**
* Write a character to the controlling OS terminal
* os_putc() - write a character to the controlling OS terminal
*
* This bypasses the U-Boot console support and writes directly to the OS
* stdout file descriptor.
*
* @param ch Character to write
* @ch: haracter to write
*/
void os_putc(int ch);
/**
* Write a string to the controlling OS terminal
* os_puts() - write a string to the controlling OS terminal
*
* This bypasses the U-Boot console support and writes directly to the OS
* stdout file descriptor.
*
* @param str String to write (note that \n is not appended)
* @str: string to write (note that \n is not appended)
*/
void os_puts(const char *str);
/**
* Write the sandbox RAM buffer to a existing file
* os_write_ram_buf() - write the sandbox RAM buffer to a existing file
*
* @param fname Filename to write memory to (simple binary format)
* @return 0 if OK, -ve on error
* @fname: filename to write memory to (simple binary format)
* Return: 0 if OK, -ve on error
*/
int os_write_ram_buf(const char *fname);
/**
* Read the sandbox RAM buffer from an existing file
* os_read_ram_buf() - read the sandbox RAM buffer from an existing file
*
* @param fname Filename containing memory (simple binary format)
* @return 0 if OK, -ve on error
* @fname: filename containing memory (simple binary format)
* Return: 0 if OK, -ve on error
*/
int os_read_ram_buf(const char *fname);
/**
* Jump to a new executable image
* os_jump_to_image() - jump to a new executable image
*
* This uses exec() to run a new executable image, after putting it in a
* temporary file. The same arguments and environment are passed to this
@ -261,22 +297,23 @@ int os_read_ram_buf(const char *fname);
* have access to this. It also means that the original
* memory filename passed to U-Boot will be left intact.
*
* @param dest Buffer containing executable image
* @param size Size of buffer
* @dest: buffer containing executable image
* @size: size of buffer
* Return: 0 if OK, -ve on error
*/
int os_jump_to_image(const void *dest, int size);
/**
* os_find_u_boot() - Determine the path to U-Boot proper
* os_find_u_boot() - determine the path to U-Boot proper
*
* This function is intended to be called from within sandbox SPL. It uses
* a few heuristics to find U-Boot proper. Normally it is either in the same
* directory, or the directory above (since u-boot-spl is normally in an
* spl/ subdirectory when built).
*
* @fname: Place to put full path to U-Boot
* @maxlen: Maximum size of @fname
* @return 0 if OK, -NOSPC if the filename is too large, -ENOENT if not found
* @fname: place to put full path to U-Boot
* @maxlen: maximum size of @fname
* Return: 0 if OK, -NOSPC if the filename is too large, -ENOENT if not found
*/
int os_find_u_boot(char *fname, int maxlen);
@ -286,23 +323,23 @@ int os_find_u_boot(char *fname, int maxlen);
* When called from SPL, this runs U-Boot proper. The filename is obtained by
* calling os_find_u_boot().
*
* @fname: Full pathname to U-Boot executable
* @return 0 if OK, -ve on error
* @fname: full pathname to U-Boot executable
* Return: 0 if OK, -ve on error
*/
int os_spl_to_uboot(const char *fname);
/**
* Read the current system time
* os_localtime() - read the current system time
*
* This reads the current Local Time and places it into the provided
* structure.
*
* @param rt Place to put system time
* @rt: place to put system time
*/
void os_localtime(struct rtc_time *rt);
/**
* os_abort() - Raise SIGABRT to exit sandbox (e.g. to debugger)
* os_abort() - raise SIGABRT to exit sandbox (e.g. to debugger)
*/
void os_abort(void);
@ -313,12 +350,12 @@ void os_abort(void);
*
* @start: Region start
* @len: Region length in bytes
* @return 0 if OK, -1 on error from mprotect()
* Return: 0 if OK, -1 on error from mprotect()
*/
int os_mprotect_allow(void *start, size_t len);
/**
* os_write_file() - Write a file to the host filesystem
* os_write_file() - write a file to the host filesystem
*
* This can be useful when debugging for writing data out of sandbox for
* inspection by external tools.
@ -326,7 +363,7 @@ int os_mprotect_allow(void *start, size_t len);
* @name: File path to write to
* @buf: Data to write
* @size: Size of data to write
* @return 0 if OK, -ve on error
* Return: 0 if OK, -ve on error
*/
int os_write_file(const char *name, const void *buf, int size);
@ -340,7 +377,7 @@ int os_write_file(const char *name, const void *buf, int size);
* @name: File path to read from
* @bufp: Returns buffer containing data read
* @sizep: Returns size of data
* @return 0 if OK, -ve on error
* Return: 0 if OK, -ve on error
*/
int os_read_file(const char *name, void **bufp, int *sizep);
@ -351,8 +388,23 @@ int os_read_file(const char *name, void **bufp, int *sizep);
* It can be useful to map the address of functions to the address listed in
* the u-boot.map file.
*
* @return address if found, else NULL
* Return: address if found, else NULL
*/
void *os_find_text_base(void);
/**
* os_relaunch() - restart the sandbox
*
* This functions is used to implement the cold reboot of the sand box.
* @argv\[0] specifies the binary that is started while the calling process
* stops immediately. If the new binary cannot be started, the process is
* terminated and 1 is set as shell return code.
*
* The PID of the process stays the same. All file descriptors that have not
* been opened with O_CLOEXEC stay open including stdin, stdout, stderr.
*
* @argv: NULL terminated list of command line parameters
*/
void os_relaunch(char *argv[]);
#endif

View File

@ -103,7 +103,7 @@ int binman_init(void)
return log_msg_ret("first image", -ENOENT);
binman->image = node;
}
binman->rom_offset = ROM_OFFSET_NONE;
binman_set_rom_offset(ROM_OFFSET_NONE);
\
return 0;
}

View File

@ -50,6 +50,15 @@ config UT_LIB_RSA
endif
config UT_COMPRESSION
bool "Unit test for compression"
depends on UNIT_TEST
depends on CMDLINE && GZIP_COMPRESSED && BZIP2 && LZMA && LZO && LZ4
default y
help
Enables tests for compression and decompression routines for simple
sanity and for buffer overflow conditions.
config UT_LOG
bool "Unit tests for logging functions"
depends on UNIT_TEST

View File

@ -2,11 +2,13 @@
#
# (C) Copyright 2012 The Chromium Authors
ifneq ($(CONFIG_SANDBOX),)
obj-$(CONFIG_$(SPL_)CMDLINE) += bloblist.o
endif
obj-$(CONFIG_$(SPL_)CMDLINE) += cmd/
obj-$(CONFIG_$(SPL_)CMDLINE) += cmd_ut.o
obj-$(CONFIG_$(SPL_)CMDLINE) += command_ut.o
obj-$(CONFIG_$(SPL_)CMDLINE) += compression.o
obj-$(CONFIG_$(SPL_)UT_COMPRESSION) += compression.o
obj-y += dm/
obj-$(CONFIG_$(SPL_)CMDLINE) += print_ut.o
obj-$(CONFIG_$(SPL_)CMDLINE) += str_ut.o

View File

@ -37,7 +37,9 @@ static int dm_test_sysreset_base(struct unit_test_state *uts)
/* Device 2 is the cold sysreset device */
ut_assertok(uclass_get_device(UCLASS_SYSRESET, 2, &dev));
ut_asserteq(-ENOSYS, sysreset_request(dev, SYSRESET_WARM));
state->sysreset_allowed[SYSRESET_COLD] = false;
ut_asserteq(-EACCES, sysreset_request(dev, SYSRESET_COLD));
state->sysreset_allowed[SYSRESET_COLD] = true;
state->sysreset_allowed[SYSRESET_POWER] = false;
ut_asserteq(-EACCES, sysreset_request(dev, SYSRESET_POWER));
state->sysreset_allowed[SYSRESET_POWER] = true;
@ -71,22 +73,25 @@ static int dm_test_sysreset_walk(struct unit_test_state *uts)
struct sandbox_state *state = state_get_current();
/* If we generate a power sysreset, we will exit sandbox! */
state->sysreset_allowed[SYSRESET_WARM] = false;
state->sysreset_allowed[SYSRESET_COLD] = false;
state->sysreset_allowed[SYSRESET_POWER] = false;
state->sysreset_allowed[SYSRESET_POWER_OFF] = false;
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_WARM));
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_COLD));
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_POWER));
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_POWER_OFF));
/*
* Enable cold system reset - this should make cold system reset work,
* plus a warm system reset should be promoted to cold, since this is
* the next step along.
*/
state->sysreset_allowed[SYSRESET_COLD] = true;
state->sysreset_allowed[SYSRESET_WARM] = true;
ut_asserteq(-EINPROGRESS, sysreset_walk(SYSRESET_WARM));
ut_asserteq(-EINPROGRESS, sysreset_walk(SYSRESET_COLD));
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_COLD));
ut_asserteq(-EACCES, sysreset_walk(SYSRESET_POWER));
state->sysreset_allowed[SYSRESET_COLD] = false;
state->sysreset_allowed[SYSRESET_COLD] = true;
state->sysreset_allowed[SYSRESET_POWER] = true;
return 0;

View File

@ -6,11 +6,11 @@ import pytest
import signal
@pytest.mark.boardspec('sandbox')
@pytest.mark.buildconfigspec('sysreset')
def test_reset(u_boot_console):
"""Test that the "reset" command exits sandbox process."""
@pytest.mark.buildconfigspec('sysreset_cmd_poweroff')
def test_poweroff(u_boot_console):
"""Test that the "poweroff" command exits sandbox process."""
u_boot_console.run_command('reset', wait_for_prompt=False)
u_boot_console.run_command('poweroff', wait_for_prompt=False)
assert(u_boot_console.validate_exited())
@pytest.mark.boardspec('sandbox')

View File

@ -245,7 +245,7 @@ class Entry(object):
state.SetInt(self._node, 'size', self.size)
base = self.section.GetRootSkipAtStart() if self.section else 0
if self.image_pos is not None:
state.SetInt(self._node, 'image-pos', self.image_pos)
state.SetInt(self._node, 'image-pos', self.image_pos - base)
if self.GetImage().allow_repack:
if self.orig_offset is not None:
state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
@ -456,6 +456,22 @@ class Entry(object):
self.Detail('GetData: size %s' % ToHexSize(self.data))
return self.data
def GetPaddedData(self, data=None):
"""Get the data for an entry including any padding
Gets the entry data and uses its section's pad-byte value to add padding
before and after as defined by the pad-before and pad-after properties.
This does not consider alignment.
Returns:
Contents of the entry along with any pad bytes before and
after it (bytes)
"""
if data is None:
data = self.GetData()
return self.section.GetPaddedDataForEntry(self, data)
def GetOffsets(self):
"""Get the offsets for siblings

View File

@ -71,7 +71,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
for entry in self._ifwi_entries.values():
# First get the input data and put it in a file
data = entry.GetData()
data = entry.GetPaddedData()
uniq = self.GetUniqueName()
input_fname = tools.GetOutputFilename('input.%s' % uniq)
tools.WriteFile(input_fname, data)

View File

@ -276,14 +276,14 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
options.branch)
upstream_commit = gitutil.GetUpstream(options.git_dir,
options.branch)
series = patchstream.GetMetaDataForList(upstream_commit,
series = patchstream.get_metadata_for_list(upstream_commit,
options.git_dir, 1, series=None, allow_overwrite=True)
series = patchstream.GetMetaDataForList(range_expr,
series = patchstream.get_metadata_for_list(range_expr,
options.git_dir, None, series, allow_overwrite=True)
else:
# Honour the count
series = patchstream.GetMetaDataForList(options.branch,
series = patchstream.get_metadata_for_list(options.branch,
options.git_dir, count, series=None, allow_overwrite=True)
else:
series = None

View File

@ -11,9 +11,14 @@ This tool is a Python script which:
- Runs the patches through checkpatch.pl and its own checks
- Optionally emails them out to selected people
It also has some Patchwork features:
- shows review tags from Patchwork so you can update your local patches
- pulls these down into a new branch on request
- lists comments received on a series
It is intended to automate patch creation and make it a less
error-prone process. It is useful for U-Boot and Linux work so far,
since it uses the checkpatch.pl script.
since they use the checkpatch.pl script.
It is configured almost entirely by tags it finds in your commits.
This means that you can work on a number of different branches at
@ -187,6 +192,21 @@ Series-name: name
patman does not yet use it, but it is convenient to put the branch
name here to help you keep track of multiple upstreaming efforts.
Series-links: [id | version:id]...
Set the ID of the series in patchwork. You can set this after you send
out the series and look in patchwork for the resulting series. The
URL you want is the one for the series itself, not any particular patch.
E.g. for http://patchwork.ozlabs.org/project/uboot/list/?series=187331
the series ID is 187331. This property can have a list of series IDs,
one for each version of the series, e.g.
Series-links: 1:187331 2:188434 189372
Patman always uses the one without a version, since it assumes this is
the latest one. When this tag is provided, patman can compare your local
branch against patchwork to see what new reviews your series has
collected ('patman status').
Cover-letter:
This is the patch set title
blah blah
@ -337,6 +357,53 @@ These people will get the cover letter even if they are not on the To/Cc
list for any of the patches.
Patchwork Integration
=====================
Patman has a very basic integration with Patchwork. If you point patman to
your series on patchwork it can show you what new reviews have appears since
you sent your series.
To set this up, add a Series-link tag to one of the commits in your series
(see above).
Then you can type
patman status
and patman will show you each patch and what review tags have been collected,
for example:
...
21 x86: mtrr: Update the command to use the new mtrr
Reviewed-by: Wolfgang Wallner <wolfgang.wallner@br-automation.com>
+ Reviewed-by: Bin Meng <bmeng.cn@gmail.com>
22 x86: mtrr: Restructure so command execution is in
Reviewed-by: Wolfgang Wallner <wolfgang.wallner@br-automation.com>
+ Reviewed-by: Bin Meng <bmeng.cn@gmail.com>
...
This shows that patch 21 and 22 were sent out with one review but have since
attracted another review each. If the series needs changes, you can update
these commits with the new review tag before sending the next version of the
series.
To automatically pull into these tags into a new branch, use the -d option:
patman status -d mtrr4
This will create a new 'mtrr4' branch which is the same as your current branch
but has the new review tags in it. The tags are added in alphabetic order and
are placed immediately after any existing ack/review/test/fixes tags, or at the
end. You can check that this worked with:
patman -b mtrr4 status
which should show that there are no new responses compared to this new branch.
There is also a -C option to list the comments received for each patch.
Example Work Flow
=================
@ -420,17 +487,33 @@ people on the list don't see your secret info.
Of course patches often attract comments and you need to make some updates.
Let's say one person sent comments and you get an Acked-by: on one patch.
Also, the patch on the list that you were waiting for has been merged,
so you can drop your wip commit. So you resync with upstream:
so you can drop your wip commit.
Take a look on patchwork and find out the URL of the series. This will be
something like http://patchwork.ozlabs.org/project/uboot/list/?series=187331
Add this to a tag in your top commit:
Series-link: http://patchwork.ozlabs.org/project/uboot/list/?series=187331
You can use then patman to collect the Acked-by tag to the correct commit,
creating a new 'version 2' branch for us-cmd:
patman status -d us-cmd2
git checkout us-cmd2
You can look at the comments in Patchwork or with:
patman status -C
Then you can resync with upstream:
git fetch origin (or whatever upstream is called)
git rebase origin/master
and use git rebase -i to edit the commits, dropping the wip one. You add
the ack tag to one commit:
and use git rebase -i to edit the commits, dropping the wip one.
Acked-by: Heiko Schocher <hs@denx.de>
update the Series-cc: in the top commit:
Then update the Series-cc: in the top commit to add the person who reviewed
the v1 series:
Series-cc: bfin, marex, Heiko Schocher <hs@denx.de>
@ -469,7 +552,9 @@ so to send them:
and it will create and send the version 2 series.
General points:
General points
==============
1. When you change back to the us-cmd branch days or weeks later all your
information is still there, safely stored in the commits. You don't need
@ -533,12 +618,10 @@ Most of these are indicated by a TODO in the code.
It would be nice if this could handle the In-reply-to side of things.
The tests are incomplete, as is customary. Use the --test flag to run them,
and make sure you are in the tools/patman directory first:
The tests are incomplete, as is customary. Use the 'test' subcommand to run
them:
$ cd /path/to/u-boot
$ cd tools/patman
$ ./patman --test
$ tools/patman/patman test
Error handling doesn't always produce friendly error messages - e.g.
putting an incorrect tag in a commit may provide a confusing message.
@ -551,3 +634,4 @@ a bad thing.
Simon Glass <sjg@chromium.org>
v1, v2, 19-Oct-11
revised v3 24-Nov-11
revised v4 Independence Day 2020, with Patchwork integration

View File

@ -93,8 +93,9 @@ def CheckPatch(fname, verbose=False, show_types=False):
re_error = re.compile('ERROR:%s (.*)' % type_name)
re_warning = re.compile(emacs_prefix + 'WARNING:%s (.*)' % type_name)
re_check = re.compile('CHECK:%s (.*)' % type_name)
re_file = re.compile('#\d+: FILE: ([^:]*):(\d+):')
re_file = re.compile('#(\d+): (FILE: ([^:]*):(\d+):)?')
re_note = re.compile('NOTE: (.*)')
re_new_file = re.compile('new file mode .*')
indent = ' ' * 6
for line in result.stdout.splitlines():
if verbose:
@ -111,8 +112,10 @@ def CheckPatch(fname, verbose=False, show_types=False):
# Skip lines which quote code
if line.startswith(indent):
continue
# Skip code quotes and #<n>
if line.startswith('+') or line.startswith('#'):
# Skip code quotes
if line.startswith('+'):
continue
if re_new_file.match(line):
continue
match = re_stats_full.match(line)
if not match:
@ -150,8 +153,13 @@ def CheckPatch(fname, verbose=False, show_types=False):
item['msg'] = check_match.group(2)
item['type'] = 'check'
elif file_match:
item['file'] = file_match.group(1)
item['line'] = int(file_match.group(2))
err_fname = file_match.group(3)
if err_fname:
item['file'] = err_fname
item['line'] = int(file_match.group(4))
else:
item['file'] = '<patch>'
item['line'] = int(file_match.group(1))
elif subject_match:
item['file'] = '<patch subject>'
item['line'] = None

View File

@ -27,6 +27,7 @@ class Commit:
rtags: Response tags (e.g. Reviewed-by) collected by the commit, dict:
key: rtag type (e.g. 'Reviewed-by')
value: Set of people who gave that rtag, each a name/email string
warn: List of warnings for this commit, each a str
"""
def __init__(self, hash):
self.hash = hash
@ -38,6 +39,10 @@ class Commit:
self.notes = []
self.change_id = None
self.rtags = collections.defaultdict(set)
self.warn = []
def __str__(self):
return self.subject
def AddChange(self, version, info):
"""Add a new change line to the change list for a version.

View File

@ -54,14 +54,14 @@ def prepare_patches(col, branch, count, start, end, ignore_binary):
# Read the metadata from the commits
to_do = count - end
series = patchstream.GetMetaData(branch, start, to_do)
series = patchstream.get_metadata(branch, start, to_do)
cover_fname, patch_files = gitutil.CreatePatches(
branch, start, to_do, ignore_binary, series)
# Fix up the patch files to our liking, and insert the cover letter
patchstream.FixPatches(series, patch_files)
patchstream.fix_patches(series, patch_files)
if cover_fname and series.get('cover'):
patchstream.InsertCoverLetter(cover_fname, series, to_do)
patchstream.insert_cover_letter(cover_fname, series, to_do)
return series, cover_fname, patch_files
def check_patches(series, patch_files, run_checkpatch, verbose):
@ -170,9 +170,62 @@ def send(args):
ok = ok and gitutil.CheckSuppressCCConfig()
its_a_go = ok or args.ignore_errors
if its_a_go:
email_patches(
col, series, cover_fname, patch_files, args.process_tags,
its_a_go, args.ignore_bad_tags, args.add_maintainers,
args.limit, args.dry_run, args.in_reply_to, args.thread,
args.smtp_server)
email_patches(
col, series, cover_fname, patch_files, args.process_tags,
its_a_go, args.ignore_bad_tags, args.add_maintainers,
args.limit, args.dry_run, args.in_reply_to, args.thread,
args.smtp_server)
def patchwork_status(branch, count, start, end, dest_branch, force,
show_comments):
"""Check the status of patches in patchwork
This finds the series in patchwork using the Series-link tag, checks for new
comments and review tags, displays then and creates a new branch with the
review tags.
Args:
branch (str): Branch to create patches from (None = current)
count (int): Number of patches to produce, or -1 to produce patches for
the current branch back to the upstream commit
start (int): Start partch to use (0=first / top of branch)
end (int): End patch to use (0=last one in series, 1=one before that,
etc.)
dest_branch (str): Name of new branch to create with the updated tags
(None to not create a branch)
force (bool): With dest_branch, force overwriting an existing branch
show_comments (bool): True to display snippets from the comments
provided by reviewers
Raises:
ValueError: if the branch has no Series-link value
"""
if count == -1:
# Work out how many patches to send if we can
count = (gitutil.CountCommitsToBranch(branch) - start)
series = patchstream.get_metadata(branch, start, count - end)
warnings = 0
for cmt in series.commits:
if cmt.warn:
print('%d warnings for %s:' % (len(cmt.warn), cmt.hash))
for warn in cmt.warn:
print('\t', warn)
warnings += 1
print
if warnings:
raise ValueError('Please fix warnings before running status')
links = series.get('links')
if not links:
raise ValueError("Branch has no Series-links value")
# Find the link without a version number (we don't support versions yet)
found = [link for link in links.split() if not ':' in link]
if not found:
raise ValueError('Series-links has no current version (without :)')
# Import this here to avoid failing on other commands if the dependencies
# are not present
from patman import status
status.check_patchwork_status(series, found[0], branch, dest_branch, force,
show_comments)

File diff suppressed because it is too large Load Diff

View File

@ -66,9 +66,13 @@ def CountCommitsToBranch(branch):
rev_range = '%s..%s' % (us, branch)
else:
rev_range = '@{upstream}..'
pipe = [LogCmd(rev_range, oneline=True), ['wc', '-l']]
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
patch_count = int(stdout)
pipe = [LogCmd(rev_range, oneline=True)]
result = command.RunPipe(pipe, capture=True, capture_stderr=True,
oneline=True, raise_on_error=False)
if result.return_code:
raise ValueError('Failed to determine upstream: %s' %
result.stderr.strip())
patch_count = len(result.stdout.splitlines())
return patch_count
def NameRevision(commit_hash):

View File

@ -86,8 +86,20 @@ AddCommonArgs(send)
send.add_argument('patchfiles', nargs='*')
test_parser = subparsers.add_parser('test', help='Run tests')
test_parser.add_argument('testname', type=str, default=None, nargs='?',
help="Specify the test to run")
AddCommonArgs(test_parser)
status = subparsers.add_parser('status',
help='Check status of patches in patchwork')
status.add_argument('-C', '--show-comments', action='store_true',
help='Show comments from each patch')
status.add_argument('-d', '--dest-branch', type=str,
help='Name of branch to create with collected responses')
status.add_argument('-f', '--force', action='store_true',
help='Force overwriting an existing branch')
AddCommonArgs(status)
# Parse options twice: first to get the project and second to handle
# defaults properly (which depends on project).
argv = sys.argv[1:]
@ -111,15 +123,23 @@ if args.cmd == 'test':
sys.argv = [sys.argv[0]]
result = unittest.TestResult()
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for module in (test_checkpatch.TestPatch, func_test.TestFunctional):
suite = unittest.TestLoader().loadTestsFromTestCase(module)
suite.run(result)
if args.testname:
try:
suite.addTests(loader.loadTestsFromName(args.testname, module))
except AttributeError:
continue
else:
suite.addTests(loader.loadTestsFromTestCase(module))
suite.run(result)
for module in ['gitutil', 'settings', 'terminal']:
suite = doctest.DocTestSuite(module)
suite.run(result)
sys.exit(test_util.ReportResult('patman', None, result))
sys.exit(test_util.ReportResult('patman', args.testname, result))
# Process commits, produce patches files, check them, email them
elif args.cmd == 'send':
@ -147,3 +167,19 @@ elif args.cmd == 'send':
else:
control.send(args)
# Check status of patches in patchwork
elif args.cmd == 'status':
ret_code = 0
try:
control.patchwork_status(args.branch, args.count, args.start, args.end,
args.dest_branch, args.force,
args.show_comments)
except Exception as e:
terminal.Print('patman: %s: %s' % (type(e).__name__, e),
colour=terminal.Color.RED)
if args.debug:
print()
traceback.print_exc()
ret_code = 1
sys.exit(ret_code)

View File

@ -2,10 +2,15 @@
# Copyright (c) 2011 The Chromium OS Authors.
#
"""Handles parsing a stream of commits/emails from 'git log' or other source"""
import collections
import datetime
import io
import math
import os
import re
import queue
import shutil
import tempfile
@ -15,38 +20,44 @@ from patman import gitutil
from patman.series import Series
# Tags that we detect and remove
re_remove = re.compile('^BUG=|^TEST=|^BRANCH=|^Review URL:'
'|Reviewed-on:|Commit-\w*:')
RE_REMOVE = re.compile(r'^BUG=|^TEST=|^BRANCH=|^Review URL:'
r'|Reviewed-on:|Commit-\w*:')
# Lines which are allowed after a TEST= line
re_allowed_after_test = re.compile('^Signed-off-by:')
RE_ALLOWED_AFTER_TEST = re.compile('^Signed-off-by:')
# Signoffs
re_signoff = re.compile('^Signed-off-by: *(.*)')
RE_SIGNOFF = re.compile('^Signed-off-by: *(.*)')
# Cover letter tag
re_cover = re.compile('^Cover-([a-z-]*): *(.*)')
RE_COVER = re.compile('^Cover-([a-z-]*): *(.*)')
# Patch series tag
re_series_tag = re.compile('^Series-([a-z-]*): *(.*)')
RE_SERIES_TAG = re.compile('^Series-([a-z-]*): *(.*)')
# Change-Id will be used to generate the Message-Id and then be stripped
re_change_id = re.compile('^Change-Id: *(.*)')
RE_CHANGE_ID = re.compile('^Change-Id: *(.*)')
# Commit series tag
re_commit_tag = re.compile('^Commit-([a-z-]*): *(.*)')
RE_COMMIT_TAG = re.compile('^Commit-([a-z-]*): *(.*)')
# Commit tags that we want to collect and keep
re_tag = re.compile('^(Tested-by|Acked-by|Reviewed-by|Patch-cc|Fixes): (.*)')
RE_TAG = re.compile('^(Tested-by|Acked-by|Reviewed-by|Patch-cc|Fixes): (.*)')
# The start of a new commit in the git log
re_commit = re.compile('^commit ([0-9a-f]*)$')
RE_COMMIT = re.compile('^commit ([0-9a-f]*)$')
# We detect these since checkpatch doesn't always do it
re_space_before_tab = re.compile('^[+].* \t')
RE_SPACE_BEFORE_TAB = re.compile('^[+].* \t')
# Match indented lines for changes
re_leading_whitespace = re.compile('^\s')
RE_LEADING_WHITESPACE = re.compile(r'^\s')
# Detect a 'diff' line
RE_DIFF = re.compile(r'^>.*diff --git a/(.*) b/(.*)$')
# Detect a context line, like '> @@ -153,8 +153,13 @@ CheckPatch
RE_LINE = re.compile(r'>.*@@ \-(\d+),\d+ \+(\d+),\d+ @@ *(.*)')
# States we can be in - can we use range() and still have comments?
STATE_MSG_HEADER = 0 # Still in the message header
@ -62,11 +73,10 @@ class PatchStream:
unwanted tags or inject additional ones. These correspond to the two
phases of processing.
"""
def __init__(self, series, name=None, is_log=False):
def __init__(self, series, is_log=False):
self.skip_blank = False # True to skip a single blank line
self.found_test = False # Found a TEST= line
self.lines_after_test = 0 # Number of lines found after TEST=
self.warn = [] # List of warnings we have collected
self.linenum = 1 # Output line number we are up to
self.in_section = None # Name of start...END section we are in
self.notes = [] # Series notes
@ -78,50 +88,98 @@ class PatchStream:
self.change_lines = [] # Lines of the current change
self.blank_count = 0 # Number of blank lines stored up
self.state = STATE_MSG_HEADER # What state are we in?
self.signoff = [] # Contents of signoff line
self.commit = None # Current commit
# List of unquoted test blocks, each a list of str lines
self.snippets = []
self.cur_diff = None # Last 'diff' line seen (str)
self.cur_line = None # Last context (@@) line seen (str)
self.recent_diff = None # 'diff' line for current snippet (str)
self.recent_line = None # '@@' line for current snippet (str)
self.recent_quoted = collections.deque([], 5)
self.recent_unquoted = queue.Queue()
self.was_quoted = None
def AddToSeries(self, line, name, value):
@staticmethod
def process_text(text, is_comment=False):
"""Process some text through this class using a default Commit/Series
Args:
text (str): Text to parse
is_comment (bool): True if this is a comment rather than a patch.
If True, PatchStream doesn't expect a patch subject at the
start, but jumps straight into the body
Returns:
PatchStream: object with results
"""
pstrm = PatchStream(Series())
pstrm.commit = commit.Commit(None)
infd = io.StringIO(text)
outfd = io.StringIO()
if is_comment:
pstrm.state = STATE_PATCH_HEADER
pstrm.process_stream(infd, outfd)
return pstrm
def _add_warn(self, warn):
"""Add a new warning to report to the user about the current commit
The new warning is added to the current commit if not already present.
Args:
warn (str): Warning to report
Raises:
ValueError: Warning is generated with no commit associated
"""
if not self.commit:
raise ValueError('Warning outside commit: %s' % warn)
if warn not in self.commit.warn:
self.commit.warn.append(warn)
def _add_to_series(self, line, name, value):
"""Add a new Series-xxx tag.
When a Series-xxx tag is detected, we come here to record it, if we
are scanning a 'git log'.
Args:
line: Source line containing tag (useful for debug/error messages)
name: Tag name (part after 'Series-')
value: Tag value (part after 'Series-xxx: ')
line (str): Source line containing tag (useful for debug/error
messages)
name (str): Tag name (part after 'Series-')
value (str): Tag value (part after 'Series-xxx: ')
"""
if name == 'notes':
self.in_section = name
self.skip_blank = False
if self.is_log:
self.series.AddTag(self.commit, line, name, value)
warn = self.series.AddTag(self.commit, line, name, value)
if warn:
self.commit.warn.append(warn)
def AddToCommit(self, line, name, value):
def _add_to_commit(self, name):
"""Add a new Commit-xxx tag.
When a Commit-xxx tag is detected, we come here to record it.
Args:
line: Source line containing tag (useful for debug/error messages)
name: Tag name (part after 'Commit-')
value: Tag value (part after 'Commit-xxx: ')
name (str): Tag name (part after 'Commit-')
"""
if name == 'notes':
self.in_section = 'commit-' + name
self.skip_blank = False
def AddCommitRtag(self, rtag_type, who):
def _add_commit_rtag(self, rtag_type, who):
"""Add a response tag to the current commit
Args:
key: rtag type (e.g. 'Reviewed-by')
who: Person who gave that rtag, e.g. 'Fred Bloggs <fred@bloggs.org>'
rtag_type (str): rtag type (e.g. 'Reviewed-by')
who (str): Person who gave that rtag, e.g.
'Fred Bloggs <fred@bloggs.org>'
"""
self.commit.AddRtag(rtag_type, who)
def CloseCommit(self):
def _close_commit(self):
"""Save the current commit into our commit list, and reset our state"""
if self.commit and self.is_log:
self.series.AddCommit(self.commit)
@ -135,24 +193,31 @@ class PatchStream:
self.skip_blank = True
self.section = []
def ParseVersion(self, value, line):
self.cur_diff = None
self.recent_diff = None
self.recent_line = None
def _parse_version(self, value, line):
"""Parse a version from a *-changes tag
Args:
value: Tag value (part after 'xxx-changes: '
line: Source line containing tag
value (str): Tag value (part after 'xxx-changes: '
line (str): Source line containing tag
Returns:
The version as an integer
int: The version as an integer
Raises:
ValueError: the value cannot be converted
"""
try:
return int(value)
except ValueError as str:
except ValueError:
raise ValueError("%s: Cannot decode version info '%s'" %
(self.commit.hash, line))
(self.commit.hash, line))
def FinalizeChange(self):
"""Finalize a (multi-line) change and add it to the series or commit"""
def _finalise_change(self):
"""_finalise a (multi-line) change and add it to the series or commit"""
if not self.change_lines:
return
change = '\n'.join(self.change_lines)
@ -165,7 +230,48 @@ class PatchStream:
self.commit.AddChange(self.change_version, change)
self.change_lines = []
def ProcessLine(self, line):
def _finalise_snippet(self):
"""Finish off a snippet and add it to the list
This is called when we get to the end of a snippet, i.e. the we enter
the next block of quoted text:
This is a comment from someone.
Something else
> Now we have some code <----- end of snippet
> more code
Now a comment about the above code
This adds the snippet to our list
"""
quoted_lines = []
while self.recent_quoted:
quoted_lines.append(self.recent_quoted.popleft())
unquoted_lines = []
valid = False
while not self.recent_unquoted.empty():
text = self.recent_unquoted.get()
if not (text.startswith('On ') and text.endswith('wrote:')):
unquoted_lines.append(text)
if text:
valid = True
if valid:
lines = []
if self.recent_diff:
lines.append('> File: %s' % self.recent_diff)
if self.recent_line:
out = '> Line: %s / %s' % self.recent_line[:2]
if self.recent_line[2]:
out += ': %s' % self.recent_line[2]
lines.append(out)
lines += quoted_lines + unquoted_lines
if lines:
self.snippets.append(lines)
def process_line(self, line):
"""Process a single line of a patch file or commit log
This process a line and returns a list of lines to output. The list
@ -184,31 +290,37 @@ class PatchStream:
don't want, and add things we think are required.
Args:
line: text line to process
line (str): text line to process
Returns:
list of output lines, or [] if nothing should be output
list: list of output lines, or [] if nothing should be output
Raises:
ValueError: a fatal error occurred while parsing, e.g. an END
without a starting tag, or two commits with two change IDs
"""
# Initially we have no output. Prepare the input line string
out = []
line = line.rstrip('\n')
commit_match = re_commit.match(line) if self.is_log else None
commit_match = RE_COMMIT.match(line) if self.is_log else None
if self.is_log:
if line[:4] == ' ':
line = line[4:]
# Handle state transition and skipping blank lines
series_tag_match = re_series_tag.match(line)
change_id_match = re_change_id.match(line)
commit_tag_match = re_commit_tag.match(line)
cover_match = re_cover.match(line)
signoff_match = re_signoff.match(line)
leading_whitespace_match = re_leading_whitespace.match(line)
series_tag_match = RE_SERIES_TAG.match(line)
change_id_match = RE_CHANGE_ID.match(line)
commit_tag_match = RE_COMMIT_TAG.match(line)
cover_match = RE_COVER.match(line)
signoff_match = RE_SIGNOFF.match(line)
leading_whitespace_match = RE_LEADING_WHITESPACE.match(line)
diff_match = RE_DIFF.match(line)
line_match = RE_LINE.match(line)
tag_match = None
if self.state == STATE_PATCH_HEADER:
tag_match = re_tag.match(line)
tag_match = RE_TAG.match(line)
is_blank = not line.strip()
if is_blank:
if (self.state == STATE_MSG_HEADER
@ -228,7 +340,7 @@ class PatchStream:
# but we are already in a section, this means 'END' is missing
# for that section, fix it up.
if self.in_section:
self.warn.append("Missing 'END' in section '%s'" % self.in_section)
self._add_warn("Missing 'END' in section '%s'" % self.in_section)
if self.in_section == 'cover':
self.series.cover = self.section
elif self.in_section == 'notes':
@ -238,15 +350,17 @@ class PatchStream:
if self.is_log:
self.commit.notes += self.section
else:
self.warn.append("Unknown section '%s'" % self.in_section)
# This should not happen
raise ValueError("Unknown section '%s'" % self.in_section)
self.in_section = None
self.skip_blank = True
self.section = []
# but we are already in a change list, that means a blank line
# is missing, fix it up.
if self.in_change:
self.warn.append("Missing 'blank line' in section '%s-changes'" % self.in_change)
self.FinalizeChange()
self._add_warn("Missing 'blank line' in section '%s-changes'" %
self.in_change)
self._finalise_change()
self.in_change = None
self.change_version = 0
@ -262,7 +376,8 @@ class PatchStream:
if self.is_log:
self.commit.notes += self.section
else:
self.warn.append("Unknown section '%s'" % self.in_section)
# This should not happen
raise ValueError("Unknown section '%s'" % self.in_section)
self.in_section = None
self.skip_blank = True
self.section = []
@ -271,14 +386,14 @@ class PatchStream:
# If we are not in a section, it is an unexpected END
elif line == 'END':
raise ValueError("'END' wihout section")
raise ValueError("'END' wihout section")
# Detect the commit subject
elif not is_blank and self.state == STATE_PATCH_SUBJECT:
self.commit.subject = line
# Detect the tags we want to remove, and skip blank lines
elif re_remove.match(line) and not commit_tag_match:
elif RE_REMOVE.match(line) and not commit_tag_match:
self.skip_blank = True
# TEST= should be the last thing in the commit, so remove
@ -296,26 +411,26 @@ class PatchStream:
self.in_section = 'cover'
self.skip_blank = False
elif name == 'letter-cc':
self.AddToSeries(line, 'cover-cc', value)
self._add_to_series(line, 'cover-cc', value)
elif name == 'changes':
self.in_change = 'Cover'
self.change_version = self.ParseVersion(value, line)
self.change_version = self._parse_version(value, line)
# If we are in a change list, key collected lines until a blank one
elif self.in_change:
if is_blank:
# Blank line ends this change list
self.FinalizeChange()
self._finalise_change()
self.in_change = None
self.change_version = 0
elif line == '---':
self.FinalizeChange()
self._finalise_change()
self.in_change = None
self.change_version = 0
out = self.ProcessLine(line)
out = self.process_line(line)
elif self.is_log:
if not leading_whitespace_match:
self.FinalizeChange()
self._finalise_change()
self.change_lines.append(line)
self.skip_blank = False
@ -326,9 +441,9 @@ class PatchStream:
if name == 'changes':
# value is the version number: e.g. 1, or 2
self.in_change = 'Series'
self.change_version = self.ParseVersion(value, line)
self.change_version = self._parse_version(value, line)
else:
self.AddToSeries(line, name, value)
self._add_to_series(line, name, value)
self.skip_blank = True
# Detect Change-Id tags
@ -336,8 +451,9 @@ class PatchStream:
value = change_id_match.group(1)
if self.is_log:
if self.commit.change_id:
raise ValueError("%s: Two Change-Ids: '%s' vs. '%s'" %
(self.commit.hash, self.commit.change_id, value))
raise ValueError(
"%s: Two Change-Ids: '%s' vs. '%s'" % self.commit.hash,
self.commit.change_id, value)
self.commit.change_id = value
self.skip_blank = True
@ -346,28 +462,28 @@ class PatchStream:
name = commit_tag_match.group(1)
value = commit_tag_match.group(2)
if name == 'notes':
self.AddToCommit(line, name, value)
self._add_to_commit(name)
self.skip_blank = True
elif name == 'changes':
self.in_change = 'Commit'
self.change_version = self.ParseVersion(value, line)
self.change_version = self._parse_version(value, line)
else:
self.warn.append('Line %d: Ignoring Commit-%s' %
(self.linenum, name))
self._add_warn('Line %d: Ignoring Commit-%s' %
(self.linenum, name))
# Detect the start of a new commit
elif commit_match:
self.CloseCommit()
self._close_commit()
self.commit = commit.Commit(commit_match.group(1))
# Detect tags in the commit message
elif tag_match:
rtag_type, who = tag_match.groups()
self.AddCommitRtag(rtag_type, who)
self._add_commit_rtag(rtag_type, who)
# Remove Tested-by self, since few will take much notice
if (rtag_type == 'Tested-by' and
who.find(os.getenv('USER') + '@') != -1):
self.warn.append("Ignoring %s" % line)
self._add_warn("Ignoring '%s'" % line)
elif rtag_type == 'Patch-cc':
self.commit.AddCc(who.split(','))
else:
@ -376,21 +492,42 @@ class PatchStream:
# Suppress duplicate signoffs
elif signoff_match:
if (self.is_log or not self.commit or
self.commit.CheckDuplicateSignoff(signoff_match.group(1))):
self.commit.CheckDuplicateSignoff(signoff_match.group(1))):
out = [line]
# Well that means this is an ordinary line
else:
# Look for space before tab
m = re_space_before_tab.match(line)
if m:
self.warn.append('Line %d/%d has space before tab' %
(self.linenum, m.start()))
mat = RE_SPACE_BEFORE_TAB.match(line)
if mat:
self._add_warn('Line %d/%d has space before tab' %
(self.linenum, mat.start()))
# OK, we have a valid non-blank line
out = [line]
self.linenum += 1
self.skip_blank = False
if diff_match:
self.cur_diff = diff_match.group(1)
# If this is quoted, keep recent lines
if not diff_match and self.linenum > 1 and line:
if line.startswith('>'):
if not self.was_quoted:
self._finalise_snippet()
self.recent_line = None
if not line_match:
self.recent_quoted.append(line)
self.was_quoted = True
self.recent_diff = self.cur_diff
else:
self.recent_unquoted.put(line)
self.was_quoted = False
if line_match:
self.recent_line = line_match.groups()
if self.state == STATE_DIFFS:
pass
@ -407,27 +544,27 @@ class PatchStream:
out += self.commit.notes
out += [''] + log
elif self.found_test:
if not re_allowed_after_test.match(line):
if not RE_ALLOWED_AFTER_TEST.match(line):
self.lines_after_test += 1
return out
def Finalize(self):
def finalise(self):
"""Close out processing of this patch stream"""
self.FinalizeChange()
self.CloseCommit()
self._finalise_snippet()
self._finalise_change()
self._close_commit()
if self.lines_after_test:
self.warn.append('Found %d lines after TEST=' %
self.lines_after_test)
self._add_warn('Found %d lines after TEST=' % self.lines_after_test)
def WriteMessageId(self, outfd):
def _write_message_id(self, outfd):
"""Write the Message-Id into the output.
This is based on the Change-Id in the original patch, the version,
and the prefix.
Args:
outfd: Output stream file object
outfd (io.IOBase): Output stream file object
"""
if not self.commit.change_id:
return
@ -461,27 +598,27 @@ class PatchStream:
# Join parts together with "." and write it out.
outfd.write('Message-Id: <%s@changeid>\n' % '.'.join(parts))
def ProcessStream(self, infd, outfd):
def process_stream(self, infd, outfd):
"""Copy a stream from infd to outfd, filtering out unwanting things.
This is used to process patch files one at a time.
Args:
infd: Input stream file object
outfd: Output stream file object
infd (io.IOBase): Input stream file object
outfd (io.IOBase): Output stream file object
"""
# Extract the filename from each diff, for nice warnings
fname = None
last_fname = None
re_fname = re.compile('diff --git a/(.*) b/.*')
self.WriteMessageId(outfd)
self._write_message_id(outfd)
while True:
line = infd.readline()
if not line:
break
out = self.ProcessLine(line)
out = self.process_line(line)
# Try to detect blank lines at EOF
for line in out:
@ -493,71 +630,124 @@ class PatchStream:
self.blank_count += 1
else:
if self.blank_count and (line == '-- ' or match):
self.warn.append("Found possible blank line(s) at "
"end of file '%s'" % last_fname)
self._add_warn("Found possible blank line(s) at end of file '%s'" %
last_fname)
outfd.write('+\n' * self.blank_count)
outfd.write(line + '\n')
self.blank_count = 0
self.Finalize()
self.finalise()
def insert_tags(msg, tags_to_emit):
"""Add extra tags to a commit message
def GetMetaDataForList(commit_range, git_dir=None, count=None,
series = None, allow_overwrite=False):
The tags are added after an existing block of tags if found, otherwise at
the end.
Args:
msg (str): Commit message
tags_to_emit (list): List of tags to emit, each a str
Returns:
(str) new message
"""
out = []
done = False
emit_tags = False
for line in msg.splitlines():
if not done:
signoff_match = RE_SIGNOFF.match(line)
tag_match = RE_TAG.match(line)
if tag_match or signoff_match:
emit_tags = True
if emit_tags and not tag_match and not signoff_match:
out += tags_to_emit
emit_tags = False
done = True
out.append(line)
if not done:
out.append('')
out += tags_to_emit
return '\n'.join(out)
def get_list(commit_range, git_dir=None, count=None):
"""Get a log of a list of comments
This returns the output of 'git log' for the selected commits
Args:
commit_range (str): Range of commits to count (e.g. 'HEAD..base')
git_dir (str): Path to git repositiory (None to use default)
count (int): Number of commits to list, or None for no limit
Returns
str: String containing the contents of the git log
"""
params = gitutil.LogCmd(commit_range, reverse=True, count=count,
git_dir=git_dir)
return command.RunPipe([params], capture=True).stdout
def get_metadata_for_list(commit_range, git_dir=None, count=None,
series=None, allow_overwrite=False):
"""Reads out patch series metadata from the commits
This does a 'git log' on the relevant commits and pulls out the tags we
are interested in.
Args:
commit_range: Range of commits to count (e.g. 'HEAD..base')
git_dir: Path to git repositiory (None to use default)
count: Number of commits to list, or None for no limit
series: Series object to add information into. By default a new series
commit_range (str): Range of commits to count (e.g. 'HEAD..base')
git_dir (str): Path to git repositiory (None to use default)
count (int): Number of commits to list, or None for no limit
series (Series): Object to add information into. By default a new series
is started.
allow_overwrite: Allow tags to overwrite an existing tag
allow_overwrite (bool): Allow tags to overwrite an existing tag
Returns:
A Series object containing information about the commits.
Series: Object containing information about the commits.
"""
if not series:
series = Series()
series.allow_overwrite = allow_overwrite
params = gitutil.LogCmd(commit_range, reverse=True, count=count,
git_dir=git_dir)
stdout = command.RunPipe([params], capture=True).stdout
ps = PatchStream(series, is_log=True)
stdout = get_list(commit_range, git_dir, count)
pst = PatchStream(series, is_log=True)
for line in stdout.splitlines():
ps.ProcessLine(line)
ps.Finalize()
pst.process_line(line)
pst.finalise()
return series
def GetMetaData(branch, start, count):
def get_metadata(branch, start, count):
"""Reads out patch series metadata from the commits
This does a 'git log' on the relevant commits and pulls out the tags we
are interested in.
Args:
branch: Branch to use (None for current branch)
start: Commit to start from: 0=branch HEAD, 1=next one, etc.
count: Number of commits to list
"""
return GetMetaDataForList('%s~%d' % (branch if branch else 'HEAD', start),
None, count)
branch (str): Branch to use (None for current branch)
start (int): Commit to start from: 0=branch HEAD, 1=next one, etc.
count (int): Number of commits to list
def GetMetaDataForTest(text):
Returns:
Series: Object containing information about the commits.
"""
return get_metadata_for_list(
'%s~%d' % (branch if branch else 'HEAD', start), None, count)
def get_metadata_for_test(text):
"""Process metadata from a file containing a git log. Used for tests
Args:
text:
Returns:
Series: Object containing information about the commits.
"""
series = Series()
ps = PatchStream(series, is_log=True)
pst = PatchStream(series, is_log=True)
for line in text.splitlines():
ps.ProcessLine(line)
ps.Finalize()
pst.process_line(line)
pst.finalise()
return series
def FixPatch(backup_dir, fname, series, commit):
def fix_patch(backup_dir, fname, series, cmt):
"""Fix up a patch file, by adding/removing as required.
We remove our tags from the patch file, insert changes lists, etc.
@ -566,18 +756,20 @@ def FixPatch(backup_dir, fname, series, commit):
A backup file is put into backup_dir (if not None).
Args:
fname: Filename to patch file to process
series: Series information about this patch set
commit: Commit object for this patch file
backup_dir (str): Path to directory to use to backup the file
fname (str): Filename to patch file to process
series (Series): Series information about this patch set
cmt (Commit): Commit object for this patch file
Return:
A list of errors, or [] if all ok.
list: A list of errors, each str, or [] if all ok.
"""
handle, tmpname = tempfile.mkstemp()
outfd = os.fdopen(handle, 'w', encoding='utf-8')
infd = open(fname, 'r', encoding='utf-8')
ps = PatchStream(series)
ps.commit = commit
ps.ProcessStream(infd, outfd)
pst = PatchStream(series)
pst.commit = cmt
pst.process_stream(infd, outfd)
infd.close()
outfd.close()
@ -585,46 +777,47 @@ def FixPatch(backup_dir, fname, series, commit):
if backup_dir:
shutil.copy(fname, os.path.join(backup_dir, os.path.basename(fname)))
shutil.move(tmpname, fname)
return ps.warn
return cmt.warn
def FixPatches(series, fnames):
def fix_patches(series, fnames):
"""Fix up a list of patches identified by filenames
The patch files are processed in place, and overwritten.
Args:
series: The series object
fnames: List of patch files to process
series (Series): The Series object
fnames (:type: list of str): List of patch files to process
"""
# Current workflow creates patches, so we shouldn't need a backup
backup_dir = None #tempfile.mkdtemp('clean-patch')
count = 0
for fname in fnames:
commit = series.commits[count]
commit.patch = fname
commit.count = count
result = FixPatch(backup_dir, fname, series, commit)
cmt = series.commits[count]
cmt.patch = fname
cmt.count = count
result = fix_patch(backup_dir, fname, series, cmt)
if result:
print('%d warnings for %s:' % (len(result), fname))
print('%d warning%s for %s:' %
(len(result), 's' if len(result) > 1 else '', fname))
for warn in result:
print('\t', warn)
print
print('\t%s' % warn)
print()
count += 1
print('Cleaned %d patches' % count)
print('Cleaned %d patch%s' % (count, 'es' if count > 1 else ''))
def InsertCoverLetter(fname, series, count):
def insert_cover_letter(fname, series, count):
"""Inserts a cover letter with the required info into patch 0
Args:
fname: Input / output filename of the cover letter file
series: Series object
count: Number of patches in the series
fname (str): Input / output filename of the cover letter file
series (Series): Series object
count (int): Number of patches in the series
"""
fd = open(fname, 'r')
lines = fd.readlines()
fd.close()
fil = open(fname, 'r')
lines = fil.readlines()
fil.close()
fd = open(fname, 'w')
fil = open(fname, 'w')
text = series.cover
prefix = series.GetPatchPrefix()
for line in lines:
@ -644,5 +837,5 @@ def InsertCoverLetter(fname, series, count):
# Now the change list
out = series.MakeChangeLog(None)
line += '\n' + '\n'.join(out)
fd.write(line)
fd.close()
fil.write(line)
fil.close()

View File

@ -16,7 +16,7 @@ from patman import tools
# Series-xxx tags that we understand
valid_series = ['to', 'cc', 'version', 'changes', 'prefix', 'notes', 'name',
'cover_cc', 'process_log']
'cover_cc', 'process_log', 'links']
class Series(dict):
"""Holds information about a patch series, including all tags.
@ -59,6 +59,9 @@ class Series(dict):
line: Source line containing tag (useful for debug/error messages)
name: Tag name (part after 'Series-')
value: Tag value (part after 'Series-xxx: ')
Returns:
String warning if something went wrong, else None
"""
# If we already have it, then add to our list
name = name.replace('-', '_')
@ -78,9 +81,10 @@ class Series(dict):
else:
self[name] = value
else:
raise ValueError("In %s: line '%s': Unknown 'Series-%s': valid "
return ("In %s: line '%s': Unknown 'Series-%s': valid "
"options are %s" % (commit.hash, line, name,
', '.join(valid_series)))
return None
def AddCommit(self, commit):
"""Add a commit into our list of commits

482
tools/patman/status.py Normal file
View File

@ -0,0 +1,482 @@
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright 2020 Google LLC
#
"""Talks to the patchwork service to figure out what patches have been reviewed
and commented on. Provides a way to display review tags and comments.
Allows creation of a new branch based on the old but with the review tags
collected from patchwork.
"""
import collections
import concurrent.futures
from itertools import repeat
import re
import pygit2
import requests
from patman import patchstream
from patman.patchstream import PatchStream
from patman import terminal
from patman import tout
# Patches which are part of a multi-patch series are shown with a prefix like
# [prefix, version, sequence], for example '[RFC, v2, 3/5]'. All but the last
# part is optional. This decodes the string into groups. For single patches
# the [] part is not present:
# Groups: (ignore, ignore, ignore, prefix, version, sequence, subject)
RE_PATCH = re.compile(r'(\[(((.*),)?(.*),)?(.*)\]\s)?(.*)$')
# This decodes the sequence string into a patch number and patch count
RE_SEQ = re.compile(r'(\d+)/(\d+)')
def to_int(vals):
"""Convert a list of strings into integers, using 0 if not an integer
Args:
vals (list): List of strings
Returns:
list: List of integers, one for each input string
"""
out = [int(val) if val.isdigit() else 0 for val in vals]
return out
class Patch(dict):
"""Models a patch in patchwork
This class records information obtained from patchwork
Some of this information comes from the 'Patch' column:
[RFC,v2,1/3] dm: Driver and uclass changes for tiny-dm
This shows the prefix, version, seq, count and subject.
The other properties come from other columns in the display.
Properties:
pid (str): ID of the patch (typically an integer)
seq (int): Sequence number within series (1=first) parsed from sequence
string
count (int): Number of patches in series, parsed from sequence string
raw_subject (str): Entire subject line, e.g.
"[1/2,v2] efi_loader: Sort header file ordering"
prefix (str): Prefix string or None (e.g. 'RFC')
version (str): Version string or None (e.g. 'v2')
raw_subject (str): Raw patch subject
subject (str): Patch subject with [..] part removed (same as commit
subject)
"""
def __init__(self, pid):
super().__init__()
self.id = pid # Use 'id' to match what the Rest API provides
self.seq = None
self.count = None
self.prefix = None
self.version = None
self.raw_subject = None
self.subject = None
# These make us more like a dictionary
def __setattr__(self, name, value):
self[name] = value
def __getattr__(self, name):
return self[name]
def __hash__(self):
return hash(frozenset(self.items()))
def __str__(self):
return self.raw_subject
def parse_subject(self, raw_subject):
"""Parse the subject of a patch into its component parts
See RE_PATCH for details. The parsed info is placed into seq, count,
prefix, version, subject
Args:
raw_subject (str): Subject string to parse
Raises:
ValueError: the subject cannot be parsed
"""
self.raw_subject = raw_subject.strip()
mat = RE_PATCH.search(raw_subject.strip())
if not mat:
raise ValueError("Cannot parse subject '%s'" % raw_subject)
self.prefix, self.version, seq_info, self.subject = mat.groups()[3:]
mat_seq = RE_SEQ.match(seq_info) if seq_info else False
if mat_seq is None:
self.version = seq_info
seq_info = None
if self.version and not self.version.startswith('v'):
self.prefix = self.version
self.version = None
if seq_info:
if mat_seq:
self.seq = int(mat_seq.group(1))
self.count = int(mat_seq.group(2))
else:
self.seq = 1
self.count = 1
class Review:
"""Represents a single review email collected in Patchwork
Patches can attract multiple reviews. Each consists of an author/date and
a variable number of 'snippets', which are groups of quoted and unquoted
text.
"""
def __init__(self, meta, snippets):
"""Create new Review object
Args:
meta (str): Text containing review author and date
snippets (list): List of snippets in th review, each a list of text
lines
"""
self.meta = ' : '.join([line for line in meta.splitlines() if line])
self.snippets = snippets
def compare_with_series(series, patches):
"""Compare a list of patches with a series it came from
This prints any problems as warnings
Args:
series (Series): Series to compare against
patches (:type: list of Patch): list of Patch objects to compare with
Returns:
tuple
dict:
key: Commit number (0...n-1)
value: Patch object for that commit
dict:
key: Patch number (0...n-1)
value: Commit object for that patch
"""
# Check the names match
warnings = []
patch_for_commit = {}
all_patches = set(patches)
for seq, cmt in enumerate(series.commits):
pmatch = [p for p in all_patches if p.subject == cmt.subject]
if len(pmatch) == 1:
patch_for_commit[seq] = pmatch[0]
all_patches.remove(pmatch[0])
elif len(pmatch) > 1:
warnings.append("Multiple patches match commit %d ('%s'):\n %s" %
(seq + 1, cmt.subject,
'\n '.join([p.subject for p in pmatch])))
else:
warnings.append("Cannot find patch for commit %d ('%s')" %
(seq + 1, cmt.subject))
# Check the names match
commit_for_patch = {}
all_commits = set(series.commits)
for seq, patch in enumerate(patches):
cmatch = [c for c in all_commits if c.subject == patch.subject]
if len(cmatch) == 1:
commit_for_patch[seq] = cmatch[0]
all_commits.remove(cmatch[0])
elif len(cmatch) > 1:
warnings.append("Multiple commits match patch %d ('%s'):\n %s" %
(seq + 1, patch.subject,
'\n '.join([c.subject for c in cmatch])))
else:
warnings.append("Cannot find commit for patch %d ('%s')" %
(seq + 1, patch.subject))
return patch_for_commit, commit_for_patch, warnings
def call_rest_api(subpath):
"""Call the patchwork API and return the result as JSON
Args:
subpath (str): URL subpath to use
Returns:
dict: Json result
Raises:
ValueError: the URL could not be read
"""
url = 'https://patchwork.ozlabs.org/api/1.2/%s' % subpath
response = requests.get(url)
if response.status_code != 200:
raise ValueError("Could not read URL '%s'" % url)
return response.json()
def collect_patches(series, series_id, rest_api=call_rest_api):
"""Collect patch information about a series from patchwork
Uses the Patchwork REST API to collect information provided by patchwork
about the status of each patch.
Args:
series (Series): Series object corresponding to the local branch
containing the series
series_id (str): Patch series ID number
rest_api (function): API function to call to access Patchwork, for
testing
Returns:
list: List of patches sorted by sequence number, each a Patch object
Raises:
ValueError: if the URL could not be read or the web page does not follow
the expected structure
"""
data = rest_api('series/%s/' % series_id)
# Get all the rows, which are patches
patch_dict = data['patches']
count = len(patch_dict)
num_commits = len(series.commits)
if count != num_commits:
tout.Warning('Warning: Patchwork reports %d patches, series has %d' %
(count, num_commits))
patches = []
# Work through each row (patch) one at a time, collecting the information
warn_count = 0
for pw_patch in patch_dict:
patch = Patch(pw_patch['id'])
patch.parse_subject(pw_patch['name'])
patches.append(patch)
if warn_count > 1:
tout.Warning(' (total of %d warnings)' % warn_count)
# Sort patches by patch number
patches = sorted(patches, key=lambda x: x.seq)
return patches
def find_new_responses(new_rtag_list, review_list, seq, cmt, patch,
rest_api=call_rest_api):
"""Find new rtags collected by patchwork that we don't know about
This is designed to be run in parallel, once for each commit/patch
Args:
new_rtag_list (list): New rtags are written to new_rtag_list[seq]
list, each a dict:
key: Response tag (e.g. 'Reviewed-by')
value: Set of people who gave that response, each a name/email
string
review_list (list): New reviews are written to review_list[seq]
list, each a
List of reviews for the patch, each a Review
seq (int): Position in new_rtag_list to update
cmt (Commit): Commit object for this commit
patch (Patch): Corresponding Patch object for this patch
rest_api (function): API function to call to access Patchwork, for
testing
"""
if not patch:
return
# Get the content for the patch email itself as well as all comments
data = rest_api('patches/%s/' % patch.id)
pstrm = PatchStream.process_text(data['content'], True)
rtags = collections.defaultdict(set)
for response, people in pstrm.commit.rtags.items():
rtags[response].update(people)
data = rest_api('patches/%s/comments/' % patch.id)
reviews = []
for comment in data:
pstrm = PatchStream.process_text(comment['content'], True)
if pstrm.snippets:
submitter = comment['submitter']
person = '%s <%s>' % (submitter['name'], submitter['email'])
reviews.append(Review(person, pstrm.snippets))
for response, people in pstrm.commit.rtags.items():
rtags[response].update(people)
# Find the tags that are not in the commit
new_rtags = collections.defaultdict(set)
base_rtags = cmt.rtags
for tag, people in rtags.items():
for who in people:
is_new = (tag not in base_rtags or
who not in base_rtags[tag])
if is_new:
new_rtags[tag].add(who)
new_rtag_list[seq] = new_rtags
review_list[seq] = reviews
def show_responses(rtags, indent, is_new):
"""Show rtags collected
Args:
rtags (dict): review tags to show
key: Response tag (e.g. 'Reviewed-by')
value: Set of people who gave that response, each a name/email string
indent (str): Indentation string to write before each line
is_new (bool): True if this output should be highlighted
Returns:
int: Number of review tags displayed
"""
col = terminal.Color()
count = 0
for tag in sorted(rtags.keys()):
people = rtags[tag]
for who in sorted(people):
terminal.Print(indent + '%s %s: ' % ('+' if is_new else ' ', tag),
newline=False, colour=col.GREEN, bright=is_new)
terminal.Print(who, colour=col.WHITE, bright=is_new)
count += 1
return count
def create_branch(series, new_rtag_list, branch, dest_branch, overwrite,
repo=None):
"""Create a new branch with review tags added
Args:
series (Series): Series object for the existing branch
new_rtag_list (list): List of review tags to add, one for each commit,
each a dict:
key: Response tag (e.g. 'Reviewed-by')
value: Set of people who gave that response, each a name/email
string
branch (str): Existing branch to update
dest_branch (str): Name of new branch to create
overwrite (bool): True to force overwriting dest_branch if it exists
repo (pygit2.Repository): Repo to use (use None unless testing)
Returns:
int: Total number of review tags added across all commits
Raises:
ValueError: if the destination branch name is the same as the original
branch, or it already exists and @overwrite is False
"""
if branch == dest_branch:
raise ValueError(
'Destination branch must not be the same as the original branch')
if not repo:
repo = pygit2.Repository('.')
count = len(series.commits)
new_br = repo.branches.get(dest_branch)
if new_br:
if not overwrite:
raise ValueError("Branch '%s' already exists (-f to overwrite)" %
dest_branch)
new_br.delete()
if not branch:
branch = 'HEAD'
target = repo.revparse_single('%s~%d' % (branch, count))
repo.branches.local.create(dest_branch, target)
num_added = 0
for seq in range(count):
parent = repo.branches.get(dest_branch)
cherry = repo.revparse_single('%s~%d' % (branch, count - seq - 1))
repo.merge_base(cherry.oid, parent.target)
base_tree = cherry.parents[0].tree
index = repo.merge_trees(base_tree, parent, cherry)
tree_id = index.write_tree(repo)
lines = []
if new_rtag_list[seq]:
for tag, people in new_rtag_list[seq].items():
for who in people:
lines.append('%s: %s' % (tag, who))
num_added += 1
message = patchstream.insert_tags(cherry.message.rstrip(),
sorted(lines))
repo.create_commit(
parent.name, cherry.author, cherry.committer, message, tree_id,
[parent.target])
return num_added
def check_patchwork_status(series, series_id, branch, dest_branch, force,
show_comments, rest_api=call_rest_api,
test_repo=None):
"""Check the status of a series on Patchwork
This finds review tags and comments for a series in Patchwork, displaying
them to show what is new compared to the local series.
Args:
series (Series): Series object for the existing branch
series_id (str): Patch series ID number
branch (str): Existing branch to update, or None
dest_branch (str): Name of new branch to create, or None
force (bool): True to force overwriting dest_branch if it exists
show_comments (bool): True to show the comments on each patch
rest_api (function): API function to call to access Patchwork, for
testing
test_repo (pygit2.Repository): Repo to use (use None unless testing)
"""
patches = collect_patches(series, series_id, rest_api)
col = terminal.Color()
count = len(series.commits)
new_rtag_list = [None] * count
review_list = [None] * count
patch_for_commit, _, warnings = compare_with_series(series, patches)
for warn in warnings:
tout.Warning(warn)
patch_list = [patch_for_commit.get(c) for c in range(len(series.commits))]
with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
futures = executor.map(
find_new_responses, repeat(new_rtag_list), repeat(review_list),
range(count), series.commits, patch_list, repeat(rest_api))
for fresponse in futures:
if fresponse:
raise fresponse.exception()
num_to_add = 0
for seq, cmt in enumerate(series.commits):
patch = patch_for_commit.get(seq)
if not patch:
continue
terminal.Print('%3d %s' % (patch.seq, patch.subject[:50]),
colour=col.BLUE)
cmt = series.commits[seq]
base_rtags = cmt.rtags
new_rtags = new_rtag_list[seq]
indent = ' ' * 2
show_responses(base_rtags, indent, False)
num_to_add += show_responses(new_rtags, indent, True)
if show_comments:
for review in review_list[seq]:
terminal.Print('Review: %s' % review.meta, colour=col.RED)
for snippet in review.snippets:
for line in snippet:
quoted = line.startswith('>')
terminal.Print(' %s' % line,
colour=col.MAGENTA if quoted else None)
terminal.Print()
terminal.Print("%d new response%s available in patchwork%s" %
(num_to_add, 's' if num_to_add != 1 else '',
'' if dest_branch
else ' (use -d to write them to a new branch)'))
if dest_branch:
num_added = create_branch(series, new_rtag_list, branch,
dest_branch, force, test_repo)
terminal.Print(
"%d response%s added from patchwork into new branch '%s'" %
(num_added, 's' if num_added != 1 else '', dest_branch))

View File

@ -34,14 +34,22 @@ class PrintLine:
newline: True to output a newline after the text
colour: Text colour to use
"""
def __init__(self, text, newline, colour):
def __init__(self, text, colour, newline=True, bright=True):
self.text = text
self.newline = newline
self.colour = colour
self.bright = bright
def __eq__(self, other):
return (self.text == other.text and
self.newline == other.newline and
self.colour == other.colour and
self.bright == other.bright)
def __str__(self):
return 'newline=%s, colour=%s, text=%s' % (self.newline, self.colour,
self.text)
return ("newline=%s, colour=%s, bright=%d, text='%s'" %
(self.newline, self.colour, self.bright, self.text))
def CalcAsciiLen(text):
"""Calculate the length of a string, ignoring any ANSI sequences
@ -136,7 +144,7 @@ def Print(text='', newline=True, colour=None, limit_to_line=False, bright=True):
global last_print_len
if print_test_mode:
print_test_list.append(PrintLine(text, newline, colour))
print_test_list.append(PrintLine(text, colour, newline, bright))
else:
if colour:
col = Color()
@ -159,11 +167,12 @@ def PrintClear():
print('\r%s\r' % (' '* last_print_len), end='', flush=True)
last_print_len = None
def SetPrintTestMode():
def SetPrintTestMode(enable=True):
"""Go into test mode, where all printing is recorded"""
global print_test_mode
print_test_mode = True
print_test_mode = enable
GetPrintTestLines()
def GetPrintTestLines():
"""Get a list of all lines output through Print()

View File

@ -148,15 +148,15 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
expfd.write(expected)
expfd.close()
# Normally by the time we call FixPatch we've already collected
# Normally by the time we call fix_patch we've already collected
# metadata. Here, we haven't, but at least fake up something.
# Set the "count" to -1 which tells FixPatch to use a bogus/fixed
# Set the "count" to -1 which tells fix_patch to use a bogus/fixed
# time for generating the Message-Id.
com = commit.Commit('')
com.change_id = 'I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413'
com.count = -1
patchstream.FixPatch(None, inname, series.Series(), com)
patchstream.fix_patch(None, inname, series.Series(), com)
rc = os.system('diff -u %s %s' % (inname, expname))
self.assertEqual(rc, 0)