summaryrefslogtreecommitdiff
path: root/gbp
diff options
context:
space:
mode:
authorMarkus Lehtonen <markus.lehtonen@linux.intel.com>2014-09-11 15:28:48 +0300
committerMarkus Lehtonen <markus.lehtonen@linux.intel.com>2014-12-19 13:33:12 +0200
commit10e7270d382adeef73001156ef1ab60eb0726c20 (patch)
tree4eaf5bbf1334efba67e62423fdcafef567374af0 /gbp
parent4251243b995152289cd8cfd1444be6cea75148a2 (diff)
downloadgit-buildpackage-10e7270d382adeef73001156ef1ab60eb0726c20.tar.gz
git-buildpackage-10e7270d382adeef73001156ef1ab60eb0726c20.tar.bz2
git-buildpackage-10e7270d382adeef73001156ef1ab60eb0726c20.zip
Introduce pq-bb tool
This is a tool for managing patch-queues for packages maintained in the BitBake packaging format (.bb recipes). Change-Id: I7c2815c05495deb731106fbfea911494ac4984ac Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
Diffstat (limited to 'gbp')
-rw-r--r--gbp/bb/__init__.py398
-rwxr-xr-xgbp/scripts/pq_bb.py427
-rwxr-xr-xgbp/scripts/pq_rpm.py19
3 files changed, 835 insertions, 9 deletions
diff --git a/gbp/bb/__init__.py b/gbp/bb/__init__.py
index 77af1f53..759ae06c 100644
--- a/gbp/bb/__init__.py
+++ b/gbp/bb/__init__.py
@@ -17,10 +17,18 @@
"""Bitbake helper functionality"""
import os
+import re
+import shutil
+import string
import subprocess
import sys
+import tempfile
+from collections import defaultdict
import gbp.log
+from gbp.errors import GbpError
+from gbp.git.repository import GitRepository, GitRepositoryError
+from gbp.scripts.common.buildpackage import dump_tree
bb = None
@@ -49,7 +57,7 @@ def init_tinfoil(config_only=False, tracking=False):
import bb.tinfoil
try:
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
- except bb.BBHandledException:
+ except (SystemExit, bb.BBHandledException):
raise GbpError("Failed to initialize tinfoil")
tinfoil.prepare(config_only=config_only)
return tinfoil
@@ -62,5 +70,393 @@ def pkg_version(data):
'version': data.getVar('PV', True) + '-' + data.getVar('PR', True)}
+class BBFile(object):
+ """Class representing .bb meta data"""
+ var_ops = r'\+=|=\+|\?=|\?\?=|:=|='
+ vardef_re = re.compile(
+ r'(^(?P<name>\w+)\s*(?P<op>%s)\s*)(?P<value>\S.*)' % var_ops)
+
+
+ def __init__(self, path, cfg_data=None):
+ self.bb_file = os.path.basename(path)
+ self.bb_dir = os.path.abspath(os.path.dirname(path))
+
+ self._pkg_data = None
+ self._variables = {}
+ self.includes = []
+ self.localfiles = []
+
+ if cfg_data is not None:
+ self.parse_bb(path, cfg_data)
+ else:
+ self.naive_parse_bb(path)
+
+ @property
+ def version(self):
+ """Get version information as a dict"""
+ return {'upstreamversion': self.getVar('PV', True),
+ 'release': self.getVar('PR', True)}
+
+ @property
+ def bb_path(self):
+ """Full path of the bb file"""
+ return os.path.join(self.bb_dir, self.bb_file)
+
+ def parse_bb(self, path, cfg_data):
+ """Parse bb meta file"""
+ self._pkg_data = bb.cache.Cache.loadDataFull(path, [], cfg_data)
+
+ # Determine local packaging files
+ uris = (self.getVar('SRC_URI', True) or "").split()
+ fetcher = bb.fetch2.Fetch(uris, self._pkg_data)
+ bb_dir = os.path.dirname(self.getVar('FILE'))
+ # Also check for file existence as fetcher incorrecly returns some
+ # non-existent .bbclass files under the recipe directory
+ self.includes = [path for path in self.getVar('BBINCLUDED').split() if
+ path.startswith(bb_dir) and os.path.exists(path)]
+ self.localfiles = [path for path in fetcher.localpaths() if
+ path.startswith(bb_dir)]
+
+ def naive_parse_bb(self, path):
+ """Naive parsing of standalone recipes"""
+ # Some variable defaults
+ # e.g. take package name and version directly from recipe file name
+ self._variables['FILE'] = os.path.abspath(path)
+ fn_base, _fn_ext = os.path.splitext(os.path.basename(path))
+ split_base = fn_base.rsplit('_', 1)
+ if len(split_base) == 2:
+ self._variables['PN'] = split_base[0]
+ self._variables['PV'] = split_base[1]
+ else:
+ self._variables['PN'] = fn_base
+ self._variables['PV'] = '1.0'
+ self._variables['PR'] = 'r0'
+
+ def var_parse_cb(lines):
+ """Callback function for parsing variables"""
+ unwrapped = self.unwrap_lines(lines)
+ match = self.vardef_re.match(unwrapped)
+ if match:
+ var = match.groupdict()
+ value = self.unquote_val(var['value'])
+
+ if (var['name'] not in self._variables or
+ var['op'] in ('=', ':=')):
+ self._variables[var['name']] = value
+ elif var['op'] in ('+=', '=+'):
+ self._variables[var['name']] += ' ' + value
+ else:
+ splitted = unwrapped.split(None, 1)
+ if (len(splitted) > 1 and
+ splitted[0] in ('include', 'require')):
+ inc_fname = splitted[1].strip()
+ inc_path = os.path.join(os.path.dirname(path),
+ inc_fname)
+ self.includes.append(os.path.abspath(inc_path))
+ return lines + self.parse_file(inc_path, var_parse_cb)
+ return lines
+
+ # Parse variables from file
+ self.parse_file(path, var_parse_cb)
+
+ # Find local files
+ filedirs = [self.getVar('PN') + '-' + self.getVar('PV'),
+ self.getVar('PN'), 'files']
+ uris = (self.getVar('SRC_URI') or "").split()
+ for uri_str in uris:
+ uri = bb.fetch2.URI(uri_str)
+ if uri.scheme == 'file':
+ found = False
+ for path in [os.path.join(self.bb_dir, dirn, uri.path) for dirn
+ in filedirs]:
+ if os.path.exists(path):
+ self.localfiles.append(path)
+ found = True
+ break
+ if not found:
+ gbp.log.warn("Seemingly local file '%s' not found under "
+ "'%s'" % (uri_str, self.bb_dir))
+
+ def _expand_single(self, match):
+ """Expand single occurrence of a variable reference"""
+ if match.group(1) in self._variables:
+ return self._variables[match.group(1)]
+ return match.group(0)
+
+ def expand_val(self, val, rec=0):
+ """Expand variable"""
+ expanded = re.sub(r'\${(\w+)}', self._expand_single, val)
+ if expanded == val:
+ return expanded
+ elif rec < 20:
+ return self.expand_val(expanded, rec +1)
+ else:
+ raise GbpError("Too many recursions when expanding variable value")
+
+ def getVar(self, var, expand=True):
+ """Get variable"""
+ if self._pkg_data:
+ return self._pkg_data.getVar(var, expand)
+ elif var in self._variables:
+ if expand:
+ return self.expand_val(self._variables[var])
+ else:
+ return self._variables[var]
+ return None
+
+ @staticmethod
+ def unquote_val(val):
+ """Unquote / strip variable value"""
+ return val.strip(string.whitespace + r'"\'\\')
+
+ @staticmethod
+ def unwrap_lines(lines):
+ """Return a joined string of multiple lines"""
+ return ''.join([re.sub(r'\\\s*$', '', line) for line in lines])
+
+ @staticmethod
+ def var_to_str(var, values, oper='+='):
+ """Create a well formatted string buffer containing a multiline variable
+ assignment"""
+ indent = ' ' * (len(var) + 2 + len(oper))
+ linebuf = ['%s %s "%s \\\n' % (var, oper, values[0])]
+ for val in values[1:]:
+ linebuf.append(indent + ' ' + val + '\\\n')
+ linebuf.append(indent + '"\n')
+ return linebuf
+
+ @staticmethod
+ def parse_file(filepath, cb_func):
+ """Parse recipe"""
+ ret_buf = []
+ with open(filepath) as fobj:
+ multiline = []
+ for line in fobj.readlines():
+ stripped = line.rstrip()
+ if not multiline:
+ if not stripped.endswith('\\'):
+ ret_buf.extend(cb_func([line]))
+ else:
+ multiline = [line]
+ else:
+ multiline.append(line)
+ if not stripped.endswith('\\'):
+ ret_buf.extend(cb_func(multiline))
+ multiline = []
+ return ret_buf
+
+ @staticmethod
+ def set_var_val(filepath, var, val):
+ """Set variable value in a recipe"""
+ class _Setter(object):
+ """Class for handling variable injections"""
+ def __init__(self):
+ self.was_set = False
+
+ def set_cb(self, lines):
+ """Parser callback for setting variable value"""
+ unwrapped = BBFile.unwrap_lines(lines)
+ match = BBFile.vardef_re.match(unwrapped)
+ if match and match.group('name') == var:
+ if not self.was_set:
+ self.was_set = True
+ print "Setting value %s = %s" % (var, val)
+ return ['%s = "%s"\n' % (var, val)]
+ else:
+ return []
+ return lines
+
+ # Parse file and set values
+ setter = _Setter()
+ linebuf = BBFile.parse_file(filepath, setter.set_cb)
+
+ # Write file
+ with open(filepath, 'w') as fobj:
+ if not setter.was_set:
+ fobj.write('%s = "%s"\n')
+ fobj.writelines(linebuf)
+
+ @staticmethod
+ def substitute_var_val(filepath, var, pattern, repl):
+ """Update variable in a recipe"""
+ def subst_cb(lines):
+ """Parser callback for substituting variable values"""
+ unwrapped = BBFile.unwrap_lines(lines)
+ match = BBFile.vardef_re.match(unwrapped)
+ if match and match.group('name') == var:
+ filtered = []
+ for line in lines:
+ line = re.sub(pattern, repl, line)
+ # Drop empty lines
+ if not re.match(r'\s*\\\s*', line):
+ filtered.append(line)
+ return filtered
+ return lines
+
+ # Parse file and substitute values
+ linebuf = BBFile.parse_file(filepath, subst_cb)
+
+ # Write file
+ with open(filepath, 'w') as fobj:
+ fobj.writelines(linebuf)
+
+ @staticmethod
+ def append_var_val(filepath, var, new_vals):
+ """Update variable in a recipe"""
+ if not new_vals:
+ return
+
+ class _Finder(object):
+ """Class for recording definitions of variables"""
+ def __init__(self):
+ self.line_ind = 0
+ self.last_occurrence = -1
+
+ def find_last_occurrence_cb(self, lines):
+ """Get the point of insertion for the variable"""
+ unwrapped = BBFile.unwrap_lines(lines)
+ match = BBFile.vardef_re.match(unwrapped)
+ if match and match.group('name') == var:
+ self.last_occurrence = self.line_ind + len(lines) - 1
+ self.line_ind += len(lines)
+ return lines
+
+ finder = _Finder()
+ linebuf = BBFile.parse_file(filepath, finder.find_last_occurrence_cb)
+
+ # Prepare for appending values
+ quote = None
+ if finder.last_occurrence >= 0:
+ last_line = linebuf[finder.last_occurrence].rstrip()
+ # Guess indentation
+ match = BBFile.vardef_re.match(last_line)
+ if match:
+ indent = ' ' * (len(match.group(1)) + 1)
+ else:
+ indent = re.match(r'(\s*)', last_line).group(1)
+
+ # Guess point of insertion for new values and mangle the last line
+ if re.match(r'^\s*$', last_line[:-1]):
+ # Insert before the last line if it's an empty line (with a
+ # quotation character only)
+ insert_ind = finder.last_occurrence
+ indent += ' '
+ else:
+ # Else, remove the quotation character and append after the
+ # last line
+ quote = last_line[-1]
+ last_line = last_line[:-1] + ' \\\n'
+ linebuf[finder.last_occurrence] = last_line
+ insert_ind = finder.last_occurrence + 1
+ else:
+ indent = ' ' * (len(var) + 4)
+
+ # Write file
+ with open(filepath, 'w') as fobj:
+ if finder.last_occurrence > -1:
+ fobj.writelines(linebuf[:insert_ind])
+ for val in new_vals:
+ fobj.write(indent + val + ' \\\n')
+ if quote:
+ fobj.write(indent + quote + '\n')
+ fobj.writelines(linebuf[insert_ind:])
+ else:
+ fobj.writelines(BBFile.var_to_str(var, new_vals, '+='))
+ fobj.writelines(linebuf)
+
+def guess_bb_file(file_list, bbappend):
+ """Guess bb recipe from a list of filenames"""
+ recipes = []
+ file_exts = ['.bb'] if not bbappend else ['.bb', '.bbappend']
+ for ext in file_exts:
+ for filepath in file_list:
+ if filepath.endswith(ext):
+ gbp.log.debug("Found bb recipe file %s" % filepath)
+ recipes.append(filepath)
+ if len(recipes) == 0:
+ raise GbpError("No recipes found.")
+ return sorted(recipes)[-1]
+
+def bb_from_repo(cfg_data, repo, treeish, bb_path):
+ """Get and parse a bb recipe from a Git treeish"""
+ try:
+ tmpdir = tempfile.mkdtemp(prefix='gbp-bb_')
+ # Dump whole bb directory
+ dump_tree(repo, tmpdir, '%s:%s' % (treeish, os.path.dirname(bb_path)),
+ False)
+ fpath = os.path.join(tmpdir, os.path.basename(bb_path))
+ return BBFile(fpath, cfg_data)
+ except GitRepositoryError as err:
+ raise GbpError("Git error: %s" % err)
+ finally:
+ shutil.rmtree(tmpdir)
+
+def guess_bb_path_from_fs(topdir, recursive=True, bbappend=False):
+ """Guess a bitbake recipe file"""
+ file_list = []
+ if not topdir:
+ topdir = '.'
+ for root, dirs, files in os.walk(topdir):
+ file_list.extend([os.path.join(root, fname) for fname in files])
+ if not recursive:
+ del dirs[:]
+ # Skip .git dir in any case
+ if '.git' in dirs:
+ dirs.remove('.git')
+ return guess_bb_file(file_list, bbappend)
+
+def guess_bb_path_from_repo(repo, treeish=None, topdir='', recursive=True,
+ bbappend=False):
+ """Guess a bitbake recipe path from a git repository"""
+ topdir = topdir.rstrip('/') + ('/') if topdir else ''
+ # Search from working copy
+ if not treeish:
+ abspath = guess_bb_path_from_fs(os.path.join(repo.path, topdir),
+ recursive, bbappend)
+ return os.path.relpath(abspath, repo.path)
+
+ # Search from treeish
+ try:
+ file_list = [nam for (mod, typ, sha, nam) in
+ repo.list_tree(treeish, recursive, topdir) if typ == 'blob']
+ except GitRepositoryError as err:
+ raise GbpError("Failed to search bb recipe from treeish %s, "
+ "Git error: %s" % (treeish, err))
+ return guess_bb_file(file_list, bbappend)
+
+def guess_bb_path(options, repo, treeish=None, bbappend=False):
+ """Guess recipe path, relative to repo rootdir"""
+ bb_path = options.bb_file
+ if options.bb_file:
+ if not treeish:
+ path = os.path.join(repo.path, bb_path)
+ if not os.path.exists(path):
+ raise GbpError("'%s' does not exist" % bb_path)
+ else:
+ try:
+ repo.show("%s:%s" % (treeish, bb_path))
+ except GbpError as err:
+ raise GbpError(str(err))
+ else:
+ bb_path = guess_bb_path_from_repo(repo, treeish, options.meta_dir,
+ bbappend=bbappend)
+ return bb_path
+
+def parse_bb(cfg_data, options, repo, treeish=None, bbappend=False):
+ """Find and parse a bb recipe from a repository"""
+ try:
+ bb_path = guess_bb_path(options, repo, treeish, bbappend=bbappend)
+ gbp.log.debug("Using recipe '%s'" % bb_path)
+ options.meta_dir = os.path.dirname(bb_path)
+ if treeish:
+ pkg_data = bb_from_repo(cfg_data, repo, treeish, bb_path)
+ else:
+ full_path = os.path.join(repo.path, bb_path)
+ pkg_data = BBFile(full_path, cfg_data)
+ except GbpError as err:
+ raise GbpError("Can't parse bb recipe: %s" % err)
+ return pkg_data
+
+
# Initialize module
bb = import_bb()
diff --git a/gbp/scripts/pq_bb.py b/gbp/scripts/pq_bb.py
new file mode 100755
index 00000000..c8247bc8
--- /dev/null
+++ b/gbp/scripts/pq_bb.py
@@ -0,0 +1,427 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2011 Guido Günther <agx@sigxcpu.org>
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+"""manage patches in a patch queue"""
+
+import ConfigParser
+import errno
+import os
+import shutil
+import sys
+
+import gbp.tmpfile as tempfile
+from gbp.config import GbpOptionParserBB
+from gbp.rpm.git import GitRepositoryError, RpmGitRepository
+from gbp.command_wrappers import GitCommand, CommandExecFailed
+from gbp.errors import GbpError
+import gbp.log
+from gbp.patch_series import PatchSeries, Patch
+from gbp.rpm import string_to_int
+from gbp.scripts.common.pq import (is_pq_branch, pq_branch_name, pq_branch_base,
+ apply_and_commit_patch, drop_pq)
+from gbp.scripts.pq_rpm import (generate_patches, safe_patches,
+ import_extra_files)
+from gbp.bb import bb, init_tinfoil, parse_bb, pkg_version
+
+# pylint: disable=bad-continuation
+
+USAGE_STRING = \
+"""%prog [options] action - maintain patches on a patch queue branch
+tions:
+export Export the patch queue / devel branch associated to the
+ current branch into a patch series in and update the recipe file
+import Create a patch queue / devel branch from recipe file
+ and patches in current dir.
+rebase Switch to patch queue / devel branch associated to the current
+ branch and rebase against upstream.
+drop Drop (delete) the patch queue /devel branch associated to
+ the current branch.
+apply Apply a patch
+switch Switch to patch-queue branch and vice versa."""
+
+
+def rm_patch_files(bbfile):
+ """Delete the patch files listed in the pkg meta data."""
+ unlinked = set()
+
+ # Go through local files
+ for path in bbfile.localfiles:
+ if path.endswith('.patch'):
+ gbp.log.debug("Removing patch '%s'" % path)
+ unlinked.add(os.path.basename(path))
+ try:
+ os.unlink(path)
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise GbpError("Failed to remove patch: %s" % err)
+ else:
+ gbp.log.debug("Patch %s does not exist." % path)
+ else:
+ gbp.log.debug("Unlink skipping non-local/non-patch file %s" % path)
+ uris = (bbfile.getVar('SRC_URI', False) or "").split()
+ return [uri for uri in uris if os.path.basename(uri) not in unlinked]
+
+
+def update_patch_series(repo, bbfile, start, end, options):
+ """Export patches to packaging directory and update recipe file"""
+ squash = options.patch_export_squash_until.split(':', 1)
+ if len(squash) == 1:
+ squash.append(None)
+ else:
+ squash[1] += '.diff'
+
+ # Unlink old (local) patch files and generate new patches
+ rm_patch_files(bbfile)
+
+ # Guess patch subdir
+ bb_dir = os.path.dirname(bbfile.getVar('FILE', True))
+ pkg_name = bbfile.getVar('PN', True)
+ pkg_ver = bbfile.getVar('PV', True)
+ subdir = pkg_name + '-' + pkg_ver
+ if not os.path.isdir(os.path.join(bb_dir, subdir)):
+ if os.path.isdir(os.path.join(bb_dir, pkg_name)):
+ subdir = pkg_name
+ elif os.path.isdir(os.path.join(bb_dir, 'files')):
+ subdir = 'files'
+ tgt_dir = os.path.join(bb_dir, subdir)
+
+ patches, _commands = generate_patches(repo, start, squash, end,
+ tgt_dir, options)
+ # TODO: implement commands processing (e.g. topic)
+ new_uris = ['file://' + patch for patch in patches]
+ bbfile.substitute_var_val(bbfile.bb_path, 'SRC_URI', r'file://\S+.\.patch',
+ '')
+ bbfile.append_var_val(bbfile.bb_path, 'SRC_URI', new_uris)
+ return patches
+
+def var_to_str(var, value):
+ """Create a well formatted string buffer for a variable assignment"""
+ indent = ' ' * (len(var) + 3)
+ linebuf = ['%s = "%s \\\n' % (var, value[0])]
+ for val in value[1:]:
+ linebuf.append(indent + ' ' + val + '\\\n')
+ linebuf.append(indent + '"\n')
+ return linebuf
+
+
+def find_upstream_commit(repo, bbfile, upstream_tag):
+ """Find commit corresponding upstream version"""
+ src_rev = bbfile.getVar('SRCREV', True)
+ if src_rev and src_rev != 'INVALID':
+ return bbfile.getVar('SRCREV', True)
+
+ # Find tag
+ upstreamversion = bbfile.getVar('PV', True)
+ tag_str_fields = {'upstreamversion': upstreamversion,
+ 'vendor': 'Upstream'}
+ upstream_commit = repo.find_version(upstream_tag, tag_str_fields)
+ if not upstream_commit:
+ raise GbpError("Couldn't find upstream version %s" % upstreamversion)
+ return upstream_commit
+
+
+def export_patches(cfg, repo, options):
+ """Export patches from the pq branch into a packaging branch"""
+ current = repo.get_branch()
+ if is_pq_branch(current, options):
+ base = pq_branch_base(current, options)
+ gbp.log.info("On branch '%s', switching to '%s'" % (current, base))
+ repo.set_branch(base)
+ bbfile = parse_bb(cfg, options, repo)
+ pq_branch = current
+ else:
+ bbfile = parse_bb(cfg, options, repo)
+ pq_branch = pq_branch_name(current, options, pkg_version(bbfile))
+ upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag)
+
+ export_treeish = options.export_rev if options.export_rev else pq_branch
+
+ update_patch_series(repo, bbfile, upstream_commit, export_treeish, options)
+
+ bb_dir = os.path.dirname(bbfile.getVar('FILE', True))
+ GitCommand('status')(['--', bb_dir])
+
+
+def bb_to_patch_series(bbfile):
+ """Get all local patches as a series"""
+ series = PatchSeries()
+ for path in bbfile.localfiles:
+ if path.endswith('.patch'):
+ series.append(Patch(path))
+ return series
+
+
+def import_bb_patches(cfg, repo, options):
+ """Apply a series of patches in a recipe to branch onto a pq branch"""
+ current = repo.get_branch()
+
+ if is_pq_branch(current, options):
+ base = pq_branch_base(current, options)
+ raise GbpError("Already on a patch-queue branch '%s' - doing "
+ "nothing." % current)
+ else:
+ bbfile = parse_bb(cfg, options, repo)
+ base = current
+ upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag)
+ pq_branch = pq_branch_name(base, options, pkg_version(bbfile))
+
+ # Create pq-branch
+ if repo.has_branch(pq_branch) and not options.force:
+ raise GbpError("Patch-queue branch '%s' already exists. "
+ "Try 'rebase' instead." % pq_branch)
+ try:
+ if repo.get_branch() == pq_branch:
+ repo.force_head(upstream_commit, hard=True)
+ else:
+ repo.create_branch(pq_branch, upstream_commit, force=True)
+ except GitRepositoryError as err:
+ raise GbpError("Cannot create patch-queue branch '%s': %s" %
+ (pq_branch, err))
+
+ # Put patches in a safe place
+ in_queue = bb_to_patch_series(bbfile)
+ queue = safe_patches(in_queue, options.tmp_dir)
+ # Do import
+ try:
+ gbp.log.info("Switching to branch '%s'" % pq_branch)
+ repo.set_branch(pq_branch)
+ import_extra_files(repo, base, options.import_files)
+
+ if not queue:
+ return
+ gbp.log.info("Trying to apply patches from branch '%s' onto '%s'" %
+ (base, upstream_commit))
+ for patch in queue:
+ gbp.log.debug("Applying %s" % patch.path)
+ apply_and_commit_patch(repo, patch, fallback_author=None)
+ except (GbpError, GitRepositoryError) as err:
+ gbp.log.err('Import failed: %s' % err)
+ repo.force_head('HEAD', hard=True)
+ repo.set_branch(base)
+ repo.delete_branch(pq_branch)
+ raise
+
+ recipe_fn = os.path.basename(bbfile.getVar('FILE', True))
+ gbp.log.info("Patches listed in '%s' imported on '%s'" % (recipe_fn,
+ pq_branch))
+
+
+def rebase_pq(cfg, repo, options):
+ """Rebase pq branch on the correct upstream version"""
+ current = repo.get_branch()
+ if is_pq_branch(current, options):
+ base = pq_branch_base(current, options)
+ bbfile = parse_bb(cfg, options, repo, base)
+ else:
+ base = current
+ bbfile = parse_bb(cfg, options, repo)
+ upstream_commit = find_upstream_commit(repo, bbfile, options.upstream_tag)
+
+ switch_to_pq_branch(cfg, repo, base, options)
+ GitCommand("rebase")([upstream_commit])
+
+
+def switch_pq(cfg, repo, options):
+ """Switch to patch-queue branch if on base branch and vice versa"""
+ current = repo.get_branch()
+ if is_pq_branch(current, options):
+ base = pq_branch_base(current, options)
+ gbp.log.info("Switching to branch '%s'" % base)
+ repo.checkout(base)
+ else:
+ switch_to_pq_branch(cfg, repo, current, options)
+
+
+def drop_pq_bb(cfg, repo, options):
+ """Remove pq branch"""
+ current = repo.get_branch()
+ if is_pq_branch(current, options):
+ base = pq_branch_base(current, options)
+ bbfile = parse_bb(cfg, options, repo, base)
+ else:
+ bbfile = parse_bb(cfg, options, repo)
+ drop_pq(repo, current, options, pkg_version(bbfile))
+
+
+def switch_to_pq_branch(cfg, repo, branch, options):
+ """
+ Switch to patch-queue branch if not already there, create it if it
+ doesn't exist yet
+ """
+ if is_pq_branch(branch, options):
+ return
+
+ bbfile = parse_bb(cfg, options, repo, branch)
+ pq_branch = pq_branch_name(branch, options, pkg_version(bbfile))
+ if not repo.has_branch(pq_branch):
+ raise GbpError("Branch '%s' does not exist" % pq_branch)
+
+ gbp.log.info("Switching to branch '%s'" % pq_branch)
+ repo.set_branch(pq_branch)
+
+def apply_single_patch(cfg, repo, patchfile, options):
+ """Apply a single patch onto the pq branch"""
+ current = repo.get_branch()
+ if not is_pq_branch(current, options):
+ switch_to_pq_branch(cfg, repo, current, options)
+ patch = Patch(patchfile)
+ apply_and_commit_patch(repo, patch, fallback_author=None)
+
+def opt_split_cb(option, opt_str, value, parser):
+ """Split option string into a list"""
+ setattr(parser.values, option.dest, value.split(','))
+
+def build_parser(name):
+ """Create command line argument parser"""
+ try:
+ parser = GbpOptionParserBB(command=os.path.basename(name),
+ prefix='', usage=USAGE_STRING)
+ except ConfigParser.ParsingError as err:
+ gbp.log.err(err)
+ return None
+
+ parser.add_boolean_config_file_option(option_name="patch-numbers",
+ dest="patch_numbers")
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
+ default=False, help="Verbose command execution")
+ parser.add_option("--force", dest="force", action="store_true",
+ default=False,
+ help="In case of import even import if the branch already exists")
+ parser.add_config_file_option(option_name="vendor", action="store",
+ dest="vendor")
+ parser.add_config_file_option(option_name="color", dest="color",
+ type='tristate')
+ parser.add_config_file_option(option_name="color-scheme",
+ dest="color_scheme")
+ parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir")
+ parser.add_config_file_option(option_name="upstream-tag",
+ dest="upstream_tag")
+ parser.add_config_file_option(option_name="bb-file", dest="bb_file")
+ parser.add_config_file_option(option_name="meta-dir",
+ dest="meta_dir")
+ parser.add_config_file_option(option_name="packaging-branch",
+ dest="packaging_branch",
+ help="Branch the packaging is being maintained on. Only relevant "
+ "if a invariable/single pq-branch is defined, in which case "
+ "this is used as the 'base' branch. Default is "
+ "'%(packaging-branch)s'")
+ parser.add_config_file_option(option_name="pq-branch", dest="pq_branch")
+ parser.add_config_file_option(option_name="import-files",
+ dest="import_files", type="string", action="callback",
+ callback=opt_split_cb)
+ parser.add_option("--export-rev", action="store", dest="export_rev",
+ default="",
+ help="Export patches from treeish object TREEISH instead of head "
+ "of patch-queue branch", metavar="TREEISH")
+ parser.add_config_file_option("patch-export-compress",
+ dest="patch_export_compress")
+ parser.add_config_file_option("patch-export-squash-until",
+ dest="patch_export_squash_until")
+ parser.add_config_file_option("patch-export-ignore-path",
+ dest="patch_export_ignore_path")
+ return parser
+
+def parse_args(argv):
+ """Parse command line arguments"""
+ parser = build_parser(argv[0])
+ if not parser:
+ return None, None
+
+ options, args = parser.parse_args(argv)
+ gbp.log.setup(options.color, options.verbose, options.color_scheme)
+ options.patch_export_compress = string_to_int(options.patch_export_compress)
+
+ return options, args
+
+
+def main(argv):
+ """Main function for the gbp pq-rpm command"""
+ retval = 0
+
+ if not bb:
+ return 1
+
+ options, args = parse_args(argv)
+ if not options:
+ return 1
+
+ if len(args) < 2:
+ gbp.log.err("No action given.")
+ return 1
+ else:
+ action = args[1]
+
+ if args[1] in ["export", "import", "rebase", "drop", "switch"]:
+ pass
+ elif args[1] in ["apply"]:
+ if len(args) != 3:
+ gbp.log.err("No patch name given.")
+ return 1
+ else:
+ patchfile = args[2]
+ else:
+ gbp.log.err("Unknown action '%s'." % args[1])
+ return 1
+
+ try:
+ repo = RpmGitRepository(os.path.curdir)
+ except GitRepositoryError:
+ gbp.log.err("%s is not a git repository" % (os.path.abspath('.')))
+ return 1
+
+ if os.path.abspath('.') != repo.path:
+ gbp.log.warn("Switching to topdir before running commands")
+ os.chdir(repo.path)
+
+ try:
+ # Initialize BitBake
+ tinfoil = init_tinfoil(config_only=True, tracking=True)
+ bb_cfg_data = bb.data.createCopy(tinfoil.config_data)
+
+ # Create base temporary directory for this run
+ options.tmp_dir = tempfile.mkdtemp(dir=options.tmp_dir,
+ prefix='gbp-pq-bb_')
+ if action == "export":
+ export_patches(bb_cfg_data, repo, options)
+ elif action == "import":
+ import_bb_patches(bb_cfg_data, repo, options)
+ elif action == "drop":
+ drop_pq_bb(bb_cfg_data, repo, options)
+ elif action == "rebase":
+ rebase_pq(bb_cfg_data, repo, options)
+ elif action == "apply":
+ apply_single_patch(bb_cfg_data, repo, patchfile, options)
+ elif action == "switch":
+ switch_pq(bb_cfg_data, repo, options)
+ except CommandExecFailed:
+ retval = 1
+ except GitRepositoryError as err:
+ gbp.log.err("Git command failed: %s" % err)
+ retval = 1
+ except GbpError, err:
+ if len(err.__str__()):
+ gbp.log.err(err)
+ retval = 1
+ finally:
+ shutil.rmtree(options.tmp_dir, ignore_errors=True)
+
+ return retval
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
+
diff --git a/gbp/scripts/pq_rpm.py b/gbp/scripts/pq_rpm.py
index ee076aa3..d8fcbf2f 100755
--- a/gbp/scripts/pq_rpm.py
+++ b/gbp/scripts/pq_rpm.py
@@ -243,14 +243,13 @@ def parse_spec(options, repo, treeish=None):
return spec
-def find_upstream_commit(repo, spec, upstream_tag):
+def find_upstream_commit(repo, upstreamversion, upstream_tag):
"""Find commit corresponding upstream version"""
- tag_str_fields = {'upstreamversion': spec.upstreamversion,
+ tag_str_fields = {'upstreamversion': upstreamversion,
'vendor': 'Upstream'}
upstream_commit = repo.find_version(upstream_tag, tag_str_fields)
if not upstream_commit:
- raise GbpError("Couldn't find upstream version %s" %
- spec.upstreamversion)
+ raise GbpError("Couldn't find upstream version %s" % upstreamversion)
return upstream_commit
@@ -266,7 +265,8 @@ def export_patches(repo, options):
else:
spec = parse_spec(options, repo)
pq_branch = pq_branch_name(current, options, spec.version)
- upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)
+ upstream_commit = find_upstream_commit(repo, spec.upstreamversion,
+ options.upstream_tag)
export_treeish = options.export_rev if options.export_rev else pq_branch
@@ -377,7 +377,8 @@ def import_spec_patches(repo, options):
spec = parse_spec(options, repo)
spec_treeish = None
base = current
- upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)
+ upstream_commit = find_upstream_commit(repo, spec.upstreamversion,
+ options.upstream_tag)
packager = get_packager(spec)
pq_branch = pq_branch_name(base, options, spec.version)
@@ -434,7 +435,8 @@ def rebase_pq(repo, options):
else:
base = current
spec = parse_spec(options, repo)
- upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)
+ upstream_commit = find_upstream_commit(repo, spec.upstreamversion,
+ options.upstream_tag)
switch_to_pq_branch(repo, base, options)
GitCommand("rebase")([upstream_commit])
@@ -500,7 +502,8 @@ def convert_package(repo, options):
pq_branch = pq_branch_name(old_packaging, options, spec.version)
raise GbpError(err_msg_base + "pq branch %s already exists" % pq_branch)
# Check that the current branch is based on upstream
- upstream_commit = find_upstream_commit(repo, spec, options.upstream_tag)
+ upstream_commit = find_upstream_commit(repo, spec.upstreamversion,
+ options.upstream_tag)
if not is_ancestor(repo, upstream_commit, old_packaging):
raise GbpError(err_msg_base + "%s is not based on upstream version %s" %
(old_packaging, spec.upstreamversion))