summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChulwoo Shin <cw1.shin@samsung.com>2017-05-17 14:29:42 +0900
committerChulwoo Shin <cw1.shin@samsung.com>2017-05-17 14:29:42 +0900
commitd79ac30dfe99fee5248a32a8200bb5ee50db637a (patch)
tree09ba700197e638832a00079c5e48ae9b64d34e58
parentad3e8374749c2ca429cce0656c2d26e7339c83ad (diff)
downloadtic-core-d79ac30dfe99fee5248a32a8200bb5ee50db637a.tar.gz
tic-core-d79ac30dfe99fee5248a32a8200bb5ee50db637a.tar.bz2
tic-core-d79ac30dfe99fee5248a32a8200bb5ee50db637a.zip
[TIC-CORE] support the supplements tag
- support dependency analysis for supplements tag - Add dependency analysis verification test case Change-Id: I70e621f9fa338d769d569d4fc3f72ba0c10d5e45 Signed-off-by: Chulwoo Shin <cw1.shin@samsung.com>
-rw-r--r--test/test_dependency.py1023
-rw-r--r--tic/command.py85
-rw-r--r--tic/config.py23
-rw-r--r--tic/dependency.py173
-rw-r--r--tic/parser/recipe_parser.py45
-rw-r--r--tic/parser/repo_parser.py47
-rw-r--r--tic/repo.py4
-rw-r--r--tic/utils/file.py2
-rw-r--r--tic/utils/grabber.py8
-rw-r--r--tic/utils/rpmmisc.py10
10 files changed, 1161 insertions, 259 deletions
diff --git a/test/test_dependency.py b/test/test_dependency.py
index 368097d..b09100f 100644
--- a/test/test_dependency.py
+++ b/test/test_dependency.py
@@ -16,122 +16,953 @@
# Contributors:
# - S-Core Co., Ltd
+
+import sys
+sys.path.append('/usr/lib64/python2.7/site-packages')
+
import os
-import time
+import shutil
import unittest
+import urlparse
+import rpm
+import zypp
+if not hasattr(zypp, 'PoolQuery') or \
+ not hasattr(zypp.RepoManager, 'loadSolvFile'):
+ raise ImportError("python-zypp in host system cannot support PoolQuery or "
+ "loadSolvFile interface, please update it to enhanced "
+ "version which can be found in download.tizen.org/tools")
-#from tic.dependency import analyze_dependency
-from tic.parser.repo_parser import RepodataParser
+from mic.utils.proxy import get_proxy_for
+from mic.utils.errors import CreatorError, RepoError, RpmError
+from tic.parser.recipe_parser import default_recipe, RecipeParser
+from tic.dependency import get_installed_packages
from tic.repo import get_repodata_from_repos
+from tic.utils import misc
+from tic.command import DEFAULT_CACHEDIR, get_pkg_metadata
+from tic import config
-current_milli_time = lambda: int(round(time.time() * 1000))
-CWD = os.path.dirname(__file__) or '.'
-TEST_REPODATA_LOC=os.path.join(CWD, 'dependency_fixtures')
-DEFAULT_CACHEDIR='/var/tmp/tic-core/cached'
def suite():
- # return unittest.makeSuite(DependencyTest, ['testsun1', 'testTrue'])
return unittest.makeSuite(DependencyTest)
+class RepositoryStub:
+ def __init__(self):
+ self.name = None
+ self.baseurl = []
+ self.mirrorlist = None
+ self.proxy = None
+ self.proxy_username = None
+ self.proxy_password = None
+ self.nocache = False
+
+ self.enabled = True
+ self.autorefresh = True
+ self.keeppackages = True
+ self.priority = None
+
+class Zypp():
+ name = 'zypp'
+
+ def __init__(self, target_arch, instroot, cachedir, strict_mode = False):
+ self.cachedir = cachedir
+ self.instroot = instroot
+ self.target_arch = target_arch
+ self.strict_mode = strict_mode
+
+ self.__pkgs_license = {}
+ self.__pkgs_content = {}
+ self.__pkgs_vcsinfo = {}
+ self.repos = []
+ self.to_deselect = []
+ self.localpkgs = {}
+ self.repo_manager = None
+ self.repo_manager_options = None
+ self.Z = None
+ self.ts = None
+ self.ts_pre = None
+ self.incpkgs = {}
+ self.excpkgs = {}
+ self.pre_pkgs = []
+ self.check_pkgs = []
+ self.probFilterFlags = [ rpm.RPMPROB_FILTER_OLDPACKAGE,
+ rpm.RPMPROB_FILTER_REPLACEPKG ]
+
+ self.has_prov_query = True
+ self.install_debuginfo = False
+ # this can't be changed, it is used by zypp
+ self.tmp_file_path = '/var/tmp'
+
+ def doFileLogSetup(self, uid, logfile):
+ # don't do the file log for the livecd as it can lead to open fds
+ # being left and an inability to clean up after ourself
+ pass
+
+ def closeRpmDB(self):
+ pass
+
+ def close(self):
+ if self.ts:
+ self.ts.closeDB()
+ self.ts = None
+
+ if self.ts_pre:
+ self.ts_pre.closeDB()
+ self.ts = None
+
+ self.closeRpmDB()
+
+ def __del__(self):
+ self.close()
+
+ def _cleanupRpmdbLocks(self, installroot):
+ # cleans up temporary files left by bdb so that differing
+ # versions of rpm don't cause problems
+ import glob
+ for f in glob.glob(installroot + "/var/lib/rpm/__db*"):
+ os.unlink(f)
+
+ def _cleanupZyppJunk(self, installroot):
+ try:
+ shutil.rmtree(os.path.join(installroot, '.zypp'))
+ except:
+ pass
+
+ def setup(self):
+ self._cleanupRpmdbLocks(self.instroot)
+ # '/var/tmp' is used by zypp to build cache, so make sure
+ # if it exists
+ if not os.path.exists(self.tmp_file_path ):
+ os.makedirs(self.tmp_file_path)
+
+ def whatObsolete(self, pkg):
+ query = zypp.PoolQuery()
+ query.addKind(zypp.ResKind.package)
+ query.addDependency(zypp.SolvAttr.obsoletes, pkg.name(), pkg.edition())
+ query.setMatchExact()
+ for pi in query.queryResults(self.Z.pool()):
+ return pi
+ return None
+
+ def _zyppQueryPackage(self, pkg):
+ query = zypp.PoolQuery()
+ query.addKind(zypp.ResKind.package)
+ query.addAttribute(zypp.SolvAttr.name, pkg)
+ query.setMatchExact()
+ for pi in query.queryResults(self.Z.pool()):
+ return pi
+ return None
+
+ def _splitPkgString(self, pkg):
+ sp = pkg.rsplit(".", 1)
+ name = sp[0]
+ arch = None
+ if len(sp) == 2:
+ arch = sp[1]
+ sysarch = zypp.Arch(self.target_arch)
+ if not zypp.Arch(arch).compatible_with (sysarch):
+ arch = None
+ name = ".".join(sp)
+ return name, arch
+
+ def selectPackage(self, pkg):
+ """Select a given package or package pattern, can be specified
+ with name.arch or name* or *name
+ """
+
+ if not self.Z:
+ self.__initialize_zypp()
+
+ def markPoolItem(obs, pi):
+ if obs == None:
+ pi.status().setToBeInstalled (zypp.ResStatus.USER)
+ else:
+ obs.status().setToBeInstalled (zypp.ResStatus.USER)
+
+ def cmpEVR(p1, p2):
+ # compare criterion: arch compatibility first, then repo
+ # priority, and version last
+ a1 = p1.arch()
+ a2 = p2.arch()
+ if str(a1) != str(a2):
+ if a1.compatible_with(a2):
+ return -1
+ else:
+ return 1
+ # Priority of a repository is an integer value between 0 (the
+ # highest priority) and 99 (the lowest priority)
+ pr1 = int(p1.repoInfo().priority())
+ pr2 = int(p2.repoInfo().priority())
+ if pr1 > pr2:
+ return -1
+ elif pr1 < pr2:
+ return 1
+
+ ed1 = p1.edition()
+ ed2 = p2.edition()
+ (e1, v1, r1) = map(str, [ed1.epoch(), ed1.version(), ed1.release()])
+ (e2, v2, r2) = map(str, [ed2.epoch(), ed2.version(), ed2.release()])
+ return rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
+
+ found = False
+ startx = pkg.startswith("*")
+ endx = pkg.endswith("*")
+ ispattern = startx or endx
+ name, arch = self._splitPkgString(pkg)
+
+ q = zypp.PoolQuery()
+ q.addKind(zypp.ResKind.package)
+
+ if ispattern:
+ if startx and not endx:
+ pattern = '%s$' % (pkg[1:])
+ if endx and not startx:
+ pattern = '^%s' % (pkg[0:-1])
+ if endx and startx:
+ pattern = '%s' % (pkg[1:-1])
+ q.setMatchRegex()
+ q.addAttribute(zypp.SolvAttr.name, pattern)
+
+ elif arch:
+ q.setMatchExact()
+ q.addAttribute(zypp.SolvAttr.name, name)
+
+ else:
+ q.setMatchExact()
+ q.addAttribute(zypp.SolvAttr.name, pkg)
+
+ for pitem in sorted(
+ q.queryResults(self.Z.pool()),
+ cmp=lambda x,y: cmpEVR(zypp.asKindPackage(x), zypp.asKindPackage(y)),
+ reverse=True):
+ item = zypp.asKindPackage(pitem)
+ if item.name() in self.excpkgs.keys() and \
+ self.excpkgs[item.name()] == item.repoInfo().name():
+ continue
+ if item.name() in self.incpkgs.keys() and \
+ self.incpkgs[item.name()] != item.repoInfo().name():
+ continue
+
+ found = True
+ obspkg = self.whatObsolete(item)
+ if arch:
+ if arch == str(item.arch()):
+ pitem.status().setToBeInstalled (zypp.ResStatus.USER)
+ else:
+ markPoolItem(obspkg, pitem)
+ if not ispattern:
+ break
+
+ # Can't match using package name, then search from packge
+ # provides infomation
+ if found == False and not ispattern:
+ q.addAttribute(zypp.SolvAttr.provides, pkg)
+ q.addAttribute(zypp.SolvAttr.name,'')
+
+ for pitem in sorted(
+ q.queryResults(self.Z.pool()),
+ cmp=lambda x,y: cmpEVR(zypp.asKindPackage(x), zypp.asKindPackage(y)),
+ reverse=True):
+ item = zypp.asKindPackage(pitem)
+ if item.name() in self.excpkgs.keys() and \
+ self.excpkgs[item.name()] == item.repoInfo().name():
+ continue
+ if item.name() in self.incpkgs.keys() and \
+ self.incpkgs[item.name()] != item.repoInfo().name():
+ continue
+
+ found = True
+ obspkg = self.whatObsolete(item)
+ markPoolItem(obspkg, pitem)
+ break
+
+ if found:
+ return None
+ else:
+ raise CreatorError("Unable to find package: %s" % (pkg,))
+
+ def inDeselectPackages(self, pitem):
+ """check if specified pacakges are in the list of inDeselectPackages
+ """
+ item = zypp.asKindPackage(pitem)
+ name = item.name()
+ for pkg in self.to_deselect:
+ startx = pkg.startswith("*")
+ endx = pkg.endswith("*")
+ ispattern = startx or endx
+ pkgname, pkgarch = self._splitPkgString(pkg)
+ if not ispattern:
+ if pkgarch:
+ if name == pkgname and str(item.arch()) == pkgarch:
+ return True
+ else:
+ if name == pkgname:
+ return True
+ else:
+ if startx and name.endswith(pkg[1:]):
+ return True
+ if endx and name.startswith(pkg[:-1]):
+ return True
+
+ return False
+
+ def deselectPackage(self, pkg):
+ """collect packages should not be installed"""
+ self.to_deselect.append(pkg)
+
+ def addRepository(self, name,
+ url = None,
+ mirrorlist = None,
+ proxy = None,
+ proxy_username = None,
+ proxy_password = None,
+ inc = None,
+ exc = None,
+ ssl_verify = False,
+ nocache = False,
+ cost=None,
+ priority=None):
+ # TODO: Handle cost attribute for repos
+
+ if not self.repo_manager:
+ self.__initialize_repo_manager()
+
+ if not proxy and url:
+ proxy = get_proxy_for(url)
+ if not proxy:
+ proxy = 'http://172.21.110.79:3128'
+
+ repo = RepositoryStub()
+ repo.name = name
+ repo.id = name
+ repo.proxy = proxy
+ repo.proxy_username = proxy_username
+ repo.proxy_password = proxy_password
+ repo.ssl_verify = ssl_verify
+ repo.nocache = nocache
+ repo.baseurl.append(url)
+ if inc:
+ for pkg in inc:
+ self.incpkgs[pkg] = name
+ if exc:
+ for pkg in exc:
+ self.excpkgs[pkg] = name
+
+ if mirrorlist:
+ repo.mirrorlist = mirrorlist
+
+ # Enable gpg check for verifying corrupt packages
+ repo.gpgcheck = 1
+ if priority is not None:
+ # priority 0 has issue in RepoInfo.setPriority
+ repo.priority = priority + 1
+
+ try:
+ repo_info = zypp.RepoInfo()
+ repo_info.setAlias(repo.name)
+ repo_info.setName(repo.name)
+ repo_info.setEnabled(repo.enabled)
+ repo_info.setAutorefresh(repo.autorefresh)
+ repo_info.setKeepPackages(repo.keeppackages)
+ baseurl = zypp.Url(repo.baseurl[0])
+
+ print(baseurl)
+
+ if not ssl_verify:
+ baseurl.setQueryParam("ssl_verify", "no")
+
+ print(baseurl)
+ if proxy:
+ host = urlparse.urlparse(proxy)[1]
+ # scheme, host, path, parm, query, frag = urlparse.urlparse(proxy)
+
+ proxyinfo = host.rsplit(":", 1)
+ host = proxyinfo[0]
+
+ port = "80"
+ if len(proxyinfo) > 1:
+ port = proxyinfo[1]
+
+ if proxy.startswith("socks") and len(proxy.rsplit(':', 1)) == 2:
+ host = proxy.rsplit(':', 1)[0]
+ port = proxy.rsplit(':', 1)[1]
+
+ # parse user/pass from proxy host
+ proxyinfo = host.rsplit("@", 1)
+ if len(proxyinfo) == 2:
+ host = proxyinfo[1]
+ # Known Issue: If password contains ":", which should be
+ # quoted, for example, use '123%3Aabc' instead of 123:abc
+ userpassinfo = proxyinfo[0].rsplit(":", 1)
+ if len(userpassinfo) == 2:
+ proxy_username = userpassinfo[0]
+ proxy_password = userpassinfo[1]
+ elif len(userpassinfo) == 1:
+ proxy_username = userpassinfo[0]
+
+ baseurl.setQueryParam ("proxy", host)
+ baseurl.setQueryParam ("proxyport", port)
+ if proxy_username:
+ baseurl.setQueryParam ("proxyuser", proxy_username)
+ if proxy_password:
+ baseurl.setQueryParam ("proxypass", proxy_password)
+ else:
+ baseurl.setQueryParam ("proxy", "_none_")
+
+ self.repos.append(repo)
+
+ repo_info.addBaseUrl(baseurl)
+
+ if repo.priority is not None:
+ repo_info.setPriority(repo.priority)
+
+ # this hack is used to change zypp credential file location
+ # the default one is $HOME/.zypp, which cause conflicts when
+ # installing some basic packages, and the location doesn't
+ # have any interface actually, so use a tricky way anyway
+ homedir = None
+ if 'HOME' in os.environ:
+ homedir = os.environ['HOME']
+ os.environ['HOME'] = '/'
+ else:
+ os.environ['HOME'] = '/'
+
+ self.repo_manager.addRepository(repo_info)
+
+ # save back the $HOME env
+ if homedir:
+ os.environ['HOME'] = homedir
+ else:
+ del os.environ['HOME']
+
+ self.__build_repo_cache(name)
+
+ except RuntimeError, e:
+ print(e)
+ print(str(e))
+ raise CreatorError(str(e))
+
+ print('repo: %s was added' % name)
+ return repo
+
+ def runInstall(self, checksize = 0):
+ os.environ["HOME"] = "/"
+ os.environ["LD_PRELOAD"] = ""
+ self.buildTransaction()
+
+ todo = zypp.GetResolvablesToInsDel(self.Z.pool())
+ installed_pkgs = todo._toInstall
+ dlpkgs = []
+
+ for pitem in installed_pkgs:
+ if not zypp.isKindPattern(pitem) and \
+ not self.inDeselectPackages(pitem):
+ item = zypp.asKindPackage(pitem)
+ dlpkgs.append(item)
+
+ if item.name() in self.check_pkgs:
+ self.check_pkgs.remove(item.name())
+
+ if not self.install_debuginfo or str(item.arch()) == "noarch":
+ continue
+
+ dipkg = self._zyppQueryPackage("%s-debuginfo" % item.name())
+ if dipkg:
+ ditem = zypp.asKindPackage(dipkg)
+ dlpkgs.append(ditem)
+ else:
+ print("No debuginfo rpm found for: %s" % item.name())
+
+ if self.check_pkgs:
+ raise CreatorError('Packages absent in image: %s' % ','.join(self.check_pkgs))
+
+ # record all pkg and the content
+ install_pkgs = {}
+ localpkgs = self.localpkgs.keys()
+ for pkg in dlpkgs:
+# if pkg.name() in localpkgs:
+# hdr = rpmmisc.readRpmHeader(self.ts, self.localpkgs[pkg.name()])
+# install_pkgs[pkg.name()] = {'name': pkg.name(),
+# 'arch': pkg.arch(),
+# 'version': pkg.edition().version(),
+# 'release': pkg.edition().release()}
+# else:
+ install_pkgs[pkg.name()] = {'name': pkg.name(),
+ 'arch': pkg.arch(),
+ 'version': pkg.edition().version(),
+ 'release': pkg.edition().release()}
+# total_count = len(dlpkgs)
+# print('total_count: %s' % total_count)
+ return install_pkgs
+
+
+ def getAllContent(self):
+ if self.__pkgs_content:
+ return self.__pkgs_content
+
+ if not self.ts:
+ self.__initialize_transaction()
+
+ mi = self.ts.dbMatch()
+ for hdr in mi:
+ lname = misc.RPM_FMT % {
+ 'name': hdr['name'],
+ 'arch': hdr['arch'],
+ 'version': hdr['version'],
+ 'release': hdr['release']
+ }
+ self.__pkgs_content[lname] = hdr['FILENAMES']
+
+ return self.__pkgs_content
+
+ def getPkgsLicense(self):
+ return self.__pkgs_license
+
+ def getFilelist(self, pkgname):
+ if not pkgname:
+ return None
+
+ if not self.ts:
+ self.__initialize_transaction()
+
+ mi = self.ts.dbMatch('name', pkgname)
+ for header in mi:
+ return header['FILENAMES']
+
+ def __initialize_repo_manager(self):
+ if self.repo_manager:
+ return
+
+ # Clean up repo metadata
+ shutil.rmtree(self.cachedir + "/etc", ignore_errors = True)
+ shutil.rmtree(self.cachedir + "/solv", ignore_errors = True)
+ shutil.rmtree(self.cachedir + "/raw", ignore_errors = True)
+
+ zypp.KeyRing.setDefaultAccept( zypp.KeyRing.ACCEPT_UNSIGNED_FILE
+ | zypp.KeyRing.ACCEPT_VERIFICATION_FAILED
+ | zypp.KeyRing.ACCEPT_UNKNOWNKEY
+ | zypp.KeyRing.TRUST_KEY_TEMPORARILY
+ )
+
+ self.repo_manager_options = \
+ zypp.RepoManagerOptions(zypp.Pathname(self.instroot))
+
+ self.repo_manager_options.knownReposPath = \
+ zypp.Pathname(self.cachedir + "/etc/zypp/repos.d")
+
+ self.repo_manager_options.repoCachePath = \
+ zypp.Pathname(self.cachedir)
+
+ self.repo_manager_options.repoRawCachePath = \
+ zypp.Pathname(self.cachedir + "/raw")
+
+ self.repo_manager_options.repoSolvCachePath = \
+ zypp.Pathname(self.cachedir + "/solv")
+
+ self.repo_manager_options.repoPackagesCachePath = \
+ zypp.Pathname(self.cachedir + "/packages")
+
+ self.repo_manager = zypp.RepoManager(self.repo_manager_options)
+
+ def __build_repo_cache(self, name):
+ repo = self.repo_manager.getRepositoryInfo(name)
+ if self.repo_manager.isCached(repo) or not repo.enabled():
+ return
+
+ print('Refreshing repository: %s ...' % name)
+ self.repo_manager.buildCache(repo, zypp.RepoManager.BuildIfNeeded)
+
+ def init_zypp(self):
+ self.Z = None
+
+ def __initialize_zypp(self):
+ if self.Z:
+ return
+
+ zconfig = zypp.ZConfig_instance()
+
+ # Set system architecture
+ if self.target_arch:
+ zconfig.setSystemArchitecture(zypp.Arch(self.target_arch))
+
+ # print("zypp architecture is <%s>" % zconfig.systemArchitecture())
+
+ # repoPackagesCachePath is corrected by this
+ self.repo_manager = zypp.RepoManager(self.repo_manager_options)
+ repos = self.repo_manager.knownRepositories()
+ for repo in repos:
+ if not repo.enabled():
+ continue
+ self.repo_manager.loadFromCache(repo)
+
+ self.Z = zypp.ZYppFactory_instance().getZYpp()
+ self.Z.initializeTarget(zypp.Pathname(self.instroot))
+ self.Z.target().load()
+
+ def buildTransaction(self):
+ if not self.Z.resolver().resolvePool():
+ probs = self.Z.resolver().problems()
+
+ for problem in probs:
+ print("repo problem: %s, %s" \
+ % (problem.description().decode("utf-8"),
+ problem.details().decode("utf-8")))
+
+ raise RepoError("found %d resolver problem, abort!" % len(probs))
+
+ def getLocalPkgPath(self, po):
+ repoinfo = po.repoInfo()
+ cacheroot = repoinfo.packagesPath()
+ location = po.location()
+ rpmpath = str(location.filename())
+ pkgpath = "%s/%s" % (cacheroot, os.path.basename(rpmpath))
+ return pkgpath
+
+ def installLocal(self, pkg, po=None, updateonly=False):
+ if not self.ts:
+ self.__initialize_transaction()
+
+ solvfile = "%s/.solv" % (self.cachedir)
+
+ rc, out = runner.runtool([fs_related.find_binary_path("rpms2solv"),
+ pkg])
+ if rc == 0:
+ f = open(solvfile, "w+")
+ f.write(out)
+ f.close()
+
+ warnmsg = self.repo_manager.loadSolvFile(solvfile,
+ os.path.basename(pkg))
+ if warnmsg:
+ print(warnmsg)
+
+ os.unlink(solvfile)
+ else:
+ print('Can not get %s solv data.' % pkg)
+
+ hdr = rpmmisc.readRpmHeader(self.ts, pkg)
+ arch = zypp.Arch(hdr['arch'])
+ sysarch = zypp.Arch(self.target_arch)
+
+ if arch.compatible_with (sysarch):
+ pkgname = hdr['name']
+ self.localpkgs[pkgname] = pkg
+ self.selectPackage(pkgname)
+ print("Marking %s to be installed" % (pkg))
+
+ else:
+ print("Cannot add package %s to transaction. "
+ "Not a compatible architecture: %s" \
+ % (pkg, hdr['arch']))
+
+ def preinstallPkgs(self):
+ if not self.ts_pre:
+ self.__initialize_transaction()
+
+ self.ts_pre.order()
+ cb = rpmmisc.RPMInstallCallback(self.ts_pre)
+ cb.headmsg = "Preinstall"
+ installlogfile = "%s/__catched_stderr.buf" % (self.instroot)
+
+ # start to catch stderr output from librpm
+ msger.enable_logstderr(installlogfile)
+
+ errors = self.ts_pre.run(cb.callback, '')
+ # stop catch
+ msger.disable_logstderr()
+ self.ts_pre.closeDB()
+ self.ts_pre = None
+
+ if errors is not None:
+ if len(errors) == 0:
+ print('scriptlet or other non-fatal errors occurred '
+ 'during transaction.')
+
+ else:
+ for e in errors:
+ print(e[0])
+ raise RepoError('Could not run transaction.')
+
+ def installPkgs(self, package_objects):
+ if not self.ts:
+ self.__initialize_transaction()
+
+ # clean rpm lock
+ self._cleanupRpmdbLocks(self.instroot)
+ self._cleanupZyppJunk(self.instroot)
+ # Set filters
+ probfilter = 0
+ for flag in self.probFilterFlags:
+ probfilter |= flag
+ self.ts.setProbFilter(probfilter)
+ self.ts_pre.setProbFilter(probfilter)
+
+ localpkgs = self.localpkgs.keys()
+
+ for po in package_objects:
+ pkgname = po.name()
+ if pkgname in localpkgs:
+ rpmpath = self.localpkgs[pkgname]
+ else:
+ rpmpath = self.getLocalPkgPath(po)
+
+ if not os.path.exists(rpmpath):
+ # Maybe it is a local repo
+ rpmuri = self.get_url(po)
+ if rpmuri.startswith("file:/"):
+ rpmpath = rpmuri[5:]
+
+ if not os.path.exists(rpmpath):
+ raise RpmError("Error: %s doesn't exist" % rpmpath)
+
+ h = rpmmisc.readRpmHeader(self.ts, rpmpath)
+
+ if pkgname in self.pre_pkgs:
+ print("pre-install package added: %s" % pkgname)
+ self.ts_pre.addInstall(h, rpmpath, 'u')
+
+ self.ts.addInstall(h, rpmpath, 'u')
+
+ unresolved_dependencies = self.ts.check()
+ if not unresolved_dependencies:
+ if self.pre_pkgs:
+ self.preinstallPkgs()
+
+ self.ts.order()
+ cb = rpmmisc.RPMInstallCallback(self.ts)
+ installlogfile = "%s/__catched_stderr.buf" % (self.instroot)
+
+ # start to catch stderr output from librpm
+ msger.enable_logstderr(installlogfile)
+
+ errors = self.ts.run(cb.callback, '')
+ # stop catch
+ msger.disable_logstderr()
+ self.ts.closeDB()
+ self.ts = None
+
+ if errors is not None:
+ if len(errors) == 0:
+ print('scriptlet or other non-fatal errors occurred '
+ 'during transaction.')
+ if self.strict_mode:
+ raise CreatorError("mic failes to install some packages")
+ else:
+ for e in errors:
+ print(e[0])
+ raise RepoError('Could not run transaction.')
+
+ else:
+ for pkg, need, needflags, sense, key in unresolved_dependencies:
+ package = '-'.join(pkg)
+
+ if needflags == rpm.RPMSENSE_LESS:
+ deppkg = ' < '.join(need)
+ elif needflags == rpm.RPMSENSE_EQUAL:
+ deppkg = ' = '.join(need)
+ elif needflags == rpm.RPMSENSE_GREATER:
+ deppkg = ' > '.join(need)
+ else:
+ deppkg = '-'.join(need)
+
+ if sense == rpm.RPMDEP_SENSE_REQUIRES:
+ print("[%s] Requires [%s], which is not provided" \
+ % (package, deppkg))
+
+ elif sense == rpm.RPMDEP_SENSE_CONFLICTS:
+ print("[%s] Conflicts with [%s]" % (package, deppkg))
+
+ raise RepoError("Unresolved dependencies, transaction failed.")
+
+ def __initialize_transaction(self):
+ if not self.ts:
+ self.ts = rpm.TransactionSet(self.instroot)
+ # Set to not verify DSA signatures.
+ self.ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)
+
+ if not self.ts_pre:
+ self.ts_pre = rpm.TransactionSet(self.instroot)
+ # Just unpack the files, don't run scripts
+ self.ts_pre.setFlags(rpm.RPMTRANS_FLAG_ALLFILES | rpm.RPMTRANS_FLAG_NOSCRIPTS)
+ # Set to not verify DSA signatures.
+ self.ts_pre.setVSFlags(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)
+
+ def checkPkg(self, pkg):
+ ret = 1
+ if not os.path.exists(pkg):
+ return ret
+ ret = rpmmisc.checkRpmIntegrity('rpm', pkg)
+ if ret != 0:
+ print("package %s is damaged: %s" \
+ % (os.path.basename(pkg), pkg))
+
+ return ret
+
+ def _add_prob_flags(self, *flags):
+ for flag in flags:
+ if flag not in self.probFilterFlags:
+ self.probFilterFlags.append(flag)
+
+ def get_proxies(self, pobj):
+ if not pobj:
+ return None
+
+ proxy = None
+ proxies = None
+ repoinfo = pobj.repoInfo()
+ reponame = "%s" % repoinfo.name()
+ repos = filter(lambda r: r.name == reponame, self.repos)
+ repourl = str(repoinfo.baseUrls()[0])
+
+ if repos:
+ proxy = repos[0].proxy
+ if not proxy:
+ proxy = get_proxy_for(repourl)
+ if proxy:
+ proxies = {str(repourl.split(':')[0]): str(proxy)}
+
+ return proxies
+
+ def get_url(self, pobj):
+ if not pobj:
+ return None
+
+ name = str(pobj.repoInfo().name())
+ try:
+ repo = filter(lambda r: r.name == name, self.repos)[0]
+ except IndexError:
+ return None
+
+ location = pobj.location()
+ location = str(location.filename())
+ if location.startswith("./"):
+ location = location[2:]
+
+ return repo.baseurl[0].join(location)
+
+ def package_url(self, pkgname):
+
+ def cmpEVR(p1, p2):
+ ed1 = p1.edition()
+ ed2 = p2.edition()
+ (e1, v1, r1) = map(str, [ed1.epoch(), ed1.version(), ed1.release()])
+ (e2, v2, r2) = map(str, [ed2.epoch(), ed2.version(), ed2.release()])
+ return rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
+
+ if not self.Z:
+ self.__initialize_zypp()
+
+ q = zypp.PoolQuery()
+ q.addKind(zypp.ResKind.package)
+ q.setMatchExact()
+ q.addAttribute(zypp.SolvAttr.name, pkgname)
+ items = sorted(q.queryResults(self.Z.pool()),
+ cmp=lambda x,y: cmpEVR(zypp.asKindPackage(x), zypp.asKindPackage(y)),
+ reverse=True)
+
+ if items:
+ item = zypp.asKindPackage(items[0])
+ url = self.get_url(item)
+ proxies = self.get_proxies(item)
+ return (url, proxies)
+
+ return (None, None)
+
class DependencyTest(unittest.TestCase):
def setUp(self):
# test environment setup
- self.repo_list = [{'name': 'local_base',
- 'url': 'file:/' + TEST_REPODATA_LOC + '/base'},
- {'name': 'local_mobile',
- 'url': 'file:/' + TEST_REPODATA_LOC + '/mobile'}]
-
- self.repodata_list = get_repodata_from_repos(self.repo_list, DEFAULT_CACHEDIR)
-
- self.pkg_group = None
- try:
- repo_parser = RepodataParser(self.repodata_list)
- self.pkg_group = repo_parser.parse()
- except Exception as e:
- raise self.failureException
-
+ config.TEST = True
+ self.architecture = 'armv7l'
+ self.recipe = default_recipe.get_default_recipe()
+ self.repo_init = False
+ self.zypp = Zypp(self.architecture, '/var/tmp/tic-core/test/inst', '/var/tmp/tic-core/test/cache', 'False')
+ self.tic_recipe = None
+ self.tic_repoinfo = None
+ self.tic_pkggroup = None
+ # init
+ self._init_zypp_repositories(self.recipe)
+ self._init_tic_metadata()
def tearDown(self):
# clear environment after test
- del self.repo_list
- del self.pkg_group
-
- def test_parse(self):
- self.assertNotEqual(self.pkg_group, None)
- self.assertNotEqual(self.pkg_group['pkg_list'], None)
- self.assertNotEqual(self.pkg_group['pkg2id'], None)
- self.assertEqual(len(self.pkg_group['pkg_list']), len(self.pkg_group['pkg2id']))
-
- pkg_list = self.pkg_group['pkg_list']
- pkg2id = self.pkg_group['pkg2id']
-
- # Consistency checks between pkg_list and pkg2id
- for pkg_id in range(len(pkg_list)):
- pkg_info = pkg_list[pkg_id]
- self.assertEqual(pkg_id, pkg2id.get(pkg_info['name']))
+ del self.recipe
+ del self.zypp
+ del self.tic_recipe
+ del self.tic_repoinfo
+ del self.tic_pkggroup
- def test_dependency(self):
- # 1st package install-depdency analysis
- # analyze_dependency(self.pkg_group)
- # print('time:', current_milli_time() - start_time)
- pkg_list = self.pkg_group['pkg_list']
-
- # 2nd package install-depdency analysis for test (using bruteforce)
- test_repo_parser = RepodataParser(self.repodata_list)
- test_pkg_group = test_repo_parser.parse()
- test_pkg_list = test_pkg_group.get('pkg_list')
- test_pkg2id = test_pkg_group.get('pkg2id')
-
- count = 1
- visited = [0]*len(test_pkg_list)
- for pkg_id in range(len(test_pkg_list)):
- visited[pkg_id] = count
- dep_set = self.dep_dfs(count, pkg_id, test_pkg_list, visited)
- test_pkg_list[pkg_id]['dependency'] = list(dep_set)
- count += 1
- # print('time:', current_milli_time() - start_time)
+ def test_analyze_dependency(self):
+ # get packages from repo using tic
+ pkg_dict = self.tic_pkggroup.get('pkg_dict')
+ target_meta = ('root', 'sub1', 'sub2')
- # compares the result of 1st and 2nd
- if len(pkg_list) != len(test_pkg_list):
- raise self.failureException
+ target_count = 0
+ # check the number of meta-pkgs
+ for pkg_name, pkg_data in pkg_dict.iteritems():
+ # is meta-pkg
+ if pkg_data.get('meta') and pkg_data['meta'] in target_meta:
+ target_count += 1
+ print('total target-meta: %d' % target_count)
- for pkg_id in range(len(pkg_list)):
- pkg_info = pkg_list[pkg_id]
- test_pkg_info = test_pkg_list[test_pkg2id[pkg_info['name']]]
-
- # package check
- if pkg_info['name'] != test_pkg_info['name']:
- raise self.failureException
-
- # dependency count
- if len(pkg_info['dependency']) != len(test_pkg_info['dependency']):
- print('pkg_info_1:', pkg_info['dependency'])
- print('pkg_info_2:', test_pkg_info['dependency'])
- raise self.failureException
-
- # dependency validation between pkg_info and test_pkg_info
- for dep in test_pkg_info.get('dependency'):
- if dep not in pkg_info['dependency']:
- print(dep, 'does not exist')
- raise self.failureException
-
- def dep_dfs(self, count, pkg_id, pkg_list, visited):
- dep_set = set([pkg_list[pkg_id]['name']])
-
- if pkg_list[pkg_id].get('requires'):
- for req in pkg_list[pkg_id].get('requires'):
- req_id = req.get('id')
- if req_id is not None:
- if visited[req_id] < count:
- visited[req_id] = count
- dep_set.update(self.dep_dfs(count, req_id, pkg_list, visited))
+ pkg_idx = 1
+ for pkg_name, pkg_data in pkg_dict.iteritems():
+ # is meta-pkg
+ if pkg_data.get('meta') and pkg_data['meta'] in target_meta:
+ print('#%d package: %s' % (pkg_idx, pkg_data['name']))
+ pkg_idx += 1
+ try:
+ zypp_pkgs = self._get_inst_pkgs_from_zypp([str(pkg_name)])
+ except RepoError:
+ # dependency package does not exist
+ continue
+
+ tic_pkgs = self._get_inst_pkgs_from_tic([str(pkg_name)])
+ print(' zypp pkgs(%s), tic_pkgs(%s)' % (len(zypp_pkgs), len(tic_pkgs)))
+ if len(zypp_pkgs) == len(tic_pkgs):
+ for pkg in tic_pkgs:
+ if pkg not in zypp_pkgs:
+ raise self.failureException
+ self.assertEqual(pkg_dict[pkg]['version']['ver'], zypp_pkgs[pkg]['version'])
+ self.assertEqual(pkg_dict[pkg]['version']['rel'], zypp_pkgs[pkg]['release'])
+ self.assertEqual(str(pkg_dict[pkg]['arch']), str(zypp_pkgs[pkg]['arch']))
else:
- pass
- #TODO: package doest not exist
- #print('def_dfs::', req['name'], 'is not exist (in dep_analysis)')
-
- return dep_set
+ self.assertEqual(len(zypp_pkgs), len(tic_pkgs))
+
+ def _init_zypp_repositories(self, recipe):
+ if not self.repo_init:
+ # add repositories
+ for repo_name in recipe['Recipe']['Repos']:
+ for repo_info in recipe['Repositories']:
+ if repo_info['Name'] == repo_name:
+ # TODO: handle proxy
+ self.zypp.addRepository(repo_name, repo_info['Url'])
+ break
+ self.repo_init = True
+
+ def _get_inst_pkgs_from_zypp(self, select_pkgs):
+ self.zypp.init_zypp()
+ # add install-pkg
+ for select_pkg in select_pkgs:
+ self.zypp.selectPackage(select_pkg)
+ # exclude pkg
+ for exclude_pkg in self.recipe['Recipe']['RemovePackages']:
+ self.zypp.deselectPackage(exclude_pkg)
+ return self.zypp.runInstall()
+ def _init_tic_metadata(self):
+ recipe_list = default_recipe.get_default_parameter()
+ recipe_parser = RecipeParser(recipe_list)
+ recipe_parser.parse()
+ self.tic_recipe = recipe_parser.get_merged_recipe()
+ self.tic_repoinfo = get_repodata_from_repos(self.tic_recipe.get('Repositories'), DEFAULT_CACHEDIR)
+ self.tic_pkggroup = get_pkg_metadata(self.tic_repoinfo, self.architecture)
+
+ def _get_inst_pkgs_from_tic(self, select_pkgs):
+ # reset pkg_group
+ self.tic_pkggroup = get_pkg_metadata(self.tic_repoinfo, self.architecture)
+ self.tic_recipe['Recipe']['ExtraPackages'] = select_pkgs
+ inst_pkgs = get_installed_packages(self.tic_recipe, self.tic_repoinfo, self.tic_pkggroup)
+ return inst_pkgs
+
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() \ No newline at end of file
diff --git a/tic/command.py b/tic/command.py
index c66743a..c3dcf63 100644
--- a/tic/command.py
+++ b/tic/command.py
@@ -36,23 +36,10 @@ from tic.config import configmgr
DEFAULT_CACHEDIR=configmgr.setting['tempdir']
DEFAULT_ANALYSISDIR=os.path.join(DEFAULT_CACHEDIR, 'analysis')
DEFAULT_KICKSTARTDIR=os.path.join(DEFAULT_CACHEDIR, 'kickstart')
-DEFAULT_RECIPE_NAME='recipe.yaml';
+DEFAULT_RECIPE_NAME='recipe.yaml'
-def analyze(recipe_list):
+def get_pkg_metadata(repoinfo, archecture):
logger = logging.getLogger(__name__)
- if not recipe_list:
- logger.info('Use default recipe because there is no import data')
- recipe_list = default_recipe.getDefaultParameter()
-
- recipe_parser = RecipeParser(recipe_list)
- recipe_parser.parse()
- recipe_info = recipe_parser.getMergedRecipe()
-
- start_time = misc.get_timestamp()
- #Download repodata from repositories (Remote/Local)
- repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
- logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
-
checksum_list=[]
for repo in repoinfo:
checksum_list.append(repo['checksum'])
@@ -61,49 +48,71 @@ def analyze(recipe_list):
pkg_group=None
if os.path.exists(analysis_file):
pkg_group=file.read_json(analysis_file)
-
- if not pkg_group or not pkg_group.get('pkg_dict'):
- start_time = misc.get_timestamp()
+ if not pkg_group or 'pkg_dict' not in pkg_group:
# Parse the xml files for the analysis of package (.rpm)
- repo_parser = RepodataParser('armv7l', repoinfo)
+ repo_parser = RepodataParser(archecture, repoinfo)
pkg_group = repo_parser.parse()
- logger.info('packages: %d, provides: %d, files: %d', len(pkg_group['pkg_dict']), len(pkg_group['provides']), len(pkg_group['files']))
- logger.info('time to parse repodata: %d ms', misc.get_timestamp() - start_time)
# dump to cached file
file.write_json_flock(analysis_file, pkg_group)
else:
logger.info('use a cache parsing data - %s', analysis_file)
+ return pkg_group
+
+def analyze(recipe_list):
+ def _make_result_data(repos, viewdata, pkggroup, inst_pkgs):
+ return {'view': viewdata,
+ 'data': {'packages': pkggroup.get('pkg_dict'),
+ 'provides': pkggroup.get('provides'),
+ 'files': pkggroup.get('files'),
+ 'groups': pkggroup.get('groups'),
+ 'conflicts': pkggroup.get('conflicts'),
+ 'supplements': pkggroup.get('supplements')},
+ 'recipes': repos,
+ 'installpackages': inst_pkgs}
+
+ logger = logging.getLogger(__name__)
+ if not recipe_list:
+ logger.info('Use default recipe because there is no import data')
+ recipe_list = default_recipe.get_default_parameter()
+
+ # parse recipe
+ recipe_parser = RecipeParser(recipe_list)
+ recipe_parser.parse()
+ recipe_info = recipe_parser.get_merged_recipe()
+ # Download repodata from repositories (Remote/Local)
+ repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
+ # paring repodata and get pkg metadata
+ pkg_group = get_pkg_metadata(repoinfo, 'armv7l')
+ logger.info('pkgs: %d, provides: %d, files: %d',
+ len(pkg_group['pkg_dict']),
+ len(pkg_group['provides']),
+ len(pkg_group['files']))
start_time = misc.get_timestamp()
# Make a data for TIC (Tizen image creation)
view_data = make_view_data(pkg_group)
- logger.info('time to create view-tree: %d ms', misc.get_timestamp() - start_time)
+ logger.info('time to make view-tree: %d ms', misc.get_timestamp() - start_time)
+
# analyze install-dependency
start_time = misc.get_timestamp()
inst_packages = get_installed_packages(recipe_info, repoinfo, pkg_group)
- logger.info('installed package: %d', len(inst_packages))
+ logger.info('install-packages: %d', len(inst_packages))
logger.info('time to analyze dependency: %d ms', misc.get_timestamp() - start_time)
-
- result = {'view': view_data,
- 'data': {'packages': pkg_group.get('pkg_dict'),
- 'provides': pkg_group.get('provides'),
- 'files': pkg_group.get('files'),
- 'groups': pkg_group.get('groups'),
- 'conflicts': pkg_group.get('conflicts')},
- 'recipes': recipe_parser.getRepositories(),
- 'installpackages': inst_packages}
+ result = _make_result_data(recipe_parser.get_repositories(),
+ view_data,
+ pkg_group,
+ inst_packages)
return result
def imports(recipe_list):
logger = logging.getLogger(__name__)
if not recipe_list:
logger.info('Use default recipe because there is no import data')
- recipe_list = default_recipe.getDefaultParameter()
+ recipe_list = default_recipe.get_default_parameter()
recipe_parser = RecipeParser(recipe_list)
recipe_parser.parse()
- result = {'recipes': recipe_parser.getRepositories()}
- #result = {'imports': recipe_parser.getMergedRepositories()}
+ result = {'recipes': recipe_parser.get_repositories()}
return result
def exports(export_type, recipes, packages, outdir, filename=None):
@@ -124,17 +133,17 @@ def exports(export_type, recipes, packages, outdir, filename=None):
recipe_parser.parse()
result = None
if export_type == 'recipe':
- recipe_path = recipe_parser.export2Recipe(packages, outdir, filename)
+ recipe_path = recipe_parser.export_recipe(packages, outdir, filename)
logger.info('export the recipe to %s' % recipe_path)
result = {'path': recipe_path}
elif export_type == 'ks':
# 1. create yaml files
- yaml_info = recipe_parser.export2Yaml(packages, DEFAULT_KICKSTARTDIR)
+ yaml_info = recipe_parser.export_yaml(packages, DEFAULT_KICKSTARTDIR)
# 2. create kickstart(.ks) using kickstarter tool
options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
kswriter(options)
# check whether the ks exists
- recipe_info = recipe_parser.getMergedRecipe()
+ recipe_info = recipe_parser.get_merged_recipe()
baseline=recipe_info['Recipe'].get('Baseline')
ksname= ''.join([recipe_info['Recipe'].get('FileName'), '.ks'])
kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
diff --git a/tic/config.py b/tic/config.py
index 4648f95..065146b 100644
--- a/tic/config.py
+++ b/tic/config.py
@@ -19,6 +19,7 @@
import os
import ConfigParser
+TEST = False
DEFAULT_MSG_CONF = "/etc/tic-core/message.conf"
DEFAULT_CONF = "/etc/tic-core/config.conf"
@@ -66,7 +67,7 @@ class ConfigMgr(object):
def __init__(self):
self._reset()
for conf_path in [DEFAULT_CONF, DEFAULT_MSG_CONF]:
- self._setConfig(conf_path)
+ self._set_config(conf_path)
def _reset(self):
for sec, vals in self.DEFAULT_TIC.iteritems():
@@ -74,18 +75,16 @@ class ConfigMgr(object):
for sec, vals in self.DEFAULT_MESSAGE.iteritems():
setattr(self, sec, vals)
- def _setConfig(self, conf):
- configParser = ConfigParser.ConfigParser()
+ def _set_config(self, conf):
+ config_parser = ConfigParser.ConfigParser()
try:
- if os.path.exists(conf):
- configParser.read(conf)
- for section in configParser.sections():
- for option in configParser.options(section):
- try:
- opt_attr=getattr(self, section)
- opt_attr[option]=configParser.get(section, option)
- except:
- pass
+ if not os.path.exists(conf):
+ return
+ config_parser.read(conf)
+ for section in config_parser.sections():
+ for option in config_parser.options(section):
+ opt_attr=getattr(self, section)
+ opt_attr[option]=config_parser.get(section, option)
except Exception as e:
print(e)
diff --git a/tic/dependency.py b/tic/dependency.py
index fefc53b..af98678 100644
--- a/tic/dependency.py
+++ b/tic/dependency.py
@@ -28,6 +28,19 @@ DEFAULT_PROFILE = 'EMPTY'
def get_installed_packages(recipe, repoinfo, pkg_group):
logger = logging.getLogger(__name__)
+ def _select_rpm_by_name(dep_name):
+ return _select_rpm(dict(name=dep_name))
+
+ def _select_rpm(dep_rpm, recommends=None):
+ selected = None
+ if dep_rpm['name'] in provides:
+ selected = _select_rpm_from_provides(provides[dep_rpm['name']], dep_rpm, recommends)
+ elif dep_rpm['name'] in files:
+ selected = _select_rpm_from_files(files[dep_rpm['name']], dep_rpm)
+ elif dep_rpm['name'] in pkg_dict:
+ selected = pkg_dict.get(dep_rpm['name'])
+ return selected
+
def _select_rpm_from_files(fileList, require):
if not fileList or not require:
return None
@@ -44,8 +57,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
if not _check_conflicts(file_info):
return file_info
return pkg_dict.get(fileList[0])
-
- def _select_rpm(capability, require, recommends=None):
+
+ def _select_rpm_from_provides(capability, require, recommends=None):
provide_list = []
# 1. Choose the rpm included in version from provides
if require.get('ver'):
@@ -70,7 +83,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# 2-1. Choose the default rpm or the selected rpm
for pro in provide_list:
provide_info = pkg_dict.get(pro['name'])
- if provide_info['name'] in required_inst_rpms or selected[provide_info['id']] >= 1:
+ if provide_info['name'] in required_inst_rpms or selected[provide_info['id']] > 0:
return provide_info
# 2-2. Choose the defualt profile
@@ -219,7 +232,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
_add_conflicts(pkg_info)
# Installation dependency analysis of rpm
- for dep_tag in [Dependency.REQUIRES, Dependency.RECOMMENDS]:
+ for dep_tag in [Dependency.REQUIRES]:
if pkg_info.get(dep_tag):
for req in pkg_info.get(dep_tag):
choose = None
@@ -227,23 +240,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
if req['name'] == pkg_info['name']:
continue
# Find dependency rpm based on capability/files
- if req['name'] in provides:
- # Select the rpm that meets the condition (version)
- if dep_tag == Dependency.REQUIRES:
- choose = _select_rpm(provides[req['name']], req, pkg_info.get('recommends'))
- else:
- choose = _select_rpm(provides[req['name']], req)
- elif req['name'] in files:
- choose = _select_rpm_from_files(files[req['name']], req)
- elif req['name'] in pkg_dict:
- choose = pkg_dict.get(req['name'])
+ choose = _select_rpm(req, pkg_info.get('recommends'))
- if dep_tag == Dependency.RECOMMENDS:
- # A Recommends B: B is installed when A is installed and B has no conflicts.
- if not choose or _check_conflicts(choose) is not None:
- #logger.info('%s recommended by %s is ignored for selection (Conflict)' % (req['name'], pkg_info['name']))
- continue
-
if choose:
# add forward/backward reference
_create_reference(pkg_info, choose)
@@ -297,7 +295,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# delete backward reference of group node
for pkgname in g_pkg_list:
pkg = pkg_dict[pkgname]
- pkg['backward'] = None;
+ pkg['backward'] = None
group_visited[g_id][pkg['name']] = -1
return True
@@ -309,7 +307,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for i in range(len(con_list)):
if con_list[i]['name'] == node['name']:
del con_list[i]
- break;
+ break
def _remove_reference(parent, node):
if parent is not None:
@@ -368,12 +366,12 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for pro in new_pkg['provides']:
if r_name == pro['name']:
matched = True
- break;
+ break
if not matched and new_pkg.get('file'):
for fname in new_pkg['file']:
if r_name == fname:
matched = True
- break;
+ break
if not matched:
return False
return True
@@ -431,34 +429,29 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# add rpms into conflicts table.
_add_conflicts(pkg_info)
+
+ # check week dependencies (recommends, supplements)
+ if Dependency.RECOMMENDS in pkg_info:
+ for recommend in pkg_info[Dependency.RECOMMENDS]:
+ week_deps_rpms.append(recommend)
+ if Dependency.PROVIDES in pkg_info:
+ for provide in pkg_info[Dependency.PROVIDES]:
+ if provide['name'] in supplements:
+ week_deps_rpms.extend(supplements[provide['name']])
+
# Installation dependency analysis of rpm
- for dep_tag in [Dependency.REQUIRES, Dependency.RECOMMENDS]:
+ for dep_tag in [Dependency.REQUIRES]:
if pkg_info.get(dep_tag):
for req in pkg_info.get(dep_tag):
choose = None
# self-reference (e.g. vim-base)
if req['name'] == pkg_info['name']:
continue
- if req['name'] in provides:
- if dep_tag == Dependency.REQUIRES:
- choose = _select_rpm(provides[req['name']], req, pkg_info.get('recommends'))
- else:
- choose = _select_rpm(provides[req['name']], req)
- elif req['name'] in files:
- choose = _select_rpm_from_files(files[req['name']], req)
- elif req['name'] in pkg_dict:
- choose = pkg_dict.get(req['name'])
-
- if dep_tag == Dependency.RECOMMENDS:
- # A Recommends B: B is installed when A is installed and B has no conflicts.
- if not choose or _check_conflicts(choose) is not None:
- #logger.info('%s recommended by %s is ignored for selection (Conflict)' % (req['name'], pkg_info['name']))
- continue
+ # Select packages with inst-dependencies
+ choose = _select_rpm(req, pkg_info.get('recommends'))
if choose:
- # add refer count, only requires
- if dep_tag == Dependency.REQUIRES:
- _add_refer(choose, req)
+ _add_refer(choose, req)
if selected[choose['id']] == 0:
if not _check_dep_validation(choose, select_list):
@@ -471,10 +464,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# recipe/repo
if not recipe or not repoinfo:
return []
-
group_set = set([])
pkg_set = set([])
-
if recipe['Recipe'].get('Groups'):
group_set.update(recipe['Recipe'].get('Groups'))
if recipe['Recipe'].get('ExtraPackages'):
@@ -499,14 +490,15 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for pkgreq in pkglist.findall('packagereq'):
plist.append(pkgreq.text)
pkg_set.update(set(plist))
- group_set.discard(group_name);
+ group_set.discard(group_name)
pkg_dict = pkg_group.get('pkg_dict')
provides = pkg_group.get('provides')
files = pkg_group.get('files')
groups = pkg_group.get('groups')
conflicts = pkg_group.get('conflicts')
-
+ supplements = pkg_group.get('supplements')
+
number = [0] # for pkg count
scc_num = [0] # for scc count
group_num = [0] # for group count
@@ -519,22 +511,29 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
capabilities = {}
require_refer = {}
- candidate_rpms = set([])
- # add rpms to install
- select_rpms = set([])
# add reference value for rpm selection
required_inst_rpms = pkg_set.copy()
+ # add rpms to install
+ select_rpms = set([])
+ select_week_rpms = set([])
+ # etc
+ candidate_rpms = set([])
+ include_week_deps = []
+ exclude_week_deps = set([])
# 1. Check whether dependencies of rpm are available.
for pkg_name in pkg_set:
- selected_pkg = _get_pkg_info(pkg_name)
+ # select the matching package
+ selected_pkg = _select_rpm_by_name(pkg_name)
if selected_pkg:
if selected[selected_pkg['id']] == 0:
+ week_deps_rpms = []
select_list = []
comp_rpms = set([])
if _check_dep_validation(selected_pkg, select_list):
select_rpms.add(pkg_name)
candidate_rpms.update(comp_rpms)
+ include_week_deps.extend(week_deps_rpms)
else:
# case: conflict or rpm does not exist
_remove_dep_rpm(select_list)
@@ -544,6 +543,34 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
else:
logger.info(configmgr.message['package_not_exist'] % pkg_name)
+ # 2. Check week dependencies (recommends/supplements)
+ loop_deps = include_week_deps
+ while loop_deps:
+ include_week_deps = []
+ # recommends
+ for week_rpm in loop_deps:
+ if week_rpm['name'] in exclude_week_deps:
+ continue
+
+ selected_pkg = _select_rpm(week_rpm)
+ if selected_pkg:
+ if selected[selected_pkg['id']] == 0:
+ week_deps_rpms = []
+ select_list = []
+ comp_rpms = set([])
+ if _check_dep_validation(selected_pkg, select_list):
+ select_week_rpms.add(selected_pkg['name'])
+ candidate_rpms.update(comp_rpms)
+ include_week_deps.extend(week_deps_rpms)
+ else:
+ # case: conflict or rpm does not exist
+ _remove_dep_rpm(select_list)
+ exclude_week_deps.add(week_rpm['name'])
+ else:
+ # ignore package selection
+ logger.info('the week dependent package(%s) dose not exists.' % week_rpm['name'])
+ loop_deps = include_week_deps
+
# init conflict table and reference
number[0] = 0
selected = [0] * len(pkg_dict)
@@ -552,25 +579,27 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
required_inst_rpms.update(select_rpms)
required_inst_rpms.update(candidate_rpms)
- print(candidate_rpms)
-
- # 2. Analyze rpm installation dependencies.
- for pkg_name in select_rpms:
- progress['status'] = True
- selected_pkg = _get_pkg_info(pkg_name)
- if selected_pkg:
- rpm_stack = []
- selected_pkg['selfChecked'] = True
- if selected[selected_pkg['id']] == 0:
- inst_rpms = _analyze_dep(selected_pkg)
- if progress['status']:
- install_rpm.update(inst_rpms)
- else:
- # Error Case
- logger.info("[Dependency Issue] Could not install the %s" % selected_pkg)
- # delete forward/backward reference
- group_visited = {}
- _remove_reference(None, selected_pkg)
- else:
- logger.info(configmgr.message['package_not_exist'] % pkg_name)
+ # 3. Analyze rpm installation dependencies.
+ for idx, value in enumerate([select_rpms, select_week_rpms]):
+ for pkg_name in value:
+ progress['status'] = True
+ #selected_pkg = _get_pkg_info(pkg_name)
+ selected_pkg = _select_rpm_by_name(pkg_name)
+ if selected_pkg:
+ rpm_stack = []
+ if idx == 0:
+ # Only a select_rpms has a selfchecked
+ selected_pkg['selfChecked'] = True
+ if selected[selected_pkg['id']] == 0:
+ inst_rpms = _analyze_dep(selected_pkg)
+ if progress['status']:
+ install_rpm.update(inst_rpms)
+ else:
+ # Error Case
+ logger.info("[Dependency Issue] Could not install the %s" % selected_pkg)
+ # delete forward/backward reference
+ group_visited = {}
+ _remove_reference(None, selected_pkg)
+ else:
+ logger.info(configmgr.message['package_not_exist'] % pkg_name)
return list(install_rpm)
diff --git a/tic/parser/recipe_parser.py b/tic/parser/recipe_parser.py
index 82b9e4c..5165c64 100644
--- a/tic/parser/recipe_parser.py
+++ b/tic/parser/recipe_parser.py
@@ -90,9 +90,9 @@ class DefaultRecipe(object):
logger.info(err)
except yaml.YAMLError as err:
logger.info(err)
- def getDefaultRecipe(self):
+ def get_default_recipe(self):
return copy.deepcopy(self.DEFAULT_RECIPE)
- def getSystemConfig(self):
+ def get_system_config(self):
data = copy.deepcopy(self.DEFAULT_RECIPE)
for field in RECIPE_EXTEND_FIELD:
if field == 'Partitions':
@@ -102,7 +102,8 @@ class DefaultRecipe(object):
if data.get(field):
data[field] = []
return data
- def getDefaultParameter(self):
+ @classmethod
+ def get_default_parameter(cls):
return [dict(url=DEFAULT_RECIPE_NAME, type='recipe')]
default_recipe = DefaultRecipe()
@@ -115,7 +116,7 @@ class RecipeParser(object):
self._repositories = None
self._recipe = None
# add recipe input
- self.addRecipes(inputs)
+ self.add_recipes(inputs)
def parse(self):
logger = logging.getLogger(__name__)
@@ -131,7 +132,7 @@ class RecipeParser(object):
if data_type == 'recipe':
# default recipe
if data.get('url') == DEFAULT_RECIPE_NAME:
- self.recipes[data.get('url')] = default_recipe.getDefaultRecipe()
+ self.recipes[data.get('url')] = default_recipe.get_default_recipe()
else:
with contextlib.closing(urllib2.urlopen(data.get('url'))) as op:
self.recipes[data.get('url')] = yaml.load(op.read())
@@ -152,7 +153,7 @@ class RecipeParser(object):
logger.error(err)
raise TICError(configmgr.message['recipe_parse_error'] % data.get('url'))
- def addRecipes(self, inputs):
+ def add_recipes(self, inputs):
if inputs:
if isinstance(inputs, list):
for data in inputs:
@@ -160,12 +161,12 @@ class RecipeParser(object):
else:
self.inputs.append(inputs)
- def getRepositories(self):
+ def get_repositories(self):
if not self._repositories:
- self._repositories = self._getAllRepositories()
+ self._repositories = self._get_all_repositories()
return self._repositories
- def _getAllRepositories(self):
+ def _get_all_repositories(self):
repos = []
name_count = 1
for data in self.inputs:
@@ -201,7 +202,7 @@ class RecipeParser(object):
repos.append(data)
return repos
- def _renameRepository(self, repo_dict, repo_name):
+ def _rename_repository(self, repo_dict, repo_name):
number = repo_dict.get(repo_name)
new_name = ''.join([repo_name, '_', str(number)])
while(new_name in repo_dict):
@@ -210,9 +211,9 @@ class RecipeParser(object):
repo_dict[repo_name] = number + 1
return new_name
- def getMergedRepositories(self):
+ def get_merged_repositories(self):
result = []
- repositories = self.getRepositories()
+ repositories = self.get_repositories()
repo_name = {} # 'name': count
repo_url = {} # 'url': exist
for target in repositories:
@@ -224,7 +225,7 @@ class RecipeParser(object):
continue
# if repo's name is duplicated, rename it (postfix '_count')
if repo.get('name') in repo_name:
- repo['name'] = self._renameRepository(repo_name, repo['name'])
+ repo['name'] = self._rename_repository(repo_name, repo['name'])
else:
repo_name[repo['name']] = 1
repo_url[repo['url']] = 1
@@ -237,18 +238,18 @@ class RecipeParser(object):
if target.get('url') in repo_url:
continue
if target['name'] in repo_name:
- target['name'] = self._renameRepository(repo_name, target['name'])
+ target['name'] = self._rename_repository(repo_name, target['name'])
else:
repo_name[target['name']] = 1
repo_url[target['url']] = 1
result.append(target)
return result
- def getMergedRecipe(self):
+ def get_merged_recipe(self):
if self._recipe:
return self._recipe
- mergedInfo = default_recipe.getSystemConfig()
+ mergedInfo = default_recipe.get_system_config()
# merge recipe info
for i in xrange(len(self.inputs), 0, -1):
if self.inputs[i-1].get('type') == 'recipe':
@@ -278,15 +279,15 @@ class RecipeParser(object):
mergedInfo[extName].reverse()
# set repositories
- mergedInfo['Repositories'] = self.getMergedRepositories()
+ mergedInfo['Repositories'] = self.get_merged_repositories()
if mergedInfo.get('Repositories'):
for repo in mergedInfo['Repositories']:
mergedInfo['Recipe']['Repos'].append(repo['name'])
return mergedInfo
- def export2Recipe(self, packages, outdir, filename):
+ def export_recipe(self, packages, outdir, filename):
logger = logging.getLogger(__name__)
- recipe = self.getMergedRecipe()
+ recipe = self.get_merged_recipe()
make_dirs(outdir)
reciep_path = os.path.join(outdir, filename)
# set packages
@@ -315,9 +316,9 @@ class RecipeParser(object):
raise TICError(configmgr.message['recipe_convert_error'])
return reciep_path
- def export2Yaml(self, packages, filepath):
+ def export_yaml(self, packages, filepath):
logger = logging.getLogger(__name__)
- recipe = self.getMergedRecipe()
+ recipe = self.get_merged_recipe()
# config.yaml
config = dict(Default=None, Configurations=[])
config['Default'] = recipe.get('Recipe')
@@ -394,6 +395,6 @@ def YamlInfo(cachedir, configs, repos):
if __name__ == '__main__':
inputs = [{'url': DEFAULT_RECIPE_NAME, 'type': 'recipe'}, {'url': 'http://localhost/repo/recipe/recipe1.yaml', 'type': 'recipe'}]
parser = RecipeParser()
- parser.addRecipes(inputs)
+ parser.add_recipes(inputs)
parser.parse()
print(parser.repositories)
diff --git a/tic/parser/repo_parser.py b/tic/parser/repo_parser.py
index d09979d..f787879 100644
--- a/tic/parser/repo_parser.py
+++ b/tic/parser/repo_parser.py
@@ -20,7 +20,7 @@ import re
import logging
from lxml import etree
from tic.utils.error import TICError
-from tic.utils.rpmmisc import archPolicies, default_arch
+from tic.utils.rpmmisc import archPolicies, default_arch, compare_ver
from tic.config import configmgr
# meta pkg
@@ -54,20 +54,32 @@ class RepodataParser(object):
pkg_dict = pkg_group.get('pkg_dict')
provides_dict = pkg_group.get('provides')
files_dict = pkg_group.get('files')
+ supplements_dict = pkg_group.get('supplements')
meta_info = pkg_group.get('meta_info')
pkg_id = len(pkg_dict)
+ repo_pkg = {}
for pkg in pkg_list:
pkg_name = pkg.findtext(tag_dic['name'])
-
+ pkg_info = {}
# check whether a package is duplicated.
if pkg_name in pkg_dict:
- # TODO: Apply to policy of duplication
- # logger.warning('package(%s) is duplicated. exclude this package', pkg_name)
- continue
-
- pkg_info = {}
- pkg_info['id'] = pkg_id
+ if pkg_name in repo_pkg:
+ # version compare
+ ver_tag = pkg.find(tag_dic['version'])
+ new_pkg_ver = {'epoch':ver_tag.attrib['epoch'],
+ 'ver':ver_tag.attrib['ver'],
+ 'rel':ver_tag.attrib['rel']}
+ if compare_ver(new_pkg_ver, pkg_dict[pkg_name]['version']) < 1:
+ continue
+ pkg_info['id'] = pkg_dict[pkg_name]['id']
+ else:
+ # TODO: Apply to policy of duplication
+ # logger.warning('package(%s) is duplicated. exclude this package', pkg_name)
+ continue
+ if not pkg_info.get('id'):
+ pkg_info['id'] = pkg_id
+ pkg_id += 1
pkg_info['name'] = pkg_name
pkg_info['arch'] = pkg.findtext(tag_dic['arch'])
pkg_info['summary'] = pkg.findtext(tag_dic['summary'])
@@ -110,6 +122,7 @@ class RepodataParser(object):
pkg_info['version'] = {'epoch':ver_tag.attrib['epoch'],
'ver':ver_tag.attrib['ver'],
'rel':ver_tag.attrib['rel']}
+ repo_pkg[pkg_name] = pkg_info['version']
pkg_info['checksum'] = pkg.findtext(tag_dic['checksum'])
pkg_info['description'] = pkg.findtext(tag_dic['description'])
pkg_info['location'] = pkg.find(tag_dic['location']).attrib['href']
@@ -158,6 +171,18 @@ class RepodataParser(object):
_set_version(recommend, rpm)
dep_list.append(recommend)
pkg_info['recommends'] = dep_list
+ supplements_tag = format_tag.find(tag_dic['supplements'])
+ if supplements_tag is not None:
+ dep_list = []
+ for rpm in supplements_tag:
+ supplement = dict(name=rpm.attrib['name'])
+ _set_version(supplement, rpm)
+ if rpm.attrib['name'] in supplements_dict:
+ supplements_dict[rpm.attrib['name']].append({'name': pkg_name, 'data': supplement})
+ else:
+ supplements_dict[rpm.attrib['name']] = [{'name': pkg_name, 'data': supplement}]
+ dep_list.append(supplement)
+ pkg_info['supplements'] = dep_list
suggests_tag = format_tag.find(tag_dic['suggests'])
if suggests_tag is not None:
dep_list = []
@@ -176,7 +201,6 @@ class RepodataParser(object):
dep_list.append(file_t.text)
pkg_info['file'] = dep_list
pkg_dict[pkg_name] = pkg_info
- pkg_id += 1
def _prepare_requires_id(self, pkg_group):
logger = logging.getLogger(__name__)
@@ -222,6 +246,7 @@ class RepodataParser(object):
tags['conflicts'] = '{%s}conflicts' % root.nsmap['rpm']
tags['suggests'] = '{%s}suggests' % root.nsmap['rpm']
tags['recommends'] = '{%s}recommends' % root.nsmap['rpm']
+ tags['supplements'] = '{%s}supplements' % root.nsmap['rpm']
tags['file'] = '{%s}file' % root.nsmap[None]
return tags
@@ -232,7 +257,7 @@ class RepodataParser(object):
for pkg_elm in xml_root.findall(tag_dic['package']):
pkg_arch = pkg_elm.findtext(tag_dic['arch'])
if pkg_arch not in archPolicies[self.arch] and pkg_arch not in default_arch:
- continue;
+ continue
if not pkg_data.get(pkg_arch):
pkg_data[pkg_arch] = []
pkg_data[pkg_arch].append(pkg_elm)
@@ -266,8 +291,8 @@ class RepodataParser(object):
files={},
groups={},
conflicts={},
+ supplements={},
meta_info=dict(root=[], sub1=[], sub2=[], category=[]))
-
# parses the repodata (primary.xml)
# for xml_root in xml_list:
# self._xml_parse(pkg_group, xml_root, tag_dic)
diff --git a/tic/repo.py b/tic/repo.py
index de10826..a7c09d4 100644
--- a/tic/repo.py
+++ b/tic/repo.py
@@ -93,9 +93,9 @@ def get_repodata_from_repos(repos, cachedir):
# make temp_dir
base64url = base64.urlsafe_b64encode(baseurl)
- temp_dir = os.path.join(temp_path, base64url);
+ temp_dir = os.path.join(temp_path, base64url)
repomd_file = os.path.join(temp_dir, 'repomd.xml')
- file.make_dirs(temp_dir);
+ file.make_dirs(temp_dir)
#TODO: support local files(local directory)
# local/remote repository
diff --git a/tic/utils/file.py b/tic/utils/file.py
index 12e70b9..08952f0 100644
--- a/tic/utils/file.py
+++ b/tic/utils/file.py
@@ -91,7 +91,7 @@ def decompress_gzip(intput_path, output_path):
return output_path
def copyfile_flock(src, dest):
- ret = dest;
+ ret = dest
try:
with FileLock(dest):
shutil.copy(src, dest)
diff --git a/tic/utils/grabber.py b/tic/utils/grabber.py
index 0f886dd..ee0f949 100644
--- a/tic/utils/grabber.py
+++ b/tic/utils/grabber.py
@@ -20,6 +20,7 @@ import os
import logging
import urllib2
import contextlib
+import httplib
from urlgrabber import grabber
from tic.utils.error import TICError
from tic.utils import process
@@ -50,6 +51,8 @@ def myurlgrab2(url, filename):
msg = str(err)
logger.info(err)
raise TICError(msg)
+ except httplib.BadStatusLine as err:
+ raise TICError(str(err))
except urllib2.URLError as err:
logger.info(err)
raise TICError(configmgr.message['server_error'])
@@ -90,7 +93,4 @@ def myurlgrab(url, filename, proxies, progress_obj = None):
msg += ' on %s' % url
raise TICError(msg)
- return filename
-
-if __name__ == '__main__':
- pass \ No newline at end of file
+ return filename \ No newline at end of file
diff --git a/tic/utils/rpmmisc.py b/tic/utils/rpmmisc.py
index aa14c8e..70c390c 100644
--- a/tic/utils/rpmmisc.py
+++ b/tic/utils/rpmmisc.py
@@ -16,16 +16,17 @@
# Contributors:
# - S-Core Co., Ltd
+import os
import rpm
class Dependency(object):
REQUIRES='requires'
RECOMMENDS='recommends'
+ SUPPLEMENTS='supplements'
SUGGESTS='suggests'
PROVIDES='provides'
CONFILCTS='conflicts'
-
default_arch = ('noarch', 'src')
archPolicies = {
@@ -65,3 +66,10 @@ def meetRequireVersion(req_ver, cmp_ver):
elif cmp_ret == -1 and (req_ver['flags'] in ['GT', 'GE']):
return True
return False
+
+def readRpmHeader(ts, filename):
+ """ Read an rpm header. """
+ fd = os.open(filename, os.O_RDONLY)
+ h = ts.hdrFromFdno(fd)
+ os.close(fd)
+ return h