summaryrefslogtreecommitdiff
path: root/tic
diff options
context:
space:
mode:
Diffstat (limited to 'tic')
-rw-r--r--tic/command.py85
-rw-r--r--tic/config.py23
-rw-r--r--tic/dependency.py173
-rw-r--r--tic/parser/recipe_parser.py45
-rw-r--r--tic/parser/repo_parser.py47
-rw-r--r--tic/repo.py4
-rw-r--r--tic/utils/file.py2
-rw-r--r--tic/utils/grabber.py8
-rw-r--r--tic/utils/rpmmisc.py10
9 files changed, 234 insertions, 163 deletions
diff --git a/tic/command.py b/tic/command.py
index c66743a..c3dcf63 100644
--- a/tic/command.py
+++ b/tic/command.py
@@ -36,23 +36,10 @@ from tic.config import configmgr
DEFAULT_CACHEDIR=configmgr.setting['tempdir']
DEFAULT_ANALYSISDIR=os.path.join(DEFAULT_CACHEDIR, 'analysis')
DEFAULT_KICKSTARTDIR=os.path.join(DEFAULT_CACHEDIR, 'kickstart')
-DEFAULT_RECIPE_NAME='recipe.yaml';
+DEFAULT_RECIPE_NAME='recipe.yaml'
-def analyze(recipe_list):
+def get_pkg_metadata(repoinfo, archecture):
logger = logging.getLogger(__name__)
- if not recipe_list:
- logger.info('Use default recipe because there is no import data')
- recipe_list = default_recipe.getDefaultParameter()
-
- recipe_parser = RecipeParser(recipe_list)
- recipe_parser.parse()
- recipe_info = recipe_parser.getMergedRecipe()
-
- start_time = misc.get_timestamp()
- #Download repodata from repositories (Remote/Local)
- repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
- logger.info('time to get repodata from repo: %d ms', misc.get_timestamp() - start_time)
-
checksum_list=[]
for repo in repoinfo:
checksum_list.append(repo['checksum'])
@@ -61,49 +48,71 @@ def analyze(recipe_list):
pkg_group=None
if os.path.exists(analysis_file):
pkg_group=file.read_json(analysis_file)
-
- if not pkg_group or not pkg_group.get('pkg_dict'):
- start_time = misc.get_timestamp()
+ if not pkg_group or 'pkg_dict' not in pkg_group:
# Parse the xml files for the analysis of package (.rpm)
- repo_parser = RepodataParser('armv7l', repoinfo)
+ repo_parser = RepodataParser(archecture, repoinfo)
pkg_group = repo_parser.parse()
- logger.info('packages: %d, provides: %d, files: %d', len(pkg_group['pkg_dict']), len(pkg_group['provides']), len(pkg_group['files']))
- logger.info('time to parse repodata: %d ms', misc.get_timestamp() - start_time)
# dump to cached file
file.write_json_flock(analysis_file, pkg_group)
else:
logger.info('use a cache parsing data - %s', analysis_file)
+ return pkg_group
+
+def analyze(recipe_list):
+ def _make_result_data(repos, viewdata, pkggroup, inst_pkgs):
+ return {'view': viewdata,
+ 'data': {'packages': pkggroup.get('pkg_dict'),
+ 'provides': pkggroup.get('provides'),
+ 'files': pkggroup.get('files'),
+ 'groups': pkggroup.get('groups'),
+ 'conflicts': pkggroup.get('conflicts'),
+ 'supplements': pkggroup.get('supplements')},
+ 'recipes': repos,
+ 'installpackages': inst_pkgs}
+
+ logger = logging.getLogger(__name__)
+ if not recipe_list:
+ logger.info('Use default recipe because there is no import data')
+ recipe_list = default_recipe.get_default_parameter()
+
+ # parse recipe
+ recipe_parser = RecipeParser(recipe_list)
+ recipe_parser.parse()
+ recipe_info = recipe_parser.get_merged_recipe()
+ # Download repodata from repositories (Remote/Local)
+ repoinfo = get_repodata_from_repos(recipe_info.get('Repositories'), DEFAULT_CACHEDIR)
+ # paring repodata and get pkg metadata
+ pkg_group = get_pkg_metadata(repoinfo, 'armv7l')
+ logger.info('pkgs: %d, provides: %d, files: %d',
+ len(pkg_group['pkg_dict']),
+ len(pkg_group['provides']),
+ len(pkg_group['files']))
start_time = misc.get_timestamp()
# Make a data for TIC (Tizen image creation)
view_data = make_view_data(pkg_group)
- logger.info('time to create view-tree: %d ms', misc.get_timestamp() - start_time)
+ logger.info('time to make view-tree: %d ms', misc.get_timestamp() - start_time)
+
# analyze install-dependency
start_time = misc.get_timestamp()
inst_packages = get_installed_packages(recipe_info, repoinfo, pkg_group)
- logger.info('installed package: %d', len(inst_packages))
+ logger.info('install-packages: %d', len(inst_packages))
logger.info('time to analyze dependency: %d ms', misc.get_timestamp() - start_time)
-
- result = {'view': view_data,
- 'data': {'packages': pkg_group.get('pkg_dict'),
- 'provides': pkg_group.get('provides'),
- 'files': pkg_group.get('files'),
- 'groups': pkg_group.get('groups'),
- 'conflicts': pkg_group.get('conflicts')},
- 'recipes': recipe_parser.getRepositories(),
- 'installpackages': inst_packages}
+ result = _make_result_data(recipe_parser.get_repositories(),
+ view_data,
+ pkg_group,
+ inst_packages)
return result
def imports(recipe_list):
logger = logging.getLogger(__name__)
if not recipe_list:
logger.info('Use default recipe because there is no import data')
- recipe_list = default_recipe.getDefaultParameter()
+ recipe_list = default_recipe.get_default_parameter()
recipe_parser = RecipeParser(recipe_list)
recipe_parser.parse()
- result = {'recipes': recipe_parser.getRepositories()}
- #result = {'imports': recipe_parser.getMergedRepositories()}
+ result = {'recipes': recipe_parser.get_repositories()}
return result
def exports(export_type, recipes, packages, outdir, filename=None):
@@ -124,17 +133,17 @@ def exports(export_type, recipes, packages, outdir, filename=None):
recipe_parser.parse()
result = None
if export_type == 'recipe':
- recipe_path = recipe_parser.export2Recipe(packages, outdir, filename)
+ recipe_path = recipe_parser.export_recipe(packages, outdir, filename)
logger.info('export the recipe to %s' % recipe_path)
result = {'path': recipe_path}
elif export_type == 'ks':
# 1. create yaml files
- yaml_info = recipe_parser.export2Yaml(packages, DEFAULT_KICKSTARTDIR)
+ yaml_info = recipe_parser.export_yaml(packages, DEFAULT_KICKSTARTDIR)
# 2. create kickstart(.ks) using kickstarter tool
options = KSoption(yaml_info.configs, yaml_info.repos, yaml_info.cachedir)
kswriter(options)
# check whether the ks exists
- recipe_info = recipe_parser.getMergedRecipe()
+ recipe_info = recipe_parser.get_merged_recipe()
baseline=recipe_info['Recipe'].get('Baseline')
ksname= ''.join([recipe_info['Recipe'].get('FileName'), '.ks'])
kspath=os.path.join(yaml_info.cachedir, baseline, ksname)
diff --git a/tic/config.py b/tic/config.py
index 4648f95..065146b 100644
--- a/tic/config.py
+++ b/tic/config.py
@@ -19,6 +19,7 @@
import os
import ConfigParser
+TEST = False
DEFAULT_MSG_CONF = "/etc/tic-core/message.conf"
DEFAULT_CONF = "/etc/tic-core/config.conf"
@@ -66,7 +67,7 @@ class ConfigMgr(object):
def __init__(self):
self._reset()
for conf_path in [DEFAULT_CONF, DEFAULT_MSG_CONF]:
- self._setConfig(conf_path)
+ self._set_config(conf_path)
def _reset(self):
for sec, vals in self.DEFAULT_TIC.iteritems():
@@ -74,18 +75,16 @@ class ConfigMgr(object):
for sec, vals in self.DEFAULT_MESSAGE.iteritems():
setattr(self, sec, vals)
- def _setConfig(self, conf):
- configParser = ConfigParser.ConfigParser()
+ def _set_config(self, conf):
+ config_parser = ConfigParser.ConfigParser()
try:
- if os.path.exists(conf):
- configParser.read(conf)
- for section in configParser.sections():
- for option in configParser.options(section):
- try:
- opt_attr=getattr(self, section)
- opt_attr[option]=configParser.get(section, option)
- except:
- pass
+ if not os.path.exists(conf):
+ return
+ config_parser.read(conf)
+ for section in config_parser.sections():
+ for option in config_parser.options(section):
+ opt_attr=getattr(self, section)
+ opt_attr[option]=config_parser.get(section, option)
except Exception as e:
print(e)
diff --git a/tic/dependency.py b/tic/dependency.py
index fefc53b..af98678 100644
--- a/tic/dependency.py
+++ b/tic/dependency.py
@@ -28,6 +28,19 @@ DEFAULT_PROFILE = 'EMPTY'
def get_installed_packages(recipe, repoinfo, pkg_group):
logger = logging.getLogger(__name__)
+ def _select_rpm_by_name(dep_name):
+ return _select_rpm(dict(name=dep_name))
+
+ def _select_rpm(dep_rpm, recommends=None):
+ selected = None
+ if dep_rpm['name'] in provides:
+ selected = _select_rpm_from_provides(provides[dep_rpm['name']], dep_rpm, recommends)
+ elif dep_rpm['name'] in files:
+ selected = _select_rpm_from_files(files[dep_rpm['name']], dep_rpm)
+ elif dep_rpm['name'] in pkg_dict:
+ selected = pkg_dict.get(dep_rpm['name'])
+ return selected
+
def _select_rpm_from_files(fileList, require):
if not fileList or not require:
return None
@@ -44,8 +57,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
if not _check_conflicts(file_info):
return file_info
return pkg_dict.get(fileList[0])
-
- def _select_rpm(capability, require, recommends=None):
+
+ def _select_rpm_from_provides(capability, require, recommends=None):
provide_list = []
# 1. Choose the rpm included in version from provides
if require.get('ver'):
@@ -70,7 +83,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# 2-1. Choose the default rpm or the selected rpm
for pro in provide_list:
provide_info = pkg_dict.get(pro['name'])
- if provide_info['name'] in required_inst_rpms or selected[provide_info['id']] >= 1:
+ if provide_info['name'] in required_inst_rpms or selected[provide_info['id']] > 0:
return provide_info
# 2-2. Choose the defualt profile
@@ -219,7 +232,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
_add_conflicts(pkg_info)
# Installation dependency analysis of rpm
- for dep_tag in [Dependency.REQUIRES, Dependency.RECOMMENDS]:
+ for dep_tag in [Dependency.REQUIRES]:
if pkg_info.get(dep_tag):
for req in pkg_info.get(dep_tag):
choose = None
@@ -227,23 +240,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
if req['name'] == pkg_info['name']:
continue
# Find dependency rpm based on capability/files
- if req['name'] in provides:
- # Select the rpm that meets the condition (version)
- if dep_tag == Dependency.REQUIRES:
- choose = _select_rpm(provides[req['name']], req, pkg_info.get('recommends'))
- else:
- choose = _select_rpm(provides[req['name']], req)
- elif req['name'] in files:
- choose = _select_rpm_from_files(files[req['name']], req)
- elif req['name'] in pkg_dict:
- choose = pkg_dict.get(req['name'])
+ choose = _select_rpm(req, pkg_info.get('recommends'))
- if dep_tag == Dependency.RECOMMENDS:
- # A Recommends B: B is installed when A is installed and B has no conflicts.
- if not choose or _check_conflicts(choose) is not None:
- #logger.info('%s recommended by %s is ignored for selection (Conflict)' % (req['name'], pkg_info['name']))
- continue
-
if choose:
# add forward/backward reference
_create_reference(pkg_info, choose)
@@ -297,7 +295,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# delete backward reference of group node
for pkgname in g_pkg_list:
pkg = pkg_dict[pkgname]
- pkg['backward'] = None;
+ pkg['backward'] = None
group_visited[g_id][pkg['name']] = -1
return True
@@ -309,7 +307,7 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for i in range(len(con_list)):
if con_list[i]['name'] == node['name']:
del con_list[i]
- break;
+ break
def _remove_reference(parent, node):
if parent is not None:
@@ -368,12 +366,12 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for pro in new_pkg['provides']:
if r_name == pro['name']:
matched = True
- break;
+ break
if not matched and new_pkg.get('file'):
for fname in new_pkg['file']:
if r_name == fname:
matched = True
- break;
+ break
if not matched:
return False
return True
@@ -431,34 +429,29 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# add rpms into conflicts table.
_add_conflicts(pkg_info)
+
+ # check week dependencies (recommends, supplements)
+ if Dependency.RECOMMENDS in pkg_info:
+ for recommend in pkg_info[Dependency.RECOMMENDS]:
+ week_deps_rpms.append(recommend)
+ if Dependency.PROVIDES in pkg_info:
+ for provide in pkg_info[Dependency.PROVIDES]:
+ if provide['name'] in supplements:
+ week_deps_rpms.extend(supplements[provide['name']])
+
# Installation dependency analysis of rpm
- for dep_tag in [Dependency.REQUIRES, Dependency.RECOMMENDS]:
+ for dep_tag in [Dependency.REQUIRES]:
if pkg_info.get(dep_tag):
for req in pkg_info.get(dep_tag):
choose = None
# self-reference (e.g. vim-base)
if req['name'] == pkg_info['name']:
continue
- if req['name'] in provides:
- if dep_tag == Dependency.REQUIRES:
- choose = _select_rpm(provides[req['name']], req, pkg_info.get('recommends'))
- else:
- choose = _select_rpm(provides[req['name']], req)
- elif req['name'] in files:
- choose = _select_rpm_from_files(files[req['name']], req)
- elif req['name'] in pkg_dict:
- choose = pkg_dict.get(req['name'])
-
- if dep_tag == Dependency.RECOMMENDS:
- # A Recommends B: B is installed when A is installed and B has no conflicts.
- if not choose or _check_conflicts(choose) is not None:
- #logger.info('%s recommended by %s is ignored for selection (Conflict)' % (req['name'], pkg_info['name']))
- continue
+ # Select packages with inst-dependencies
+ choose = _select_rpm(req, pkg_info.get('recommends'))
if choose:
- # add refer count, only requires
- if dep_tag == Dependency.REQUIRES:
- _add_refer(choose, req)
+ _add_refer(choose, req)
if selected[choose['id']] == 0:
if not _check_dep_validation(choose, select_list):
@@ -471,10 +464,8 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
# recipe/repo
if not recipe or not repoinfo:
return []
-
group_set = set([])
pkg_set = set([])
-
if recipe['Recipe'].get('Groups'):
group_set.update(recipe['Recipe'].get('Groups'))
if recipe['Recipe'].get('ExtraPackages'):
@@ -499,14 +490,15 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
for pkgreq in pkglist.findall('packagereq'):
plist.append(pkgreq.text)
pkg_set.update(set(plist))
- group_set.discard(group_name);
+ group_set.discard(group_name)
pkg_dict = pkg_group.get('pkg_dict')
provides = pkg_group.get('provides')
files = pkg_group.get('files')
groups = pkg_group.get('groups')
conflicts = pkg_group.get('conflicts')
-
+ supplements = pkg_group.get('supplements')
+
number = [0] # for pkg count
scc_num = [0] # for scc count
group_num = [0] # for group count
@@ -519,22 +511,29 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
capabilities = {}
require_refer = {}
- candidate_rpms = set([])
- # add rpms to install
- select_rpms = set([])
# add reference value for rpm selection
required_inst_rpms = pkg_set.copy()
+ # add rpms to install
+ select_rpms = set([])
+ select_week_rpms = set([])
+ # etc
+ candidate_rpms = set([])
+ include_week_deps = []
+ exclude_week_deps = set([])
# 1. Check whether dependencies of rpm are available.
for pkg_name in pkg_set:
- selected_pkg = _get_pkg_info(pkg_name)
+ # select the matching package
+ selected_pkg = _select_rpm_by_name(pkg_name)
if selected_pkg:
if selected[selected_pkg['id']] == 0:
+ week_deps_rpms = []
select_list = []
comp_rpms = set([])
if _check_dep_validation(selected_pkg, select_list):
select_rpms.add(pkg_name)
candidate_rpms.update(comp_rpms)
+ include_week_deps.extend(week_deps_rpms)
else:
# case: conflict or rpm does not exist
_remove_dep_rpm(select_list)
@@ -544,6 +543,34 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
else:
logger.info(configmgr.message['package_not_exist'] % pkg_name)
+ # 2. Check week dependencies (recommends/supplements)
+ loop_deps = include_week_deps
+ while loop_deps:
+ include_week_deps = []
+ # recommends
+ for week_rpm in loop_deps:
+ if week_rpm['name'] in exclude_week_deps:
+ continue
+
+ selected_pkg = _select_rpm(week_rpm)
+ if selected_pkg:
+ if selected[selected_pkg['id']] == 0:
+ week_deps_rpms = []
+ select_list = []
+ comp_rpms = set([])
+ if _check_dep_validation(selected_pkg, select_list):
+ select_week_rpms.add(selected_pkg['name'])
+ candidate_rpms.update(comp_rpms)
+ include_week_deps.extend(week_deps_rpms)
+ else:
+ # case: conflict or rpm does not exist
+ _remove_dep_rpm(select_list)
+ exclude_week_deps.add(week_rpm['name'])
+ else:
+ # ignore package selection
+ logger.info('the week dependent package(%s) dose not exists.' % week_rpm['name'])
+ loop_deps = include_week_deps
+
# init conflict table and reference
number[0] = 0
selected = [0] * len(pkg_dict)
@@ -552,25 +579,27 @@ def get_installed_packages(recipe, repoinfo, pkg_group):
required_inst_rpms.update(select_rpms)
required_inst_rpms.update(candidate_rpms)
- print(candidate_rpms)
-
- # 2. Analyze rpm installation dependencies.
- for pkg_name in select_rpms:
- progress['status'] = True
- selected_pkg = _get_pkg_info(pkg_name)
- if selected_pkg:
- rpm_stack = []
- selected_pkg['selfChecked'] = True
- if selected[selected_pkg['id']] == 0:
- inst_rpms = _analyze_dep(selected_pkg)
- if progress['status']:
- install_rpm.update(inst_rpms)
- else:
- # Error Case
- logger.info("[Dependency Issue] Could not install the %s" % selected_pkg)
- # delete forward/backward reference
- group_visited = {}
- _remove_reference(None, selected_pkg)
- else:
- logger.info(configmgr.message['package_not_exist'] % pkg_name)
+ # 3. Analyze rpm installation dependencies.
+ for idx, value in enumerate([select_rpms, select_week_rpms]):
+ for pkg_name in value:
+ progress['status'] = True
+ #selected_pkg = _get_pkg_info(pkg_name)
+ selected_pkg = _select_rpm_by_name(pkg_name)
+ if selected_pkg:
+ rpm_stack = []
+ if idx == 0:
+ # Only a select_rpms has a selfchecked
+ selected_pkg['selfChecked'] = True
+ if selected[selected_pkg['id']] == 0:
+ inst_rpms = _analyze_dep(selected_pkg)
+ if progress['status']:
+ install_rpm.update(inst_rpms)
+ else:
+ # Error Case
+ logger.info("[Dependency Issue] Could not install the %s" % selected_pkg)
+ # delete forward/backward reference
+ group_visited = {}
+ _remove_reference(None, selected_pkg)
+ else:
+ logger.info(configmgr.message['package_not_exist'] % pkg_name)
return list(install_rpm)
diff --git a/tic/parser/recipe_parser.py b/tic/parser/recipe_parser.py
index 82b9e4c..5165c64 100644
--- a/tic/parser/recipe_parser.py
+++ b/tic/parser/recipe_parser.py
@@ -90,9 +90,9 @@ class DefaultRecipe(object):
logger.info(err)
except yaml.YAMLError as err:
logger.info(err)
- def getDefaultRecipe(self):
+ def get_default_recipe(self):
return copy.deepcopy(self.DEFAULT_RECIPE)
- def getSystemConfig(self):
+ def get_system_config(self):
data = copy.deepcopy(self.DEFAULT_RECIPE)
for field in RECIPE_EXTEND_FIELD:
if field == 'Partitions':
@@ -102,7 +102,8 @@ class DefaultRecipe(object):
if data.get(field):
data[field] = []
return data
- def getDefaultParameter(self):
+ @classmethod
+ def get_default_parameter(cls):
return [dict(url=DEFAULT_RECIPE_NAME, type='recipe')]
default_recipe = DefaultRecipe()
@@ -115,7 +116,7 @@ class RecipeParser(object):
self._repositories = None
self._recipe = None
# add recipe input
- self.addRecipes(inputs)
+ self.add_recipes(inputs)
def parse(self):
logger = logging.getLogger(__name__)
@@ -131,7 +132,7 @@ class RecipeParser(object):
if data_type == 'recipe':
# default recipe
if data.get('url') == DEFAULT_RECIPE_NAME:
- self.recipes[data.get('url')] = default_recipe.getDefaultRecipe()
+ self.recipes[data.get('url')] = default_recipe.get_default_recipe()
else:
with contextlib.closing(urllib2.urlopen(data.get('url'))) as op:
self.recipes[data.get('url')] = yaml.load(op.read())
@@ -152,7 +153,7 @@ class RecipeParser(object):
logger.error(err)
raise TICError(configmgr.message['recipe_parse_error'] % data.get('url'))
- def addRecipes(self, inputs):
+ def add_recipes(self, inputs):
if inputs:
if isinstance(inputs, list):
for data in inputs:
@@ -160,12 +161,12 @@ class RecipeParser(object):
else:
self.inputs.append(inputs)
- def getRepositories(self):
+ def get_repositories(self):
if not self._repositories:
- self._repositories = self._getAllRepositories()
+ self._repositories = self._get_all_repositories()
return self._repositories
- def _getAllRepositories(self):
+ def _get_all_repositories(self):
repos = []
name_count = 1
for data in self.inputs:
@@ -201,7 +202,7 @@ class RecipeParser(object):
repos.append(data)
return repos
- def _renameRepository(self, repo_dict, repo_name):
+ def _rename_repository(self, repo_dict, repo_name):
number = repo_dict.get(repo_name)
new_name = ''.join([repo_name, '_', str(number)])
while(new_name in repo_dict):
@@ -210,9 +211,9 @@ class RecipeParser(object):
repo_dict[repo_name] = number + 1
return new_name
- def getMergedRepositories(self):
+ def get_merged_repositories(self):
result = []
- repositories = self.getRepositories()
+ repositories = self.get_repositories()
repo_name = {} # 'name': count
repo_url = {} # 'url': exist
for target in repositories:
@@ -224,7 +225,7 @@ class RecipeParser(object):
continue
# if repo's name is duplicated, rename it (postfix '_count')
if repo.get('name') in repo_name:
- repo['name'] = self._renameRepository(repo_name, repo['name'])
+ repo['name'] = self._rename_repository(repo_name, repo['name'])
else:
repo_name[repo['name']] = 1
repo_url[repo['url']] = 1
@@ -237,18 +238,18 @@ class RecipeParser(object):
if target.get('url') in repo_url:
continue
if target['name'] in repo_name:
- target['name'] = self._renameRepository(repo_name, target['name'])
+ target['name'] = self._rename_repository(repo_name, target['name'])
else:
repo_name[target['name']] = 1
repo_url[target['url']] = 1
result.append(target)
return result
- def getMergedRecipe(self):
+ def get_merged_recipe(self):
if self._recipe:
return self._recipe
- mergedInfo = default_recipe.getSystemConfig()
+ mergedInfo = default_recipe.get_system_config()
# merge recipe info
for i in xrange(len(self.inputs), 0, -1):
if self.inputs[i-1].get('type') == 'recipe':
@@ -278,15 +279,15 @@ class RecipeParser(object):
mergedInfo[extName].reverse()
# set repositories
- mergedInfo['Repositories'] = self.getMergedRepositories()
+ mergedInfo['Repositories'] = self.get_merged_repositories()
if mergedInfo.get('Repositories'):
for repo in mergedInfo['Repositories']:
mergedInfo['Recipe']['Repos'].append(repo['name'])
return mergedInfo
- def export2Recipe(self, packages, outdir, filename):
+ def export_recipe(self, packages, outdir, filename):
logger = logging.getLogger(__name__)
- recipe = self.getMergedRecipe()
+ recipe = self.get_merged_recipe()
make_dirs(outdir)
reciep_path = os.path.join(outdir, filename)
# set packages
@@ -315,9 +316,9 @@ class RecipeParser(object):
raise TICError(configmgr.message['recipe_convert_error'])
return reciep_path
- def export2Yaml(self, packages, filepath):
+ def export_yaml(self, packages, filepath):
logger = logging.getLogger(__name__)
- recipe = self.getMergedRecipe()
+ recipe = self.get_merged_recipe()
# config.yaml
config = dict(Default=None, Configurations=[])
config['Default'] = recipe.get('Recipe')
@@ -394,6 +395,6 @@ def YamlInfo(cachedir, configs, repos):
if __name__ == '__main__':
inputs = [{'url': DEFAULT_RECIPE_NAME, 'type': 'recipe'}, {'url': 'http://localhost/repo/recipe/recipe1.yaml', 'type': 'recipe'}]
parser = RecipeParser()
- parser.addRecipes(inputs)
+ parser.add_recipes(inputs)
parser.parse()
print(parser.repositories)
diff --git a/tic/parser/repo_parser.py b/tic/parser/repo_parser.py
index d09979d..f787879 100644
--- a/tic/parser/repo_parser.py
+++ b/tic/parser/repo_parser.py
@@ -20,7 +20,7 @@ import re
import logging
from lxml import etree
from tic.utils.error import TICError
-from tic.utils.rpmmisc import archPolicies, default_arch
+from tic.utils.rpmmisc import archPolicies, default_arch, compare_ver
from tic.config import configmgr
# meta pkg
@@ -54,20 +54,32 @@ class RepodataParser(object):
pkg_dict = pkg_group.get('pkg_dict')
provides_dict = pkg_group.get('provides')
files_dict = pkg_group.get('files')
+ supplements_dict = pkg_group.get('supplements')
meta_info = pkg_group.get('meta_info')
pkg_id = len(pkg_dict)
+ repo_pkg = {}
for pkg in pkg_list:
pkg_name = pkg.findtext(tag_dic['name'])
-
+ pkg_info = {}
# check whether a package is duplicated.
if pkg_name in pkg_dict:
- # TODO: Apply to policy of duplication
- # logger.warning('package(%s) is duplicated. exclude this package', pkg_name)
- continue
-
- pkg_info = {}
- pkg_info['id'] = pkg_id
+ if pkg_name in repo_pkg:
+ # version compare
+ ver_tag = pkg.find(tag_dic['version'])
+ new_pkg_ver = {'epoch':ver_tag.attrib['epoch'],
+ 'ver':ver_tag.attrib['ver'],
+ 'rel':ver_tag.attrib['rel']}
+ if compare_ver(new_pkg_ver, pkg_dict[pkg_name]['version']) < 1:
+ continue
+ pkg_info['id'] = pkg_dict[pkg_name]['id']
+ else:
+ # TODO: Apply to policy of duplication
+ # logger.warning('package(%s) is duplicated. exclude this package', pkg_name)
+ continue
+ if not pkg_info.get('id'):
+ pkg_info['id'] = pkg_id
+ pkg_id += 1
pkg_info['name'] = pkg_name
pkg_info['arch'] = pkg.findtext(tag_dic['arch'])
pkg_info['summary'] = pkg.findtext(tag_dic['summary'])
@@ -110,6 +122,7 @@ class RepodataParser(object):
pkg_info['version'] = {'epoch':ver_tag.attrib['epoch'],
'ver':ver_tag.attrib['ver'],
'rel':ver_tag.attrib['rel']}
+ repo_pkg[pkg_name] = pkg_info['version']
pkg_info['checksum'] = pkg.findtext(tag_dic['checksum'])
pkg_info['description'] = pkg.findtext(tag_dic['description'])
pkg_info['location'] = pkg.find(tag_dic['location']).attrib['href']
@@ -158,6 +171,18 @@ class RepodataParser(object):
_set_version(recommend, rpm)
dep_list.append(recommend)
pkg_info['recommends'] = dep_list
+ supplements_tag = format_tag.find(tag_dic['supplements'])
+ if supplements_tag is not None:
+ dep_list = []
+ for rpm in supplements_tag:
+ supplement = dict(name=rpm.attrib['name'])
+ _set_version(supplement, rpm)
+ if rpm.attrib['name'] in supplements_dict:
+ supplements_dict[rpm.attrib['name']].append({'name': pkg_name, 'data': supplement})
+ else:
+ supplements_dict[rpm.attrib['name']] = [{'name': pkg_name, 'data': supplement}]
+ dep_list.append(supplement)
+ pkg_info['supplements'] = dep_list
suggests_tag = format_tag.find(tag_dic['suggests'])
if suggests_tag is not None:
dep_list = []
@@ -176,7 +201,6 @@ class RepodataParser(object):
dep_list.append(file_t.text)
pkg_info['file'] = dep_list
pkg_dict[pkg_name] = pkg_info
- pkg_id += 1
def _prepare_requires_id(self, pkg_group):
logger = logging.getLogger(__name__)
@@ -222,6 +246,7 @@ class RepodataParser(object):
tags['conflicts'] = '{%s}conflicts' % root.nsmap['rpm']
tags['suggests'] = '{%s}suggests' % root.nsmap['rpm']
tags['recommends'] = '{%s}recommends' % root.nsmap['rpm']
+ tags['supplements'] = '{%s}supplements' % root.nsmap['rpm']
tags['file'] = '{%s}file' % root.nsmap[None]
return tags
@@ -232,7 +257,7 @@ class RepodataParser(object):
for pkg_elm in xml_root.findall(tag_dic['package']):
pkg_arch = pkg_elm.findtext(tag_dic['arch'])
if pkg_arch not in archPolicies[self.arch] and pkg_arch not in default_arch:
- continue;
+ continue
if not pkg_data.get(pkg_arch):
pkg_data[pkg_arch] = []
pkg_data[pkg_arch].append(pkg_elm)
@@ -266,8 +291,8 @@ class RepodataParser(object):
files={},
groups={},
conflicts={},
+ supplements={},
meta_info=dict(root=[], sub1=[], sub2=[], category=[]))
-
# parses the repodata (primary.xml)
# for xml_root in xml_list:
# self._xml_parse(pkg_group, xml_root, tag_dic)
diff --git a/tic/repo.py b/tic/repo.py
index de10826..a7c09d4 100644
--- a/tic/repo.py
+++ b/tic/repo.py
@@ -93,9 +93,9 @@ def get_repodata_from_repos(repos, cachedir):
# make temp_dir
base64url = base64.urlsafe_b64encode(baseurl)
- temp_dir = os.path.join(temp_path, base64url);
+ temp_dir = os.path.join(temp_path, base64url)
repomd_file = os.path.join(temp_dir, 'repomd.xml')
- file.make_dirs(temp_dir);
+ file.make_dirs(temp_dir)
#TODO: support local files(local directory)
# local/remote repository
diff --git a/tic/utils/file.py b/tic/utils/file.py
index 12e70b9..08952f0 100644
--- a/tic/utils/file.py
+++ b/tic/utils/file.py
@@ -91,7 +91,7 @@ def decompress_gzip(intput_path, output_path):
return output_path
def copyfile_flock(src, dest):
- ret = dest;
+ ret = dest
try:
with FileLock(dest):
shutil.copy(src, dest)
diff --git a/tic/utils/grabber.py b/tic/utils/grabber.py
index 0f886dd..ee0f949 100644
--- a/tic/utils/grabber.py
+++ b/tic/utils/grabber.py
@@ -20,6 +20,7 @@ import os
import logging
import urllib2
import contextlib
+import httplib
from urlgrabber import grabber
from tic.utils.error import TICError
from tic.utils import process
@@ -50,6 +51,8 @@ def myurlgrab2(url, filename):
msg = str(err)
logger.info(err)
raise TICError(msg)
+ except httplib.BadStatusLine as err:
+ raise TICError(str(err))
except urllib2.URLError as err:
logger.info(err)
raise TICError(configmgr.message['server_error'])
@@ -90,7 +93,4 @@ def myurlgrab(url, filename, proxies, progress_obj = None):
msg += ' on %s' % url
raise TICError(msg)
- return filename
-
-if __name__ == '__main__':
- pass \ No newline at end of file
+ return filename \ No newline at end of file
diff --git a/tic/utils/rpmmisc.py b/tic/utils/rpmmisc.py
index aa14c8e..70c390c 100644
--- a/tic/utils/rpmmisc.py
+++ b/tic/utils/rpmmisc.py
@@ -16,16 +16,17 @@
# Contributors:
# - S-Core Co., Ltd
+import os
import rpm
class Dependency(object):
REQUIRES='requires'
RECOMMENDS='recommends'
+ SUPPLEMENTS='supplements'
SUGGESTS='suggests'
PROVIDES='provides'
CONFILCTS='conflicts'
-
default_arch = ('noarch', 'src')
archPolicies = {
@@ -65,3 +66,10 @@ def meetRequireVersion(req_ver, cmp_ver):
elif cmp_ret == -1 and (req_ver['flags'] in ['GT', 'GE']):
return True
return False
+
+def readRpmHeader(ts, filename):
+ """ Read an rpm header. """
+ fd = os.open(filename, os.O_RDONLY)
+ h = ts.hdrFromFdno(fd)
+ os.close(fd)
+ return h