本文整理汇总了Python中rebasehelper.logger.logger.info函数的典型用法代码示例。如果您正苦于以下问题:Python info函数的具体用法?Python info怎么用?Python info使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了info函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: print_summary
def print_summary(cls, path):
"""
Function is used for printing summary informations
:return:
"""
for key, value in six.iteritems(OutputLogger.get_summary_info()):
logger.info("%s %s\n", key, value)
try:
LoggerHelper.add_file_handler(logger_report, path)
except (OSError, IOError):
raise RebaseHelperError("Can not create results file '%s'" % path)
type_pkgs = ['old', 'new']
if OutputLogger.get_patches():
cls.print_patches(OutputLogger.get_patches(), '\nSummary information about patches:')
for pkg in type_pkgs:
type_pkg = OutputLogger.get_build(pkg)
if type_pkg:
cls.print_rpms(type_pkg, pkg.capitalize())
cls.print_build_logs(type_pkg, pkg.capitalize())
cls.print_pkgdiff_tool()
示例2: build
def build(cls, spec, results_dir, srpm, **kwargs):
"""
Builds the RPMs using rpmbuild
:param spec: SpecFile object
:param results_dir: absolute path to DIR where results should be stored
:param srpm: absolute path to SRPM
:return: dict with:
'rpm' -> list with absolute paths to RPMs
'logs' -> list with absolute paths to build_logs
"""
cls.logs = []
rpm_results_dir = os.path.join(results_dir, "RPM")
sources = spec.get_sources()
patches = [p.get_path() for p in spec.get_patches()]
with RpmbuildTemporaryEnvironment(sources, patches, spec.get_path(), rpm_results_dir) as tmp_env:
env = tmp_env.env()
tmp_dir = tmp_env.path()
tmp_results_dir = env.get(RpmbuildTemporaryEnvironment.TEMPDIR_RESULTS)
rpms = cls._build_rpm(srpm, tmp_dir, tmp_results_dir, rpm_results_dir,
builder_options=cls.get_builder_options(**kwargs))
logger.info("Building RPMs finished successfully")
# RPMs paths in results_dir
rpms = [os.path.join(rpm_results_dir, os.path.basename(f)) for f in rpms]
logger.verbose("Successfully built RPMs: '%s'", str(rpms))
# gather logs
cls.logs.extend(l for l in PathHelper.find_all_files(rpm_results_dir, '*.log'))
logger.verbose("logs: '%s'", str(cls.logs))
return dict(rpm=rpms, logs=cls.logs)
示例3: _scratch_build
def _scratch_build(cls, source, **kwargs):
session = cls.koji_helper.session_maker()
remote = cls.koji_helper.upload_srpm(session, source)
task_id = session.build(remote, cls.target_tag, cls.opts, priority=cls.priority)
if kwargs['builds_nowait']:
return None, None, task_id
weburl = cls.weburl + '/taskinfo?taskID=%i' % task_id
logger.info('Koji task_id is here:\n' + weburl)
session.logout()
task_dict = cls.koji_helper.watch_koji_tasks(session, [task_id])
task_list = []
package_failed = False
for key in six.iterkeys(task_dict):
if task_dict[key] == koji.TASK_STATES['FAILED']:
package_failed = True
task_list.append(key)
rpms, logs = cls.koji_helper.download_scratch_build(task_list, os.path.dirname(source).replace('SRPM', 'RPM'))
if package_failed:
weburl = '%s/taskinfo?taskID=%i' % (cls.weburl, task_list[0])
logger.info('RPM build failed %s', weburl)
logs.append(weburl)
cls.logs.append(weburl)
raise BinaryPackageBuildError
logs.append(weburl)
return rpms, logs, task_id
示例4: download_file
def download_file(url, destination_name):
"""
Method for downloading file using pycurl
:param url: URL from which to download the file
:param destination_name: path where to store downloaded file
:return None
"""
if os.path.exists(destination_name):
return
with open(destination_name, 'wb') as f:
curl = pycurl.Curl()
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.CONNECTTIMEOUT, 30)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.MAXREDIRS, 5)
curl.setopt(pycurl.TIMEOUT, 300)
curl.setopt(pycurl.WRITEDATA, f)
try:
logger.info('Downloading sources from URL %s', url)
curl.perform()
except pycurl.error as error:
curl.close()
raise ReferenceError("Downloading '%s' failed with error '%s'.", url, error)
else:
curl.close()
示例5: print_summary
def print_summary(cls, path, results):
"""Function is used for printing summary information"""
if results.get_summary_info():
for key, value in results.get_summary_info().items():
logger.info("%s %s\n", key, value)
LoggerHelper.add_file_handler(logger_report, path)
cls.results_store = results
cls.print_success_message()
logger_report.info("All result files are stored in %s", os.path.dirname(path))
cls.print_changes_patch()
cls.print_checkers_text_output(results.get_checkers())
cls.print_build_log_hooks_result(results.get_build_log_hooks())
if results.get_patches():
cls.print_patches(results.get_patches())
cls.print_message_and_separator("\nRPMS")
for pkg_version in ['old', 'new']:
pkg_results = results.get_build(pkg_version)
if pkg_results:
cls.print_rpms_and_logs(pkg_results, pkg_version.capitalize())
示例6: prepare_next_run
def prepare_next_run(self, results_dir):
# Running build log hooks only makes sense after a failed build
# of new RPM packages. The folder results_dir/new-build/RPM
# doesn't exist unless the build of new RPM packages has been run.
changes_made = False
if os.path.exists(os.path.join(results_dir, 'new-build', 'RPM')):
changes_made = plugin_manager.build_log_hooks.run(self.spec_file, self.rebase_spec_file, **self.kwargs)
# Save current rebase spec file content
self.rebase_spec_file.save()
if not self.conf.non_interactive and \
InputHelper.get_message('Do you want to try it one more time'):
logger.info('Now it is time to make changes to %s if necessary.', self.rebase_spec_file.path)
elif self.conf.non_interactive and changes_made:
logger.info('Build log hooks made some changes to the SPEC file, starting the build process again.')
else:
return False
if not self.conf.non_interactive and not \
InputHelper.get_message('Do you want to continue with the rebuild now'):
return False
# Update rebase spec file content after potential manual modifications
self.rebase_spec_file._read_spec_content() # pylint: disable=protected-access
self.rebase_spec_file._update_data() # pylint: disable=protected-access
# clear current version output directories
if os.path.exists(os.path.join(results_dir, 'old-build')):
shutil.rmtree(os.path.join(results_dir, 'old-build'))
if os.path.exists(os.path.join(results_dir, 'new-build')):
shutil.rmtree(os.path.join(results_dir, 'new-build'))
return True
示例7: _correct_missing_files
def _correct_missing_files(cls, rebase_spec_file, files):
"""Adds files found in buildroot which are missing in %files
sections in the SPEC file. Each file is added to a %files section
with the closest matching path.
"""
macros = [m for m in rebase_spec_file.macros if m['name'] in MacroHelper.MACROS_WHITELIST]
macros = MacroHelper.expand_macros(macros)
# ensure maximal greediness
macros.sort(key=lambda k: len(k['value']), reverse=True)
result = collections.defaultdict(lambda: collections.defaultdict(list))
for file in files:
section = cls._get_best_matching_files_section(rebase_spec_file, file)
substituted_path = MacroHelper.substitute_path_with_macros(file, macros)
try:
index = [i for i, l in enumerate(rebase_spec_file.spec_content.section(section)) if l][-1] + 1
except IndexError:
# section is empty
index = 0
rebase_spec_file.spec_content.section(section).insert(index, substituted_path)
result['added'][section].append(substituted_path)
logger.info("Added %s to '%s' section", substituted_path, section)
return result
示例8: run
def run(self, versioneer, package_name, category, versioneer_blacklist=None):
"""Runs the specified versioneer or all versioneers subsequently
until one of them succeeds.
Args:
versioneer (str): Name of a versioneer.
package_name (str): Name of a package.
category (str): Package category.
versioneer_blacklist (list): List of versioneers that will be skipped.
Returns:
str: Latest upstream version of a package.
"""
if versioneer_blacklist is None:
versioneer_blacklist = []
if versioneer:
logger.info("Running '%s' versioneer", versioneer)
return self.plugins[versioneer].run(package_name)
# run all versioneers, except those disabled in config, categorized first
allowed_versioneers = [v for k, v in self.plugins.items() if v and k not in versioneer_blacklist]
for versioneer in sorted(allowed_versioneers, key=lambda v: not v.CATEGORIES):
categories = versioneer.CATEGORIES
if not categories or category in categories:
logger.info("Running '%s' versioneer", versioneer.name)
result = versioneer.run(package_name)
if result:
return result
return None
示例9: run
def run(self):
sources = self.prepare_sources()
if not self.conf.build_only and not self.conf.comparepkgs:
self.patch_sources(sources)
if not self.conf.patch_only:
if not self.conf.comparepkgs:
# check build dependencies for rpmbuild
if self.conf.buildtool == 'rpmbuild':
Application.check_build_requires(self.spec_file)
# Build packages
build = self.build_packages()
# Perform checks
else:
build = self.get_rpm_packages(self.conf.comparepkgs)
# We don't care dirname doesn't contain any RPM packages
# Therefore return 1
if not build:
return 1
if build:
self.pkgdiff_packages()
# print summary information
self.print_summary()
if not self.conf.keep_workspace:
self._delete_workspace_dir()
if self.debug_log_file:
logger.info("Detailed debug log is located in '%s'", self.debug_log_file)
示例10: print_summary
def print_summary(cls, path):
"""
Function is used for printing summary informations
:return:
"""
# First of all we would like to print all
# summary information
OutputLogger.set_info_text("Summary output is also available in log:", path)
logger.info('\n')
for key, value in six.iteritems(OutputLogger.get_summary_info()):
logger.info("%s %s\n", key, value)
try:
LoggerHelper.add_file_handler(logger_output, path)
except (OSError, IOError):
raise RebaseHelperError("Can not create results file '%s'", path)
type_pkgs = ['old', 'new']
cls.print_patches(OutputLogger.get_patches(), '\nSummary information about patches:')
for pkg in type_pkgs:
type_pkg = OutputLogger.get_build(pkg)
if type_pkg:
cls.print_rpms(type_pkg, pkg.capitalize())
cls.print_build_logs(type_pkg, pkg.capitalize())
cls.print_pkgdiff_tool()
示例11: fill_dictionary
def fill_dictionary(cls, result_dir):
"""
Parsed files.xml and symbols.xml and fill dictionary
:return:
"""
XML_FILES = ['files.xml', 'symbols.xml']
results_dict = {}
for tag in settings.CHECKER_TAGS:
results_dict[tag] = []
for file_name in [os.path.join(result_dir, x) for x in XML_FILES]:
logger.info('Processing %s file.', file_name)
try:
with open(file_name, "r") as f:
lines = f.readlines()
lines.insert(0, '<pkgdiff>')
lines.append('</pkgdiff>')
pkgdiff_tree = ElementTree.fromstringlist(lines)
for tag in settings.CHECKER_TAGS:
for pkgdiff in pkgdiff_tree.findall('.//' + tag):
results_dict[tag].extend([x.strip() for x in pkgdiff.text.strip().split('\n')])
except IOError:
continue
return results_dict
示例12: upload_srpm
def upload_srpm(cls, session, srpm):
"""Uploads SRPM to a Koji hub.
Args:
session (koji.ClientSession): Active Koji session instance.
srpm (str): Valid path to SRPM.
Returns:
str: Remote path to the uploaded SRPM.
Raises:
RebaseHelperError: If upload failed.
"""
def progress(uploaded, total, chunksize, t1, t2): # pylint: disable=unused-argument
DownloadHelper.progress(total, uploaded, upload_start)
suffix = ''.join(random.choice(string.ascii_letters) for _ in range(8))
path = os.path.join('cli-build', str(time.time()), suffix)
logger.info('Uploading SRPM')
try:
try:
upload_start = time.time()
session.uploadWrapper(srpm, path, callback=progress)
except koji.GenericError as e:
raise RebaseHelperError('Upload failed: {}'.format(str(e)))
finally:
sys.stdout.write('\n')
sys.stdout.flush()
return os.path.join(path, os.path.basename(srpm))
示例13: run
def run(cls, spec_file, rebase_spec_file, **kwargs):
# find non-existent local sources
sources = [idx for idx, src in enumerate(rebase_spec_file.sources)
if not urllib.parse.urlparse(src).scheme and not os.path.isfile(src)]
for idx in sources:
if spec_file.sources[idx] == rebase_spec_file.sources[idx]:
# skip sources that stayed unchanged
continue
source = rebase_spec_file.sources[idx]
logger.info("Found non-existent source '%s'", source)
source_re = re.compile(r'^Source0?:' if idx == 0 else r'^Source{}:'.format(idx))
comment_re = re.compile(r'^#')
comments = None
# find matching Source line in the SPEC file
preamble = rebase_spec_file.spec_content.section('%package')
for i in range(len(preamble)):
if source_re.match(preamble[i]):
# get all comments above this line
for j in range(i - 1, 0, -1):
if not comment_re.match(preamble[j]):
comments = preamble[j+1:i]
break
break
if not comments:
# nothing to do
continue
# update data so that RPM macros are populated correctly
rebase_spec_file._update_data() # pylint: disable=protected-access
instructions = cls._get_instructions(comments,
spec_file.get_version(),
rebase_spec_file.get_version())
logfile = os.path.join(kwargs['workspace_dir'], '{}.log'.format(source))
cls._build_source_from_instructions(instructions, source, logfile)
示例14: _get_initial_sources_list
def _get_initial_sources_list(self):
"""Function returns all sources mentioned in SPEC file"""
# get all regular sources
sources = []
tar_sources = []
sources_list = [x for x in self.spc.sources if x[2] == 1]
remote_files_re = re.compile(r'(http:|https:|ftp:)//.*')
for index, src in enumerate(sorted(sources_list, key=lambda source: source[1])):
# src is type of (SOURCE, Index of source, Type of source (PAtch, Source)
# We need to download all archives and only the one
abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip())
sources.append(abs_path)
archive = [x for x in Archive.get_supported_archives() if src[0].endswith(x)]
# if the source is a remote file, download it
if archive:
if remote_files_re.search(src[0]) and self.download and not os.path.isfile(abs_path):
logger.debug("Tarball is not in absolute path {} "
"trying to download one from URL {}".format(abs_path, src[0]))
logger.info("Tarball is not in absolute path. Trying to download it from URL")
try:
DownloadHelper.download_file(src[0], abs_path)
except DownloadError as e:
raise RebaseHelperError("Failed to download file from URL {}. "
"Reason: '{}'. ".format(src[0], str(e)))
tar_sources.append(abs_path)
return sources, tar_sources
示例15: _build_rpm
def _build_rpm(cls, srpm, results_dir, rpm_results_dir, root=None, arch=None, builder_options=None):
"""
Build RPM using mock.
:param srpm: full path to the srpm.
:param results_dir: abs path to dir where the log should be placed.
:param rpm_results_dir: directory where rpms will be placed.
:param root: path to where chroot should be built.
:param arch: target architectures for the build.
:param builder_options: builder_options for mock.
:return abs paths to RPMs.
"""
logger.info("Building RPMs")
output = os.path.join(results_dir, "mock_output.log")
cmd = [cls.CMD, '--old-chroot', '--rebuild', srpm, '--resultdir', results_dir]
if root is not None:
cmd.extend(['--root', root])
if arch is not None:
cmd.extend(['--arch', arch])
if builder_options is not None:
cmd.extend(builder_options)
ret = ProcessHelper.run_subprocess(cmd, output_file=output)
if ret == 0:
return [f for f in PathHelper.find_all_files(results_dir, '*.rpm') if not f.endswith('.src.rpm')]
else:
logfile = Mock.get_mock_logfile_path(ret, rpm_results_dir, tmp_path=results_dir)
logs = [l for l in PathHelper.find_all_files(results_dir, '*.log')]
cls.logs.extend(os.path.join(rpm_results_dir, os.path.basename(l)) for l in logs)
raise BinaryPackageBuildError("Building RPMs failed!", rpm_results_dir, logfile=logfile)