本文整理汇总了Python中jenkins_jobs.parser.YamlParser.expandYaml方法的典型用法代码示例。如果您正苦于以下问题:Python YamlParser.expandYaml方法的具体用法?Python YamlParser.expandYaml怎么用?Python YamlParser.expandYaml使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类jenkins_jobs.parser.YamlParser
的用法示例。
在下文中一共展示了YamlParser.expandYaml方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: execute
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def execute(self, options, jjb_config):
builder = JenkinsManager(jjb_config)
if options.del_jobs and options.del_views:
raise JenkinsJobsException(
'"--views-only" and "--jobs-only" cannot be used together.')
fn = options.path
registry = ModuleRegistry(jjb_config, builder.plugins_list)
parser = YamlParser(jjb_config)
if fn:
parser.load_files(fn)
parser.expandYaml(registry, options.name)
jobs = [j['name'] for j in parser.jobs]
views = [v['name'] for v in parser.views]
else:
jobs = options.name
views = options.name
if options.del_jobs:
builder.delete_jobs(jobs)
elif options.del_views:
builder.delete_views(views)
else:
builder.delete_jobs(jobs)
builder.delete_views(views)
示例2: test_yaml_snippet
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def test_yaml_snippet(self):
expected_xml = self._read_utf8_content()
if self.conf_filename:
config = configparser.ConfigParser()
config.readfp(open(self.conf_filename))
else:
config = None
parser = YamlParser(config)
parser.parse(self.in_filename)
# Generate the XML tree
parser.expandYaml()
parser.generateXML()
parser.xml_jobs.sort(key=operator.attrgetter('name'))
# Prettify generated XML
pretty_xml = u"\n".join(job.output().decode('utf-8')
for job in parser.xml_jobs)
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(expected_xml,
doctest.ELLIPSIS |
doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_NDIFF)
)
示例3: execute
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def execute(self, options, jjb_config):
builder = JenkinsManager(jjb_config)
fn = options.path
registry = ModuleRegistry(jjb_config, builder.plugins_list)
parser = YamlParser(jjb_config)
if fn:
parser.load_files(fn)
parser.expandYaml(registry, options.name)
jobs = [j["name"] for j in parser.jobs]
else:
jobs = options.name
builder.delete_jobs(jobs)
示例4: _generate_xmljobs
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def _generate_xmljobs(self, options, jjb_config=None):
builder = JenkinsManager(jjb_config)
logger.info("Updating jobs in {0} ({1})".format(
options.path, options.names))
orig = time.time()
# Generate XML
parser = YamlParser(jjb_config)
registry = ModuleRegistry(jjb_config, builder.plugins_list)
xml_job_generator = XmlJobGenerator(registry)
xml_view_generator = XmlViewGenerator(registry)
parser.load_files(options.path)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(
registry, options.names)
xml_jobs = xml_job_generator.generateXML(job_data_list)
xml_views = xml_view_generator.generateXML(view_data_list)
jobs = parser.jobs
step = time.time()
logging.debug('%d XML files generated in %ss',
len(jobs), str(step - orig))
return builder, xml_jobs, xml_views
示例5: test_yaml_snippet
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def test_yaml_snippet(self):
config = self._get_config()
expected_xml = self._read_utf8_content()
parser = YamlParser(config)
parser.parse(self.in_filename)
registry = ModuleRegistry(config)
registry.set_parser_data(parser.data)
job_data_list = parser.expandYaml(registry)
# Generate the XML tree
xml_generator = XmlJobGenerator(registry)
xml_jobs = xml_generator.generateXML(job_data_list)
xml_jobs.sort(key=operator.attrgetter('name'))
# Prettify generated XML
pretty_xml = u"\n".join(job.output().decode('utf-8')
for job in xml_jobs)
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(expected_xml,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF)
)
示例6: execute
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def execute(self, options, jjb_config):
builder = Builder(jjb_config)
fn = options.path
registry = ModuleRegistry(jjb_config, builder.plugins_list)
for jobs_glob in options.name:
parser = YamlParser(jjb_config)
if fn:
parser.load_files(fn)
parser.expandYaml(registry, [jobs_glob])
jobs = [j['name'] for j in parser.jobs]
else:
jobs = [jobs_glob]
builder.delete_jobs(jobs)
示例7: _generate_xmljobs
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def _generate_xmljobs(self, options, jjb_config=None):
builder = Builder(jjb_config)
logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
orig = time.time()
# Generate XML
parser = YamlParser(jjb_config, builder.plugins_list)
parser.load_files(options.path)
parser.expandYaml(options.names)
parser.generateXML()
jobs = parser.jobs
step = time.time()
logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))
return builder, parser.xml_jobs
示例8: test_yaml_snippet
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def test_yaml_snippet(self):
config = self._get_config()
expected_xml = self._read_utf8_content().strip() \
.replace('<BLANKLINE>', '').replace('\n\n', '\n')
parser = YamlParser(config)
parser.parse(self.in_filename)
plugins_info = None
if self.plugins_info_filename:
plugins_info = self._read_yaml_content(self.plugins_info_filename)
self.addDetail("plugins-info-filename",
text_content(self.plugins_info_filename))
self.addDetail("plugins-info",
text_content(str(plugins_info)))
registry = ModuleRegistry(config, plugins_info)
registry.set_parser_data(parser.data)
job_data_list, view_data_list = parser.expandYaml(registry)
# Generate the XML tree
xml_generator = XmlJobGenerator(registry)
xml_jobs = xml_generator.generateXML(job_data_list)
xml_jobs.sort(key=AlphanumSort)
# check reference files are under correct path for folders
prefix = os.path.dirname(self.in_filename)
# split using '/' since fullname uses URL path separator
expected_folders = list(set([
os.path.normpath(
os.path.join(prefix,
'/'.join(job_data['name'].split('/')[:-1])))
for job_data in job_data_list
]))
actual_folders = [os.path.dirname(f) for f in self.out_filenames]
six.assertCountEqual(
self,
expected_folders, actual_folders,
"Output file under wrong path, was '%s', should be '%s'" %
(self.out_filenames[0],
os.path.join(expected_folders[0],
os.path.basename(self.out_filenames[0]))))
# Prettify generated XML
pretty_xml = u"\n".join(job.output().decode('utf-8')
for job in xml_jobs) \
.strip().replace('\n\n', '\n')
self.assertThat(
pretty_xml,
testtools.matchers.DocTestMatches(expected_xml,
doctest.ELLIPSIS |
doctest.REPORT_NDIFF))
示例9: assert_case
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
def assert_case(case_name):
case_source, case_result = (os.path.join(BASE_PATH, case_name + ext) for ext in ['.yml', '.xml'])
jjb_config = JJBConfig()
builder = Builder(jjb_config)
# Generate XML
parser = YamlParser(jjb_config)
registry = ModuleRegistry(jjb_config, builder.plugins_list)
xml_generator = XmlJobGenerator(registry)
parser.load_files(case_source)
registry.set_parser_data(parser.data)
job_data_list = parser.expandYaml(registry, [])
xml_jobs = xml_generator.generateXML(job_data_list)
result_xml = ET.XML(xml_jobs[0].output())
expected_xml = ET.XML(open(case_result).read())
assert ET.tostring(result_xml) == ET.tostring(expected_xml)
示例10: Builder
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
class Builder(object):
def __init__(self, jenkins_url, jenkins_user, jenkins_password,
config=None, jenkins_timeout=_DEFAULT_TIMEOUT,
ignore_cache=False, flush_cache=False, plugins_list=None):
self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password,
jenkins_timeout)
self.cache = CacheStorage(jenkins_url, flush=flush_cache)
self.global_config = config
self.ignore_cache = ignore_cache
self._plugins_list = plugins_list
@property
def plugins_list(self):
if self._plugins_list is None:
self._plugins_list = self.jenkins.get_plugins_info()
return self._plugins_list
def load_files(self, fn):
self.parser = YamlParser(self.global_config, self.plugins_list)
# handle deprecated behavior, and check that it's not a file like
# object as these may implement the '__iter__' attribute.
if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
logger.warning(
'Passing single elements for the `fn` argument in '
'Builder.load_files is deprecated. Please update your code '
'to use a list as support for automatic conversion will be '
'removed in a future version.')
fn = [fn]
files_to_process = []
for path in fn:
if not hasattr(path, 'read') and os.path.isdir(path):
files_to_process.extend([os.path.join(path, f)
for f in os.listdir(path)
if (f.endswith('.yml')
or f.endswith('.yaml'))])
else:
files_to_process.append(path)
# symlinks used to allow loading of sub-dirs can result in duplicate
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
if hasattr(f, 'read'):
unique_files.append(f)
continue
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning("File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf))
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, 'name'):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, 'read'):
self.parser.parse_fp(in_file)
else:
self.parser.parse(in_file)
def delete_old_managed(self, keep=None):
jobs = self.jenkins.get_jobs()
deleted_jobs = 0
if keep is None:
keep = [job.name for job in self.parser.xml_jobs]
for job in jobs:
if job['name'] not in keep:
if self.jenkins.is_managed(job['name']):
logger.info("Removing obsolete jenkins job {0}"
.format(job['name']))
self.delete_job(job['name'])
deleted_jobs += 1
else:
logger.info("Not deleting unmanaged jenkins job %s",
job['name'])
else:
logger.debug("Keeping job %s", job['name'])
return deleted_jobs
def delete_job(self, jobs_glob, fn=None):
if fn:
self.load_files(fn)
self.parser.expandYaml([jobs_glob])
jobs = [j['name'] for j in self.parser.jobs]
else:
jobs = [jobs_glob]
if jobs is not None:
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
for job in jobs:
self.jenkins.delete_job(job)
if(self.cache.is_cached(job)):
#.........这里部分代码省略.........
示例11: Builder
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
class Builder(object):
def __init__(self, jenkins_url, jenkins_user, jenkins_password,
config=None, ignore_cache=False, flush_cache=False,
plugins_list=None):
self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password)
self.cache = CacheStorage(jenkins_url, flush=flush_cache)
self.global_config = config
self.ignore_cache = ignore_cache
self._plugins_list = plugins_list
@property
def plugins_list(self):
if self._plugins_list is None:
self._plugins_list = self.jenkins.get_plugins_info()
return self._plugins_list
def load_files(self, fn):
self.parser = YamlParser(self.global_config, self.plugins_list)
# handle deprecated behavior
if not hasattr(fn, '__iter__'):
logger.warning(
'Passing single elements for the `fn` argument in '
'Builder.load_files is deprecated. Please update your code '
'to use a list as support for automatic conversion will be '
'removed in a future version.')
fn = [fn]
files_to_process = []
for path in fn:
if os.path.isdir(path):
files_to_process.extend([os.path.join(path, f)
for f in os.listdir(path)
if (f.endswith('.yml')
or f.endswith('.yaml'))])
else:
files_to_process.append(path)
# symlinks used to allow loading of sub-dirs can result in duplicate
# definitions of macros and templates when loading all from top-level
unique_files = []
for f in files_to_process:
rpf = os.path.realpath(f)
if rpf not in unique_files:
unique_files.append(rpf)
else:
logger.warning("File '%s' already added as '%s', ignoring "
"reference to avoid duplicating yaml "
"definitions." % (f, rpf))
for in_file in unique_files:
# use of ask-for-permissions instead of ask-for-forgiveness
# performs better when low use cases.
if hasattr(in_file, 'name'):
fname = in_file.name
else:
fname = in_file
logger.debug("Parsing YAML file {0}".format(fname))
if hasattr(in_file, 'read'):
self.parser.parse_fp(in_file)
else:
self.parser.parse(in_file)
def delete_old_managed(self, keep=None):
jobs = self.jenkins.get_jobs()
deleted_jobs = 0
if keep is None:
keep = [job.name for job in self.parser.xml_jobs]
for job in jobs:
if job['name'] not in keep and \
self.jenkins.is_managed(job['name']):
logger.info("Removing obsolete jenkins job {0}"
.format(job['name']))
self.delete_job(job['name'])
deleted_jobs += 1
else:
logger.debug("Ignoring unmanaged jenkins job %s",
job['name'])
return deleted_jobs
def delete_job(self, jobs_glob, fn=None):
if fn:
self.load_files(fn)
self.parser.expandYaml([jobs_glob])
jobs = [j['name'] for j in self.parser.jobs]
else:
jobs = [jobs_glob]
if jobs is not None:
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
for job in jobs:
self.jenkins.delete_job(job)
if(self.cache.is_cached(job)):
self.cache.set(job, '')
def delete_all_jobs(self):
jobs = self.jenkins.get_jobs()
logger.info("Number of jobs to delete: %d", len(jobs))
for job in jobs:
self.delete_job(job['name'])
#.........这里部分代码省略.........
示例12: Builder
# 需要导入模块: from jenkins_jobs.parser import YamlParser [as 别名]
# 或者: from jenkins_jobs.parser.YamlParser import expandYaml [as 别名]
class Builder(object):
def __init__(self, jjb_config):
self.jenkins = Jenkins(jjb_config.jenkins['url'],
jjb_config.jenkins['user'],
jjb_config.jenkins['password'],
jjb_config.jenkins['timeout'])
self.cache = CacheStorage(jjb_config.jenkins['url'],
flush=jjb_config.builder['flush_cache'])
self._plugins_list = jjb_config.builder['plugins_info']
self.jjb_config = jjb_config
@property
def plugins_list(self):
if self._plugins_list is None:
self._plugins_list = self.jenkins.get_plugins_info()
return self._plugins_list
def delete_old_managed(self, keep=None):
jobs = self.jenkins.get_jobs()
deleted_jobs = 0
if keep is None:
keep = [job.name for job in self.parser.xml_jobs]
for job in jobs:
if job['name'] not in keep:
if self.jenkins.is_managed(job['name']):
logger.info("Removing obsolete jenkins job {0}"
.format(job['name']))
self.delete_job(job['name'])
deleted_jobs += 1
else:
logger.info("Not deleting unmanaged jenkins job %s",
job['name'])
else:
logger.debug("Keeping job %s", job['name'])
return deleted_jobs
def delete_job(self, jobs_glob, fn=None):
self.parser = YamlParser(self.jjb_config, self.plugins_list)
if fn:
self.parser.load_files(fn)
self.parser.expandYaml([jobs_glob])
jobs = [j['name'] for j in self.parser.jobs]
else:
jobs = [jobs_glob]
if jobs is not None:
logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
for job in jobs:
self.jenkins.delete_job(job)
if(self.cache.is_cached(job)):
self.cache.set(job, '')
self.cache.save()
def delete_all_jobs(self):
jobs = self.jenkins.get_jobs()
logger.info("Number of jobs to delete: %d", len(jobs))
self.jenkins.delete_all_jobs()
# Need to clear the JJB cache after deletion
self.cache.clear()
@parallelize
def changed(self, job):
md5 = job.md5()
changed = (self.jjb_config.builder['ignore_cache'] or
self.cache.has_changed(job.name, md5))
if not changed:
logger.debug("'{0}' has not changed".format(job.name))
return changed
def update_jobs(self, input_fn, jobs_glob=None, output=None,
n_workers=None):
orig = time.time()
self.parser = YamlParser(self.jjb_config, self.plugins_list)
self.parser.load_files(input_fn)
self.parser.expandYaml(jobs_glob)
self.parser.generateXML()
step = time.time()
logging.debug('%d XML files generated in %ss',
len(self.parser.jobs), str(step - orig))
logger.info("Number of jobs generated: %d", len(self.parser.xml_jobs))
self.parser.xml_jobs.sort(key=operator.attrgetter('name'))
if (output and not hasattr(output, 'write')
and not os.path.isdir(output)):
logger.info("Creating directory %s" % output)
try:
os.makedirs(output)
except OSError:
if not os.path.isdir(output):
raise
if output:
# ensure only wrapped once
if hasattr(output, 'write'):
#.........这里部分代码省略.........