本文整理汇总了Python中yaml.safe_dump函数的典型用法代码示例。如果您正苦于以下问题:Python safe_dump函数的具体用法?Python safe_dump怎么用?Python safe_dump使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了safe_dump函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: flocker_deploy
def flocker_deploy(test_case, deployment_config, application_config):
"""
Run ``flocker-deploy`` with given configuration files.
:param test_case: The ``TestCase`` running this unit test.
:param dict deployment_config: The desired deployment configuration.
:param dict application_config: The desired application configuration.
"""
# This is duplicate code, see
# https://clusterhq.atlassian.net/browse/FLOC-1903
control_node = environ.get("FLOCKER_ACCEPTANCE_CONTROL_NODE")
certificate_path = environ["FLOCKER_ACCEPTANCE_API_CERTIFICATES_PATH"]
if control_node is None:
raise SkipTest("Set control node address using "
"FLOCKER_ACCEPTANCE_CONTROL_NODE environment variable.")
temp = FilePath(test_case.mktemp())
temp.makedirs()
deployment = temp.child(b"deployment.yml")
deployment.setContent(safe_dump(deployment_config))
application = temp.child(b"application.yml")
application.setContent(safe_dump(application_config))
check_call([b"flocker-deploy", b"--certificates-directory",
certificate_path, control_node, deployment.path,
application.path])
示例2: _run_gemini_stats
def _run_gemini_stats(bam_file, data, out_dir):
"""Retrieve high level variant statistics from Gemini.
"""
out = {}
gemini_db = (data.get("variants", [{}])[0].get("population", {}).get("db")
if data.get("variants") else None)
if gemini_db:
gemini_stat_file = "%s-stats.yaml" % os.path.splitext(gemini_db)[0]
if not utils.file_uptodate(gemini_stat_file, gemini_db):
gemini = config_utils.get_program("gemini", data["config"])
tstv = subprocess.check_output([gemini, "stats", "--tstv", gemini_db])
gt_counts = subprocess.check_output([gemini, "stats", "--gts-by-sample", gemini_db])
dbsnp_count = subprocess.check_output([gemini, "query", gemini_db, "-q",
"SELECT count(*) FROM variants WHERE in_dbsnp==1"])
out["Transition/Transversion"] = tstv.split("\n")[1].split()[-1]
for line in gt_counts.split("\n"):
parts = line.rstrip().split()
if len(parts) > 0 and parts[0] == data["name"][-1]:
_, hom_ref, het, hom_var, _, total = parts
out["Variations (total)"] = int(total)
out["Variations (heterozygous)"] = int(het)
out["Variations (homozygous)"] = int(hom_var)
break
out["Variations (in dbSNP)"] = int(dbsnp_count.strip())
if out.get("Variations (total)") > 0:
out["Variations (in dbSNP) pct"] = "%.1f%%" % (out["Variations (in dbSNP)"] /
float(out["Variations (total)"]) * 100.0)
with open(gemini_stat_file, "w") as out_handle:
yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
else:
with open(gemini_stat_file) as in_handle:
out = yaml.safe_load(in_handle)
return out
示例3: __update_config
def __update_config(self):
dir_name = self.get_path()
if not os.path.exists(dir_name):
os.mkdir(dir_name)
for dir in self.__get_diretories():
os.mkdir(os.path.join(dir_name, dir))
filename = os.path.join(dir_name, 'node.conf')
values = {
'name' : self.name,
'status' : self.status,
'auto_bootstrap' : self.auto_bootstrap,
'interfaces' : self.network_interfaces,
'jmx_port' : self.jmx_port,
'config_options' : self.__config_options,
}
if self.pid:
values['pid'] = self.pid
if self.initial_token:
values['initial_token'] = self.initial_token
if self.__cassandra_dir is not None:
values['cassandra_dir'] = self.__cassandra_dir
if self.data_center:
values['data_center'] = self.data_center
if self.remote_debug_port:
values['remote_debug_port'] = self.remote_debug_port
with open(filename, 'w') as f:
yaml.safe_dump(values, f)
示例4: loadResumeFile
def loadResumeFile():
"""
Sets the singleton stateDict object to the content of the resume file.
If the file is empty then it will create an empty one.
Raises:
:class:ooni.runner.InvalidResumeFile if the resume file is not valid
"""
if not config.stateDict:
try:
with open(config.resume_filename) as f:
config.stateDict = yaml.safe_load(f)
except:
log.err("Error loading YAML file")
raise InvalidResumeFile
if not config.stateDict:
with open(config.resume_filename, "w+") as f:
yaml.safe_dump(dict(), f)
config.stateDict = dict()
elif isinstance(config.stateDict, dict):
return
else:
log.err("The resume file is of the wrong format")
raise InvalidResumeFile
示例5: test_dont_override_theme
def test_dont_override_theme(self, checkout_path, run):
tmpdir = tempfile.mkdtemp()
os.mkdir(os.path.join(tmpdir, 'docs'))
yaml_file = os.path.join(tmpdir, 'mkdocs.yml')
yaml.safe_dump(
{
'theme': 'not-readthedocs',
'theme_dir': 'not-readthedocs',
'site_name': 'mkdocs',
'docs_dir': 'docs',
},
open(yaml_file, 'w')
)
checkout_path.return_value = tmpdir
self.searchbuilder = MkdocsHTML(
build_env=self.build_env,
python_env=None
)
self.searchbuilder.append_conf()
run.assert_called_with('cat', 'mkdocs.yml', cwd=mock.ANY)
config = yaml.safe_load(open(yaml_file))
self.assertEqual(
config['theme_dir'],
'not-readthedocs'
)
示例6: restore
def restore(self):
backup_yaml = self.get_backup_dict()
current_yaml = self.get_current_dict()
not_found_keys = []
for key, subkeys in self.keys_to_restore:
if not subkeys and key not in backup_yaml:
not_found_keys.append(key)
continue
if not subkeys:
current_yaml[key] = backup_yaml[key]
continue
backup_values = backup_yaml.get(key, {})
current_yaml[key] = current_yaml.get(key, {})
for subkey in subkeys:
if subkey not in backup_values:
not_found_keys.append("{0}/{1}".format(key, subkey))
else:
current_yaml[key][subkey] = backup_values[subkey]
if not_found_keys:
raise Exception(
"Not found values in backup for keys: {0}".format(
",".join(not_found_keys)))
old_path_name = "{0}.old".format(self.path)
new_path_name = "{0}.new".format(self.path)
shutil.copy2(self.path, old_path_name)
with open(new_path_name, "w") as new:
yaml.safe_dump(current_yaml, new, default_flow_style=False)
shutil.move(new_path_name, self.path)
self._post_restore_action()
示例7: test_configuration_with_binary_strings
def test_configuration_with_binary_strings():
"""
Regression test: serialization was failing on binary strings
"""
import yaml
obj = '\xaa\xbb\x00\xff\xff\x00ABC'
assert yaml.load(yaml.dump(obj)) == obj
assert yaml.safe_load(yaml.safe_dump(obj)) == obj
obj = {'blob': '\xaa\xbb\x00\xff\xff\x00ABC'}
assert yaml.load(yaml.dump(obj)) == obj
assert yaml.safe_load(yaml.safe_dump(obj)) == obj
obj = {
'function': 'jobcontrol.utils.testing:job_simple_echo',
'title': None,
'notes': None,
# 'args': ('\xaa\xbb\x00\xff\xff\x00ABC',),
'args': '\xaa\xbb\x00\xff\xff\x00ABC',
'dependencies': [],
'kwargs': {},
'id': 'f974e89f-4ae3-40cc-8316-b78e42bd5cc8',
}
dump(obj)
示例8: _setup_config_files
def _setup_config_files(dst_dir,configs,post_process_config_file,fc_dir,sample_name="run",fc_date=None,fc_name=None):
# Setup the data structure
config_data_structure = {'details': configs}
if fc_date is not None:
config_data_structure['fc_date'] = fc_date
if fc_name is not None:
config_data_structure['fc_name'] = fc_name
# Dump the config to file
config_file = os.path.join(dst_dir,"%s-bcbb-config.yaml" % sample_name)
with open(config_file,'w') as fh:
fh.write(yaml.safe_dump(config_data_structure, default_flow_style=False, allow_unicode=True, width=1000))
# Copy post-process file
with open(post_process_config_file) as fh:
local_post_process = yaml.load(fh)
# Update galaxy config to point to the original location
local_post_process['galaxy_config'] = bcbio.utils.add_full_path(local_post_process['galaxy_config'],os.path.abspath(os.path.dirname(post_process_config_file)))
# Add job name and output paths to the cluster platform arguments
if 'distributed' in local_post_process and 'platform_args' in local_post_process['distributed']:
slurm_out = "%s-bcbb.log" % sample_name
local_post_process['distributed']['platform_args'] = "%s -J %s -o %s -D %s" % (local_post_process['distributed']['platform_args'], sample_name, slurm_out, dst_dir)
local_post_process_file = os.path.join(dst_dir,"%s-post_process.yaml" % sample_name)
with open(local_post_process_file,'w') as fh:
fh.write(yaml.safe_dump(local_post_process, default_flow_style=False, allow_unicode=True, width=1000))
# Write the command for running the pipeline with the configuration files
run_command_file = os.path.join(dst_dir,"%s-bcbb-command.txt" % sample_name)
with open(run_command_file,"w") as fh:
fh.write(" ".join([os.path.basename(__file__),"--only-run",os.path.basename(local_post_process_file), os.path.join("..",os.path.basename(dst_dir)), os.path.basename(config_file)]))
fh.write("\n")
return [os.path.basename(local_post_process_file), dst_dir, fc_dir, os.path.basename(config_file)]
示例9: build_manifest_yaml
def build_manifest_yaml(manifest, msgs, srvs, actions, output_dir):
# by default, assume that packages are on wiki
m_yaml = {}
m_yaml['authors'] = manifest.author or ''
m_yaml['maintainers'] = manifest.maintainer or ''
m_yaml['brief'] = manifest.brief or ''
m_yaml['depends'] = manifest.depends or ''
m_yaml['description'] = manifest.description or ''
m_yaml['license'] = manifest.license or ''
m_yaml['msgs'] = msgs
m_yaml['srvs'] = srvs
m_yaml['actions'] = actions
m_yaml['url'] = manifest.url or ''
m_yaml['bugtracker'] = manifest.bugtracker or ''
m_yaml['repo_url'] = manifest.repo_url or ''
external_docs = manifest.get_export('doxymaker', 'external')
if external_docs:
m_yaml['external_docmentation'] = external_docs
metapackage = [e for e in manifest.exports if e.tagname == 'metapackage']
if metapackage:
m_yaml['package_type'] = 'metapackage'
else:
m_yaml['package_type'] = 'package'
deprecated = [e for e in manifest.exports if e.tagname == 'deprecated']
if deprecated:
m_yaml['deprecated'] = deprecated[0].content or "This package is deprecated."
with open(os.path.join(output_dir, 'manifest.yaml'), 'w') as f:
yaml.safe_dump(m_yaml, f, default_flow_style=False)
示例10: write_transitions
def write_transitions(from_trans):
"""
Update the active transitions file safely.
This function takes a parsed input file (which avoids invalid
files or files that may be be modified while the function is
active) and ensure the transitions file is updated atomically
to avoid locks.
@attention: This function may run B{within sudo}
@type from_trans: dict
@param from_trans: transitions dictionary, as returned by L{load_transitions}
"""
trans_file = Cnf["Dinstall::ReleaseTransitions"]
trans_temp = trans_file + ".tmp"
trans_lock = lock_file(trans_file)
temp_lock = lock_file(trans_temp)
destfile = file(trans_temp, 'w')
yaml.safe_dump(from_trans, destfile, default_flow_style=False)
destfile.close()
os.rename(trans_temp, trans_file)
os.close(temp_lock)
os.close(trans_lock)
示例11: createscan
def createscan(specfile,outputfile):
spec = yaml.load(open(specfile))
analysis_info = recastapi.analysis.read.analysis_by_pub_identifier(*spec['pubkey'].split('/'))
if not analysis_info:
raise click.ClickException('Analysis {} not known, import it first.'.format(spec['pubkey']))
scanrequest = recastapi.request.write.scan_request(
analysis_info['id'],
spec['title'],
spec['description'],
spec['reason'],
spec['additional_information']
)
parnames = spec['parameters']
points = spec['points']
prlist, brlist = addpoints_to_scan(scanrequest['id'],spec['request_format'],parnames,points)
yaml.safe_dump({
'scan_id': scanrequest['id'],
'point_requests': prlist,
'basic_requests': brlist
},
open(outputfile,'w'),
default_flow_style = False
)
示例12: synchronize_workspace_descriptor
def synchronize_workspace_descriptor(workspace, session) -> None:
"""
Updates both the workspace descriptor on disk and in
the database to contain the same essential data
:param workspace: the database workspace model
:param session: the current database session
:return:
"""
from son_editor.models.repository import Catalogue
with open(os.path.join(workspace.path, "workspace.yml"), "r+") as stream:
ws_descriptor = yaml.safe_load(stream)
if "catalogue_servers" not in ws_descriptor:
ws_descriptor["catalogue_servers"] = []
for catalogue_server in ws_descriptor["catalogue_servers"]:
if len([x for x in workspace.catalogues if x.name == catalogue_server['id']]) == 0:
session.add(Catalogue(name=catalogue_server['id'],
url=catalogue_server['url'],
publish=catalogue_server['publish'] == 'yes',
workspace=workspace)
)
for cat in workspace.catalogues:
if len([x for x in ws_descriptor["catalogue_servers"] if x['id'] == cat.name]) == 0:
catalogue_server = {'id': cat.name, 'url': cat.url, 'publish': cat.publish}
ws_descriptor['catalogue_servers'].append(catalogue_server)
ws_descriptor['name'] = workspace.name
ws_descriptor['ns_schema_index'] = workspace.ns_schema_index
ws_descriptor['vnf_schema_index'] = workspace.vnf_schema_index
yaml.safe_dump(ws_descriptor, stream)
示例13: save
def save(self):
with self.file_system.open(self.PENDING_TASK, 'w') as f:
yaml.safe_dump(self.data, f)
with self.file_system.open(self.REPEAT_TASK, 'w') as f:
yaml.safe_dump(self.repeat_data, f)
if not self.alarm_on:
self.schedule()
示例14: _send_configuration
def _send_configuration(self,
application_config_yaml=COMPLEX_APPLICATION_YAML,
deployment_config_yaml=COMPLEX_DEPLOYMENT_YAML):
"""
Run ``flocker-deploy`` against the API server.
:param application_config: Application configuration dictionary.
:param deployment_config: Deployment configuration dictionary.
:return: ``Deferred`` that fires with a tuple (stdout, stderr,
exit code).
"""
app_config = FilePath(self.mktemp())
app_config.setContent(safe_dump(application_config_yaml))
deployment_config = FilePath(self.mktemp())
deployment_config.setContent(safe_dump(deployment_config_yaml))
# This duplicates some code in the acceptance tests...
# https://clusterhq.atlassian.net/browse/FLOC-1904
return getProcessOutputAndValue(
b"flocker-deploy", [
b"--certificates-directory", self.certificate_path.path,
b"--port",
unicode(self.port_number).encode("ascii"), b"localhost",
deployment_config.path, app_config.path
],
env=environ)
示例15: run
def run(self, connection, args=None):
connection = super(TestOverlayAction, self).run(connection, args)
runner_path = self.data['test'][self.test_uuid]['overlay_path'][self.parameters['test_name']]
# now read the YAML to create a testdef dict to retrieve metadata
yaml_file = os.path.join(runner_path, self.parameters['path'])
# FIXME: check the existence at the same time as the open.
if not os.path.exists(yaml_file):
raise JobError("Unable to find test definition YAML: %s" % yaml_file)
with open(yaml_file, 'r') as test_file:
testdef = yaml.safe_load(test_file)
# FIXME: change lava-test-runner to accept a variable instead of duplicating the YAML?
with open("%s/testdef.yaml" % runner_path, 'w') as run_file:
yaml.safe_dump(testdef, run_file)
# write out the UUID of each test definition.
# FIXME: is this necessary any longer?
with open('%s/uuid' % runner_path, 'w') as uuid:
uuid.write(self.test_uuid)
# FIXME: does this match old-world test-shell & is it needed?
with open('%s/testdef_metadata' % runner_path, 'w') as metadata:
metadata.write(yaml.safe_dump(self.data['test'][self.test_uuid]['testdef_metadata']))
# Need actions for the run.sh script (calling parameter support in base class)
# and install script (also calling parameter support here.)
# this run then only does the incidental files.
self.results = {'success': self.test_uuid}
return connection