本文整理汇总了Python中yaml.dump函数的典型用法代码示例。如果您正苦于以下问题:Python dump函数的具体用法?Python dump怎么用?Python dump使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了dump函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: dump_yaml_by_year_week
def dump_yaml_by_year_week(items, root_dir=None):
""" Dumps a set of files: ``$root_dir/$year/$week_number.yaml``
"""
assert root_dir
years = defaultdict(lambda: defaultdict(lambda: []))
total_facts = 0
for raw_fact in items:
year = raw_fact["since"].year
week = raw_fact["since"].isocalendar()[1]
fact = OrderedDict()
keys = "activity", "category", "since", "until", "description", "tags", "hamster_fact_id"
for key in keys:
if key in raw_fact:
fact[key] = raw_fact[key]
years[year][week].append(fact)
total_facts += 1
for year in years:
for month, facts in years[year].iteritems():
year_dir = os.path.join(root_dir, str(year))
if not os.path.exists(year_dir):
os.makedirs(year_dir)
month_file = os.path.join(year_dir, "{0:0>2}".format(month)) + ".yaml"
with open(month_file, "w") as f:
yaml.dump(facts, f, allow_unicode=True, default_flow_style=False)
return total_facts
示例2: main
def main():
root = "/mnt/charts/docs"
chart_url = os.environ.get(
"CHARTS_URL", "https://kubernetes-charts.storage.googleapis.com/")
repo_url = os.environ.get("GIT_REPO")
if repo_url is None:
raise RuntimeError("You must specify a git repo!")
p = urlparse(repo_url)
git_user = p.path.split("/")[-2]
repo_name = p.path.split("/")[-1].split(".")[0]
default_mirror = "https://%s.github.io/%s/" % (git_user.lower(), repo_name)
mirror_url = os.environ.get("MIRROR_URL", default_mirror)
index_file = "index.yaml"
wget(chart_url + index_file, index_file)
with open(index_file) as f:
index = yaml.load(f)
entries = index["entries"]
new = index.copy()
for name, charts in entries.items():
for chart, new_chart in zip(charts, new["entries"][name]):
url = chart["urls"][0]
tar_name = url.split("/")[-1]
target = os.path.join(root, tar_name)
new_chart["urls"][0] = "/".join(
[mirror_url[:-1] if mirror_url.endswith("/") else mirror_url, tar_name])
# datetime format issure
new_chart["created"] = new_chart["created"].strftime('%Y-%m-%dT%H:%M:%S.%f000Z')
if os.path.exists(target):
continue
wget(url, target)
new["generated"] = new["generated"].strftime('%Y-%m-%dT%H:%M:%S.%f000Z')
with open(os.path.join(root, "index.yaml"), "w") as f:
yaml.dump(new, stream=f)
示例3: get_post_process_yaml
def get_post_process_yaml(data_dir, workdir):
try:
from bcbiovm.docker.defaults import get_datadir
datadir = get_datadir()
system = os.path.join(datadir, "galaxy", "bcbio_system.yaml") if datadir else None
except ImportError:
system = None
if system is None or not os.path.exists(system):
try:
_, system = load_system_config("bcbio_system.yaml")
except ValueError:
system = None
sample = os.path.join(data_dir, "post_process-sample.yaml")
std = os.path.join(data_dir, "post_process.yaml")
if os.path.exists(std):
return std
elif system and os.path.exists(system):
# create local config pointing to reduced genomes
test_system = os.path.join(workdir, os.path.basename(system))
with open(system) as in_handle:
config = yaml.load(in_handle)
config["galaxy_config"] = os.path.join(data_dir, "universe_wsgi.ini")
with open(test_system, "w") as out_handle:
yaml.dump(config, out_handle)
return test_system
else:
return sample
示例4: settings
def settings(request):
profile = request.user.get_profile()
merchant_settings = MerchantSettings.load_by_merchant(profile)
settings = MerchantSettingsForm(instance=merchant_settings)
account_info = dict()
try:
cb_api = coinbase.get_api_instance(profile)
account_info = coinbase.get_account_info(cb_api)
print yaml.dump(account_info)
except Exception as e:
print "%s: %s" % (e.__class__, e)
cb_api = None
transactions = list()
# if cb_api:
# try:
# transactions = [x for x in cb_api.transactions()]
# if len(transactions) > 7:
# transactions = transactions[:7]
# except Exception as e:
# print "Exception getting transactions: %s %s" % (e.__class__, e)
# for tx in cb_api.transactions():
# print tx
# print dir(tx)
data = {'settings_form': settings,
'coinbase_api': cb_api,
'account_info': account_info,
'transactions': transactions,
}
t = loader.get_template("coinexchange/account/settings.html")
c = CoinExchangeContext(request, data)
return HttpResponse(t.render(c))
示例5: test_disks_flag
def test_disks_flag(self):
# specifying a EBS mount or PD mount is only valid for EC2/Euca/GCE, so
# fail on a cluster deployment.
argv = self.cluster_argv[:] + ["--disks", "ABCDFEG"]
self.assertRaises(BadConfigurationException, ParseArgs, argv, self.function)
# if we get a --disk flag, fail if it's not a dict (after base64, yaml load)
bad_disks_layout = yaml.load("""
public1,
""")
base64ed_bad_disks = base64.b64encode(yaml.dump(bad_disks_layout))
cloud_argv1 = self.cloud_argv[:] + ["--disks", base64ed_bad_disks]
self.assertRaises(BadConfigurationException, ParseArgs, cloud_argv1,
self.function)
# passing in a dict should be fine, and result in us seeing the same value
# for --disks that we passed in.
disks = {'public1' : 'vol-ABCDEFG'}
good_disks_layout = yaml.load("""
public1 : vol-ABCDEFG
""")
base64ed_good_disks = base64.b64encode(yaml.dump(good_disks_layout))
cloud_argv2 = self.cloud_argv[:] + ["--disks", base64ed_good_disks]
actual = ParseArgs(cloud_argv2, self.function).args
self.assertEquals(disks, actual.disks)
示例6: main
def main(run_info_yaml, lane, out_file, genome_build, barcode_type, trim, ascii, analysis, description, clear_description, verbose):
if verbose: print "Verifying that %s exists" % run_info_yaml
assert os.path.exists(run_info_yaml)
if verbose: print "Parsing %s" % run_info_yaml
with open(run_info_yaml) as fh:
run_info = yaml.load(fh)
if verbose: print "Extracting lane info"
if lane == 0:
lane_info = run_info
else:
for info in run_info:
if (int(info.get("lane",0)) == lane):
lane_info = [info]
break
for info in lane_info:
if verbose: print "Processing lane %s" % info["lane"]
_process_info(info,genome_build,barcode_type,trim,ascii,analysis,description,clear_description,verbose)
if out_file is not None:
with open(out_file,'w') as fh:
yaml.dump(run_info, fh, allow_unicode=True, default_flow_style=False)
else:
print yaml.dump(run_info, allow_unicode=True, default_flow_style=False)
示例7: gen_maestro_yaml
def gen_maestro_yaml():
with open("maestro.yaml", "w") as conf:
data = {
"name": cluster_name,
"ships": {},
"services": {
"weibo-emotion-app": {
"image": base_image,
"instances": {}
},
},
"audit": [
{"type": "log", "file": "/tmp/maestro.log"}
]
}
for ship in ships:
data["ships"][ship] = {"ip": ship}
for i in xrange(len(ships)):
for j in xrange(num_instance_per_ship):
container_no = i * num_instance_per_ship + j
service_name = "weibo-emotion-app"
container_name = "%s%s" % (service_name, container_no)
container_port = base_port + container_no
data["services"][service_name]["instances"][container_name] = {
"ship": ships[i],
"ports": {"client": {"external": container_port, "exposed": 8000}},
"lifecycle": {
"running": [{"type": "http", "port": "client"}],
},
}
yaml.dump(data, conf, default_flow_style=False)
示例8: test_print_basic
def test_print_basic(monkeypatch):
monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: MagicMock())
monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock())
data = {'SenzaInfo': {'StackName': 'test'}, 'SenzaComponents': [{'Configuration': {'Type': 'Senza::Configuration',
'ServerSubnets': {
'eu-west-1': [
'subnet-123']}}},
{'AppServer': {
'Type': 'Senza::TaupageAutoScalingGroup',
'InstanceType': 't2.micro',
'Image': 'AppImage',
'TaupageConfig': {'runtime': 'Docker',
'source': 'foo/bar'}}}]}
runner = CliRunner()
with runner.isolated_filesystem():
with open('myapp.yaml', 'w') as fd:
yaml.dump(data, fd)
result = runner.invoke(cli, ['print', 'myapp.yaml', '--region=myregion', '123', '1.0-SNAPSHOT'],
catch_exceptions=False)
assert 'AWSTemplateFormatVersion' in result.output
assert 'subnet-123' in result.output
示例9: test_print_replace_mustache
def test_print_replace_mustache(monkeypatch):
sg = MagicMock()
sg.name = 'app-master-mind'
sg.id = 'sg-007'
monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: MagicMock())
monkeypatch.setattr('boto.ec2.connect_to_region', lambda x: MagicMock(get_all_security_groups=lambda: [sg]))
monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock())
data = {'SenzaInfo': {'StackName': 'test',
'Parameters': [{'ApplicationId': {'Description': 'Application ID from kio'}}]},
'SenzaComponents': [{'Configuration': {'ServerSubnets': {'eu-west-1': ['subnet-123']},
'Type': 'Senza::Configuration'}},
{'AppServer': {'Image': 'AppImage',
'InstanceType': 't2.micro',
'SecurityGroups': ['app-{{Arguments.ApplicationId}}'],
'IamRoles': ['app-{{Arguments.ApplicationId}}'],
'TaupageConfig': {'runtime': 'Docker',
'source': 'foo/bar'},
'Type': 'Senza::TaupageAutoScalingGroup'}}]
}
runner = CliRunner()
with runner.isolated_filesystem():
with open('myapp.yaml', 'w') as fd:
yaml.dump(data, fd)
result = runner.invoke(cli, ['print', 'myapp.yaml', '--region=myregion', '123', 'master-mind'],
catch_exceptions=False)
assert 'AWSTemplateFormatVersion' in result.output
assert 'subnet-123' in result.output
assert 'app-master-mind' in result.output
assert 'sg-007' in result.output
示例10: test_console
def test_console(monkeypatch):
stack = MagicMock(stack_name='test-1')
inst = MagicMock()
inst.tags = {'aws:cloudformation:stack-name': 'test-1'}
ec2 = MagicMock()
ec2.get_only_instances.return_value = [inst]
ec2.get_console_output.return_value.output = b'**MAGIC-CONSOLE-OUTPUT**'
monkeypatch.setattr('boto.ec2.connect_to_region', lambda x: ec2)
monkeypatch.setattr('boto.cloudformation.connect_to_region',
lambda x: MagicMock(list_stacks=lambda stack_status_filters: [stack]))
monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock())
runner = CliRunner()
data = {'SenzaInfo': {'StackName': 'test'}}
with runner.isolated_filesystem():
with open('myapp.yaml', 'w') as fd:
yaml.dump(data, fd)
result = runner.invoke(cli, ['console', 'myapp.yaml', '--region=myregion', '1'],
catch_exceptions=False)
assert '**MAGIC-CONSOLE-OUTPUT**' in result.output
result = runner.invoke(cli, ['console', 'foobar', '--region=myregion'],
catch_exceptions=False)
assert '' == result.output
result = runner.invoke(cli, ['console', '172.31.1.2', '--region=myregion'],
catch_exceptions=False)
assert '**MAGIC-CONSOLE-OUTPUT**' in result.output
result = runner.invoke(cli, ['console', 'i-123', '--region=myregion'],
catch_exceptions=False)
assert '**MAGIC-CONSOLE-OUTPUT**' in result.output
示例11: test_delete
def test_delete(monkeypatch):
cf = MagicMock()
stack = MagicMock(stack_name='test-1')
cf.list_stacks.return_value = [stack]
monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: cf)
monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock())
runner = CliRunner()
data = {'SenzaInfo': {'StackName': 'test'}}
with runner.isolated_filesystem():
with open('myapp.yaml', 'w') as fd:
yaml.dump(data, fd)
result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion', '1'],
catch_exceptions=False)
assert 'OK' in result.output
cf.list_stacks.return_value = [stack, stack]
result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion'],
catch_exceptions=False)
assert 'Please use the "--force" flag if you really want to delete multiple stacks' in result.output
result = runner.invoke(cli, ['delete', 'myapp.yaml', '--region=myregion', '--force'],
catch_exceptions=False)
assert 'OK' in result.output
示例12: save_config
def save_config(config, path=None):
""" Save given quaycon configuration to YAML file
:param dict config:
configuration to save
:param path:
where to save the configuration. Can be either a path to a file, or
an opened file-like object. If kept to `None`, then configuration is
saved to ~/.quaycon.yml if it already exist or
~/.config/quaycon/config.yml
"""
if path is None:
for _path in DEFAULT_CONFIG_FILES:
if osp.exists(_path):
path = _path
break
if path is None:
path = _path
if isinstance(path, string_types):
LOGGER.info("Saving config in '{}'".format(path))
parent = osp.dirname(path)
if not osp.isdir(parent):
os.makedirs(parent)
with open(path, "w") as ostr:
yaml.dump(config, ostr, encoding="utf-8", default_flow_style=False, Dumper=Dumper)
else:
yaml.dump(config, path, encoding="utf-8", default_flow_style=False, Dumper=Dumper)
示例13: __init__
def __init__(self):
configure(None)
self.logger = getLogger("Permissions")
self.confdir = tmpdir + "/config/"
self.datadir = tmpdir + "/data/"
try:
os.makedirs(self.confdir)
os.makedirs(self.datadir)
self.logger.debug("Config and data dirs created.")
except Exception:
pass
yaml.dump({"editor_warning": False},
open(self.confdir + "settings.yml", "w"))
self.storage = StorageManager(self.confdir, self.datadir)
self.data = self.storage.get_file(self, "data", formats.YAML,
"permissions.yml")
self.handler = permissionsHandler(self, self.data)
super(TestPlugin, self).__init__(
AttrDict(name="test", module="test_permissions"),
AttrDict(name="python"),
)
示例14: dump_yaml_by_year_month_day
def dump_yaml_by_year_month_day(items, root_dir=None):
""" Dumps a set of files: ``$root_dir/$year/$month/$day.yaml``
"""
assert root_dir
years = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: [])))
total_facts = 0
for raw_fact in items:
year = raw_fact["since"].year
month = raw_fact["since"].month
day = raw_fact["since"].day
fact = OrderedDict()
keys = "activity", "category", "since", "until", "description", "tags", "hamster_fact_id"
for key in keys:
if key in raw_fact:
fact[key] = raw_fact[key]
years[year][month][day].append(fact)
total_facts += 1
for year in years:
for month in years[year]:
for day, facts in years[year][month].iteritems():
month_dir = os.path.join(root_dir, str(year), "{0:0>2}".format(month))
if not os.path.exists(month_dir):
os.makedirs(month_dir)
day_file = os.path.join(month_dir, "{0:0>2}".format(day)) + ".yaml"
with open(day_file, "w") as f:
yaml.dump(facts, f, allow_unicode=True, default_flow_style=False)
return total_facts
示例15: main
def main():
arg = sys.argv[1:]
json_file = open(arg[0], 'r')
json_obj = json.load(json_file, encoding="latin-1")
json_obj = byteify(json_obj)
path = os.getcwd()
for base_object in json_obj:
if '.' not in base_object:
new_directory = "%s/inventory/%s" % (path, base_object)
if not os.path.exists(new_directory):
os.makedirs(new_directory)
for group_vars in json_obj['group_vars']:
stream = open("%s/inventory/group_vars/%s" % (path, group_vars), "w")
yaml.dump(json_obj['group_vars'][group_vars], stream, default_flow_style=False)
stream.close()
for host_vars in json_obj['host_vars']:
stream = open("%s/inventory/host_vars/%s" % (path, host_vars), "w")
yaml.dump(json_obj['host_vars'][host_vars], stream, default_flow_style=False)
stream.close()
stream = open("%s/inventory/hosts.ini" % path, "w")
stream.write( json_obj['hosts.ini'] )
stream.close()