本文整理汇总了Python中six.viewkeys函数的典型用法代码示例。如果您正苦于以下问题:Python viewkeys函数的具体用法?Python viewkeys怎么用?Python viewkeys使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了viewkeys函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: globals
def globals(Globals, **tagmap):
'''Apply the tags in `Globals` back into the database.'''
global apply
cls, tagmap_output = apply.__class__, u", {:s}".format(u', '.join(u"{:s}={:s}".format(internal.utils.string.escape(oldtag), internal.utils.string.escape(newtag)) for oldtag, newtag in six.iteritems(tagmap))) if tagmap else ''
count = 0
for ea, res in Globals:
ns = func if func.within(ea) else db
# grab the current (old) tag state
state = ns.tag(ea)
# transform the new tag state using the tagmap
new = { tagmap.get(name, name) : value for name, value in six.viewitems(res) }
# check if the tag mapping resulted in the deletion of a tag
if len(new) != len(res):
for name in six.viewkeys(res) - six.viewkeys(new):
logging.warn(u"{:s}.globals(...{:s}) : Refusing requested tag mapping as it results in the tag \"{:s}\" overwriting the tag \"{:s}\" in the global {:#x}. The value {!s} would be replaced with {!s}.".format('.'.join((__name__, cls.__name__)), tagmap_output, internal.utils.string.escape(name, '"'), internal.utils.string.escape(tagmap[name], '"'), ea, internal.utils.string.repr(res[name]), internal.utils.string.repr(res[tagmap[name]])))
pass
# check what's going to be overwritten with different values prior to doing it
for name in six.viewkeys(state) & six.viewkeys(new):
if state[name] == new[name]: continue
logging.warn(u"{:s}.globals(...{:s}) : Overwriting tag \"{:s}\" for global at {:#x} with new value {!s}. Old value was {!s}.".format('.'.join((__name__, cls.__name__)), tagmap_output, internal.utils.string.escape(name, '"'), ea, internal.utils.string.repr(new[name]), internal.utils.string.repr(state[name])))
# now we can apply the tags to the global address
try:
[ ns.tag(ea, name, value) for name, value in six.iteritems(new) if state.get(name, dummy) != value ]
except:
logging.warn(u"{:s}.globals(...{:s}) : Unable to apply tags ({!s}) to global {:#x}.".format('.'.join((__name__, cls.__name__)), tagmap_output, internal.utils.string.repr(new), ea), exc_info=True)
# increase our counter
count += 1
return count
示例2: validate
def validate(self, doc):
return doc
try:
assert set(six.viewkeys(doc)) == {
'_id', 'imageId', 'skill', 'creatorId', 'lesionBoundary',
'created'}
assert isinstance(doc['imageId'], ObjectId)
assert self.model('image', 'isic_archive').find(
{'_id': doc['imageId']}).count()
# TODO: better use of Enum
assert doc['skill'] in {'novice', 'expert'}
assert isinstance(doc['creatorId'], ObjectId)
assert self.model('user').find(
{'_id': doc['creatorId']}).count()
assert isinstance(doc['lesionBoundary'], dict)
assert set(six.viewkeys(doc['lesionBoundary'])) == {
'type', 'properties', 'geometry'}
assert doc['lesionBoundary']['type'] == 'Feature'
assert isinstance(doc['lesionBoundary']['properties'], dict)
assert set(six.viewkeys(doc['lesionBoundary']['properties'])) <= {
'source', 'startTime', 'stopTime', 'seedPoint', 'tolerance'}
assert set(six.viewkeys(doc['lesionBoundary']['properties'])) >= {
'source', 'startTime', 'stopTime'}
assert doc['lesionBoundary']['properties']['source'] in {
'autofill', 'manual pointlist'}
assert isinstance(doc['lesionBoundary']['properties']['startTime'],
datetime.datetime)
assert isinstance(doc['lesionBoundary']['properties']['stopTime'],
datetime.datetime)
assert isinstance(doc['lesionBoundary']['geometry'], dict)
assert set(six.viewkeys(doc['lesionBoundary']['geometry'])) == {
'type', 'coordinates'}
assert doc['lesionBoundary']['geometry']['type'] == 'Polygon'
assert isinstance(doc['lesionBoundary']['geometry']['coordinates'],
list)
assert len(doc['lesionBoundary']['geometry']['coordinates']) == 1
assert isinstance(
doc['lesionBoundary']['geometry']['coordinates'][0], list)
assert len(doc['lesionBoundary']['geometry']['coordinates'][0]) > 2
assert doc['lesionBoundary']['geometry']['coordinates'][0][0] == \
doc['lesionBoundary']['geometry']['coordinates'][0][-1]
for coord in doc['lesionBoundary']['geometry']['coordinates'][0]:
assert isinstance(coord, list)
assert len(coord) == 2
assert isinstance(coord[0], (int, float))
assert isinstance(coord[1], (int, float))
assert isinstance(doc['created'], datetime.datetime)
except AssertionError:
# TODO: message
raise ValidationException('')
return doc
示例3: once_a_day
def once_a_day(midnight_dt):
# Get the positions before updating the date so that prices are
# fetched for trading close instead of midnight
positions = algo.perf_tracker.position_tracker.positions
position_assets = algo.asset_finder.retrieve_all(positions)
# set all the timestamps
self.simulation_dt = midnight_dt
algo.on_dt_changed(midnight_dt)
# we want to wait until the clock rolls over to the next day
# before cleaning up expired assets.
self._cleanup_expired_assets(midnight_dt, position_assets)
perf_tracker = algo.perf_tracker
# handle any splits that impact any positions or any open orders.
assets_we_care_about = \
viewkeys(perf_tracker.position_tracker.positions) | \
viewkeys(algo.blotter.open_orders)
if assets_we_care_about:
splits = data_portal.get_splits(assets_we_care_about,
midnight_dt)
if splits:
algo.blotter.process_splits(splits)
perf_tracker.position_tracker.handle_splits(splits)
# call before trading start
algo.before_trading_start(current_data)
示例4: legend_aesthetics
def legend_aesthetics(self, layer, plot):
"""
Return the aesthetics that contribute to the legend
Parameters
----------
layer : Layer
Layer whose legend is to be drawn
plot : ggplot
Plot object
Returns
-------
matched : list
List of the names of the aethetics that contribute
to the legend.
"""
l = layer
legend_ae = set(self.key.columns) - {'label'}
all_ae = (six.viewkeys(l.mapping) |
(plot.mapping if l.inherit_aes else set()) |
six.viewkeys(l.stat.DEFAULT_AES))
geom_ae = l.geom.REQUIRED_AES | six.viewkeys(l.geom.DEFAULT_AES)
matched = all_ae & geom_ae & legend_ae
matched = list(matched - set(l.geom.aes_params))
return matched
示例5: once_a_day
def once_a_day(midnight_dt, current_data=self.current_data,
data_portal=self.data_portal):
# process any capital changes that came overnight
for capital_change in algo.calculate_capital_changes(
midnight_dt, emission_rate=emission_rate,
is_interday=True):
yield capital_change
# set all the timestamps
self.simulation_dt = midnight_dt
algo.on_dt_changed(midnight_dt)
metrics_tracker.handle_market_open(
midnight_dt,
algo.data_portal,
)
# handle any splits that impact any positions or any open orders.
assets_we_care_about = (
viewkeys(metrics_tracker.positions) |
viewkeys(algo.blotter.open_orders)
)
if assets_we_care_about:
splits = data_portal.get_splits(assets_we_care_about,
midnight_dt)
if splits:
algo.blotter.process_splits(splits)
metrics_tracker.handle_splits(splits)
示例6: setup_params
def setup_params(self, data):
params = self.params.copy()
valid_scale = ('area', 'count', 'width')
if params['scale'] not in valid_scale:
msg = "Parameter scale should be one of {}"
raise PlotnineError(msg.format(valid_scale))
lookup = {
'biweight': 'biw',
'cosine': 'cos',
'cosine2': 'cos2',
'epanechnikov': 'epa',
'gaussian': 'gau',
'triangular': 'tri',
'triweight': 'triw',
'uniform': 'uni'}
with suppress(KeyError):
params['kernel'] = lookup[params['kernel'].lower()]
if params['kernel'] not in six.viewvalues(lookup):
msg = ("kernel should be one of {}. "
"You may use the abbreviations {}")
raise PlotnineError(msg.format(six.viewkeys(lookup),
six.viewvalues()))
missing_params = (six.viewkeys(stat_density.DEFAULT_PARAMS) -
six.viewkeys(params))
for key in missing_params:
params[key] = stat_density.DEFAULT_PARAMS[key]
return params
示例7: sync_bay_status
def sync_bay_status(self, ctx):
try:
LOG.debug('Starting to sync up bay status')
osc = clients.OpenStackClients(ctx)
status = [bay_status.CREATE_IN_PROGRESS,
bay_status.UPDATE_IN_PROGRESS,
bay_status.DELETE_IN_PROGRESS]
filters = {'status': status}
bays = objects.Bay.list(ctx, filters=filters)
if not bays:
return
sid_to_bay_mapping = {bay.stack_id: bay for bay in bays}
bay_stack_ids = sid_to_bay_mapping.keys()
stacks = osc.heat().stacks.list(global_tenant=True,
filters={'id': bay_stack_ids})
sid_to_stack_mapping = {s.id: s for s in stacks}
# intersection of bays magnum has and heat has
for sid in (six.viewkeys(sid_to_bay_mapping) &
six.viewkeys(sid_to_stack_mapping)):
stack = sid_to_stack_mapping[sid]
bay = sid_to_bay_mapping[sid]
self._sync_existing_bay(bay, stack)
# the stacks that magnum has but heat doesn't have
for sid in (six.viewkeys(sid_to_bay_mapping) -
six.viewkeys(sid_to_stack_mapping)):
bay = sid_to_bay_mapping[sid]
self._sync_missing_heat_stack(bay)
except Exception as e:
LOG.warn(_LW("Ignore error [%s] when syncing up bay status."), e,
exc_info=True)
示例8: once_a_day
def once_a_day(midnight_dt, current_data=self.current_data,
data_portal=self.data_portal):
perf_tracker = algo.perf_tracker
# Get the positions before updating the date so that prices are
# fetched for trading close instead of midnight
positions = algo.perf_tracker.position_tracker.positions
position_assets = algo.asset_finder.retrieve_all(positions)
# set all the timestamps
self.simulation_dt = midnight_dt
algo.on_dt_changed(midnight_dt)
# process any capital changes that came overnight
for capital_change in algo.calculate_capital_changes(
midnight_dt, emission_rate=emission_rate,
is_interday=True):
yield capital_change
# we want to wait until the clock rolls over to the next day
# before cleaning up expired assets.
self._cleanup_expired_assets(midnight_dt, position_assets)
# handle any splits that impact any positions or any open orders.
assets_we_care_about = \
viewkeys(perf_tracker.position_tracker.positions) | \
viewkeys(algo.blotter.open_orders)
if assets_we_care_about:
splits = data_portal.get_splits(assets_we_care_about,
midnight_dt)
if splits:
algo.blotter.process_splits(splits)
perf_tracker.position_tracker.handle_splits(splits)
示例9: _dictionary_merge
def _dictionary_merge(dictionary_left, dictionary_right):
"""Merge two dictionaries preserving values for the same key.
:param dictionary_left:
A valid dictionary with keys and values.
Example:
dictionary_left = {1: 'A', 2: ['B', 'C'], 3: []}
:param dictionary_right:
A valid dictionary with keys and values.
Example:
dictionary_right = {1: 'A', 2: 'C', 4: 'E'}
:return:
A merged dictionary, which preserves both values in the situation
of a key conflict.
Example:
{1: ['A', 'A'], 2: [['B', 'C'], 'C'], 3: [[]], 4: ['E']}
"""
merged_dictionary = {}
for key in (viewkeys(dictionary_left) | viewkeys(dictionary_right)):
if key in dictionary_left:
merged_dictionary.setdefault(key, []).append(dictionary_left[key])
if key in dictionary_right:
merged_dictionary.setdefault(key, []).append(
dictionary_right[key])
return merged_dictionary
示例10: use_defaults
def use_defaults(self, data):
"""
Combine data with defaults and set aesthetics from parameters
stats should not override this method.
Parameters
----------
data : dataframe
Data used for drawing the geom.
Returns
-------
out : dataframe
Data used for drawing the geom.
"""
missing = (self.aesthetics() -
six.viewkeys(self.aes_params) -
set(data.columns))
for ae in missing-self.REQUIRED_AES:
if self.DEFAULT_AES[ae] is not None:
data[ae] = self.DEFAULT_AES[ae]
missing = (six.viewkeys(self.aes_params) -
set(data.columns))
for ae in self.aes_params:
data[ae] = self.aes_params[ae]
return data
示例11: sync_bay_status
def sync_bay_status(self, ctx):
try:
LOG.debug('Starting to sync up bay status')
osc = clients.OpenStackClients(ctx)
filters = [bay_status.CREATE_IN_PROGRESS,
bay_status.UPDATE_IN_PROGRESS,
bay_status.DELETE_IN_PROGRESS]
bays = objects.Bay.list_all(ctx, filters=filters)
if not bays:
return
sid_to_bay_mapping = {bay.stack_id: bay for bay in bays}
bay_stack_ids = sid_to_bay_mapping.keys()
stacks = osc.heat().stacks.list(global_tenant=True,
filters={'id': bay_stack_ids})
sid_to_stack_mapping = {s.id: s for s in stacks}
for sid in (six.viewkeys(sid_to_bay_mapping) &
six.viewkeys(sid_to_stack_mapping)):
stack = sid_to_stack_mapping[sid]
bay = sid_to_bay_mapping[sid]
if bay.status != stack.stack_status:
old_status = bay.status
bay.status = stack.stack_status
bay.save()
LOG.info(_LI("Sync up bay with id %(id)s from "
"%(old_status)s to %(status)s."),
{'id': bay.id, 'old_status': old_status,
'status': bay.status})
for sid in (six.viewkeys(sid_to_bay_mapping) -
six.viewkeys(sid_to_stack_mapping)):
bay = sid_to_bay_mapping[sid]
if bay.status == bay_status.DELETE_IN_PROGRESS:
bay.destroy()
LOG.info(_LI("Bay with id %(id)s has been deleted due "
"to stack with id %(sid)s not found in "
"Heat."),
{'id': bay.id, 'sid': sid})
elif bay.status == bay_status.CREATE_IN_PROGRESS:
bay.status = bay_status.CREATE_FAILED
bay.save()
LOG.info(_LI("Bay with id %(id)s has been set to "
"%(status)s due to stack with id %(sid)s "
"not found in Heat."),
{'id': bay.id, 'status': bay.status,
'sid': sid})
elif bay.status == bay_status.UPDATE_IN_PROGRESS:
bay.status = bay_status.UPDATE_FAILED
bay.save()
LOG.info(_LI("Bay with id %(id)s has been set to "
"%(status)s due to stack with id %(sid)s "
"not found in Heat."),
{'id': bay.id, 'status': bay.status,
'sid': sid})
except Exception as e:
LOG.warn(_LW("Ignore error [%s] when syncing up bay status."), e,
exc_info=True)
示例12: _clean_running_config_from_removed_nets
def _clean_running_config_from_removed_nets(self):
# Cleanup running config from networks that have been actually
# removed but not yet removed from running config.
running_config = RunningConfig()
nets2remove = (six.viewkeys(running_config.networks) -
six.viewkeys(self.runningConfig.networks))
for net in nets2remove:
running_config.removeNetwork(net)
running_config.save()
示例13: namespace
def namespace(cls):
# turn all instances of things into read-only attributes
attrs,properties,subclass = {},{},{}
for k,v in cls.__dict__.items():
if hasattr(v, '__name__'):
v.__name__ = '{}.{}'.format(cls.__name__,k)
if k.startswith('_') or type(v) is property:
attrs[k] = v
elif not six.callable(v) or isinstance(v,type):
properties[k] = v
elif not hasattr(v, '__class__'):
subclass[k] = namespace(v)
else:
attrs[k] = v
continue
def getprops(obj):
result = []
col1,col2 = 0,0
for k,v in obj.items():
col1 = max((col1,len(k)))
if isinstance(v, type):
val = '<>'
elif hasattr(v, '__class__'):
val = '{!r}'.format(v)
else:
raise ValueError(k)
doc = v.__doc__.split('\n')[0] if v.__doc__ else None
col2 = max((col2,len(val)))
result.append((k, val, doc))
return [('{name:{}} : {val:{}} # {doc}' if d else '{name:{}} : {val:{}}').format(col1,col2,name=k,val=v,doc=d) for k,v,d in result]
def __repr__(self):
props = getprops(properties)
descr = ('{{{!s}}} # {}\n' if cls.__doc__ else '{{{!s}}}\n')
subs = ['{{{}.{}}}\n...'.format(cls.__name__,k) for k in subclass.keys()]
res = descr.format(cls.__name__,cls.__doc__) + '\n'.join(props)
if subs:
return res + '\n' + '\n'.join(subs) + '\n'
return res + '\n'
def __setattr__(self, name, value):
if name in six.viewkeys(attrs):
object.__setattr__(self, name, value)
return
raise AttributeError('Configuration \'{:s}\' does not have field named \'{:s}\''.format(cls.__name__,name))
attrs['__repr__'] = __repr__
attrs['__setattr__'] = __setattr__
attrs.update((k,property(fget=lambda s,k=k:properties[k])) for k in six.viewkeys(properties))
attrs.update((k,property(fget=lambda s,k=k:subclass[k])) for k in six.viewkeys(subclass))
result = type(cls.__name__, cls.__bases__, attrs)
return result()
示例14: assert_dict_equal
def assert_dict_equal(result, expected, path=(), msg="", **kwargs):
_check_sets(viewkeys(result), viewkeys(expected), msg, path + (".%s()" % ("viewkeys" if PY2 else "keys"),), "key")
failures = []
for k, (resultv, expectedv) in iteritems(dzip_exact(result, expected)):
try:
assert_equal(resultv, expectedv, path=path + ("[%r]" % k,), msg=msg, **kwargs)
except AssertionError as e:
failures.append(str(e))
if failures:
raise AssertionError("\n".join(failures))
示例15: _adjust_az_filters
def _adjust_az_filters(self, filters):
# The intersect of sets gets us applicable filter keys (others ignored)
common_keys = six.viewkeys(filters) & six.viewkeys(AZ_ATTRIBUTE_MAP)
for key in common_keys:
filter_key = AZ_ATTRIBUTE_MAP[key]['agent_key']
filter_vals = filters.pop(key)
if filter_vals:
filter_vals = [AZ_ATTRIBUTE_MAP[key]['convert_to'](v)
for v in filter_vals]
filters.setdefault(filter_key, [])
filters[filter_key] += filter_vals
return filters