本文整理汇总了Python中moto.compat.OrderedDict.keys方法的典型用法代码示例。如果您正苦于以下问题:Python OrderedDict.keys方法的具体用法?Python OrderedDict.keys怎么用?Python OrderedDict.keys使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类moto.compat.OrderedDict
的用法示例。
在下文中一共展示了OrderedDict.keys方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Shard
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
class Shard(BaseModel):
def __init__(self, shard_id, starting_hash, ending_hash):
self._shard_id = shard_id
self.starting_hash = starting_hash
self.ending_hash = ending_hash
self.records = OrderedDict()
@property
def shard_id(self):
return "shardId-{0}".format(str(self._shard_id).zfill(12))
def get_records(self, last_sequence_id, limit):
last_sequence_id = int(last_sequence_id)
results = []
for sequence_number, record in self.records.items():
if sequence_number > last_sequence_id:
results.append(record)
last_sequence_id = sequence_number
if len(results) == limit:
break
return results, last_sequence_id
def put_record(self, partition_key, data, explicit_hash_key):
# Note: this function is not safe for concurrency
if self.records:
last_sequence_number = self.get_max_sequence_number()
else:
last_sequence_number = 0
sequence_number = last_sequence_number + 1
self.records[sequence_number] = Record(
partition_key, data, sequence_number, explicit_hash_key)
return sequence_number
def get_min_sequence_number(self):
if self.records:
return list(self.records.keys())[0]
return 0
def get_max_sequence_number(self):
if self.records:
return list(self.records.keys())[-1]
return 0
def to_json(self):
return {
"HashKeyRange": {
"EndingHashKey": str(self.ending_hash),
"StartingHashKey": str(self.starting_hash)
},
"SequenceNumberRange": {
"EndingSequenceNumber": self.get_max_sequence_number(),
"StartingSequenceNumber": self.get_min_sequence_number(),
},
"ShardId": self.shard_id
}
示例2: Shard
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
class Shard(object):
def __init__(self, shard_id):
self.shard_id = shard_id
self.records = OrderedDict()
def get_records(self, last_sequence_id, limit):
last_sequence_id = int(last_sequence_id)
results = []
for sequence_number, record in self.records.items():
if sequence_number > last_sequence_id:
results.append(record)
last_sequence_id = sequence_number
if len(results) == limit:
break
return results, last_sequence_id
def put_record(self, partition_key, data):
# Note: this function is not safe for concurrency
if self.records:
last_sequence_number = self.get_max_sequence_number()
else:
last_sequence_number = 0
sequence_number = last_sequence_number + 1
self.records[sequence_number] = Record(partition_key, data, sequence_number)
return sequence_number
def get_min_sequence_number(self):
if self.records:
return list(self.records.keys())[0]
return 0
def get_max_sequence_number(self):
if self.records:
return list(self.records.keys())[-1]
return 0
def to_json(self):
return {
"HashKeyRange": {
"EndingHashKey": "113427455640312821154458202477256070484",
"StartingHashKey": "0"
},
"SequenceNumberRange": {
"EndingSequenceNumber": self.get_max_sequence_number(),
"StartingSequenceNumber": self.get_min_sequence_number(),
},
"ShardId": self.shard_id
}
示例3: Shard
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
class Shard(BaseModel):
def __init__(self, shard_id, starting_hash, ending_hash):
self._shard_id = shard_id
self.starting_hash = starting_hash
self.ending_hash = ending_hash
self.records = OrderedDict()
@property
def shard_id(self):
return "shardId-{0}".format(str(self._shard_id).zfill(12))
def get_records(self, last_sequence_id, limit):
last_sequence_id = int(last_sequence_id)
results = []
secs_behind_latest = 0
for sequence_number, record in self.records.items():
if sequence_number > last_sequence_id:
results.append(record)
last_sequence_id = sequence_number
very_last_record = self.records[next(reversed(self.records))]
secs_behind_latest = very_last_record.created_at - record.created_at
if len(results) == limit:
break
millis_behind_latest = int(secs_behind_latest * 1000)
return results, last_sequence_id, millis_behind_latest
def put_record(self, partition_key, data, explicit_hash_key):
# Note: this function is not safe for concurrency
if self.records:
last_sequence_number = self.get_max_sequence_number()
else:
last_sequence_number = 0
sequence_number = last_sequence_number + 1
self.records[sequence_number] = Record(
partition_key, data, sequence_number, explicit_hash_key)
return sequence_number
def get_min_sequence_number(self):
if self.records:
return list(self.records.keys())[0]
return 0
def get_max_sequence_number(self):
if self.records:
return list(self.records.keys())[-1]
return 0
def get_sequence_number_at(self, at_timestamp):
if not self.records or at_timestamp < list(self.records.values())[0].created_at:
return 0
else:
# find the last item in the list that was created before
# at_timestamp
r = next((r for r in reversed(self.records.values()) if r.created_at < at_timestamp), None)
return r.sequence_number
def to_json(self):
return {
"HashKeyRange": {
"EndingHashKey": str(self.ending_hash),
"StartingHashKey": str(self.starting_hash)
},
"SequenceNumberRange": {
"EndingSequenceNumber": self.get_max_sequence_number(),
"StartingSequenceNumber": self.get_min_sequence_number(),
},
"ShardId": self.shard_id
}
示例4: CloudFormationBackend
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
#.........这里部分代码省略.........
stack = s
if stack is None:
raise ValidationError(stack_name)
else:
stack = self.create_stack(stack_name, template, parameters,
region_name, notification_arns, tags,
role_arn, create_change_set=True)
change_set_id = generate_changeset_id(change_set_name, region_name)
self.stacks[change_set_name] = {'Id': change_set_id,
'StackId': stack.stack_id}
self.change_sets[change_set_id] = stack
return change_set_id, stack.stack_id
def execute_change_set(self, change_set_name, stack_name=None):
stack = None
if change_set_name in self.change_sets:
# This means arn was passed in
stack = self.change_sets[change_set_name]
else:
for cs in self.change_sets:
if self.change_sets[cs].name == change_set_name:
stack = self.change_sets[cs]
if stack is None:
raise ValidationError(stack_name)
if stack.events[-1].resource_status == 'REVIEW_IN_PROGRESS':
stack._add_stack_event('CREATE_COMPLETE')
else:
stack._add_stack_event('UPDATE_IN_PROGRESS')
stack._add_stack_event('UPDATE_COMPLETE')
return True
def describe_stacks(self, name_or_stack_id):
stacks = self.stacks.values()
if name_or_stack_id:
for stack in stacks:
if stack.name == name_or_stack_id or stack.stack_id == name_or_stack_id:
return [stack]
if self.deleted_stacks:
deleted_stacks = self.deleted_stacks.values()
for stack in deleted_stacks:
if stack.stack_id == name_or_stack_id:
return [stack]
raise ValidationError(name_or_stack_id)
else:
return list(stacks)
def list_stacks(self):
return self.stacks.values()
def get_stack(self, name_or_stack_id):
all_stacks = dict(self.deleted_stacks, **self.stacks)
if name_or_stack_id in all_stacks:
# Lookup by stack id - deleted stacks incldued
return all_stacks[name_or_stack_id]
else:
# Lookup by stack name - undeleted stacks only
for stack in self.stacks.values():
if stack.name == name_or_stack_id:
return stack
def update_stack(self, name, template, role_arn=None, parameters=None, tags=None):
stack = self.get_stack(name)
stack.update(template, role_arn, parameters=parameters, tags=tags)
return stack
def list_stack_resources(self, stack_name_or_id):
stack = self.get_stack(stack_name_or_id)
return stack.stack_resources
def delete_stack(self, name_or_stack_id):
if name_or_stack_id in self.stacks:
# Delete by stack id
stack = self.stacks.pop(name_or_stack_id, None)
stack.delete()
self.deleted_stacks[stack.stack_id] = stack
[self.exports.pop(export.name) for export in stack.exports]
return self.stacks.pop(name_or_stack_id, None)
else:
# Delete by stack name
for stack in list(self.stacks.values()):
if stack.name == name_or_stack_id:
self.delete_stack(stack.stack_id)
def list_exports(self, token):
all_exports = list(self.exports.values())
if token is None:
exports = all_exports[0:100]
next_token = '100' if len(all_exports) > 100 else None
else:
token = int(token)
exports = all_exports[token:token + 100]
next_token = str(token + 100) if len(all_exports) > token + 100 else None
return exports, next_token
def _validate_export_uniqueness(self, stack):
new_stack_export_names = [x.name for x in stack.exports]
export_names = self.exports.keys()
if not set(export_names).isdisjoint(new_stack_export_names):
raise ValidationError(stack.stack_id, message='Export names must be unique across a given region')
示例5: ELBv2Backend
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
#.........这里部分代码省略.........
for target_group in self.target_groups.values():
if target_group.name == name:
raise DuplicateTargetGroupName()
valid_protocols = ['HTTPS', 'HTTP', 'TCP']
if kwargs.get('healthcheck_protocol') and kwargs['healthcheck_protocol'] not in valid_protocols:
raise InvalidConditionValueError(
"Value {} at 'healthCheckProtocol' failed to satisfy constraint: "
"Member must satisfy enum value set: {}".format(kwargs['healthcheck_protocol'], valid_protocols))
if kwargs.get('protocol') and kwargs['protocol'] not in valid_protocols:
raise InvalidConditionValueError(
"Value {} at 'protocol' failed to satisfy constraint: "
"Member must satisfy enum value set: {}".format(kwargs['protocol'], valid_protocols))
if kwargs.get('matcher') and FakeTargetGroup.HTTP_CODE_REGEX.match(kwargs['matcher']['HttpCode']) is None:
raise RESTError('InvalidParameterValue', 'HttpCode must be like 200 | 200-399 | 200,201 ...')
arn = make_arn_for_target_group(account_id=1, name=name, region_name=self.region_name)
target_group = FakeTargetGroup(name, arn, **kwargs)
self.target_groups[target_group.arn] = target_group
return target_group
def create_listener(self, load_balancer_arn, protocol, port, ssl_policy, certificate, default_actions):
balancer = self.load_balancers.get(load_balancer_arn)
if balancer is None:
raise LoadBalancerNotFoundError()
if port in balancer.listeners:
raise DuplicateListenerError()
arn = load_balancer_arn.replace(':loadbalancer/', ':listener/') + "/%s%s" % (port, id(self))
listener = FakeListener(load_balancer_arn, arn, protocol, port, ssl_policy, certificate, default_actions)
balancer.listeners[listener.arn] = listener
for action in default_actions:
if action['target_group_arn'] in self.target_groups.keys():
target_group = self.target_groups[action['target_group_arn']]
target_group.load_balancer_arns.append(load_balancer_arn)
return listener
def describe_load_balancers(self, arns, names):
balancers = self.load_balancers.values()
arns = arns or []
names = names or []
if not arns and not names:
return balancers
matched_balancers = []
matched_balancer = None
for arn in arns:
for balancer in balancers:
if balancer.arn == arn:
matched_balancer = balancer
if matched_balancer is None:
raise LoadBalancerNotFoundError()
elif matched_balancer not in matched_balancers:
matched_balancers.append(matched_balancer)
for name in names:
for balancer in balancers:
if balancer.name == name:
matched_balancer = balancer
if matched_balancer is None:
raise LoadBalancerNotFoundError()
elif matched_balancer not in matched_balancers:
matched_balancers.append(matched_balancer)
示例6: CloudFormationBackend
# 需要导入模块: from moto.compat import OrderedDict [as 别名]
# 或者: from moto.compat.OrderedDict import keys [as 别名]
class CloudFormationBackend(BaseBackend):
def __init__(self):
self.stacks = OrderedDict()
self.deleted_stacks = {}
self.exports = OrderedDict()
def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None, role_arn=None):
stack_id = generate_stack_id(name)
new_stack = FakeStack(
stack_id=stack_id,
name=name,
template=template,
parameters=parameters,
region_name=region_name,
notification_arns=notification_arns,
tags=tags,
role_arn=role_arn,
cross_stack_resources=self.exports,
)
self.stacks[stack_id] = new_stack
self._validate_export_uniqueness(new_stack)
for export in new_stack.exports:
self.exports[export.name] = export
return new_stack
def describe_stacks(self, name_or_stack_id):
stacks = self.stacks.values()
if name_or_stack_id:
for stack in stacks:
if stack.name == name_or_stack_id or stack.stack_id == name_or_stack_id:
return [stack]
if self.deleted_stacks:
deleted_stacks = self.deleted_stacks.values()
for stack in deleted_stacks:
if stack.stack_id == name_or_stack_id:
return [stack]
raise ValidationError(name_or_stack_id)
else:
return list(stacks)
def list_stacks(self):
return self.stacks.values()
def get_stack(self, name_or_stack_id):
all_stacks = dict(self.deleted_stacks, **self.stacks)
if name_or_stack_id in all_stacks:
# Lookup by stack id - deleted stacks incldued
return all_stacks[name_or_stack_id]
else:
# Lookup by stack name - undeleted stacks only
for stack in self.stacks.values():
if stack.name == name_or_stack_id:
return stack
def update_stack(self, name, template, role_arn=None, parameters=None, tags=None):
stack = self.get_stack(name)
stack.update(template, role_arn, parameters=parameters, tags=tags)
return stack
def list_stack_resources(self, stack_name_or_id):
stack = self.get_stack(stack_name_or_id)
return stack.stack_resources
def delete_stack(self, name_or_stack_id):
if name_or_stack_id in self.stacks:
# Delete by stack id
stack = self.stacks.pop(name_or_stack_id, None)
stack.delete()
self.deleted_stacks[stack.stack_id] = stack
[self.exports.pop(export.name) for export in stack.exports]
return self.stacks.pop(name_or_stack_id, None)
else:
# Delete by stack name
for stack in list(self.stacks.values()):
if stack.name == name_or_stack_id:
self.delete_stack(stack.stack_id)
def list_exports(self, token):
all_exports = list(self.exports.values())
if token is None:
exports = all_exports[0:100]
next_token = '100' if len(all_exports) > 100 else None
else:
token = int(token)
exports = all_exports[token:token + 100]
next_token = str(token + 100) if len(all_exports) > token + 100 else None
return exports, next_token
def _validate_export_uniqueness(self, stack):
new_stack_export_names = [x.name for x in stack.exports]
export_names = self.exports.keys()
if not set(export_names).isdisjoint(new_stack_export_names):
raise ValidationError(stack.stack_id, message='Export names must be unique across a given region')