本文整理汇总了Python中syntaxnet.util.check.In方法的典型用法代码示例。如果您正苦于以下问题:Python check.In方法的具体用法?Python check.In怎么用?Python check.In使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类syntaxnet.util.check
的用法示例。
在下文中一共展示了check.In方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_attrs_with_defaults
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def get_attrs_with_defaults(parameters, defaults):
"""Populates a dictionary with run-time attributes.
Given defaults, populates any overrides from 'parameters' with their
corresponding converted values. 'defaults' should be typed. This is useful
for specifying NetworkUnit-specific configuration options.
Args:
parameters: a <string, string> map.
defaults: a <string, value> typed set of default values.
Returns:
dictionary populated with any overrides.
Raises:
RuntimeError: if a key in parameters is not present in defaults.
"""
attrs = defaults
for key, value in parameters.iteritems():
check.In(key, defaults, 'Unknown attribute: %s' % key)
if isinstance(defaults[key], bool):
attrs[key] = value.lower() == 'true'
else:
attrs[key] = type(defaults[key])(value)
return attrs
示例2: __init__
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(GatherNetwork, self).__init__(component)
self._attrs = get_attrs_with_defaults(
component.spec.network_unit.parameters, {'trainable_padding': False})
check.In('indices', self._linked_feature_dims,
'Missing required linked feature')
check.Eq(self._linked_feature_dims['indices'], 1,
'Wrong dimension for "indices" feature')
self._dim = self._concatenated_input_dim - 1 # exclude 'indices'
self._layers.append(Layer(component, 'outputs', self._dim))
if self._attrs['trainable_padding']:
self._params.append(
tf.get_variable(
'pre_padding', [1, 1, self._dim],
initializer=tf.random_normal_initializer(stddev=1e-4),
dtype=tf.float32))
示例3: __init__
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(self, 'adjacency', -1))
示例4: __init__
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
# TODO(googleuser): Make parameter initialization configurable.
self._weights = []
self._weights.append(tf.get_variable(
'weights_arc', [self._source_dim, self._target_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'weights_source', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._weights.append(tf.get_variable(
'root', [self._source_dim], tf.float32,
tf.random_normal_initializer(stddev=1e-4)))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例5: testCheckIn
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def testCheckIn(self):
check.In('a', ('a', 'b', 'c'), 'foo')
check.In('b', {'a': 1, 'b': 2}, 'bar')
with self.assertRaisesRegexp(ValueError, 'bar'):
check.In('d', ('a', 'b', 'c'), 'bar')
with self.assertRaisesRegexp(RuntimeError, 'baz'):
check.In('c', {'a': 1, 'b': 2}, 'baz', RuntimeError)
示例6: __init__
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def __init__(self, component):
"""Initializes weights and layers.
Args:
component: Parent ComponentBuilderBase object.
"""
super(BiaffineDigraphNetwork, self).__init__(component)
check.Eq(len(self._fixed_feature_dims.items()), 0,
'Expected no fixed features')
check.Eq(len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('sources', self._linked_feature_dims,
'Missing required linked feature')
check.In('targets', self._linked_feature_dims,
'Missing required linked feature')
self._source_dim = self._linked_feature_dims['sources']
self._target_dim = self._linked_feature_dims['targets']
self._weights = []
self._weights.append(
tf.get_variable('weights_arc', [self._source_dim, self._target_dim],
tf.float32, tf.orthogonal_initializer()))
self._weights.append(
tf.get_variable('weights_source', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._weights.append(
tf.get_variable('root', [self._source_dim], tf.float32,
tf.zeros_initializer()))
self._params.extend(self._weights)
self._regularized_weights.extend(self._weights)
# Add runtime hooks for pre-computed weights.
self._derived_params.append(self._get_root_weights)
self._derived_params.append(self._get_root_bias)
# Negative Layer.dim indicates that the dimension is dynamic.
self._layers.append(network_units.Layer(component, 'adjacency', -1))
示例7: __init__
# 需要导入模块: from syntaxnet.util import check [as 别名]
# 或者: from syntaxnet.util.check import In [as 别名]
def __init__(self, component):
"""Initializes layers.
Args:
component: Parent ComponentBuilderBase object.
"""
layers = [
network_units.Layer(self, 'lengths', -1),
network_units.Layer(self, 'scores', -1),
network_units.Layer(self, 'logits', -1),
network_units.Layer(self, 'arcs', -1),
]
super(MstSolverNetwork, self).__init__(component, init_layers=layers)
self._attrs = network_units.get_attrs_with_defaults(
component.spec.network_unit.parameters,
defaults={
'forest': False,
'loss': 'softmax',
'crf_max_dynamic_range': 20,
})
check.Eq(
len(self._fixed_feature_dims.items()), 0, 'Expected no fixed features')
check.Eq(
len(self._linked_feature_dims.items()), 2,
'Expected two linked features')
check.In('lengths', self._linked_feature_dims,
'Missing required linked feature')
check.In('scores', self._linked_feature_dims,
'Missing required linked feature')