本文整理汇总了Python中allennlp.common.params.Params.pop_int方法的典型用法代码示例。如果您正苦于以下问题:Python Params.pop_int方法的具体用法?Python Params.pop_int怎么用?Python Params.pop_int使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类allennlp.common.params.Params
的用法示例。
在下文中一共展示了Params.pop_int方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: prepare_environment
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def prepare_environment(params: Params):
"""
Sets random seeds for reproducible experiments. This may not work as expected
if you use this from within a python project in which you have already imported Pytorch.
If you use the scripts/run_model.py entry point to training models with this library,
your experiments should be reasonably reproducible. If you are using this from your own
project, you will want to call this function before importing Pytorch. Complete determinism
is very difficult to achieve with libraries doing optimized linear algebra due to massively
parallel execution, which is exacerbated by using GPUs.
Parameters
----------
params: Params object or dict, required.
A ``Params`` object or dict holding the json parameters.
"""
seed = params.pop_int("random_seed", 13370)
numpy_seed = params.pop_int("numpy_seed", 1337)
torch_seed = params.pop_int("pytorch_seed", 133)
if seed is not None:
random.seed(seed)
if numpy_seed is not None:
numpy.random.seed(numpy_seed)
if torch_seed is not None:
torch.manual_seed(torch_seed)
# Seed all GPUs with the same seed if available.
if torch.cuda.is_available():
torch.cuda.manual_seed_all(torch_seed)
log_pytorch_version_info()
示例2: from_params
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def from_params(cls, params: Params) -> "EndpointSpanExtractor":
input_dim = params.pop_int("input_dim")
combination = params.pop("combination", "x,y")
num_width_embeddings = params.pop_int("num_width_embeddings", None)
span_width_embedding_dim = params.pop_int("span_width_embedding_dim", None)
bucket_widths = params.pop_bool("bucket_widths", False)
use_exclusive_start_indices = params.pop_bool("use_exclusive_start_indices", False)
params.assert_empty(cls.__name__)
return EndpointSpanExtractor(input_dim=input_dim,
combination=combination,
num_width_embeddings=num_width_embeddings,
span_width_embedding_dim=span_width_embedding_dim,
use_exclusive_start_indices=use_exclusive_start_indices,
bucket_widths=bucket_widths)
示例3: from_params
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def from_params(cls, params: Params) -> "BidirectionalEndpointSpanExtractor":
input_dim = params.pop_int("input_dim")
forward_combination = params.pop("forward_combination", "y-x")
backward_combination = params.pop("backward_combination", "x-y")
num_width_embeddings = params.pop_int("num_width_embeddings", None)
span_width_embedding_dim = params.pop_int("span_width_embedding_dim", None)
bucket_widths = params.pop_bool("bucket_widths", False)
use_sentinels = params.pop_bool("use_sentinels", True)
return BidirectionalEndpointSpanExtractor(input_dim=input_dim,
forward_combination=forward_combination,
backward_combination=backward_combination,
num_width_embeddings=num_width_embeddings,
span_width_embedding_dim=span_width_embedding_dim,
bucket_widths=bucket_widths,
use_sentinels=use_sentinels)
示例4: from_params
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def from_params(cls, params: Params, instances: Iterable['adi.Instance'] = None):
"""
There are two possible ways to build a vocabulary; from a
collection of instances, using :func:`Vocabulary.from_instances`, or
from a pre-saved vocabulary, using :func:`Vocabulary.from_files`.
This method wraps both of these options, allowing their specification
from a ``Params`` object, generated from a JSON configuration file.
Parameters
----------
params: Params, required.
dataset: Dataset, optional.
If ``params`` doesn't contain a ``vocabulary_directory`` key,
the ``Vocabulary`` can be built directly from a ``Dataset``.
Returns
-------
A ``Vocabulary``.
"""
vocabulary_directory = params.pop("directory_path", None)
if not vocabulary_directory and not instances:
raise ConfigurationError("You must provide either a Params object containing a "
"vocab_directory key or a Dataset to build a vocabulary from.")
if vocabulary_directory and instances:
logger.info("Loading Vocab from files instead of dataset.")
if vocabulary_directory:
params.assert_empty("Vocabulary - from files")
return Vocabulary.from_files(vocabulary_directory)
min_count = params.pop("min_count", None)
max_vocab_size = params.pop_int("max_vocab_size", None)
non_padded_namespaces = params.pop("non_padded_namespaces", DEFAULT_NON_PADDED_NAMESPACES)
pretrained_files = params.pop("pretrained_files", {})
only_include_pretrained_words = params.pop_bool("only_include_pretrained_words", False)
params.assert_empty("Vocabulary - from dataset")
return Vocabulary.from_instances(instances=instances,
min_count=min_count,
max_vocab_size=max_vocab_size,
non_padded_namespaces=non_padded_namespaces,
pretrained_files=pretrained_files,
only_include_pretrained_words=only_include_pretrained_words)
示例5: create_kwargs
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def create_kwargs(cls: Type[T], params: Params, **extras) -> Dict[str, Any]:
"""
Given some class, a `Params` object, and potentially other keyword arguments,
create a dict of keyword args suitable for passing to the class's constructor.
The function does this by finding the class's constructor, matching the constructor
arguments to entries in the `params` object, and instantiating values for the parameters
using the type annotation and possibly a from_params method.
Any values that are provided in the `extras` will just be used as is.
For instance, you might provide an existing `Vocabulary` this way.
"""
# Get the signature of the constructor.
signature = inspect.signature(cls.__init__)
kwargs: Dict[str, Any] = {}
# Iterate over all the constructor parameters and their annotations.
for name, param in signature.parameters.items():
# Skip "self". You're not *required* to call the first parameter "self",
# so in theory this logic is fragile, but if you don't call the self parameter
# "self" you kind of deserve what happens.
if name == "self":
continue
# If the annotation is a compound type like typing.Dict[str, int],
# it will have an __origin__ field indicating `typing.Dict`
# and an __args__ field indicating `(str, int)`. We capture both.
annotation = remove_optional(param.annotation)
origin = getattr(annotation, '__origin__', None)
args = getattr(annotation, '__args__', [])
# The parameter is optional if its default value is not the "no default" sentinel.
default = param.default
optional = default != _NO_DEFAULT
# Some constructors expect extra non-parameter items, e.g. vocab: Vocabulary.
# We check the provided `extras` for these and just use them if they exist.
if name in extras:
kwargs[name] = extras[name]
# The next case is when the parameter type is itself constructible from_params.
elif hasattr(annotation, 'from_params'):
if name in params:
# Our params have an entry for this, so we use that.
subparams = params.pop(name)
if takes_arg(annotation.from_params, 'extras'):
# If annotation.params accepts **extras, we need to pass them all along.
# For example, `BasicTextFieldEmbedder.from_params` requires a Vocabulary
# object, but `TextFieldEmbedder.from_params` does not.
subextras = extras
else:
# Otherwise, only supply the ones that are actual args; any additional ones
# will cause a TypeError.
subextras = {k: v for k, v in extras.items() if takes_arg(annotation.from_params, k)}
# In some cases we allow a string instead of a param dict, so
# we need to handle that case separately.
if isinstance(subparams, str):
kwargs[name] = annotation.by_name(subparams)()
else:
print(annotation)
kwargs[name] = annotation.from_params(params=subparams, **subextras)
elif not optional:
# Not optional and not supplied, that's an error!
raise ConfigurationError(f"expected key {name} for {cls.__name__}")
else:
kwargs[name] = default
# If the parameter type is a Python primitive, just pop it off
# using the correct casting pop_xyz operation.
elif annotation == str:
kwargs[name] = (params.pop(name, default)
if optional
else params.pop(name))
elif annotation == int:
kwargs[name] = (params.pop_int(name, default)
if optional
else params.pop_int(name))
elif annotation == bool:
kwargs[name] = (params.pop_bool(name, default)
if optional
else params.pop_bool(name))
elif annotation == float:
kwargs[name] = (params.pop_float(name, default)
if optional
else params.pop_float(name))
# This is special logic for handling types like Dict[str, TokenIndexer], which it creates by
# instantiating each value from_params and returning the resulting dict.
elif origin == Dict and len(args) == 2 and hasattr(args[-1], 'from_params'):
value_cls = annotation.__args__[-1]
value_dict = {}
for key, value_params in params.pop(name, Params({})).items():
value_dict[key] = value_cls.from_params(params=value_params, **extras)
kwargs[name] = value_dict
#.........这里部分代码省略.........
示例6: from_params
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def from_params(cls, params: Params) -> "PassThroughEncoder":
input_dim = params.pop_int("input_dim")
params.assert_empty(cls.__name__)
return PassThroughEncoder(input_dim=input_dim)
示例7: from_params
# 需要导入模块: from allennlp.common.params import Params [as 别名]
# 或者: from allennlp.common.params.Params import pop_int [as 别名]
def from_params(cls, params: Params) -> "SelfAttentiveSpanExtractor":
input_dim = params.pop_int("input_dim")
params.assert_empty(cls.__name__)
return SelfAttentiveSpanExtractor(input_dim=input_dim)