本文整理汇总了Python中allennlp.data.fields.Field方法的典型用法代码示例。如果您正苦于以下问题:Python fields.Field方法的具体用法?Python fields.Field怎么用?Python fields.Field使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类allennlp.data.fields
的用法示例。
在下文中一共展示了fields.Field方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, context_tokens: List[Token], tokens: List[Token], tags: List[str] = None,
intents: List[str] = None, dialog_act: Dict[str, Any] = None) -> Instance: # type: ignore
"""
We take `pre-tokenized` input here, because we don't have a tokenizer in this class.
"""
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
# print([t.text for t in context_tokens])
fields["context_tokens"] = TextField(context_tokens, self._token_indexers)
fields["tokens"] = TextField(tokens, self._token_indexers)
fields["metadata"] = MetadataField({"words": [x.text for x in tokens]})
if tags is not None:
fields["tags"] = SequenceLabelField(tags, fields["tokens"])
if intents is not None:
fields["intents"] = MultiLabelField(intents, label_namespace="intent_labels")
if dialog_act is not None:
fields["metadata"] = MetadataField({"words": [x.text for x in tokens],
'dialog_act': dialog_act})
else:
fields["metadata"] = MetadataField({"words": [x.text for x in tokens], 'dialog_act': {}})
return Instance(fields)
示例2: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, tokens: List[Token], tags: List[str] = None, domain: str = None,
intent: str = None, dialog_act: Dict[str, Any] = None) -> Instance: # type: ignore
"""
We take `pre-tokenized` input here, because we don't have a tokenizer in this class.
"""
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
sequence = TextField(tokens, self._token_indexers)
fields["tokens"] = sequence
if tags:
fields["tags"] = SequenceLabelField(tags, sequence)
if domain:
fields["domain"] = LabelField(domain, label_namespace="domain_labels")
if intent:
fields["intent"] = LabelField(intent, label_namespace="intent_labels")
if dialog_act is not None:
fields["metadata"] = MetadataField({"words": [x.text for x in tokens],
'dialog_act': dialog_act})
else:
fields["metadata"] = MetadataField({"words": [x.text for x in tokens], 'dialog_act': {}})
return Instance(fields)
示例3: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
tokens: List[str],
entity_1: Tuple[int],
entity_2: Tuple[int],
label: str = None) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
tokens = [OpenAISplitter._standardize(token) for token in tokens]
tokens = ['__start__'] + tokens[entity_1[0]:entity_1[1]+1] + ['__del1__'] + tokens[entity_2[0]:entity_2[1]+1] + ['__del2__'] + tokens + ['__clf__']
sentence = TextField([Token(text=t) for t in tokens], self._token_indexers)
fields['sentence'] = sentence
#fields['entity1'] = SpanField(*entity_1, sequence_field=sentence)
#fields['entity2'] = SpanField(*entity_2, sequence_field=sentence)
if label:
fields['label'] = LabelField(label)
return Instance(fields)
示例4: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
item_id: Any,
question_text: str,
choice_text_list: List[str],
answer_id: int
) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
question_tokens = self._tokenizer.tokenize(question_text)
choices_tokens_list = [self._tokenizer.tokenize(x) for x in choice_text_list]
fields['question'] = TextField(question_tokens, self._token_indexers)
fields['choices_list'] = ListField([TextField(x, self._token_indexers) for x in choices_tokens_list])
fields['label'] = LabelField(answer_id, skip_indexing=True)
metadata = {
"id": item_id,
"question_text": question_text,
"choice_text_list": choice_text_list,
"question_tokens": [x.text for x in question_tokens],
"choice_tokens_list": [[x.text for x in ct] for ct in choices_tokens_list],
}
fields["metadata"] = MetadataField(metadata)
return Instance(fields)
示例5: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self,
premise: str,
hypothesis: str,
hypothesis_structure: str,
label: str = None) -> Instance:
fields: Dict[str, Field] = {}
premise_tokens = self._tokenizer.tokenize(premise)[-self._max_tokens:]
hypothesis_tokens = self._tokenizer.tokenize(hypothesis)[-self._max_tokens:]
fields['premise'] = TextField(premise_tokens, self._token_indexers)
fields['hypothesis'] = TextField(hypothesis_tokens, self._token_indexers)
metadata = {
'premise': premise,
'hypothesis': hypothesis,
'premise_tokens': [token.text for token in premise_tokens],
'hypothesis_tokens': [token.text for token in hypothesis_tokens]
}
fields['metadata'] = MetadataField(metadata)
self._add_structure_to_fields(hypothesis_structure, fields)
if label:
fields['label'] = LabelField(label)
return Instance(fields)
示例6: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
premise: str,
hypothesis: str,
pid: str = None,
label: str = None) -> Instance:
fields: Dict[str, Field] = {}
premise_tokens = [Token(t) for t in premise.split(' ')] # Removing code for parentheses in NLI
hypothesis_tokens = [Token(t) for t in hypothesis.split(' ')]
if self.max_l is not None:
premise_tokens = premise_tokens[:self.max_l]
hypothesis_tokens = hypothesis_tokens[:self.max_l]
fields['premise'] = TextField(premise_tokens, self._token_indexers)
fields['hypothesis'] = TextField(hypothesis_tokens, self._token_indexers)
if label:
fields['selection_label'] = LabelField(label, label_namespace='selection_labels')
if pid:
fields['pid'] = IdField(pid)
return Instance(fields)
示例7: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
premise: str,
hypothesis: str,
pid: str = None,
label: str = None) -> Instance:
fields: Dict[str, Field] = {}
premise_tokens = [Token(t) for t in premise.split(' ')] # Removing code for parentheses in NLI
hypothesis_tokens = [Token(t) for t in hypothesis.split(' ')]
if self.max_l is not None:
premise_tokens = premise_tokens[:self.max_l]
hypothesis_tokens = hypothesis_tokens[:self.max_l]
fields['premise'] = TextField(premise_tokens, self._token_indexers)
fields['hypothesis'] = TextField(hypothesis_tokens, self._token_indexers)
if label:
fields['label'] = LabelField(label, label_namespace='labels')
if pid:
fields['pid'] = IdField(pid)
return Instance(fields)
示例8: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(
self, # type: ignore
query: List[str],
slot_tags: List[str] = None,
sql_template: str = None,
) -> Instance:
fields: Dict[str, Field] = {}
tokens = TextField([Token(t) for t in query], self._token_indexers)
fields["tokens"] = tokens
if slot_tags is not None and sql_template is not None:
slot_field = SequenceLabelField(slot_tags, tokens, label_namespace="slot_tags")
template = LabelField(sql_template, label_namespace="template_labels")
fields["slot_tags"] = slot_field
fields["template"] = template
return Instance(fields)
示例9: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # pylint: disable=arguments-differ
premise: str,
hypothesis: str,
label: str = None) -> Instance:
fields: Dict[str, Field] = {}
premise_tokens = [Token(token.text)
for token in self._tokenizer.tokenize(premise)[-self._max_tokens:]]
hypothesis_tokens = [Token(token.text)
for token in self._tokenizer.tokenize(hypothesis)[-self._max_tokens:]]
fields['premise'] = TextField(premise_tokens, self._token_indexers)
fields['hypothesis'] = TextField(hypothesis_tokens, self._token_indexers)
if label:
fields['label'] = LabelField(label)
# metadata = {"premise_tokens": [x.text for x in premise_tokens],
# "hypothesis_tokens": [x.text for x in hypothesis_tokens]}
# fields["metadata"] = MetadataField(metadata)
return Instance(fields)
示例10: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
premise: str,
hypotheses: List[str],
label: int = None) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
premise_tokens = self._tokenizer.tokenize(premise)
fields['premise'] = TextField(premise_tokens, self._token_indexers)
# This could be another way to get randomness
for i, hyp in enumerate(hypotheses):
hypothesis_tokens = self._tokenizer.tokenize(hyp)
fields['hypothesis{}'.format(i)] = TextField(hypothesis_tokens, self._token_indexers)
if label is not None:
fields['label'] = LabelField(label, skip_indexing=True)
return Instance(fields)
示例11: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
premise: str,
hypotheses: List[str],
label: int = None) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
# premise_tokens = self._tokenizer.tokenize(premise)
# fields['premise'] = TextField(premise_tokens, self._token_indexers)
# This could be another way to get randomness
for i, hyp in enumerate(hypotheses):
hypothesis_tokens = self._tokenizer.tokenize(hyp)
fields['hypothesis{}'.format(i)] = TextField(hypothesis_tokens, self._token_indexers)
if label is not None:
fields['label'] = LabelField(label, skip_indexing=True)
return Instance(fields)
示例12: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, vec: str = None) -> Instance: # type: ignore
"""
Parameters
----------
text : ``str``, required.
The text to classify
label ``str``, optional, (default = None).
The label for this text.
Returns
-------
An ``Instance`` containing the following fields:
tokens : ``TextField``
The tokens in the sentence or phrase.
label : ``LabelField``
The label label of the sentence or phrase.
"""
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
fields['tokens'] = ArrayField(vec)
return Instance(fields)
示例13: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
item_id: Any,
question_text: str,
choice_text_list: List[str],
answer_id: int) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
question_tokens = self._tokenizer.tokenize(question_text)
choices_tokens_list = [self._tokenizer.tokenize(x) for x in choice_text_list]
fields['question'] = TextField(question_tokens, self._token_indexers)
fields['choices_list'] = ListField([TextField(x, self._token_indexers) for x in choices_tokens_list])
fields['label'] = LabelField(answer_id, skip_indexing=True)
metadata = {
"id": item_id,
"question_text": question_text,
"choice_text_list": choice_text_list,
"question_tokens": [x.text for x in question_tokens],
"choice_tokens_list": [[x.text for x in ct] for ct in choices_tokens_list],
}
fields["metadata"] = MetadataField(metadata)
return Instance(fields)
示例14: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, state: np.ndarray, action: int = None) -> Instance: # type: ignore
"""
"""
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
fields["states"] = ArrayField(state)
if action is not None:
fields["actions"] = LabelField(action, skip_indexing=True)
return Instance(fields)
示例15: text_to_instance
# 需要导入模块: from allennlp.data import fields [as 别名]
# 或者: from allennlp.data.fields import Field [as 别名]
def text_to_instance(self, # type: ignore
sentence: str,
head: str,
tail: str,
head_type: str=None,
tail_type: str=None,
label: str=None) -> Instance:
# pylint: disable=arguments-differ
fields: Dict[str, Field] = {}
instance_id = f'{head}#{tail}'
if label:
instance_id = f'{instance_id}#{label}'
fields['metadata'] = MetadataField({'instance_id': instance_id.lower()})
tokens = self._token_splitter.split_words(sentence)
head = self._token_splitter.split_words(head)
tail = self._token_splitter.split_words(tail)
# TODO: this should not be done here
if self._masking_mode == 'ner_least_specific':
logger.info(f"Using masking mode 'ner_least_specific'.")
tokens = ([Token('__start__')]
+ head + [Token('__del1__')] + head_type + [Token('__ent1__')]
+ tail + [Token('__del2__')] + tail_type + [Token('__ent2__')]
+ tokens + [Token('__clf__')])
else:
tokens = [Token('__start__')] + head + [Token('__del1__')] + tail + [Token('__del2__')] + tokens + [Token('__clf__')]
fields['sentence'] = TextField(tokens, self._token_indexers)
if label:
fields['label'] = LabelField(label)
return Instance(fields)