本文整理匯總了Python中typing.Counter方法的典型用法代碼示例。如果您正苦於以下問題:Python typing.Counter方法的具體用法?Python typing.Counter怎麽用?Python typing.Counter使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類typing
的用法示例。
在下文中一共展示了typing.Counter方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: count_functions
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_functions(graph: BELGraph) -> typing.Counter[str]:
"""Count the frequency of each function present in a graph.
:param graph: A BEL graph
:return: A Counter from {function: frequency}
"""
return Counter(_function_iterator(graph))
示例2: count_namespaces
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_namespaces(graph: BELGraph) -> typing.Counter[str]:
"""Count the frequency of each namespace across all nodes (that have namespaces).
:param graph: A BEL graph
:return: A Counter from {namespace: frequency}
"""
return Counter(_iterate_namespaces(graph))
示例3: count_names_by_namespace
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_names_by_namespace(graph: BELGraph, namespace: str) -> typing.Counter[str]:
"""Get the set of all of the names in a given namespace that are in the graph.
:param graph: A BEL graph
:param namespace: A namespace prefix
:return: A counter from {name: frequency}
:raises IndexError: if the namespace is not defined in the graph.
"""
if namespace not in graph.defined_namespace_keywords:
raise IndexError('{} is not defined in {}'.format(namespace, graph))
return Counter(_namespace_filtered_iterator(graph, namespace))
示例4: count_variants
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_variants(graph: BELGraph) -> typing.Counter[str]:
"""Count how many of each type of variant a graph has.
:param graph: A BEL graph
"""
return Counter(
variant_data[KIND]
for data in graph
if has_variant(graph, data)
for variant_data in data[VARIANTS]
)
示例5: get_top_hubs
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def get_top_hubs(graph: BELGraph, *, n: Optional[int] = 15) -> List[Tuple[BaseEntity, int]]:
"""Get the top hubs in the graph by BEL.
:param graph: A BEL graph
:param n: The number of top hubs to return. If None, returns all nodes
"""
return Counter(dict(graph.degree())).most_common(n=n)
示例6: count_modifications
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_modifications(graph: BELGraph) -> Counter:
"""Get a modifications count dictionary."""
return Counter(remove_falsy_values({
'Translocations': len(get_translocated(graph)),
'Degradations': len(get_degradations(graph)),
'Molecular Activities': len(get_activities(graph)),
}))
示例7: count_error_types
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_error_types(graph: BELGraph) -> typing.Counter[str]:
"""Count the occurrence of each type of error in a graph.
:return: A Counter of {error type: frequency}
"""
return Counter(
exc.__class__.__name__
for _, exc, _ in graph.warnings
)
示例8: count_naked_names
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def count_naked_names(graph: BELGraph) -> typing.Counter[str]:
"""Count the frequency of each naked name (names without namespaces).
:return: A Counter from {name: frequency}
"""
return Counter(_naked_names_iter(graph))
示例9: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(self, corpus: Optional[Corpus] = None) -> None:
r"""Initialize Corpus.
Parameters
----------
corpus : Corpus
The :py:class:`Corpus` from which to initialize the n-gram corpus.
By default, this is None, which initializes an empty NGramCorpus.
This can then be populated using NGramCorpus methods.
Raises
------
TypeError
Corpus argument must be None or of type abydos.Corpus
Example
-------
>>> tqbf = 'The quick brown fox jumped over the lazy dog.\n'
>>> tqbf += 'And then it slept.\n And the dog ran off.'
>>> ngcorp = NGramCorpus(Corpus(tqbf))
.. versionadded:: 0.3.0
"""
self.ngcorpus = Counter() # type: TCounter[Optional[str]]
if corpus is None:
return
elif isinstance(corpus, Corpus):
self.corpus_importer(corpus)
else:
raise TypeError(
'Corpus argument must be None or of type abydos.corpus.Corpus. '
+ str(type(corpus))
+ ' found.'
)
示例10: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(
self,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
**kwargs: Any
) -> None:
"""Initialize GilbertWells instance.
Parameters
----------
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
.. versionadded:: 0.4.0
"""
super(GilbertWells, self).__init__(
alphabet=alphabet, tokenizer=tokenizer, **kwargs
)
示例11: get_count
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def get_count(
self,
ngram: Union[str, List[str]],
corpus: Optional[TCounter[Optional[str]]] = None,
) -> int:
r"""Get the count of an n-gram in the corpus.
Parameters
----------
ngram : str or List[str]
The n-gram to retrieve the count of from the n-gram corpus
corpus : Counter[str] or None
The corpus
Returns
-------
int
The n-gram count
Examples
--------
>>> tqbf = 'The quick brown fox jumped over the lazy dog.\n'
>>> tqbf += 'And then it slept.\n And the dog ran off.'
>>> ngcorp = NGramCorpus(Corpus(tqbf))
>>> ngcorp.get_count('the')
2
>>> ngcorp.get_count('fox')
1
.. versionadded:: 0.3.0
"""
if not corpus:
corpus = self.ngcorpus
# if ngram is empty, we're at our leaf node and should return the
# value in None
if not ngram:
return corpus[None]
# support strings or lists/tuples by splitting strings
if isinstance(ngram, str):
ngram = ngram.split()
# if ngram is not empty, check whether the next element is in the
# corpus; if so, recurse--if not, return 0
if ngram[0] in corpus:
return self.get_count(
ngram[1:],
cast(Optional[TCounter[Optional[str]]], corpus[ngram[0]]),
)
return 0
示例12: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(
self,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
intersection_type: str = 'crisp',
**kwargs: Any
) -> None:
"""Initialize KuhnsIV instance.
Parameters
----------
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
intersection_type : str
Specifies the intersection type, and set type as a result:
See :ref:`intersection_type <intersection_type>` description in
:py:class:`_TokenDistance` for details.
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
metric : _Distance
A string distance measure class for use in the ``soft`` and
``fuzzy`` variants.
threshold : float
A threshold value, similarities above which are counted as
members of the intersection for the ``fuzzy`` variant.
.. versionadded:: 0.4.0
"""
super(KuhnsIV, self).__init__(
alphabet=alphabet,
tokenizer=tokenizer,
intersection_type=intersection_type,
**kwargs
)
示例13: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(
self,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
intersection_type: str = 'crisp',
**kwargs: Any
) -> None:
"""Initialize PearsonHeronII instance.
Parameters
----------
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
intersection_type : str
Specifies the intersection type, and set type as a result:
See :ref:`intersection_type <intersection_type>` description in
:py:class:`_TokenDistance` for details.
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
metric : _Distance
A string distance measure class for use in the ``soft`` and
``fuzzy`` variants.
threshold : float
A threshold value, similarities above which are counted as
members of the intersection for the ``fuzzy`` variant.
.. versionadded:: 0.4.0
"""
super(PearsonHeronII, self).__init__(
alphabet=alphabet,
tokenizer=tokenizer,
intersection_type=intersection_type,
**kwargs
)
示例14: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(
self,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
intersection_type: str = 'crisp',
**kwargs: Any
) -> None:
"""Initialize BaulieuXV instance.
Parameters
----------
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
intersection_type : str
Specifies the intersection type, and set type as a result:
See :ref:`intersection_type <intersection_type>` description in
:py:class:`_TokenDistance` for details.
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
metric : _Distance
A string distance measure class for use in the ``soft`` and
``fuzzy`` variants.
threshold : float
A threshold value, similarities above which are counted as
members of the intersection for the ``fuzzy`` variant.
.. versionadded:: 0.4.0
"""
super(BaulieuXV, self).__init__(
alphabet=alphabet,
tokenizer=tokenizer,
intersection_type=intersection_type,
**kwargs
)
示例15: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import Counter [as 別名]
def __init__(
self,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
intersection_type: str = 'crisp',
**kwargs: Any
) -> None:
"""Initialize KentFosterII instance.
Parameters
----------
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
intersection_type : str
Specifies the intersection type, and set type as a result:
See :ref:`intersection_type <intersection_type>` description in
:py:class:`_TokenDistance` for details.
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
metric : _Distance
A string distance measure class for use in the ``soft`` and
``fuzzy`` variants.
threshold : float
A threshold value, similarities above which are counted as
members of the intersection for the ``fuzzy`` variant.
.. versionadded:: 0.4.0
"""
super(KentFosterII, self).__init__(
alphabet=alphabet,
tokenizer=tokenizer,
intersection_type=intersection_type,
**kwargs
)