当前位置: 首页>>代码示例>>Python>>正文


Python collections.Sequence方法代码示例

本文整理汇总了Python中collections.Sequence方法的典型用法代码示例。如果您正苦于以下问题:Python collections.Sequence方法的具体用法?Python collections.Sequence怎么用?Python collections.Sequence使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在collections的用法示例。


在下文中一共展示了collections.Sequence方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: aggregate_output

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def aggregate_output(self):
        """Given a list of predictions from net, make a decision based on aggreagation rule"""
        if isinstance(self.predictions, collections.Sequence):
            logits = []
            for pred in self.predictions:
                logit = self.net.apply_argmax_softmax(pred).unsqueeze(0)
                logits.append(logit)

            logits = torch.cat(logits, 0)
            if self.aggregation == 'max':
                self.pred = logits.data.max(0)[0].max(1)
            elif self.aggregation == 'mean':
                self.pred = logits.data.mean(0).max(1)
            elif self.aggregation == 'weighted_mean':
                self.pred = (self.aggregation_weight.expand_as(logits) * logits).data.mean(0).max(1)
            elif self.aggregation == 'idx':
                self.pred = logits[self.aggregation_param].data.max(1)
        else:
            # Apply a softmax and return a segmentation map
            self.logits = self.net.apply_argmax_softmax(self.predictions)
            self.pred = self.logits.data.max(1) 
开发者ID:ozan-oktay,项目名称:Attention-Gated-Networks,代码行数:23,代码来源:aggregated_classifier.py

示例2: __call__

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def __call__(self, image):
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(
                self.sigma, random_state=self.random_state)
        else:
            sigma = self.sigma
        if isinstance(self.mean, collections.Sequence):
            mean = random_num_generator(
                self.mean, random_state=self.random_state)
        else:
            mean = self.mean
        row, col, ch = image.shape
        gauss = self.random_state.normal(mean, sigma, (row, col, ch))
        gauss = gauss.reshape(row, col, ch)
        image += image * gauss
        return image 
开发者ID:ozan-oktay,项目名称:Attention-Gated-Networks,代码行数:18,代码来源:myImageTransformations.py

示例3: to_tensor

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def to_tensor(data):
    """Convert objects of various python types to :obj:`torch.Tensor`.

    Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`,
    :class:`Sequence`, :class:`int` and :class:`float`.
    """
    if isinstance(data, torch.Tensor):
        return data
    elif isinstance(data, np.ndarray):
        return torch.from_numpy(data)
    elif isinstance(data, Sequence) and not mmcv.is_str(data):
        return torch.tensor(data)
    elif isinstance(data, int):
        return torch.LongTensor([data])
    elif isinstance(data, float):
        return torch.FloatTensor([data])
    else:
        raise TypeError('type {} cannot be converted to tensor.'.format(
            type(data))) 
开发者ID:dingjiansw101,项目名称:AerialDetection,代码行数:21,代码来源:utils.py

示例4: normalize

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def normalize(tensor, mean, std):
    """Normalize a tensor image with mean and standard deviation.
    .. note::
        This transform acts in-place, i.e., it mutates the input tensor.
    See :class:`~torchvision.transforms.Normalize` for more details.
    Args:
        tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
        mean (sequence): Sequence of means for each channel.
        std (sequence): Sequence of standard deviations for each channely.
    Returns:
        Tensor: Normalized Tensor image.
    """
    if not _is_tensor_image(tensor):
        raise TypeError('tensor is not a torch image.')

    # This is faster than using broadcasting, don't change without benchmarking
    for t, m, s in zip(tensor, mean, std):
        t.sub_(m).div_(s)
    return tensor 
开发者ID:jbohnslav,项目名称:opencv_transforms,代码行数:21,代码来源:functional.py

示例5: __init__

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def __init__( self, exprs, savelist = False ):
        super(ParseExpression,self).__init__(savelist)
        if isinstance( exprs, _generatorType ):
            exprs = list(exprs)

        if isinstance( exprs, basestring ):
            self.exprs = [ Literal( exprs ) ]
        elif isinstance( exprs, collections.Sequence ):
            # if sequence of strings provided, wrap with Literal
            if all(isinstance(expr, basestring) for expr in exprs):
                exprs = map(Literal, exprs)
            self.exprs = list(exprs)
        else:
            try:
                self.exprs = list( exprs )
            except TypeError:
                self.exprs = [ exprs ]
        self.callPreparse = False 
开发者ID:vulscanteam,项目名称:vulscan,代码行数:20,代码来源:pyparsing.py

示例6: flatten

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def flatten(x):
        """Flattens a cell state by concatenating a sequence of cell
        states along the last dimension. If the cell states are
        :tf_main:`LSTMStateTuple <contrib/rnn/LSTMStateTuple>`, only the
        hidden `LSTMStateTuple.h` is used.

        This process is used by default if :attr:`medium` is not provided
        to :meth:`_build`.
        """
        if isinstance(x, LSTMStateTuple):
            return x.h
        if isinstance(x, collections.Sequence):
            return tf.concat(
                [HierarchicalRNNEncoder.flatten(v) for v in x], -1)
        else:
            return x 
开发者ID:qkaren,项目名称:Counterfactual-StoryRW,代码行数:18,代码来源:hierarchical_encoders.py

示例7: collate_fn

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def collate_fn(self):
        def collate(batch):
            if len(self.fields) == 1:
                batch = [batch, ]
            else:
                batch = list(zip(*batch))

            tensors = []
            for field, data in zip(self.fields.values(), batch):
                tensor = field.process(data)
                if isinstance(tensor, collections.Sequence) and any(isinstance(t, torch.Tensor) for t in tensor):
                    tensors.extend(tensor)
                else:
                    tensors.append(tensor)

            if len(tensors) > 1:
                return tensors
            else:
                return tensors[0]

        return collate 
开发者ID:aimagelab,项目名称:speaksee,代码行数:23,代码来源:dataset.py

示例8: dict_gather

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def dict_gather(outputs, target_device, dim=0):
    """
    Gathers variables from different GPUs on a specified device
      (-1 means the CPU), with dictionary support.
    """
    def gather_map(outputs):
        out = outputs[0]
        if isinstance(out, Variable):
            # MJY(20180330) HACK:: force nr_dims > 0
            if out.dim() == 0:
                outputs = [o.unsqueeze(0) for o in outputs]
            return Gather.apply(target_device, dim, *outputs)
        elif out is None:
            return None
        elif isinstance(out, collections.Mapping):
            return {k: gather_map([o[k] for o in outputs]) for k in out}
        elif isinstance(out, collections.Sequence):
            return type(out)(map(gather_map, zip(*outputs)))
    return gather_map(outputs) 
开发者ID:XiaLiPKU,项目名称:EMANet,代码行数:21,代码来源:data_parallel.py

示例9: build

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def build(self, input_shape):
    if isinstance(input_shape, collections.Sequence):
      input_shape = input_shape[0]
    out_channels = input_shape[1]

    if self.weights_initializer is None:
      weights_initializer = tf.keras.initializers.VarianceScaling
    else:
      weights_initializer = self.weights_initializer

    self.dense_H = tf.keras.layers.Dense(
        out_channels,
        activation=self.activation_fn,
        bias_initializer=self.biases_initializer,
        kernel_initializer=weights_initializer)
    self.dense_T = tf.keras.layers.Dense(
        out_channels,
        activation=tf.nn.sigmoid,
        bias_initializer=tf.constant_initializer(-1),
        kernel_initializer=weights_initializer)
    self.built = True 
开发者ID:deepchem,项目名称:deepchem,代码行数:23,代码来源:layers.py

示例10: _serialize_internal

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def _serialize_internal(cls, msg):
        # insert the name of the class
        data = [msg.__class__.__name__]

        # get list of fields
        fields = msg.__class__.get_fields()
        for field_name, field_type in fields:
            attr = getattr(msg, field_name)
            if field_type is not None and attr is not None:
                # if attr has a field type defined deserialize that field
                data.extend(cls._serialize_internal(attr))
            else:
                if isinstance(attr, str) or isinstance(attr, bytes):
                    data.append(attr)
                elif isinstance(attr, collections.Sequence):
                    data.append([cls._serialize_internal(i) for i in attr])
                elif isinstance(attr, collections.Mapping):
                    data.append({k: cls._serialize_internal(v) for k, v in attr.items()})
                else:
                    data.append(attr)
        return data 
开发者ID:kippandrew,项目名称:tattle,代码行数:23,代码来源:messages.py

示例11: __init__

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def __init__(self, config, queue, events, loop=None):
        """
        Initialize instance of the NodeManager class

        :param config: config object
        :param queue: broadcast queue
        :type config: tattle.config.Configuration
        :type events: tattle.event.EventManager
        :type queue: tattle.queue.BroadcastQueue
        """
        self.config = config
        self._queue = queue
        self._events = events
        self._loop = loop or asyncio.get_event_loop()
        self._leaving = False
        self._nodes = list()
        self._nodes_map = dict()
        self._nodes_lock = asyncio.Lock()
        self._suspect_nodes = dict()
        self._local_node_name = None
        self._local_node_seq = sequence.Sequence() 
开发者ID:kippandrew,项目名称:tattle,代码行数:23,代码来源:state.py

示例12: _clone_node_with_lineno

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def _clone_node_with_lineno(node, parent, lineno):
    cls = node.__class__
    other_fields = node._other_fields
    _astroid_fields = node._astroid_fields
    init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
    postinit_params = {param: getattr(node, param) for param in _astroid_fields}
    if other_fields:
        init_params.update({param: getattr(node, param) for param in other_fields})
    new_node = cls(**init_params)
    if hasattr(node, "postinit") and _astroid_fields:
        for param, child in postinit_params.items():
            if child and not isinstance(child, collections.Sequence):
                cloned_child = _clone_node_with_lineno(
                    node=child, lineno=new_node.lineno, parent=new_node
                )
                postinit_params[param] = cloned_child
        new_node.postinit(**postinit_params)
    return new_node 
开发者ID:sofia-netsurv,项目名称:python-netsurv,代码行数:20,代码来源:brain_fstrings.py

示例13: _sequence_like

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def _sequence_like(instance, args):
  """Converts the sequence `args` to the same type as `instance`.

  Args:
    instance: an instance of `tuple`, `list`, or a `namedtuple` class.
    args: elements to be converted to a sequence.

  Returns:
    `args` with the type of `instance`.
  """
  if (isinstance(instance, tuple) and
      hasattr(instance, "_fields") and
      isinstance(instance._fields, collections.Sequence) and
      all(isinstance(f, six.string_types) for f in instance._fields)):
    # This is a namedtuple
    return type(instance)(*args)
  else:
    # Not a namedtuple
    return type(instance)(args) 
开发者ID:ryfeus,项目名称:lambda-packs,代码行数:21,代码来源:nest.py

示例14: constrain_collection

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def constrain_collection(config: PushConfig, coll: Sequence) -> Sequence:
    """Constrains the collection to a size that is safe for Push program execution."""
    if len(coll) > config.collection_size_cap:
        return coll[:config.collection_size_cap]
    return coll 
开发者ID:erp12,项目名称:pyshgp,代码行数:7,代码来源:config.py

示例15: convert

# 需要导入模块: import collections [as 别名]
# 或者: from collections import Sequence [as 别名]
def convert(self, value):
        if isinstance(value, collections.Sequence):
            return list(map(self._elem_converter, value))
        else:
            # TODO: Handle the case where the value is not an sequence.
            return [self._elem_converter(value)] 
开发者ID:prkumar,项目名称:uplink,代码行数:8,代码来源:typing_.py


注:本文中的collections.Sequence方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。