当前位置: 首页>>代码示例>>Python>>正文


Python tensor.TensorVariable方法代码示例

本文整理汇总了Python中theano.tensor.TensorVariable方法的典型用法代码示例。如果您正苦于以下问题:Python tensor.TensorVariable方法的具体用法?Python tensor.TensorVariable怎么用?Python tensor.TensorVariable使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在theano.tensor的用法示例。


在下文中一共展示了tensor.TensorVariable方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: ensure_2d_arguments

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def ensure_2d_arguments(f, squeeze_ret=True):
    """Decorator which ensures all of its function's arguments are 2D."""
    @wraps(f)
    def wrapped(*args, **kwargs):
        new_args = []
        for arg in args:
            if isinstance(arg, T.TensorVariable):
                if arg.ndim == 1:
                    arg = arg.dimshuffle("x", 0)
                elif arg.ndim > 2:
                    raise RuntimeError("ensure_2d_arguments wrapped a function"
                                       " which received an %i-d argument. "
                                       "Don't know what to do.")
            new_args.append(arg)

        ret = f(*new_args, **kwargs)
        if squeeze_ret:
            if isinstance(ret, (list, tuple)):
                ret = [ret_i.squeeze() for ret_i in ret]
            elif isinstance(ret, T.TensorVariable):
                ret = ret.squeeze()
        return ret
    return wrapped 
开发者ID:stanfordnlp,项目名称:spinn,代码行数:25,代码来源:theano_internal.py

示例2: test_sanity_check_slice

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def test_sanity_check_slice(self):

        mySymbolicMatricesList = TypedListType(T.TensorType(
            theano.config.floatX, (False, False)))()

        mySymbolicSlice = SliceType()()

        z = GetItem()(mySymbolicMatricesList, mySymbolicSlice)

        self.assertFalse(isinstance(z, T.TensorVariable))

        f = theano.function([mySymbolicMatricesList, mySymbolicSlice],
                            z)

        x = rand_ranged_matrix(-1000, 1000, [100, 101])

        self.assertTrue(numpy.array_equal(f([x], slice(0, 1, 1)), [x])) 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:19,代码来源:test_basic.py

示例3: make_node

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def make_node(self, x, index):
        assert isinstance(x.type, TypedListType)
        if not isinstance(index, Variable):
            if isinstance(index, slice):
                index = Constant(SliceType(), index)
                return Apply(self, [x, index], [x.type()])
            else:
                index = T.constant(index, ndim=0, dtype='int64')
                return Apply(self, [x, index], [x.ttype()])
        if isinstance(index.type, SliceType):
            return Apply(self, [x, index], [x.type()])
        elif isinstance(index, T.TensorVariable) and index.ndim == 0:
            assert index.dtype == 'int64'
            return Apply(self, [x, index], [x.ttype()])
        else:
            raise TypeError('Expected scalar or slice as index.') 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:18,代码来源:basic.py

示例4: shared_like

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def shared_like(variable, name=None, **kwargs):
    r"""Construct a shared variable to hold the value of a tensor variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable whose dtype and ndim will be used to construct
        the new shared variable.
    name : :obj:`str` or :obj:`None`
        The name of the shared variable. If None, the name is determined
        based on variable's name.
    \*\*kwargs
        Keyword arguments to pass to the :func:`~theano.shared` function.

    """
    variable = tensor.as_tensor_variable(variable)
    if name is None:
        name = "shared_{}".format(variable.name)
    return theano.shared(numpy.zeros((0,) * variable.ndim,
                                     dtype=variable.dtype),
                         name=name, **kwargs) 
开发者ID:rizar,项目名称:attention-lvcsr,代码行数:23,代码来源:__init__.py

示例5: is_graph_input

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def is_graph_input(variable):
    """Check if variable is a user-provided graph input.

    To be considered an input the variable must have no owner, and not
    be a constant or shared variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`

    Returns
    -------
    bool
        ``True`` If the variable is a user-provided input to the graph.

    """
    return (not variable.owner and
            not isinstance(variable, SharedVariable) and
            not isinstance(variable, Constant)) 
开发者ID:rizar,项目名称:attention-lvcsr,代码行数:21,代码来源:__init__.py

示例6: put_hook

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def put_hook(variable, hook_fn, *args):
    r"""Put a hook on a Theano variables.

    Ensures that the hook function is executed every time when the value
    of the Theano variable is available.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable to put a hook on.
    hook_fn : function
        The hook function. Should take a single argument: the variable's
        value.
    \*args : list
        Positional arguments to pass to the hook function.

    """
    return printing.Print(global_fn=lambda _, x: hook_fn(x, *args))(variable) 
开发者ID:rizar,项目名称:attention-lvcsr,代码行数:20,代码来源:__init__.py

示例7: apply

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def apply(self, input_):
        """Apply the linear transformation.

        Parameters
        ----------
        input_ : :class:`~tensor.TensorVariable`
            The input on which to apply the transformation

        Returns
        -------
        output : :class:`~tensor.TensorVariable`
            The transformed input plus optional bias

        """
        output = tensor.dot(input_, self.W)
        if self.use_bias:
            output += self.b
        return output 
开发者ID:rizar,项目名称:attention-lvcsr,代码行数:20,代码来源:simple.py

示例8: get_output_shape_for

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def get_output_shape_for(self, input_shape, **kwargs):
        # self.crop is a tensor --> we cannot know in advance how much
        # we will crop
        if isinstance(self.crop, T.TensorVariable):
            if self.data_format == 'bc01':
                input_shape = list(input_shape)
                input_shape[2] = None
                input_shape[3] = None
            else:
                input_shape = list(input_shape)
                input_shape[1] = None
                input_shape[2] = None
        # self.crop is a list of ints
        else:
            if self.data_format == 'bc01':
                input_shape = list(input_shape)
                input_shape[2] -= self.crop[0]
                input_shape[3] -= self.crop[1]
            else:
                input_shape = list(input_shape)
                input_shape[1] -= self.crop[0]
                input_shape[2] -= self.crop[1]
        return input_shape 
开发者ID:fvisin,项目名称:reseg,代码行数:25,代码来源:layers.py

示例9: build_loss_func

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def build_loss_func(loss, uncertain_inputs=False, name='loss_fn',
                    *args, **kwargs):
    '''
        Utility function to compiling a theano graph corresponding to a loss
        function
    '''
    mx = tt.vector('mx') if uncertain_inputs else tt.matrix('mx')
    Sx = tt.matrix('Sx') if uncertain_inputs else None
    inputs = [mx, Sx] if uncertain_inputs else [mx]
    # add extra input variables
    inputs += [a for a in args
               if type(a) is theano.tensor.TensorVariable
               and len(a.get_parents()) == 0]
    inputs += [k for k in kwargs.values()
               if type(k) is theano.tensor.TensorVariable
               and len(k.get_parents()) == 0]
    outputs = loss(mx, Sx, *args, **kwargs)
    return theano.function(inputs, outputs, name=name,
                           allow_input_downcast=True) 
开发者ID:mcgillmrl,项目名称:kusanagi,代码行数:21,代码来源:cost.py

示例10: build_distance_based_cost

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def build_distance_based_cost(uncertain_inputs=False, name='loss_fn',
                              *args, **kwargs):
    '''
        Utility function to compiling a theano graph corresponding to a loss
        function
    '''
    mx = tt.vector('mx') if uncertain_inputs else tt.matrix('mx')
    Sx = tt.matrix('Sx') if uncertain_inputs else None
    Q = kwargs.pop('Q', tt.matrix('Q'))
    target = kwargs.pop('target', tt.vector('target'))
    angi = kwargs.pop('angle_dims', [])
    inputs = [mx, Sx] if uncertain_inputs else [mx]
    if type(target) is tt.TensorVariable and len(target.get_parents()) == 0:
        inputs += [target]
    if type(Q) is tt.TensorVariable and len(Q.get_parents()) == 0:
        inputs += [Q]
    if type(angi) is tt.TensorVariable and len(angi.get_parents()) == 0:
        inputs += [angi]
    outputs = distance_based_cost(mx, Sx, target=target, Q=Q, angle_dims=angi,
                                  *args, **kwargs)
    return theano.function(inputs, outputs, name=name,
                           allow_input_downcast=True) 
开发者ID:mcgillmrl,项目名称:kusanagi,代码行数:24,代码来源:cost.py

示例11: shared_like

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def shared_like(variable, name=None):
    """Construct a shared variable to hold the value of a tensor variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable whose dtype and ndim will be used to construct
        the new shared variable.
    name : :obj:`str` or :obj:`None`
        The name of the shared variable. If None, the name is determined
        based on variable's name.

    """
    variable = tensor.as_tensor_variable(variable)
    if name is None:
        name = "shared_{}".format(variable.name)
    return theano.shared(numpy.zeros((0,) * variable.ndim,
                                     dtype=variable.dtype),
                         name=name) 
开发者ID:caglar,项目名称:Attentive_reader,代码行数:21,代码来源:blocks_utils.py

示例12: variables

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def variables(self):
        return filter(lambda x: isinstance(x, T.TensorVariable), gof.graph.inputs([self._logp])) 
开发者ID:ibab,项目名称:python-mle,代码行数:4,代码来源:model.py

示例13: __setstate__

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def __setstate__(self, d):
        self.__dict__.update(d)
        if "allow_gc" not in self.__dict__:
            self.allow_gc = True
            self.info['allow_gc'] = True
        if not hasattr(self, 'gpua'):
            self.gpua = False
            self.info['gpua'] = False
        if not hasattr(self, 'var_mappings'):
            # Generate the mappings between inner and outer inputs and outputs
            # if they haven't already been generated.
            self.var_mappings = self.get_oinp_iinp_iout_oout_mappings()
        if hasattr(self, 'fn'):
            if not hasattr(self, 'thunk_mit_mot_out_slices'):
                # The thunk has been compiled before mit_mot preallocation
                # feature was implemented. Mark every mit_mot output tap as
                # not having been preallocated
                self.mitmots_preallocated = [False] * self.n_mit_mot_outs

            if not hasattr(self, 'outs_is_tensor'):
                # The thunk has been compiled before the analysis, at
                # compilation time, of the location of the inputs and outputs.
                # Perform this analysis here.
                self.inps_is_tensor = [isinstance(out, theano.tensor.TensorVariable)
                                       for out in self.fn.maker.fgraph.inputs]
                self.outs_is_tensor = [isinstance(out, theano.tensor.TensorVariable)
                                       for out in self.fn.maker.fgraph.outputs]

        # Ensure that the graph associated with the inner function is valid.
        self.validate_inner_graph() 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:32,代码来源:scan_op.py

示例14: apply

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def apply(self, x):
        s = x.shape
        if isinstance(x, np.ndarray):
            return np.dot(x.reshape((s[0],np.prod(s[1:]))) - self.mean.get_value(), self.ZCA_mat.get_value()).reshape(s)
        elif isinstance(x, T.TensorVariable):
            return T.dot(x.flatten(2) - self.mean.dimshuffle('x',0), self.ZCA_mat).reshape(s)
        else:
            raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables") 
开发者ID:hendrycks,项目名称:GELUs,代码行数:10,代码来源:nn.py

示例15: invert

# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import TensorVariable [as 别名]
def invert(self, x):
        s = x.shape
        if isinstance(x, np.ndarray):
            return (np.dot(x.reshape((s[0],np.prod(s[1:]))), self.inv_ZCA_mat.get_value()) + self.mean.get_value()).reshape(s)
        elif isinstance(x, T.TensorVariable):
            return (T.dot(x.flatten(2), self.inv_ZCA_mat) + self.mean.dimshuffle('x',0)).reshape(s)
        else:
            raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables")

# T.nnet.relu has some issues with very large inputs, this is more stable 
开发者ID:hendrycks,项目名称:GELUs,代码行数:12,代码来源:nn.py


注:本文中的theano.tensor.TensorVariable方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。