當前位置: 首頁>>代碼示例>>Python>>正文


Python tensor.TensorVariable方法代碼示例

本文整理匯總了Python中theano.tensor.TensorVariable方法的典型用法代碼示例。如果您正苦於以下問題:Python tensor.TensorVariable方法的具體用法?Python tensor.TensorVariable怎麽用?Python tensor.TensorVariable使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在theano.tensor的用法示例。


在下文中一共展示了tensor.TensorVariable方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: ensure_2d_arguments

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def ensure_2d_arguments(f, squeeze_ret=True):
    """Decorator which ensures all of its function's arguments are 2D."""
    @wraps(f)
    def wrapped(*args, **kwargs):
        new_args = []
        for arg in args:
            if isinstance(arg, T.TensorVariable):
                if arg.ndim == 1:
                    arg = arg.dimshuffle("x", 0)
                elif arg.ndim > 2:
                    raise RuntimeError("ensure_2d_arguments wrapped a function"
                                       " which received an %i-d argument. "
                                       "Don't know what to do.")
            new_args.append(arg)

        ret = f(*new_args, **kwargs)
        if squeeze_ret:
            if isinstance(ret, (list, tuple)):
                ret = [ret_i.squeeze() for ret_i in ret]
            elif isinstance(ret, T.TensorVariable):
                ret = ret.squeeze()
        return ret
    return wrapped 
開發者ID:stanfordnlp,項目名稱:spinn,代碼行數:25,代碼來源:theano_internal.py

示例2: test_sanity_check_slice

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def test_sanity_check_slice(self):

        mySymbolicMatricesList = TypedListType(T.TensorType(
            theano.config.floatX, (False, False)))()

        mySymbolicSlice = SliceType()()

        z = GetItem()(mySymbolicMatricesList, mySymbolicSlice)

        self.assertFalse(isinstance(z, T.TensorVariable))

        f = theano.function([mySymbolicMatricesList, mySymbolicSlice],
                            z)

        x = rand_ranged_matrix(-1000, 1000, [100, 101])

        self.assertTrue(numpy.array_equal(f([x], slice(0, 1, 1)), [x])) 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:19,代碼來源:test_basic.py

示例3: make_node

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def make_node(self, x, index):
        assert isinstance(x.type, TypedListType)
        if not isinstance(index, Variable):
            if isinstance(index, slice):
                index = Constant(SliceType(), index)
                return Apply(self, [x, index], [x.type()])
            else:
                index = T.constant(index, ndim=0, dtype='int64')
                return Apply(self, [x, index], [x.ttype()])
        if isinstance(index.type, SliceType):
            return Apply(self, [x, index], [x.type()])
        elif isinstance(index, T.TensorVariable) and index.ndim == 0:
            assert index.dtype == 'int64'
            return Apply(self, [x, index], [x.ttype()])
        else:
            raise TypeError('Expected scalar or slice as index.') 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:18,代碼來源:basic.py

示例4: shared_like

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def shared_like(variable, name=None, **kwargs):
    r"""Construct a shared variable to hold the value of a tensor variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable whose dtype and ndim will be used to construct
        the new shared variable.
    name : :obj:`str` or :obj:`None`
        The name of the shared variable. If None, the name is determined
        based on variable's name.
    \*\*kwargs
        Keyword arguments to pass to the :func:`~theano.shared` function.

    """
    variable = tensor.as_tensor_variable(variable)
    if name is None:
        name = "shared_{}".format(variable.name)
    return theano.shared(numpy.zeros((0,) * variable.ndim,
                                     dtype=variable.dtype),
                         name=name, **kwargs) 
開發者ID:rizar,項目名稱:attention-lvcsr,代碼行數:23,代碼來源:__init__.py

示例5: is_graph_input

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def is_graph_input(variable):
    """Check if variable is a user-provided graph input.

    To be considered an input the variable must have no owner, and not
    be a constant or shared variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`

    Returns
    -------
    bool
        ``True`` If the variable is a user-provided input to the graph.

    """
    return (not variable.owner and
            not isinstance(variable, SharedVariable) and
            not isinstance(variable, Constant)) 
開發者ID:rizar,項目名稱:attention-lvcsr,代碼行數:21,代碼來源:__init__.py

示例6: put_hook

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def put_hook(variable, hook_fn, *args):
    r"""Put a hook on a Theano variables.

    Ensures that the hook function is executed every time when the value
    of the Theano variable is available.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable to put a hook on.
    hook_fn : function
        The hook function. Should take a single argument: the variable's
        value.
    \*args : list
        Positional arguments to pass to the hook function.

    """
    return printing.Print(global_fn=lambda _, x: hook_fn(x, *args))(variable) 
開發者ID:rizar,項目名稱:attention-lvcsr,代碼行數:20,代碼來源:__init__.py

示例7: apply

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def apply(self, input_):
        """Apply the linear transformation.

        Parameters
        ----------
        input_ : :class:`~tensor.TensorVariable`
            The input on which to apply the transformation

        Returns
        -------
        output : :class:`~tensor.TensorVariable`
            The transformed input plus optional bias

        """
        output = tensor.dot(input_, self.W)
        if self.use_bias:
            output += self.b
        return output 
開發者ID:rizar,項目名稱:attention-lvcsr,代碼行數:20,代碼來源:simple.py

示例8: get_output_shape_for

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def get_output_shape_for(self, input_shape, **kwargs):
        # self.crop is a tensor --> we cannot know in advance how much
        # we will crop
        if isinstance(self.crop, T.TensorVariable):
            if self.data_format == 'bc01':
                input_shape = list(input_shape)
                input_shape[2] = None
                input_shape[3] = None
            else:
                input_shape = list(input_shape)
                input_shape[1] = None
                input_shape[2] = None
        # self.crop is a list of ints
        else:
            if self.data_format == 'bc01':
                input_shape = list(input_shape)
                input_shape[2] -= self.crop[0]
                input_shape[3] -= self.crop[1]
            else:
                input_shape = list(input_shape)
                input_shape[1] -= self.crop[0]
                input_shape[2] -= self.crop[1]
        return input_shape 
開發者ID:fvisin,項目名稱:reseg,代碼行數:25,代碼來源:layers.py

示例9: build_loss_func

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def build_loss_func(loss, uncertain_inputs=False, name='loss_fn',
                    *args, **kwargs):
    '''
        Utility function to compiling a theano graph corresponding to a loss
        function
    '''
    mx = tt.vector('mx') if uncertain_inputs else tt.matrix('mx')
    Sx = tt.matrix('Sx') if uncertain_inputs else None
    inputs = [mx, Sx] if uncertain_inputs else [mx]
    # add extra input variables
    inputs += [a for a in args
               if type(a) is theano.tensor.TensorVariable
               and len(a.get_parents()) == 0]
    inputs += [k for k in kwargs.values()
               if type(k) is theano.tensor.TensorVariable
               and len(k.get_parents()) == 0]
    outputs = loss(mx, Sx, *args, **kwargs)
    return theano.function(inputs, outputs, name=name,
                           allow_input_downcast=True) 
開發者ID:mcgillmrl,項目名稱:kusanagi,代碼行數:21,代碼來源:cost.py

示例10: build_distance_based_cost

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def build_distance_based_cost(uncertain_inputs=False, name='loss_fn',
                              *args, **kwargs):
    '''
        Utility function to compiling a theano graph corresponding to a loss
        function
    '''
    mx = tt.vector('mx') if uncertain_inputs else tt.matrix('mx')
    Sx = tt.matrix('Sx') if uncertain_inputs else None
    Q = kwargs.pop('Q', tt.matrix('Q'))
    target = kwargs.pop('target', tt.vector('target'))
    angi = kwargs.pop('angle_dims', [])
    inputs = [mx, Sx] if uncertain_inputs else [mx]
    if type(target) is tt.TensorVariable and len(target.get_parents()) == 0:
        inputs += [target]
    if type(Q) is tt.TensorVariable and len(Q.get_parents()) == 0:
        inputs += [Q]
    if type(angi) is tt.TensorVariable and len(angi.get_parents()) == 0:
        inputs += [angi]
    outputs = distance_based_cost(mx, Sx, target=target, Q=Q, angle_dims=angi,
                                  *args, **kwargs)
    return theano.function(inputs, outputs, name=name,
                           allow_input_downcast=True) 
開發者ID:mcgillmrl,項目名稱:kusanagi,代碼行數:24,代碼來源:cost.py

示例11: shared_like

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def shared_like(variable, name=None):
    """Construct a shared variable to hold the value of a tensor variable.

    Parameters
    ----------
    variable : :class:`~tensor.TensorVariable`
        The variable whose dtype and ndim will be used to construct
        the new shared variable.
    name : :obj:`str` or :obj:`None`
        The name of the shared variable. If None, the name is determined
        based on variable's name.

    """
    variable = tensor.as_tensor_variable(variable)
    if name is None:
        name = "shared_{}".format(variable.name)
    return theano.shared(numpy.zeros((0,) * variable.ndim,
                                     dtype=variable.dtype),
                         name=name) 
開發者ID:caglar,項目名稱:Attentive_reader,代碼行數:21,代碼來源:blocks_utils.py

示例12: variables

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def variables(self):
        return filter(lambda x: isinstance(x, T.TensorVariable), gof.graph.inputs([self._logp])) 
開發者ID:ibab,項目名稱:python-mle,代碼行數:4,代碼來源:model.py

示例13: __setstate__

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def __setstate__(self, d):
        self.__dict__.update(d)
        if "allow_gc" not in self.__dict__:
            self.allow_gc = True
            self.info['allow_gc'] = True
        if not hasattr(self, 'gpua'):
            self.gpua = False
            self.info['gpua'] = False
        if not hasattr(self, 'var_mappings'):
            # Generate the mappings between inner and outer inputs and outputs
            # if they haven't already been generated.
            self.var_mappings = self.get_oinp_iinp_iout_oout_mappings()
        if hasattr(self, 'fn'):
            if not hasattr(self, 'thunk_mit_mot_out_slices'):
                # The thunk has been compiled before mit_mot preallocation
                # feature was implemented. Mark every mit_mot output tap as
                # not having been preallocated
                self.mitmots_preallocated = [False] * self.n_mit_mot_outs

            if not hasattr(self, 'outs_is_tensor'):
                # The thunk has been compiled before the analysis, at
                # compilation time, of the location of the inputs and outputs.
                # Perform this analysis here.
                self.inps_is_tensor = [isinstance(out, theano.tensor.TensorVariable)
                                       for out in self.fn.maker.fgraph.inputs]
                self.outs_is_tensor = [isinstance(out, theano.tensor.TensorVariable)
                                       for out in self.fn.maker.fgraph.outputs]

        # Ensure that the graph associated with the inner function is valid.
        self.validate_inner_graph() 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:32,代碼來源:scan_op.py

示例14: apply

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def apply(self, x):
        s = x.shape
        if isinstance(x, np.ndarray):
            return np.dot(x.reshape((s[0],np.prod(s[1:]))) - self.mean.get_value(), self.ZCA_mat.get_value()).reshape(s)
        elif isinstance(x, T.TensorVariable):
            return T.dot(x.flatten(2) - self.mean.dimshuffle('x',0), self.ZCA_mat).reshape(s)
        else:
            raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables") 
開發者ID:hendrycks,項目名稱:GELUs,代碼行數:10,代碼來源:nn.py

示例15: invert

# 需要導入模塊: from theano import tensor [as 別名]
# 或者: from theano.tensor import TensorVariable [as 別名]
def invert(self, x):
        s = x.shape
        if isinstance(x, np.ndarray):
            return (np.dot(x.reshape((s[0],np.prod(s[1:]))), self.inv_ZCA_mat.get_value()) + self.mean.get_value()).reshape(s)
        elif isinstance(x, T.TensorVariable):
            return (T.dot(x.flatten(2), self.inv_ZCA_mat) + self.mean.dimshuffle('x',0)).reshape(s)
        else:
            raise NotImplementedError("Whitening only implemented for numpy arrays or Theano TensorVariables")

# T.nnet.relu has some issues with very large inputs, this is more stable 
開發者ID:hendrycks,項目名稱:GELUs,代碼行數:12,代碼來源:nn.py


注:本文中的theano.tensor.TensorVariable方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。