當前位置: 首頁>>代碼示例>>Python>>正文


Python theano.OpFromGraph方法代碼示例

本文整理匯總了Python中theano.OpFromGraph方法的典型用法代碼示例。如果您正苦於以下問題:Python theano.OpFromGraph方法的具體用法?Python theano.OpFromGraph怎麽用?Python theano.OpFromGraph使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在theano的用法示例。


在下文中一共展示了theano.OpFromGraph方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: test_gradient

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def test_gradient(self, output: str, wrt: str):
        theano.config.compute_test_value = 'ignore'
        interpolator = self.model._interpolator
        out = self.get_output(output)
        wrt_ = self.get_wrt(wrt)

        geo_model_T = theano.OpFromGraph(interpolator.theano_graph.input_parameters_loop,
                                         [theano.grad(out[0], wrt_)],
                                         inline=True,
                                         on_unused_input='ignore',
                                         name='test_'+output)

        i = interpolator.get_python_input_block()
        th_f = theano.function([], geo_model_T(*i), on_unused_input='warn')

        interpolator.theano_graph.sig_slope.set_value(20)

        return th_f() 
開發者ID:cgre-aachen,項目名稱:gempy,代碼行數:20,代碼來源:theano_op.py

示例2: make_thunk

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def make_thunk(self, node, storage_map, compute_map, no_recycling):
        ret = super(OpFromGraph, self).make_thunk(node, storage_map,
                                                  compute_map, no_recycling)
        if not hasattr(self, "fn"):
            self.fn = orig_function(self.new_inputs,
                                    self.new_outputs,
                                    **self.kwargs)
        return ret 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:10,代碼來源:builders.py

示例3: grad

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def grad(self, inputs, output_grads):
        if hasattr(self, "grad_ops"):
            grad_ops = self.grad_ops
        else:
            gs = theano.gradient.grad(cost=None,
                                      known_grads=dict(izip(self.new_outputs,
                                                            output_grads)),
                                      wrt=self.new_inputs,
                                      disconnected_inputs='ignore')

            grad_ops = []
            for g in gs:
                if g is None:
                    grad_ops.append(lambda *args: None)
                else:
                    # It is normal if some inputs are not needed in order
                    # to compute the gradient, so we ignore them.
                    grad_ops.append(OpFromGraph(self.new_inputs + output_grads,
                                                [g],
                                                on_unused_input='ignore'))
            self.grad_ops = grad_ops

        return [go(*(inputs + output_grads)) for go in grad_ops]

# Since OpFromGraph contains a Theano compiled function, we should let
# DebugMode know about it 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:28,代碼來源:builders.py

示例4: test_opfromgraph

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def test_opfromgraph(self):
        # as with the scan tests above, insert foreign inputs into the
        # inner graph.
        outer = tensor.scalar("outer")
        shared = theano.shared(
            numpy.array(1., dtype=theano.config.floatX),
            name="shared")
        constant = tensor.constant(1., name="constant")
        z = outer * (shared + constant)

        # construct the inner graph
        a = tensor.scalar()
        b = tensor.scalar()
        r = a + b
        r.tag.replacement = z * (a - b)

        # construct the outer graph
        c = tensor.scalar()
        d = tensor.scalar()
        u = theano.OpFromGraph([a, b], [r])(c, d)
        t = z * u
        v, = map_variables(self.replacer, [t])
        t2 = z * v

        f = theano.function([c, d, outer], [t, t2])
        for m, n in itertools.combinations(range(10), 2):
            assert f(m, n, outer=0.5) == [m + n, m - n]

        # test that the unsupported case of replacement with a shared
        # variable with updates crashes
        shared.update = shared + 1
        self.assertRaises(NotImplementedError,
                          map_variables, self.replacer, [t]) 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:35,代碼來源:test_scan_utils.py

示例5: __init__

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def __init__(self):
        x, y, z = T.scalars('xyz')
        e = x * y
        op = th.OpFromGraph([x, y], [e])
        e2 = op(x, y) + z
        op2 = th.OpFromGraph([x, y, z], [e2])
        e3 = op2(x, y, z) + z

        self.inputs = [x, y, z]
        self.outputs = [e3] 
開發者ID:muhanzhang,項目名稱:D-VAE,代碼行數:12,代碼來源:models.py

示例6: __init__

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def __init__(self, fn):
        self.fn = fn
        # memoizes an OpFromGraph instance per tensor type
        self.ops = {} 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:6,代碼來源:utils.py

示例7: __call__

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def __call__(self, *args):
        # constants needs to be manually converted to tensors
        def try_convert_tensor(arg):
            if treeano.utils.is_variable(arg):
                return arg
            else:
                return T.constant(arg, dtype=fX)

        args = map(try_convert_tensor, args)
        # OpFromGraph is oblique to Theano optimizations, so we need to move
        # things to GPU ourselves if needed.
        if theano.sandbox.cuda.cuda_enabled:
            maybe_to_gpu = theano.sandbox.cuda.as_cuda_ndarray_variable
        else:
            maybe_to_gpu = lambda x: x
        # move the input to GPU if needed.
        args = map(maybe_to_gpu, args)
        # note the tensor type of the input variable to the fn
        # (mainly dimensionality and dtype); we need to create a fitting Op.
        tensor_types = tuple([arg.type for arg in args])
        # create a suitable Op if not yet done
        if tensor_types not in self.ops:
            # create an input variable of the correct type
            inps = [tensor_type() for tensor_type in tensor_types]
            # pass it through the fn (and move to GPU if needed)
            outp = maybe_to_gpu(self.fn(*inps))
            # fix the forward expression
            op = theano.OpFromGraph(inps, [outp])
            # keep a reference to previous gradient
            op.overwritten_grad = op.grad
            # replace the gradient with our own
            op.grad = self.grad
            # Finally, we memoize the new Op
            self.ops[tensor_types] = op
        # apply the memoized Op to the input we got
        return self.ops[tensor_types](*args) 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:38,代碼來源:utils.py

示例8: set_th_op

# 需要導入模塊: import theano [as 別名]
# 或者: from theano import OpFromGraph [as 別名]
def set_th_op(self, output):
        interpolator = self.model._interpolator
        out = self.get_output(output)

        i = interpolator.get_python_input_block()
        theano.config.compute_test_value = 'ignore'
        self.th_op = theano.OpFromGraph(interpolator.theano_graph.input_parameters_loop,
                                        [out],
                                        inline=False,
                                        on_unused_input='ignore',
                                        name=output)
        return self.th_op 
開發者ID:cgre-aachen,項目名稱:gempy,代碼行數:14,代碼來源:theano_op.py


注:本文中的theano.OpFromGraph方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。