当前位置: 首页>>代码示例>>Python>>正文


Python theano.OpFromGraph方法代码示例

本文整理汇总了Python中theano.OpFromGraph方法的典型用法代码示例。如果您正苦于以下问题:Python theano.OpFromGraph方法的具体用法?Python theano.OpFromGraph怎么用?Python theano.OpFromGraph使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在theano的用法示例。


在下文中一共展示了theano.OpFromGraph方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_gradient

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def test_gradient(self, output: str, wrt: str):
        theano.config.compute_test_value = 'ignore'
        interpolator = self.model._interpolator
        out = self.get_output(output)
        wrt_ = self.get_wrt(wrt)

        geo_model_T = theano.OpFromGraph(interpolator.theano_graph.input_parameters_loop,
                                         [theano.grad(out[0], wrt_)],
                                         inline=True,
                                         on_unused_input='ignore',
                                         name='test_'+output)

        i = interpolator.get_python_input_block()
        th_f = theano.function([], geo_model_T(*i), on_unused_input='warn')

        interpolator.theano_graph.sig_slope.set_value(20)

        return th_f() 
开发者ID:cgre-aachen,项目名称:gempy,代码行数:20,代码来源:theano_op.py

示例2: make_thunk

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def make_thunk(self, node, storage_map, compute_map, no_recycling):
        ret = super(OpFromGraph, self).make_thunk(node, storage_map,
                                                  compute_map, no_recycling)
        if not hasattr(self, "fn"):
            self.fn = orig_function(self.new_inputs,
                                    self.new_outputs,
                                    **self.kwargs)
        return ret 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:10,代码来源:builders.py

示例3: grad

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def grad(self, inputs, output_grads):
        if hasattr(self, "grad_ops"):
            grad_ops = self.grad_ops
        else:
            gs = theano.gradient.grad(cost=None,
                                      known_grads=dict(izip(self.new_outputs,
                                                            output_grads)),
                                      wrt=self.new_inputs,
                                      disconnected_inputs='ignore')

            grad_ops = []
            for g in gs:
                if g is None:
                    grad_ops.append(lambda *args: None)
                else:
                    # It is normal if some inputs are not needed in order
                    # to compute the gradient, so we ignore them.
                    grad_ops.append(OpFromGraph(self.new_inputs + output_grads,
                                                [g],
                                                on_unused_input='ignore'))
            self.grad_ops = grad_ops

        return [go(*(inputs + output_grads)) for go in grad_ops]

# Since OpFromGraph contains a Theano compiled function, we should let
# DebugMode know about it 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:28,代码来源:builders.py

示例4: test_opfromgraph

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def test_opfromgraph(self):
        # as with the scan tests above, insert foreign inputs into the
        # inner graph.
        outer = tensor.scalar("outer")
        shared = theano.shared(
            numpy.array(1., dtype=theano.config.floatX),
            name="shared")
        constant = tensor.constant(1., name="constant")
        z = outer * (shared + constant)

        # construct the inner graph
        a = tensor.scalar()
        b = tensor.scalar()
        r = a + b
        r.tag.replacement = z * (a - b)

        # construct the outer graph
        c = tensor.scalar()
        d = tensor.scalar()
        u = theano.OpFromGraph([a, b], [r])(c, d)
        t = z * u
        v, = map_variables(self.replacer, [t])
        t2 = z * v

        f = theano.function([c, d, outer], [t, t2])
        for m, n in itertools.combinations(range(10), 2):
            assert f(m, n, outer=0.5) == [m + n, m - n]

        # test that the unsupported case of replacement with a shared
        # variable with updates crashes
        shared.update = shared + 1
        self.assertRaises(NotImplementedError,
                          map_variables, self.replacer, [t]) 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:35,代码来源:test_scan_utils.py

示例5: __init__

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def __init__(self):
        x, y, z = T.scalars('xyz')
        e = x * y
        op = th.OpFromGraph([x, y], [e])
        e2 = op(x, y) + z
        op2 = th.OpFromGraph([x, y, z], [e2])
        e3 = op2(x, y, z) + z

        self.inputs = [x, y, z]
        self.outputs = [e3] 
开发者ID:muhanzhang,项目名称:D-VAE,代码行数:12,代码来源:models.py

示例6: __init__

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def __init__(self, fn):
        self.fn = fn
        # memoizes an OpFromGraph instance per tensor type
        self.ops = {} 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:6,代码来源:utils.py

示例7: __call__

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def __call__(self, *args):
        # constants needs to be manually converted to tensors
        def try_convert_tensor(arg):
            if treeano.utils.is_variable(arg):
                return arg
            else:
                return T.constant(arg, dtype=fX)

        args = map(try_convert_tensor, args)
        # OpFromGraph is oblique to Theano optimizations, so we need to move
        # things to GPU ourselves if needed.
        if theano.sandbox.cuda.cuda_enabled:
            maybe_to_gpu = theano.sandbox.cuda.as_cuda_ndarray_variable
        else:
            maybe_to_gpu = lambda x: x
        # move the input to GPU if needed.
        args = map(maybe_to_gpu, args)
        # note the tensor type of the input variable to the fn
        # (mainly dimensionality and dtype); we need to create a fitting Op.
        tensor_types = tuple([arg.type for arg in args])
        # create a suitable Op if not yet done
        if tensor_types not in self.ops:
            # create an input variable of the correct type
            inps = [tensor_type() for tensor_type in tensor_types]
            # pass it through the fn (and move to GPU if needed)
            outp = maybe_to_gpu(self.fn(*inps))
            # fix the forward expression
            op = theano.OpFromGraph(inps, [outp])
            # keep a reference to previous gradient
            op.overwritten_grad = op.grad
            # replace the gradient with our own
            op.grad = self.grad
            # Finally, we memoize the new Op
            self.ops[tensor_types] = op
        # apply the memoized Op to the input we got
        return self.ops[tensor_types](*args) 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:38,代码来源:utils.py

示例8: set_th_op

# 需要导入模块: import theano [as 别名]
# 或者: from theano import OpFromGraph [as 别名]
def set_th_op(self, output):
        interpolator = self.model._interpolator
        out = self.get_output(output)

        i = interpolator.get_python_input_block()
        theano.config.compute_test_value = 'ignore'
        self.th_op = theano.OpFromGraph(interpolator.theano_graph.input_parameters_loop,
                                        [out],
                                        inline=False,
                                        on_unused_input='ignore',
                                        name=output)
        return self.th_op 
开发者ID:cgre-aachen,项目名称:gempy,代码行数:14,代码来源:theano_op.py


注:本文中的theano.OpFromGraph方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。