当前位置: 首页>>代码示例>>Python>>正文


Python Problem.calc_gradient方法代码示例

本文整理汇总了Python中openmdao.core.Problem.calc_gradient方法的典型用法代码示例。如果您正苦于以下问题:Python Problem.calc_gradient方法的具体用法?Python Problem.calc_gradient怎么用?Python Problem.calc_gradient使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在openmdao.core.Problem的用法示例。


在下文中一共展示了Problem.calc_gradient方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_converge_diverge_compfd

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_converge_diverge_compfd(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd comp2 and comp5. each is under a par group
        prob.root.par1.comp2.fd_options['force_fd'] = True
        prob.root.par2.comp5.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:29,代码来源:test_mpi_derivs.py

示例2: test_double_arraycomp

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_double_arraycomp(self):
        # Mainly testing a bug in the array return for multiple arrays

        group = Group()
        group.add('x_param1', IndepVarComp('x1', np.ones((2))), promotes=['*'])
        group.add('x_param2', IndepVarComp('x2', np.ones((2))), promotes=['*'])
        group.add('mycomp', DoubleArrayComp(), promotes=['*'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        Jbase = group.mycomp.JJ

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fwd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='rev',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:32,代码来源:test_petsc_ksp.py

示例3: test_fan_out_grouped

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = root = Group()

        root.add('p', IndepVarComp('x', 1.0))
        root.add('comp1', ExecComp(['y=3.0*x']))

        sub = root.add('sub', ParallelGroup())
        sub.add('comp2', ExecComp(['y=-2.0*x']))
        sub.add('comp3', ExecComp(['y=5.0*x']))

        root.add('c2', ExecComp(['y=-x']))
        root.add('c3', ExecComp(['y=3.0*x']))
        root.connect('sub.comp2.y', 'c2.x')
        root.connect('sub.comp3.y', 'c3.x')

        root.connect("comp1.y", "sub.comp2.x")
        root.connect("comp1.y", "sub.comp3.x")
        root.connect("p.x", "comp1.x")

        prob.root.ln_solver = LinearGaussSeidel()
        prob.root.sub.ln_solver = LinearGaussSeidel()

        prob.setup(check=False)
        prob.run()

        param = 'p.x'
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient([param], unknown_list, mode='fwd', return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        J = prob.calc_gradient([param], unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        unknown_list = ['c2.y', "c3.y"]

        J = prob.calc_gradient([param], unknown_list, mode='fwd', return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], 6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 45.0, 1e-6)

        J = prob.calc_gradient([param], unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], 6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 45.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:51,代码来源:test_mpi_derivs.py

示例4: test_derivatives

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_derivatives(self):
        meta = MetaModel()
        meta.add_param('x', 0.)
        meta.add_output('f', 0.)
        meta.default_surrogate = FloatKrigingSurrogate()

        prob = Problem(Group())
        prob.root.add('meta', meta, promotes=['x'])
        prob.root.add('p', IndepVarComp('x', 0.), promotes=['x'])
        prob.setup(check=False)

        prob['meta.train:x'] = [0., .25, .5, .75, 1.]
        prob['meta.train:f'] = [1., .75, .5, .25, 0.]
        prob['x'] = 0.125
        prob.run()

        Jf = prob.calc_gradient(['x'], ['meta.f'], mode='fwd')
        Jr = prob.calc_gradient(['x'], ['meta.f'], mode='rev')

        assert_rel_error(self, Jf[0][0], -1.00011, 1.0e-5)
        assert_rel_error(self, Jr[0][0], -1.00011, 1.0e-5)

        stream = cStringIO()
        prob.check_partial_derivatives(out_stream=stream)

        abs_errors = findall('Absolute Error \(.+\) : (.+)', stream.getvalue())
        self.assertTrue(len(abs_errors) > 0)
        for match in abs_errors:
            abs_error = float(match)
            self.assertTrue(abs_error < 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:32,代码来源:test_meta_model.py

示例5: test_complex_step2

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_complex_step2(self):
        prob = Problem(Group())
        comp = prob.root.add('comp', ExecComp('y=x*x + x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = False

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)
开发者ID:kishenr12,项目名称:OpenMDAO,代码行数:18,代码来源:test_exec_comp.py

示例6: test_simple_jac

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_simple_jac(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', ExecComp(['y=2.0*x']), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = DirectSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:18,代码来源:test_ln_explicit.py

示例7: test_simple

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_simple(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:18,代码来源:test_petsc_ksp.py

示例8: test_simple_in_group_matvec

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_simple_in_group_matvec(self):
        group = Group()
        sub = group.add('sub', Group(), promotes=['x', 'y'])
        group.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        sub.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
开发者ID:jcchin,项目名称:project_clippy,代码行数:19,代码来源:test_ln_explicit.py

示例9: test_fan_out

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fan_out(self):

        prob = Problem()
        prob.root = FanOut()
        prob.root.ln_solver = DirectSolver()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp2.y', "comp3.y"]

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:20,代码来源:test_ln_explicit.py

示例10: test_single_diamond

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_single_diamond(self):

        prob = Problem(impl=impl)
        prob.root = SingleDiamond()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp4.y1', 'comp4.y2']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:20,代码来源:test_petsc_ksp.py

示例11: test_fan_in

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fan_in(self):

        prob = Problem(impl=impl)
        prob.root = FanIn()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:20,代码来源:test_petsc_ksp.py

示例12: test_no_derivatives

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_no_derivatives(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', ExecComp('y=x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)
开发者ID:jcchin,项目名称:project_clippy,代码行数:20,代码来源:test_comp_fd_jacobian.py

示例13: test_fan_in_grouped

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fan_in_grouped(self):

        prob = Problem()
        prob.root = FanInGrouped()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        J = prob.calc_gradient(param_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
开发者ID:jcchin,项目名称:project_clippy,代码行数:20,代码来源:test_ln_explicit.py

示例14: test_fan_out_grouped

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanOutGrouped()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)
开发者ID:briantomko,项目名称:OpenMDAO,代码行数:20,代码来源:test_petsc_ksp.py

示例15: test_fd_options_meta_step_size

# 需要导入模块: from openmdao.core import Problem [as 别名]
# 或者: from openmdao.core.Problem import calc_gradient [as 别名]
    def test_fd_options_meta_step_size(self):

        class MetaParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """

            def __init__(self):
                super(MetaParaboloid, self).__init__()

                # Params
                self.add_param('x', 1.0, fd_step_size = 1.0e5)
                self.add_param('y', 1.0, fd_step_size = 1.0e5)

                # Unknowns
                self.add_output('f_xy', 0.0)

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x = params['x']
                y = params['y']

                f_xy = ((x-3.0)**2 + x*y + (y+4.0)**2 - 3.0)
                unknowns['f_xy'] = f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x = params['x']
                y = params['y']
                J = {}

                J['f_xy', 'x'] = (2.0*x - 6.0 + y)
                J['f_xy', 'y'] = (2.0*y + 8.0 + x)

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', MetaParaboloid())
        prob.root.add('p1', ParamComp('x', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure bad meta step_size is used
        # Derivative should be way high with this.

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 1000.0)
开发者ID:jcchin,项目名称:project_clippy,代码行数:58,代码来源:test_comp_fd_jacobian.py


注:本文中的openmdao.core.Problem.calc_gradient方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。