本文整理汇总了Python中openmdao.main.expreval.ExprEvaluator.evaluate_gradient方法的典型用法代码示例。如果您正苦于以下问题:Python ExprEvaluator.evaluate_gradient方法的具体用法?Python ExprEvaluator.evaluate_gradient怎么用?Python ExprEvaluator.evaluate_gradient使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类openmdao.main.expreval.ExprEvaluator
的用法示例。
在下文中一共展示了ExprEvaluator.evaluate_gradient方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Constraint
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
class Constraint(object):
""" Object that stores info for a single constraint. """
def __init__(self, lhs, comparator, rhs, scaler, adder, scope=None):
self.lhs = ExprEvaluator(lhs, scope=scope)
if not self.lhs.check_resolve():
raise ValueError("Constraint '%s' has an invalid left-hand-side." \
% ' '.join([lhs, comparator, rhs]))
self.comparator = comparator
self.rhs = ExprEvaluator(rhs, scope=scope)
if not self.rhs.check_resolve():
raise ValueError("Constraint '%s' has an invalid right-hand-side." \
% ' '.join([lhs, comparator, rhs]))
if not isinstance(scaler, float):
raise ValueError("Scaler parameter should be a float")
self.scaler = scaler
if scaler <= 0.0:
raise ValueError("Scaler parameter should be a float > 0")
if not isinstance(adder, float):
raise ValueError("Adder parameter should be a float")
self.adder = adder
def evaluate(self, scope):
"""Returns a tuple of the form (lhs, rhs, comparator, is_violated)."""
lhs = (self.lhs.evaluate(scope) + self.adder)*self.scaler
rhs = (self.rhs.evaluate(scope) + self.adder)*self.scaler
return (lhs, rhs, self.comparator, not _ops[self.comparator](lhs, rhs))
def evaluate_gradient(self, scope, stepsize=1.0e-6, wrt=None):
"""Returns the gradient of the constraint eq/inep as a tuple of the
form (lhs, rhs, comparator, is_violated)."""
lhs = self.lhs.evaluate_gradient(scope=scope, stepsize=stepsize, wrt=wrt)
for key, value in lhs.iteritems():
lhs[key] = (value + self.adder)*self.scaler
rhs = self.rhs.evaluate_gradient(scope=scope, stepsize=stepsize, wrt=wrt)
for key, value in rhs.iteritems():
rhs[key] = (value + self.adder)*self.scaler
return (lhs, rhs, self.comparator, not _ops[self.comparator](lhs, rhs))
def get_referenced_compnames(self):
return self.lhs.get_referenced_compnames().union(self.rhs.get_referenced_compnames())
def __str__(self):
return ' '.join([self.lhs.text, self.comparator, self.rhs.text])
示例2: test_eval_gradient_lots_of_vars
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
def test_eval_gradient_lots_of_vars(self):
top = set_as_top(Assembly())
top.add('comp1', B())
#build expr
expr = "2*comp1.in1 + 3*comp1.in11"
exp = ExprEvaluator(expr, top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.in1'], 2.0, 0.00001)
assert_rel_error(self, grad['comp1.in11'], 3.0, 0.00001)
expr = "asin(comp1.in1)"
exp = ExprEvaluator(expr, top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.in1'], 1.0, 0.00001)
示例3: test_eval_gradient_array
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
def test_eval_gradient_array(self):
top = set_as_top(Assembly())
top.add('comp1', A())
top.run()
# Uncomment these when arrays work
exp = ExprEvaluator('4.0*comp1.b2d[0][1]*comp1.b2d[1][1]', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.b2d[0][1]'], 12.0, 0.00001)
assert_rel_error(self, grad['comp1.b2d[1][1]'], 4.0, 0.00001)
示例4: test_eval_gradient_array
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
def test_eval_gradient_array(self):
top = set_as_top(Assembly())
top.add('comp1', A())
top.run()
# Uncomment these when arrays work
exp = ExprEvaluator('4.0*comp1.b2d[0][1]*comp1.b2d[1][1]', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.b2d[0][1]'], 12.0, 0.00001)
assert_rel_error(self, grad['comp1.b2d[1][1]'], 4.0, 0.00001)
exp = ExprEvaluator('comp1.c2d**2', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.c2d'][0,0], 0.0, 0.00001)
assert_rel_error(self, grad['comp1.c2d'][1,1], 2.0, 0.00001)
assert_rel_error(self, grad['comp1.c2d'][2,2], 4.0, 0.00001)
assert_rel_error(self, grad['comp1.c2d'][3,3], 6.0, 0.00001)
exp = ExprEvaluator('comp1.c1d**2', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp1.c1d'][0,0], 0.0, 0.00001)
assert_rel_error(self, grad['comp1.c1d'][1,1], 2.0, 0.00001)
assert_rel_error(self, grad['comp1.c1d'][2,2], 4.0, 0.00001)
assert_rel_error(self, grad['comp1.c1d'][3,3], 6.0, 0.00001)
exp = ExprEvaluator('comp1.a2d + comp1.c2d**2', top.driver)
grad = exp.evaluate_gradient(scope=top)
a2d_grad, c2d_grad = grad['comp1.a2d'], grad['comp1.c2d']
assert_rel_error(self, a2d_grad[0,0], 1.0, 0.00001)
assert_rel_error(self, a2d_grad[1,1], 1.0, 0.00001)
assert_rel_error(self, a2d_grad[2,2], 1.0, 0.00001)
assert_rel_error(self, a2d_grad[3,3], 1.0, 0.00001)
assert_rel_error(self, c2d_grad[0,0], 0.0, 0.00001)
assert_rel_error(self, c2d_grad[1,1], 2.0, 0.00001)
assert_rel_error(self, c2d_grad[2,2], 4.0, 0.00001)
assert_rel_error(self, c2d_grad[3,3], 6.0, 0.00001)
示例5: test_eval_gradient
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
def test_eval_gradient(self):
top = set_as_top(Assembly())
top.add('comp1', Simple())
top.run()
exp = ExprEvaluator('3.0*comp1.c', top.driver)
grad = exp.evaluate_gradient(scope=top)
self.assertEqual(top.comp1.c, 7.0)
assert_rel_error(self, grad['comp1.c'], 3.0, 0.00001)
# Commented out this test, until we find a case that can't be
# handled analytically
# interface test: step size
# (for linear slope, larger stepsize more accurate because of
# python's rounding)
#grad2 = exp.evaluate_gradient(scope=top, stepsize=0.1)
#assert( abs(grad['comp1.c'] - 3.0) > abs(grad2['comp1.c'] - 3.0) )
# More complicated, multiple comps
top.add('comp2', Simple())
exp = ExprEvaluator('comp2.b*comp1.c**2', top.driver)
grad = exp.evaluate_gradient(scope=top)
self.assertEqual(len(grad), 2)
assert_rel_error(self, grad['comp1.c'], 70.0, 0.00001)
assert_rel_error(self, grad['comp2.b'], 49.0, 0.00001)
# test limited varset
grad = exp.evaluate_gradient(scope=top, wrt=['comp2.b'])
self.assertEqual(len(grad), 1)
exp = ExprEvaluator('pow(comp2.b,2)', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp2.b'], 10.0, 0.00001)
exp = ExprEvaluator('pow(comp2.b,3)', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp2.b'], 75.0, 0.00001)
exp = ExprEvaluator('log(comp2.a)', top.driver)
grad = exp.evaluate_gradient(scope=top)
assert_rel_error(self, grad['comp2.a'], 1./top.comp2.a, 0.00001)
exp = ExprEvaluator('sin(cos(comp2.b))+sqrt(comp2.a)/comp1.c', top.driver)
grad = exp.evaluate_gradient(scope=top)
g1=-sin(top.comp2.b)*cos(cos(top.comp2.b)) #true gradient components
g2=(2*sqrt(top.comp2.a)*top.comp1.c)**-1
g3=-sqrt(top.comp2.a)/top.comp1.c**2
assert_rel_error(self, grad['comp2.b'], g1, 0.00001)
assert_rel_error(self, grad['comp2.a'], g2, 0.00001)
assert_rel_error(self, grad['comp1.c'], g3, 0.00001)
示例6: Constraint
# 需要导入模块: from openmdao.main.expreval import ExprEvaluator [as 别名]
# 或者: from openmdao.main.expreval.ExprEvaluator import evaluate_gradient [as 别名]
#.........这里部分代码省略.........
rhs and combine them into a single expression by moving rhs
terms over to the lhs. For example,
for the constraint 'C1.x < C2.y + 7', return the expression
'C1.x - C2.y - 7'. Depending on the direction of the operator,
the sign of the expression may be flipped. The final form of
the constraint, when evaluated, will be considered to be satisfied
if it evaluates to a value <= 0.
"""
scope = self.lhs.scope
if self.comparator.startswith('>'):
first = self.rhs.text
second = self.lhs.text
else:
first = self.lhs.text
second = self.rhs.text
first_zero = False
try:
f = float(first)
except Exception:
pass
else:
if f == 0:
first_zero = True
second_zero = False
try:
f = float(second)
except Exception:
pass
else:
if f == 0:
second_zero = True
if first_zero:
newexpr = "-(%s)" % second
elif second_zero:
newexpr = "%s" % first
else:
newexpr = '%s-(%s)' % (first, second)
return ExprEvaluator(newexpr, scope)
def copy(self):
return Constraint(str(self.lhs), self.comparator, str(self.rhs),
scope=self.lhs.scope)
def evaluate(self, scope):
"""Returns the value of the constraint as a sequence."""
pcomp = getattr(scope, self.pcomp_name)
if not pcomp.is_valid():
pcomp.update_outputs(['out0'])
val = pcomp.out0
if isinstance(val, ndarray):
return val.flatten()
else:
return [val]
def evaluate_gradient(self, scope, stepsize=1.0e-6, wrt=None):
"""Returns the gradient of the constraint eq/ineq as a tuple of the
form (lhs, rhs, comparator, is_violated)."""
lhs = self.lhs.evaluate_gradient(scope=scope, stepsize=stepsize, wrt=wrt)
if isinstance(self.rhs, float):
rhs = 0.
else:
rhs = self.rhs.evaluate_gradient(scope=scope, stepsize=stepsize, wrt=wrt)
return (lhs, rhs, self.comparator, not _ops[self.comparator](lhs, rhs))
def get_referenced_compnames(self):
"""Returns a set of names of each component referenced by this
constraint.
"""
if isinstance(self.rhs, float):
return self.lhs.get_referenced_compnames()
else:
return self.lhs.get_referenced_compnames().union(
self.rhs.get_referenced_compnames())
def get_referenced_varpaths(self, copy=True):
"""Returns a set of names of each component referenced by this
constraint.
"""
if isinstance(self.rhs, float):
return self.lhs.get_referenced_varpaths(copy=copy)
else:
return self.lhs.get_referenced_varpaths(copy=copy).union(
self.rhs.get_referenced_varpaths(copy=copy))
def __str__(self):
return ' '.join([str(self.lhs), self.comparator, str(self.rhs)])
def __eq__(self, other):
if not isinstance(other, Constraint):
return False
return (self.lhs, self.comparator, self.rhs) == \
(other.lhs, other.comparator, other.rhs)