本文整理匯總了Python中polynomial.Polynomial.coordinates方法的典型用法代碼示例。如果您正苦於以下問題:Python Polynomial.coordinates方法的具體用法?Python Polynomial.coordinates怎麽用?Python Polynomial.coordinates使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類polynomial.Polynomial
的用法示例。
在下文中一共展示了Polynomial.coordinates方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_orientation_reprojection
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_orientation_reprojection(self):
true_s = np.array([.1, .2, -.3])
true_r = utils.cayley(true_s)
true_k = 1. / (1. + np.dot(true_s, true_s))
true_vars = np.r_[true_s, true_k]
xs = np.random.rand(8, 3)
true_ys = np.dot(xs, true_r.T)
sym_vars = Polynomial.coordinates(4, ctype=float)
x, y, z, w = sym_vars
sym_s = sym_vars[:3]
sym_k = sym_vars[3]
sym_r = utils.cayley_mat(sym_s)
sym_rd = utils.cayley_denom(sym_s)
residuals = (np.dot(xs, sym_r.T) - true_ys * sym_rd).flatten()
cost = sum(r**2 for r in residuals)
gradients = cost.partial_derivatives()
constraint = sym_k * (1 + np.dot(sym_s, sym_s)) - 1
print 'Cost:', cost
print 'Constraint:', constraint
print 'At ground truth:'
print ' Cost = ', cost(*true_vars)
print ' Constraint = ', constraint(*true_vars)
print ' Gradients = ', [p(*true_vars) for p in gradients]
expansions = [solvers.all_monomials(sym_vars, 2) for _ in range(cost.num_vars)]
for a in expansions:
a.extend([z*z*w, x*x*w, y*y*w, z*z*w*w, z*w*w])
minima = optimize.minimize_globally(cost,
[constraint],
expansions=expansions,
diagnostic_solutions=[true_vars])
print 'Minima: ', minima
示例2: test_two_vars
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_two_vars(self):
x, y = Polynomial.coordinates(2)
equations = [x**2 + y**2 - 1,
x-y]
expansion_monomials = [
[x, y],
[x, y, x*y, x*x, y*y]
]
result = solvers.solve_via_basis_selection(equations, expansion_monomials, x+y+1)
expected_solutions = [np.sqrt([.5, .5]), -np.sqrt([.5, .5])]
self.assert_solutions_found(result, expected_solutions)
示例3: test_estimate_pose_and_depths
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_estimate_pose_and_depths(self):
np.random.seed(0)
num_landmarks = 5
# Generate ground truth
true_cayleys = np.array([.1, .2, -.3])
true_orientation = utils.cayley(true_cayleys)
true_position = np.array([2., 3., 10.])
# Generate observed quantities
true_landmarks = np.random.randn(num_landmarks, 3)
true_pfeatures = np.dot(true_landmarks - true_position, true_orientation.T)
true_depths = np.sqrt(np.sum(np.square(true_pfeatures), axis=1))
true_features = true_pfeatures / true_depths[:, None]
true_vars = np.hstack((true_orientation.flatten(), true_position, true_depths))
# Create symbolic quantities
sym_vars = Polynomial.coordinates(12 + num_landmarks, ctype=float)
sym_orientation = np.reshape(sym_vars[:9], (3, 3))
sym_position = np.array(sym_vars[9:12])
sym_depths = np.array(sym_vars[12:])
residuals = []
for i, (landmark, feature, sym_depth) in enumerate(zip(true_landmarks, true_features, sym_depths)):
residual = np.dot(sym_orientation, landmark - sym_position) - sym_depth * feature
residuals.extend(residual)
constraints = (np.dot(sym_orientation.T, sym_orientation) - np.eye(3)).flatten()
cost = sum(r**2 for r in residuals)
gradients = cost.partial_derivatives()
print 'Cost:', cost
print 'Constraints:'
for constraint in constraints:
print ' ', constraint
print 'At ground truth:'
print ' Cost = ', cost(*true_vars)
print ' Constraints = ', utils.evaluate_array(constraints, *true_vars)
print ' Gradients = ', [p(*true_vars) for p in gradients]
expansions = solvers.all_monomials(sym_vars, 2)
minima = optimize.minimize_globally(cost,
constraints,
expansions=expansions,
#diagnostic_solutions=[true_vars],
)
estimated_r = np.reshape(minima, (3, 3))
error = np.linalg.norm(estimated_r - true_orientation)
print 'Minima:\n', estimated_r
print 'Ground truth:\n', true_orientation
print 'Error:', error
示例4: test_synthetic_ideal
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_synthetic_ideal(self):
zeros = [[-2., -3., 5., 6.], [4.5, 5., -1., 8.]]
equations = ideal_from_variety(zeros, ctype=float)
coords = Polynomial.coordinates(len(zeros[0]))
expansion_monomials = solvers.all_monomials(coords, degree=1)
lambda_poly = sum(xi * (i + 1) for i, xi in enumerate(coords)) + 1
result = solvers.solve_via_basis_selection(
equations,
expansion_monomials,
lambda_poly,
diagnostic_solutions=[zeros[0]])
self.assert_solutions_found(result, zeros)
示例5: test_two_circles
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_two_circles(self):
x, y = Polynomial.coordinates(2)
equations = [
(x+2)**2 + (y+2)**2 - 25,
(x-6)**2 + (y+2)**2 - 25,
]
expansion_monomials = [x, y, x*y, x*x, y*y, x*x*y]
expected_solutions = [(2, 1), (2, -5)]
result = solvers.solve_via_basis_selection(equations,
expansion_monomials,
x+2*y-3,
diagnostic_solutions=expected_solutions)
self.assert_solutions_found(result, expected_solutions)
示例6: find_critical_points
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def find_critical_points(polynomial, constraints=None, expansions=2, lambda_poly=None, seed=0, **kwargs):
"""Find all local minima, local maxima, and saddle points of the given polynomial by solving the first order
conditions."""
system = polynomial.astype(float).partial_derivatives()
system = filter(lambda p: p.total_degree > 0, system)
if constraints is not None:
system.extend(constraints)
coords = Polynomial.coordinates(polynomial.num_vars, ctype=float)
if lambda_poly is None:
rng = random.Random(seed) # use this rng for repeatability
lambda_poly = sum(p * rng.random() for p in coords) + rng.random()
if isinstance(expansions, numbers.Integral):
expansions = [solvers.all_monomials(coords, expansions)] * len(system)
return solvers.solve_via_basis_selection(system, expansions, lambda_poly, **kwargs)
示例7: test_three_circles
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_three_circles(self):
x, y, z = Polynomial.coordinates(3)
equations = [
(x-6)**2 + (y-1)**2 + (z-1)**2 - 25,
(x-1)**2 + (y-6)**2 + (z-1)**2 - 25,
(x-1)**2 + (y-1)**2 + (z-6)**2 - 25,
]
expansion_monomials = solvers.all_monomials((x, y, z), degree=2)
expected_solutions = [(1, 1, 1)]
lambda_poly = x + 2*y + 3*z + 4
result = solvers.solve_via_basis_selection(equations,
expansion_monomials,
lambda_poly,
diagnostic_solutions=expected_solutions)
self.assert_solutions_found(result, expected_solutions)
示例8: test_three_vars
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_three_vars(self):
x, y, z = Polynomial.coordinates(3)
equations = [
x**2 + y**2 + z**2 - 1,
x - y,
x - z
]
expansion_monomials = [
[],
[x, y, z],
[x, y, z]
]
result = solvers.solve_via_basis_selection(equations, expansion_monomials, x)
point = np.sqrt(np.ones(3) / 3.)
expected_solutions = [point, -point]
self.assert_solutions_found(result, expected_solutions)
示例9: test_estimate_orientation_9params
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_estimate_orientation_9params(self):
np.random.seed(0)
true_s = np.array([.1, .2, -.3])
true_r = utils.cayley(true_s)
true_vars = true_r.flatten()
observed_xs = np.random.rand(8, 3)
observed_ys = np.dot(observed_xs, true_r.T)
sym_vars = Polynomial.coordinates(9, ctype=float)
sym_r = np.reshape(sym_vars, (3, 3))
residuals = (np.dot(observed_xs, sym_r.T) - observed_ys).flatten()
constraints = (np.dot(sym_r.T, sym_r) - np.eye(3)).flatten()
cost = sum(r**2 for r in residuals)
gradients = cost.partial_derivatives()
print 'Cost:', cost
print 'Constraints:'
for constraint in constraints:
print ' ', constraint
print 'At ground truth:'
print ' Cost = ', cost(*true_vars)
print ' Constraints = ', utils.evaluate_array(constraints, *true_vars)
print ' Gradients = ', [p(*true_vars) for p in gradients]
expansions = [solvers.all_monomials(sym_vars, 2) for _ in range(cost.num_vars)]
minima = optimize.minimize_globally(cost,
constraints,
expansions=expansions,
#diagnostic_solutions=[true_vars],
)
estimated_r = np.reshape(minima, (3, 3))
error = np.linalg.norm(estimated_r - true_r)
print 'Minima:\n', estimated_r
print 'Ground truth:\n', true_r
print 'Error:', error
示例10: test_range_optimization_2d
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_range_optimization_2d(self):
np.random.seed(0)
bases = np.round(np.random.randn(2, 3) * 6.)
true_position = np.array([-2., 1., 3.])
true_ranges = np.array([np.linalg.norm(base - true_position) for base in bases])
true_vars = np.hstack((true_position, true_ranges))
sym_vars = Polynomial.coordinates(3 + len(bases), float)
sym_position = sym_vars[:3]
sym_ranges = sym_vars[3:]
cost = sum((zz - z)**2 for zz, z in zip(sym_ranges, true_ranges))
constraints = [np.sum(np.square(base - sym_position)) - sym_range*sym_range
for base, sym_range in zip(bases, sym_ranges)]
lagrangian = make_langrangian(-cost, constraints)
gradients = lagrangian.partial_derivatives()
print 'Cost:', cost
print 'Lagrangian:', lagrangian
print 'Constraints:'
for constraint in constraints:
print ' ', constraint
print 'Gradients:'
for gradient in gradients:
print ' ', gradient
print 'At ground truth:'
print ' Cost = ', cost(*true_vars)
print ' Constraints = ', utils.evaluate_array(constraints, *true_vars)
#print ' Gradients = ', [p(*true_vars) for p in gradients]
expansions = 3 # solvers.all_monomials(sym_vars, 2) + [sym_vars[2]**3, sym_vars[0]**2*sym_vars[2]]
minima = optimize.minimize_globally(-lagrangian, [], expansions=expansions, verbosity=2,
diagnostic_solutions=[], include_grobner=False)
error = np.linalg.norm(minima - true_position)
print minima
print true_position
print 'Error:', error
示例11: test_optimize_2d
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def test_optimize_2d(self):
np.random.seed(0)
x, y = Polynomial.coordinates(2, float)
fs = [
x+1,
y+1,
x*x,
]
true_solution = np.array([2., 5.])
cost = sum((f(x, y) - f(*true_solution)) ** 2 for f in fs)
import matplotlib.pyplot as plt
dX, dY = np.meshgrid(np.linspace(-.1, .1, 50), np.linspace(-.1, .1, 50))
X = true_solution[0] + dX
Y = true_solution[1] + dY * 1j
Z = np.abs(cost(X, Y))
print np.min(Z), np.max(Z)
plt.contourf(dX, dY, Z, levels=np.logspace(np.log10(np.min(Z)), np.log10(np.max(Z)), 16))
plt.plot(0, 0, 'mx')
plt.show()
return
gradients = cost.partial_derivatives()
print 'Cost:', cost
print 'Residuals:'
for f in fs:
print f(x, y) - f(*true_solution)
print 'Gradients:'
for gradient in gradients:
print ' ', gradient(*true_solution)
print 'Jacobian:'
print polynomial_jacobian(fs)(*true_solution)
expansions = 3 # solvers.all_monomials(sym_vars, 2) + [sym_vars[2]**3, sym_vars[0]**2*sym_vars[2]]
minima = optimize.minimize_globally(cost, expansions=expansions, verbosity=2, constraints=fs[:2],
#diagnostic_solutions=[true_vars]
)
np.testing.assert_array_almost_equal(minima, true_solution)
示例12: solve_via_basis_selection
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
#.........這裏部分代碼省略.........
# Check that lambda * b = action_b * b + action_r * r at solution
for bi, b_row, r_row in zip(basis, c_action_b, c_action_r):
assert len(r_row) == len(dependent)
assert len(b_row) == len(basis)
lhs = bi*lambda_poly
rhs = (sum(cj*rj for cj, rj in zip(r_row, x_dependent)) +
sum(cj*bj for cj, bj in zip(b_row, x_basis)))
if diagnostic_solutions is not None:
for solution in diagnostic_solutions:
lvalue = lhs(*solution)
rvalue = rhs(*solution)
if verbosity >= 2:
print ' at %s, lhs=%s, rhs=%s' % (solution, lvalue, rvalue)
if abs(lvalue - rvalue) > 1e-8:
raise DiagnosticError('expected action equation lhs (=%f) to match rhs (=%f) at %s:\n'
' lhs=%s\n rhs=%s' %
(lvalue, rvalue, solution, lhs, rhs))
# Compute action matrix form for lambda_poly * basis
soln = np.linalg.solve(c1, c2)
c_action = c_action_b - np.dot(c_action_r, soln)
if verbosity >= 2:
print 'Solution for required monomials:'
print soln
for row, monomial in zip(soln, dependent):
print ' %s = - %s * basis {at points on variety}' % (as_monomial(monomial), row)
# Check that basis + soln * required = 0 at diagnostic solutions
if diagnostic_solutions is not None:
c_solved = np.hstack((np.eye(len(required) + len(eliminated)), soln))
for solution in diagnostic_solutions:
values = np.dot(c_solved, evaluate_poly_vector(x_post, solution))
if verbosity >= 2:
print ' Evaluated at %s: %s' % (solution, values)
if np.abs(values).max() > 1e-8:
idx = np.abs(values).argmax()
raise DiagnosticError('expected equation %d to evaluate to zero but received %f' % (idx, values[idx]))
# Check that lambda * b = action * b at solution (note that lambda is a polynomial, not a matrix here)
if verbosity >= 2 or diagnostic_solutions is not None:
if verbosity >= 2:
print 'Action matrix:'
print c_action
for bi, row in zip(basis, c_action):
assert len(basis) == len(row)
lhs = bi*lambda_poly
rhs = sum(cj * bj for cj, bj in zip(row, x_basis))
if verbosity >= 2:
print ' %s * (%s) = %s = %s' % (as_term(bi, nvars), lambda_poly, lhs, rhs)
if diagnostic_solutions is not None:
for solution in diagnostic_solutions:
lvalue = lhs(*solution)
rvalue = rhs(*solution)
if verbosity >= 2:
print ' at %s, lhs=%s, rhs=%s' % (solution, lvalue, rvalue)
if abs(lvalue - rvalue) > 1e-8:
raise DiagnosticError('expected action equation lhs (=%f) to match rhs (=%f) at %s:\n'
' lhs=%s\n rhs=%s' %
(lvalue, rvalue, solution, lhs, rhs))
# Find indices within basis
unit_index = basis.index(Polynomial.constant(1, nvars))
# Compute eigenvalues and eigenvectors
eigvals, eigvecs = np.linalg.eig(c_action)
if verbosity >= 2:
print 'Eigenvectors:'
print eigvecs
# Divide out the unit monomial row
nrm = eigvecs[unit_index]
mask = np.abs(nrm) > 1e-8
monomial_values = (eigvecs[:, mask] / eigvecs[unit_index][mask]).T
if verbosity >= 2:
print 'Normalized eigenvectors:'
print monomial_values
# Test each solution
solutions = []
for values in monomial_values:
#candidate = [eigvec[i]/eigvec[unit_index] for i in var_indices]
for solution in solve_monomial_equations(basis, values):
values = [f(*solution) for f in equations]
if verbosity >= 1:
print 'Candidate solution: %s -> Values = %s' % (solution, values)
if np.linalg.norm(values) < 1e-8:
solutions.append(solution)
# Report final solutions
base_vars = Polynomial.coordinates(lambda_poly.num_vars)
if verbosity >= 1:
print 'Solutions:'
for solution in solutions:
print ' ' + ' '.join('%s=%s' % (var, val) for var, val in zip(base_vars, solution))
return SolutionSet(solutions)
示例13: make_langrangian
# 需要導入模塊: from polynomial import Polynomial [as 別名]
# 或者: from polynomial.Polynomial import coordinates [as 別名]
def make_langrangian(objective, constraints):
ex_vars = Polynomial.coordinates(objective.num_vars + len(constraints))
orig_vars = ex_vars[:objective.num_vars]
lg_vars = ex_vars[objective.num_vars:]
return objective(*orig_vars) + sum(lg * c(*orig_vars) for lg, c in zip(lg_vars, constraints))