本文整理汇总了Python中scipy.optimize.minimize方法的典型用法代码示例。如果您正苦于以下问题:Python optimize.minimize方法的具体用法?Python optimize.minimize怎么用?Python optimize.minimize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scipy.optimize
的用法示例。
在下文中一共展示了optimize.minimize方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_bounded
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_bounded(make_quadratic, make_random):
random = make_random
a, b, c, data, bounds = make_quadratic
w0 = np.concatenate((random.randn(2), [1.5]))
res = minimize(qobj, w0, args=(data,), jac=True, bounds=bounds,
method='L-BFGS-B')
Ea_bfgs, Eb_bfgs, Ec_bfgs = res['x']
res = sgd(qobj, w0, data, bounds=bounds, eval_obj=True,
random_state=random)
Ea_sgd, Eb_sgd, Ec_sgd = res['x']
assert np.allclose((Ea_bfgs, Eb_bfgs, Ec_bfgs),
(Ea_sgd, Eb_sgd, Ec_sgd),
atol=5e-2, rtol=0)
示例2: _calculate_CAR
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def _calculate_CAR(self, time, magnitude, error, minimize_method):
magnitude = magnitude.copy()
time = time.copy()
error = error.copy() ** 2
x0 = [10, 0.5]
bnds = ((0, 100), (0, 100))
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
res = minimize(
_car_like,
x0,
args=(time, magnitude, error),
method=minimize_method,
bounds=bnds,
)
sigma, tau = res.x[0], res.x[1]
return sigma, tau
示例3: linear_regression_np
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def linear_regression_np(X, y, l=1):
"""linear regression
args:
X: feature matrix, (m, n+1) # with incercept x0=1
y: target vector, (m, )
l: lambda constant for regularization
return: trained parameters
"""
# init theta
theta = np.ones(X.shape[1])
# train it
res = opt.minimize(fun=regularized_cost,
x0=theta,
args=(X, y, l),
method='TNC',
jac=regularized_gradient,
options={'disp': True})
return res
示例4: betaseries_file
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def betaseries_file(tmpdir_factory,
deriv_betaseries_fname=deriv_betaseries_fname):
bfile = tmpdir_factory.mktemp("beta").ensure(deriv_betaseries_fname)
np.random.seed(3)
num_trials = 40
tgt_corr = 0.1
bs1 = np.random.rand(num_trials)
# create another betaseries with a target correlation
bs2 = minimize(lambda x: abs(tgt_corr - pearsonr(bs1, x)[0]),
np.random.rand(num_trials)).x
# two identical beta series
bs_data = np.array([[[bs1, bs2]]])
# the nifti image
bs_img = nib.Nifti1Image(bs_data, np.eye(4))
bs_img.to_filename(str(bfile))
return bfile
示例5: invert_bfgs
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def invert_bfgs(gen_model, invert_model, ftr_model, im, z_predict=None, npx=64):
_f, z = invert_model
nz = gen_model.nz
if z_predict is None:
z_predict = np_rng.uniform(-1., 1., size=(1, nz))
else:
z_predict = floatX(z_predict)
z_predict = np.arctanh(z_predict)
im_t = gen_model.transform(im)
ftr = ftr_model(im_t)
prob = optimize.minimize(f_bfgs, z_predict, args=(_f, im_t, ftr),
tol=1e-6, jac=True, method='L-BFGS-B', options={'maxiter': 200})
print('n_iters = %3d, f = %.3f' % (prob.nit, prob.fun))
z_opt = prob.x
z_opt_n = floatX(z_opt[np.newaxis, :])
[f_opt, g, gx] = _f(z_opt_n, im_t, ftr)
gx = gen_model.inverse_transform(gx, npx=npx)
z_opt = np.tanh(z_opt)
return gx, z_opt, f_opt
示例6: test_unbounded
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_unbounded(make_quadratic, make_random):
random = make_random
a, b, c, data, _ = make_quadratic
w0 = random.randn(3)
assert_opt = lambda Ea, Eb, Ec: \
np.allclose((a, b, c), (Ea, Eb, Ec), atol=1e-3, rtol=0)
for updater in [SGDUpdater, AdaDelta, AdaGrad, Momentum, Adam]:
res = sgd(qobj, w0, data, eval_obj=True, updater=updater(),
random_state=make_random)
assert_opt(*res['x'])
res = minimize(qobj, w0, args=(data,), jac=True, method='L-BFGS-B')
assert_opt(*res['x'])
res = minimize(qfun, w0, args=(data,), jac=qgrad, method='L-BFGS-B')
assert_opt(*res['x'])
res = minimize(qfun, w0, args=(data), jac=False, method=None)
assert_opt(*res['x'])
示例7: test_structured_params
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_structured_params(make_quadratic, make_random):
random = make_random
a, b, c, data, _ = make_quadratic
w0 = [Parameter(random.randn(2), Bound()),
Parameter(random.randn(1), Bound())
]
qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
assert_opt = lambda Eab, Ec: \
np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)
nmin = structured_minimizer(minimize)
res = nmin(qobj_struc, w0, args=(data,), jac=True, method='L-BFGS-B')
assert_opt(*res.x)
nsgd = structured_sgd(sgd)
res = nsgd(qobj_struc, w0, data, eval_obj=True,
random_state=make_random)
assert_opt(*res.x)
qf_struc = lambda w12, w3, data: q_struc(w12, w3, data, qfun)
qg_struc = lambda w12, w3, data: q_struc(w12, w3, data, qgrad)
res = nmin(qf_struc, w0, args=(data,), jac=qg_struc, method='L-BFGS-B')
assert_opt(*res.x)
示例8: test_log_params
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_log_params(make_quadratic, make_random):
random = make_random
a, b, c, data, _ = make_quadratic
w0 = np.abs(random.randn(3))
bounds = [Positive(), Bound(), Positive()]
assert_opt = lambda Ea, Eb, Ec: \
np.allclose((a, b, c), (Ea, Eb, Ec), atol=1e-3, rtol=0)
nmin = logtrick_minimizer(minimize)
res = nmin(qobj, w0, args=(data,), jac=True, method='L-BFGS-B',
bounds=bounds)
assert_opt(*res.x)
nsgd = logtrick_sgd(sgd)
res = nsgd(qobj, w0, data, eval_obj=True, bounds=bounds,
random_state=make_random)
assert_opt(*res.x)
nmin = logtrick_minimizer(minimize)
res = nmin(qfun, w0, args=(data,), jac=qgrad, method='L-BFGS-B',
bounds=bounds)
assert_opt(*res.x)
示例9: test_logstruc_params
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_logstruc_params(make_quadratic, make_random):
random = make_random
a, b, c, data, _ = make_quadratic
w0 = [Parameter(random.gamma(2, size=(2,)), Positive()),
Parameter(random.randn(), Bound())
]
qobj_struc = lambda w12, w3, data: q_struc(w12, w3, data, qobj)
assert_opt = lambda Eab, Ec: \
np.allclose((a, b, c), (Eab[0], Eab[1], Ec), atol=1e-3, rtol=0)
nmin = structured_minimizer(logtrick_minimizer(minimize))
res = nmin(qobj_struc, w0, args=(data,), jac=True, method='L-BFGS-B')
assert_opt(*res.x)
nsgd = structured_sgd(logtrick_sgd(sgd))
res = nsgd(qobj_struc, w0, data, eval_obj=True, random_state=make_random)
assert_opt(*res.x)
qf_struc = lambda w12, w3, data: q_struc(w12, w3, data, qfun)
qg_struc = lambda w12, w3, data: q_struc(w12, w3, data, qgrad)
res = nmin(qf_struc, w0, args=(data,), jac=qg_struc, method='L-BFGS-B')
assert_opt(*res.x)
示例10: _optimization_function
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def _optimization_function(self, objective_function: Callable[[base.ArrayLike], float]) -> base.ArrayLike:
# pylint:disable=unused-argument
budget = np.inf if self.budget is None else self.budget
best_res = np.inf
best_x: np.ndarray = self.current_bests["average"].x # np.zeros(self.dimension)
if self.initial_guess is not None:
best_x = np.array(self.initial_guess, copy=True) # copy, just to make sure it is not modified
remaining = budget - self._num_ask
while remaining > 0: # try to restart if budget is not elapsed
options: Dict[str, int] = {} if self.budget is None else {"maxiter": remaining}
res = scipyoptimize.minimize(
objective_function,
best_x if not self.random_restart else self._rng.normal(0.0, 1.0, self.dimension),
method=self.method,
options=options,
tol=0,
)
if res.fun < best_res:
best_res = res.fun
best_x = res.x
remaining = budget - self._num_ask
return best_x
示例11: __init__
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def __init__(self, function: Function, bounds=None, *args, **kwargs):
"""
Initialize a :class:`Minimizer`.
Args:
function: :class:`Function` that will be minimized.
bounds: :class:`Bounds` defining the domain of the minimization \
process. If it is ``None`` the :class:`Function` :class:`Bounds` \
will be used.
*args: Passed to ``scipy.optimize.minimize``.
**kwargs: Passed to ``scipy.optimize.minimize``.
"""
self.env = function
self.function = function.function
self.bounds = self.env.bounds if bounds is None else bounds
self.args = args
self.kwargs = kwargs
示例12: minimize
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def minimize(self, x: numpy.ndarray):
"""
Apply ``scipy.optimize.minimize`` to a single point.
Args:
x: Array representing a single point of the function to be minimized.
Returns:
Optimization result object returned by ``scipy.optimize.minimize``.
"""
def _optimize(_x):
try:
_x = _x.reshape((1,) + _x.shape)
y = self.function(_x)
except (ZeroDivisionError, RuntimeError):
y = numpy.inf
return y
bounds = ScipyBounds(
ub=self.bounds.high if self.bounds is not None else None,
lb=self.bounds.low if self.bounds is not None else None,
)
return minimize(_optimize, x, bounds=bounds, *self.args, **self.kwargs)
示例13: minimize_point
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def minimize_point(self, x: numpy.ndarray) -> Tuple[numpy.ndarray, Scalar]:
"""
Minimize the target function passing one starting point.
Args:
x: Array representing a single point of the function to be minimized.
Returns:
Tuple containing a numpy array representing the best solution found, \
and the numerical value of the function at that point.
"""
optim_result = self.minimize(x)
point = optim_result["x"]
reward = float(optim_result["fun"])
return point, reward
示例14: anneal_schedule
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def anneal_schedule(self, schedule='fast', use_wrapper=False):
""" Call anneal algorithm using specified schedule """
n = 0 # index of test function
if use_wrapper:
opts = {'upper': self.upper[n],
'lower': self.lower[n],
'ftol': 1e-3,
'maxiter': self.maxiter,
'schedule': schedule,
'disp': False}
res = minimize(self.fun[n], self.x0[n], method='anneal',
options=opts)
x, retval = res['x'], res['status']
else:
x, retval = anneal(self.fun[n], self.x0[n], full_output=False,
upper=self.upper[n], lower=self.lower[n],
feps=1e-3, maxiter=self.maxiter,
schedule=schedule, disp=False)
assert_almost_equal(x, self.sol[n], 2)
return retval
示例15: test_minimize_l_bfgs_b_ftol
# 需要导入模块: from scipy import optimize [as 别名]
# 或者: from scipy.optimize import minimize [as 别名]
def test_minimize_l_bfgs_b_ftol(self):
# Check that the `ftol` parameter in l_bfgs_b works as expected
v0 = None
for tol in [1e-1, 1e-4, 1e-7, 1e-10]:
opts = {'disp': False, 'maxiter': self.maxiter, 'ftol': tol}
sol = optimize.minimize(self.func, self.startparams,
method='L-BFGS-B', jac=self.grad,
options=opts)
v = self.func(sol.x)
if v0 is None:
v0 = v
else:
assert_(v < v0)
assert_allclose(v, self.func(self.solution), rtol=tol)