本文整理汇总了Python中scipy.optimize.OptimizeResult.nfev方法的典型用法代码示例。如果您正苦于以下问题:Python OptimizeResult.nfev方法的具体用法?Python OptimizeResult.nfev怎么用?Python OptimizeResult.nfev使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scipy.optimize.OptimizeResult
的用法示例。
在下文中一共展示了OptimizeResult.nfev方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: scipy_graduate_walk
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
def scipy_graduate_walk(*args, **kwargs):
"""Scipy-compatible graduate_walk function wrapper.
parameters:
args[0]: target, function to be minimized
args[1]: x0, starting point for minimization
dx=1e-8: step in change of the point
dx_start=0.1: starting value for dx step. Must be bigger that dx.
dx_step=0.1: change of dx on each iteration. Should be less than 1.
diagonal=False: defines directions for point movements. See
generate_all_directions
generate_nondiagonal_directions
for more information.
bounds=None: list of bounds for the movement
[[min, max], [min, max], ...]
if set to None, bounds are ignored
ytol=1e-8: relative tolerance for search stop. See graduate_walk for
more info.
returns:
OptimizeResult() object with properly set x, fun, nfev.
success is always set to True, status to 1
"""
target = args[0]
x0 = args[1]
dx = kwargs['dx'] if 'dx' in list(kwargs.keys()) else 1e-8
dx_start = kwargs['dx_start'] if 'dx_start' in list(kwargs.keys()) else 0.1
dx_step = kwargs['dx_step'] if 'dx_step' in list(kwargs.keys()) else 0.1
if 'diagonal' in list(kwargs.keys()) and kwargs['diagonal']:
directions = generate_all_directions(len(x0))
else:
directions = generate_nondiagonal_directions(len(x0))
if 'bounds' in list(kwargs.keys()) and kwargs['bounds'] is not None:
bounds = Bounds(kwargs['bounds'])
else:
bounds = None
ytol_rel = kwargs['ytol_rel'] if 'ytol_rel' in list(kwargs.keys()) else 1e-8
res = graduate_walk(target, x0, dx, directions, dx_start, dx_step,
bounds=bounds, ytol_rel=ytol_rel)
answ = OptimizeResult()
answ.x = res['x0']
answ.fun = res['fval']
answ.success = True
answ.status = 1
answ.nfev = res['fnval']
return answ
示例2: solve
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
def solve(self):
"""
Runs the DifferentialEvolutionSolver.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes. If `polish`
was employed, and a lower minimum was obtained by the polishing,
then OptimizeResult also contains the ``jac`` attribute.
"""
nit, warning_flag = 0, False
status_message = _status_message['success']
# The population may have just been initialized (all entries are
# np.inf). If it has you have to calculate the initial energies.
# Although this is also done in the evolve generator it's possible
# that someone can set maxiter=0, at which point we still want the
# initial energies to be calculated (the following loop isn't run).
if np.all(np.isinf(self.population_energies)):
self.population_energies[:] = self._calculate_population_energies(
self.population)
self._promote_lowest_energy()
# do the optimisation.
for nit in xrange(1, self.maxiter + 1):
# evolve the population by a generation
try:
next(self)
except StopIteration:
warning_flag = True
if self._nfev > self.maxfun:
status_message = _status_message['maxfev']
elif self._nfev == self.maxfun:
status_message = ('Maximum number of function evaluations'
' has been reached.')
break
if self.disp:
print("differential_evolution step %d: f(x)= %g"
% (nit,
self.population_energies[0]))
# should the solver terminate?
convergence = self.convergence
if (self.callback and
self.callback(self._scale_parameters(self.population[0]),
convergence=self.tol / convergence) is True):
warning_flag = True
status_message = ('callback function requested stop early '
'by returning True')
break
if np.any(np.isinf(self.population_energies)):
intol = False
else:
intol = (np.std(self.population_energies) <=
self.atol +
self.tol * np.abs(np.mean(self.population_energies)))
if warning_flag or intol:
break
else:
status_message = _status_message['maxiter']
warning_flag = True
DE_result = OptimizeResult(
x=self.x,
fun=self.population_energies[0],
nfev=self._nfev,
nit=nit,
message=status_message,
success=(warning_flag is not True))
if self.polish:
result = minimize(self.func,
np.copy(DE_result.x),
method='L-BFGS-B',
bounds=self.limits.T)
self._nfev += result.nfev
DE_result.nfev = self._nfev
if result.fun < DE_result.fun:
DE_result.fun = result.fun
DE_result.x = result.x
DE_result.jac = result.jac
# to keep internal state consistent
self.population_energies[0] = result.fun
self.population[0] = self._unscale_parameters(result.x)
return DE_result
示例3: solve
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
def solve(self):
"""
Runs the DifferentialEvolutionSolver.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes. If polish
was employed, then OptimizeResult also contains the ``hess_inv`` and
``jac`` attributes.
"""
nfev, nit, warning_flag = 0, 0, False
status_message = _status_message['success']
# calculate energies to start with
for index, candidate in enumerate(self.population):
parameters = self._scale_parameters(candidate)
self.population_energies[index] = self.func(parameters,
*self.args)
nfev += 1
if nfev > self.maxfun:
warning_flag = True
status_message = _status_message['maxfev']
break
minval = np.argmin(self.population_energies)
# put the lowest energy into the best solution position.
lowest_energy = self.population_energies[minval]
self.population_energies[minval] = self.population_energies[0]
self.population_energies[0] = lowest_energy
self.population[[0, minval], :] = self.population[[minval, 0], :]
if warning_flag:
return OptimizeResult(
x=self.x,
fun=self.population_energies[0],
nfev=nfev,
nit=nit,
message=status_message,
success=(warning_flag != True))
# do the optimisation.
for nit in range(1, self.maxiter + 1):
if self.dither is not None:
self.scale = self.random_number_generator.rand(
) * (self.dither[1] - self.dither[0]) + self.dither[0]
for candidate in range(np.size(self.population, 0)):
if nfev > self.maxfun:
warning_flag = True
status_message = _status_message['maxfev']
break
trial = self._mutate(candidate)
self._ensure_constraint(trial)
parameters = self._scale_parameters(trial)
energy = self.func(parameters, *self.args)
nfev += 1
if energy < self.population_energies[candidate]:
self.population[candidate] = trial
self.population_energies[candidate] = energy
if energy < self.population_energies[0]:
self.population_energies[0] = energy
self.population[0] = trial
# stop when the fractional s.d. of the population is less than tol
# of the mean energy
convergence = (np.std(self.population_energies) /
np.abs(np.mean(self.population_energies) +
_MACHEPS))
if self.disp:
print("differential_evolution step %d: f(x)= %g"
% (nit,
self.population_energies[0]))
if (self.callback and
self.callback(self._scale_parameters(self.population[0]),
convergence=self.tol / convergence) is True):
warning_flag = True
status_message = ('callback function requested stop early '
'by returning True')
break
if convergence < self.tol or warning_flag:
break
else:
status_message = _status_message['maxiter']
#.........这里部分代码省略.........
示例4: dual_annealing
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
#.........这里部分代码省略.........
Package for R. The R Journal, Volume 5/1 (2013).
.. [6] Mullen, K. Continuous Global Optimization in R. Journal of
Statistical Software, 60(6), 1 - 45, (2014). DOI:10.18637/jss.v060.i06
Examples
--------
The following example is a 10-dimensional problem, with many local minima.
The function involved is called Rastrigin
(https://en.wikipedia.org/wiki/Rastrigin_function)
>>> from scipy.optimize import dual_annealing
>>> func = lambda x: np.sum(x*x - 10*np.cos(2*np.pi*x)) + 10*np.size(x)
>>> lw = [-5.12] * 10
>>> up = [5.12] * 10
>>> ret = dual_annealing(func, None, bounds=list(zip(lw, up)), seed=1234)
>>> print("global minimum: xmin = {0}, f(xmin) = {1:.6f}".format(
... ret.x, ret.fun))
global minimum: xmin = [-4.26437714e-09 -3.91699361e-09 -1.86149218e-09 -3.97165720e-09
-6.29151648e-09 -6.53145322e-09 -3.93616815e-09 -6.55623025e-09
-6.05775280e-09 -5.00668935e-09], f(xmin) = 0.000000
"""
if x0 is not None and not len(x0) == len(bounds):
raise ValueError('Bounds size does not match x0')
lu = list(zip(*bounds))
lower = np.array(lu[0])
upper = np.array(lu[1])
# Check that restart temperature ratio is correct
if restart_temp_ratio <= 0. or restart_temp_ratio >= 1.:
raise ValueError('Restart temperature ratio has to be in range (0, 1)')
# Checking bounds are valid
if (np.any(np.isinf(lower)) or np.any(np.isinf(upper)) or np.any(
np.isnan(lower)) or np.any(np.isnan(upper))):
raise ValueError('Some bounds values are inf values or nan values')
# Checking that bounds are consistent
if not np.all(lower < upper):
raise ValueError('Bounds are note consistent min < max')
# Wrapper for the objective function
func_wrapper = ObjectiveFunWrapper(func, maxfun, *args)
# Wrapper fot the minimizer
minimizer_wrapper = LocalSearchWrapper(
bounds, func_wrapper, **local_search_options)
# Initialization of RandomState for reproducible runs if seed provided
rand_state = check_random_state(seed)
# Initialization of the energy state
energy_state = EnergyState(lower, upper, callback)
energy_state.reset(func_wrapper, rand_state, x0)
# Minimum value of annealing temperature reached to perform
# re-annealing
temperature_restart = initial_temp * restart_temp_ratio
# VisitingDistribution instance
visit_dist = VisitingDistribution(lower, upper, visit, rand_state)
# Strategy chain instance
strategy_chain = StrategyChain(accept, visit_dist, func_wrapper,
minimizer_wrapper, rand_state, energy_state)
# Run the search loop
need_to_stop = False
iteration = 0
message = []
t1 = np.exp((visit - 1) * np.log(2.0)) - 1.0
while(not need_to_stop):
for i in range(maxiter):
# Compute temperature for this step
s = float(i) + 2.0
t2 = np.exp((visit - 1) * np.log(s)) - 1.0
temperature = initial_temp * t1 / t2
iteration += 1
if iteration >= maxiter:
message.append("Maximum number of iteration reached")
need_to_stop = True
break
# Need a re-annealing process?
if temperature < temperature_restart:
energy_state.reset(func_wrapper, rand_state)
break
# starting strategy chain
val = strategy_chain.run(i, temperature)
if val is not None:
message.append(val)
need_to_stop = True
break
# Possible local search at the end of the strategy chain
if not no_local_search:
val = strategy_chain.local_search()
if val is not None:
message.append(val)
need_to_stop = True
break
# Return the OptimizeResult
res = OptimizeResult()
res.x = energy_state.xbest
res.fun = energy_state.ebest
res.nit = iteration
res.nfev = func_wrapper.nfev
res.njev = func_wrapper.ngev
res.message = message
return res
示例5: optimize_stiefel
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
def optimize_stiefel(func, X0, args=(), tau_max=.5, max_it=1, tol=1e-6,
disp=False, tau_find_freq=100):
"""
Optimize a function over a Stiefel manifold.
:param func: Function to be optimized
:param X0: Initial point for line search
:param tau_max: Maximum step size
:param max_it: Maximum number of iterations
:param tol: Tolerance criteria to terminate line search
:param disp: Choose whether to display output
:param args: Extra arguments passed to the function
"""
tol = float(tol)
assert tol > 0, 'Tolerance must be positive'
max_it = int(max_it)
assert max_it > 0, 'The maximum number of iterations must be a positive '\
+ 'integer'
tau_max = float(tau_max)
assert tau_max > 0, 'The parameter `tau_max` must be positive.'
k = 0
X = X0.copy()
nit = 0
nfev = 0
success = False
if disp:
print 'Stiefel Optimization'.center(80)
print '{0:4s} {1:11s} {2:5s}'.format('It', 'F', '(F - F_old) / F_old')
print '-' * 30
ls_func = LSFunc()
ls_func.func = func
decrease_tau = False
tau_max0 = tau_max
while nit <= max_it:
nit += 1
F, G = func(X, *args)
F_old = F
nfev += 1
A = compute_A(G, X)
ls_func.A = A
ls_func.X = X
ls_func.func_args = args
ls_func.tau_max = tau_max
increased_tau = False
if nit == 1 or decrease_tau or nit % tau_find_freq == 0:
# Need to minimize ls_func with respect to each argument
tau_init = np.linspace(-10, 0., 3)[:, None]
tau_d = np.linspace(-10, 0., 50)[:, None]
tau_all, F_all = pybgo.minimize(ls_func, tau_init, tau_d, fixed_noise=1e-16,
add_at_least=1, tol=1e-2, scale=True,
train_every=1)[:2]
nfev += tau_all.shape[0]
idx = np.argmin(F_all)
tau = np.exp(tau_all[idx, 0]) * tau_max
if tau_max - tau <= 1e-6:
tau_max = 1.2 * tau_max
if disp:
print 'increasing tau_max to {0:1.5e}'.format(tau_max)
increased_tau = True
if decrease_tau:
tau_max = .8 * tau_max
if disp:
print 'decreasing max_tau to {0:1.5e}'.format(tau_max)
decrease_tau = False
F = F_all[idx, 0]
else:
F = ls_func([np.log(tau / tau_max)])
delta_F = (F_old - F) / np.abs(F_old)
if delta_F < 0:
if disp:
print '*** backtracking'
nit -= 1
decrease_tau = True
continue
X_old = X
X = Y_func(tau, X, A)
if disp:
print '{0:4s} {1:1.5e} {2:5e} tau = {3:1.3e}, tau_max = {4:1.3e}'.format(
str(nit).zfill(4), F, delta_F, tau, tau_max)
if delta_F <= tol:
if disp:
print '*** Converged ***'
success = True
break
res = OptimizeResult()
res.tau_max = tau_max
res.X = X
res.nfev = nfev
res.nit = nit
res.fun = F
res.success = success
return res
示例6: solve
# 需要导入模块: from scipy.optimize import OptimizeResult [as 别名]
# 或者: from scipy.optimize.OptimizeResult import nfev [as 别名]
def solve(self):
nfev, nit, warning_flag = 0, 0, False
status_message = _status_message['success']
# calculate energies to start with
for index, candidate in enumerate(self.population):
parameters = self._scale_parameters(candidate)
self.population_energies[index] = self.func(parameters,
*self.args)
nfev += 1
if nfev > self.maxfun:
warning_flag = True
status_message = _status_message['maxfev']
break
minval = np.argmin(self.population_energies)
# put the lowest energy into the best solution position.
lowest_energy = self.population_energies[minval]
self.population_energies[minval] = self.population_energies[0]
self.population_energies[0] = lowest_energy
self.population[[0, minval], :] = self.population[[minval, 0], :]
if warning_flag:
return OptimizeResult(
x=self.x,
fun=self.population_energies[0],
nfev=nfev,
nit=nit,
message=status_message,
success=(warning_flag is not True))
# do the optimisation.
start_time = time.time()
nit = 0
while nit < self.maxiter + 1:
nit += 1
if start_time + self.max_execution_time < time.time():
warning_flag = True
status_message = 'Max execution time reached'
break
if self.dither is not None:
self.scale = self.random_number_generator.rand(
) * (self.dither[1] - self.dither[0]) + self.dither[0]
for candidate in range(np.size(self.population, 0)):
if nfev > self.maxfun:
warning_flag = True
status_message = _status_message['maxfev']
break
trial = self._mutate(candidate)
self._ensure_constraint(trial)
parameters = self._scale_parameters(trial)
energy = self.func(parameters, *self.args)
nfev += 1
if energy < self.population_energies[candidate]:
self.population[candidate] = trial
self.population_energies[candidate] = energy
if energy < self.population_energies[0]:
self.population_energies[0] = energy
self.population[0] = trial
# stop when the fractional s.d. of the population is less than tol
# of the mean energy
convergence = (np.std(self.population_energies) /
np.abs(np.mean(self.population_energies) +
_MACHEPS))
if self.disp:
print("differential_evolution step %d: f(x)= %g"
% (nit,
self.population_energies[0]))
if (self.callback and
self.callback(self._scale_parameters(self.population[0]),
convergence=self.tol / convergence) is True):
warning_flag = True
status_message = ('callback function requested stop early '
'by returning True')
break
if convergence < self.tol or warning_flag:
break
else:
status_message = _status_message['maxiter']
warning_flag = True
DE_result = OptimizeResult(
x=self.x,
fun=self.population_energies[0],
nfev=nfev,
nit=nit,
#.........这里部分代码省略.........