当前位置: 首页>>代码示例>>Python>>正文


Python optimize.OptimizeResult类代码示例

本文整理汇总了Python中scipy.optimize.OptimizeResult的典型用法代码示例。如果您正苦于以下问题:Python OptimizeResult类的具体用法?Python OptimizeResult怎么用?Python OptimizeResult使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了OptimizeResult类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: result

 def result(self):
     """ The OptimizeResult """
     res = OptimizeResult()
     res.x = self._xmin
     res.fun = self._fvalue
     res.message = self._message
     res.nit = self._step_record
     return res
开发者ID:mgje,项目名称:Python-Mathematik-Beispiele,代码行数:8,代码来源:gensa.py

示例2: result

 def result(self):
     """ The OptimizeResult """
     res = OptimizeResult()
     res.x = self.es.xbest
     res.fun = self.es.ebest
     res.nit = self._iter
     res.ncall = self.owf.nb_fun_call
     return res
开发者ID:sgubianpm,项目名称:gensabench,代码行数:8,代码来源:_sda.py

示例3: setup_method

    def setup_method(self):
        self.x0 = np.array(1)
        self.f0 = 0

        minres = OptimizeResult()
        minres.x = self.x0
        minres.fun = self.f0

        self.storage = Storage(minres)
开发者ID:beyondmetis,项目名称:scipy,代码行数:9,代码来源:test__basinhopping.py

示例4: test_higher_f_rejected

    def test_higher_f_rejected(self):
        new_minres = OptimizeResult()
        new_minres.x = self.x0 + 1
        new_minres.fun = self.f0 + 1

        ret = self.storage.update(new_minres)
        minres = self.storage.get_lowest()
        assert_equal(self.x0, minres.x)
        assert_equal(self.f0, minres.fun)
        assert_(not ret)
开发者ID:beyondmetis,项目名称:scipy,代码行数:10,代码来源:test__basinhopping.py

示例5: test_lower_f_accepted

    def test_lower_f_accepted(self):
        new_minres = OptimizeResult()
        new_minres.x = self.x0 + 1
        new_minres.fun = self.f0 - 1

        ret = self.storage.update(new_minres)
        minres = self.storage.get_lowest()
        assert_(self.x0 != minres.x)
        assert_(self.f0 != minres.fun)
        assert_(ret)
开发者ID:beyondmetis,项目名称:scipy,代码行数:10,代码来源:test__basinhopping.py

示例6: create_result

def create_result(Xi, yi, space=None, rng=None, specs=None, models=None):
    """
    Initialize an `OptimizeResult` object.

    Parameters
    ----------
    * `Xi` [list of lists, shape=(n_iters, n_features)]:
        Location of the minimum at every iteration.

    * `yi` [array-like, shape=(n_iters,)]:
        Minimum value obtained at every iteration.

    * `space` [Space instance, optional]:
        Search space.

    * `rng` [RandomState instance, optional]:
        State of the random state.

    * `specs` [dict, optional]:
        Call specifications.

    * `models` [list, optional]:
        List of fit surrogate models.

    Returns
    -------
    * `res` [`OptimizeResult`, scipy object]:
        OptimizeResult instance with the required information.
    """
    res = OptimizeResult()
    yi = np.asarray(yi)
    if np.ndim(yi) == 2:
        res.log_time = np.ravel(yi[:, 1])
        yi = np.ravel(yi[:, 0])
    best = np.argmin(yi)
    res.x = Xi[best]
    res.fun = yi[best]
    res.func_vals = yi
    res.x_iters = Xi
    res.models = models
    res.space = space
    res.random_state = rng
    res.specs = specs
    return res
开发者ID:betatim,项目名称:scikit-optimize,代码行数:44,代码来源:utils.py

示例7: _tree_minimize

def _tree_minimize(func, dimensions, base_estimator, n_calls,
                   n_points, n_random_starts, random_state=None):
    rng = check_random_state(random_state)
    space = Space(dimensions)

    # Initialize with random points
    if n_random_starts <= 0:
        raise ValueError(
            "Expected n_random_starts > 0, got %d" % n_random_starts)

    if n_calls <= 0:
        raise ValueError(
            "Expected n_calls > 0, got %d" % n_random_starts)

    if n_calls < n_random_starts:
        raise ValueError(
            "Expected n_calls >= %d, got %d" % (n_random_starts, n_calls))

    Xi = space.rvs(n_samples=n_random_starts, random_state=rng)
    yi = [func(x) for x in Xi]
    if np.ndim(yi) != 1:
        raise ValueError(
            "The function to be optimized should return a scalar")

    # Tree-based optimization loop
    models = []

    n_model_iter = n_calls - n_random_starts
    for i in range(n_model_iter):
        rgr = clone(base_estimator)
        rgr.fit(space.transform(Xi), yi)
        models.append(rgr)

        # `rgr` predicts constants for each leaf which means that the EI
        # has zero gradient over large distances. As a result we can not
        # use gradient based optimizers like BFGS, so using random sampling
        # for the moment.
        X = space.transform(space.rvs(n_samples=n_points,
                                      random_state=rng))
        values = -gaussian_ei(X, rgr, np.min(yi))
        next_x = X[np.argmin(values)]

        next_x = space.inverse_transform(next_x.reshape((1, -1)))[0]
        next_y = func(next_x)
        Xi = np.vstack((Xi, next_x))
        yi.append(next_y)

    res = OptimizeResult()
    best = np.argmin(yi)
    res.x = Xi[best]
    res.fun = yi[best]
    res.func_vals = np.array(yi)
    res.x_iters = Xi
    res.models = models
    res.space = space

    return res
开发者ID:ErmiaAzarkhalili,项目名称:scikit-optimize,代码行数:57,代码来源:tree_opt.py

示例8: scipy_nlopt_cobyla

def scipy_nlopt_cobyla(*args, **kwargs):
    """Wraps nlopt library cobyla function to be compatible with scipy optimize

    parameters:
        args[0]: target, function to be minimized
        args[1]: x0, starting point for minimization
        bounds: list of bounds for the movement
                [[min, max], [min, max], ...]
        ftol_rel: same as in nlopt
        xtol_rel: same as in nlopt
            one of the tol_rel should be specified
    returns:
        OptimizeResult() object with properly set x, fun, success.
            status is not set when nlopt.RoundoffLimited is raised
    """
    answ = OptimizeResult()
    bounds = kwargs['bounds']

    opt = nlopt.opt(nlopt.LN_COBYLA, len(args[1]))
    opt.set_lower_bounds([i[0] for i in bounds])
    opt.set_upper_bounds([i[1] for i in bounds])
    if 'ftol_rel' in kwargs.keys():
        opt.set_ftol_rel(kwargs['ftol_rel'])
    if 'xtol_rel' in kwargs.keys():
        opt.set_ftol_rel(kwargs['xtol_rel'])
    opt.set_min_objective(args[0])

    x0 = list(args[1])

    try:
        x1 = opt.optimize(x0)
    except nlopt.RoundoffLimited:
        answ.x = x0
        answ.fun = args[0](x0)
        answ.success = False
        answ.message = 'nlopt.RoundoffLimited'
        return answ

    answ.x = x1
    answ.fun = args[0](x1)
    answ.success = True if opt.last_optimize_result() in [3, 4] else False
    answ.status = opt.last_optimize_result()
    if not answ.fun == opt.last_optimum_value():
        print 'Something\'s wrong, ', answ.fun, opt.last_optimum_value()

    return answ
开发者ID:nishbo,项目名称:simsimpy,代码行数:46,代码来源:nlopt_wrap.py

示例9: steepest_decent

def steepest_decent(fun, x0, fprime, args, tol=1.0e-4, maxiter=1000,
                    callback=None):
    '''最急降下法
    '''
    x = numpy.array(x0)

    for itr in xrange(maxiter):
        direction = -1 * fprime(x, *args)
        alpha, obj_current, obj_next = armijo_stepsize(fun, x, fprime, direction, args=args)

        if numpy.linalg.norm(obj_current - obj_next) < tol:
            break

        x = x + alpha * direction
        if callback is not None:
            callback(x)

    result = OptimizeResult()
    result.x = x
    result.fun = fun(x, *args)
    result.nit = itr
    return result
开发者ID:anaguma2261,项目名称:TokyoWebmining_40_tkm2261,代码行数:22,代码来源:opt_func.py

示例10: newton_method

def newton_method(fun, x0, fprime, args, tol=1.0e-4, maxiter=1000,
                    callback=None):
    '''ニュートン法 ステップサイズにArmijo条件
    '''
    x = numpy.array(x0)
    A, b = args

    for itr in xrange(maxiter):
        direction =  -1 * numpy.linalg.solve(A, fprime(x, *args))
        alpha, obj_current, obj_next = armijo_stepsize(fun, x, fprime, direction, args=args)

        if numpy.linalg.norm(obj_current - obj_next) < tol:
            break

        x = x + alpha * direction
        if callback is not None:
            callback(x)

    result = OptimizeResult()
    result.x = x
    result.fun = fun(x, *args)
    result.nit = itr
    return result
开发者ID:anaguma2261,项目名称:TokyoWebmining_40_tkm2261,代码行数:23,代码来源:opt_func.py

示例11: average_results

    def average_results(self):
        """
        group the results by minimizer and average over the runs
        """
        grouped_results = defaultdict(list)
        for res in self.results:
            grouped_results[res.name].append(res)

        averaged_results = dict()
        for name, result_list in grouped_results.items():
            newres = OptimizeResult()
            newres.name = name
            newres.mean_nfev = np.mean([r.nfev for r in result_list])
            newres.mean_njev = np.mean([r.njev for r in result_list])
            newres.mean_nhev = np.mean([r.nhev for r in result_list])
            newres.mean_time = np.mean([r.time for r in result_list])
            newres.ntrials = len(result_list)
            newres.nfail = len([r for r in result_list if not r.success])
            try:
                newres.ndim = len(result_list[0].x)
            except TypeError:
                newres.ndim = 1
            averaged_results[name] = newres
        return averaged_results.values()
开发者ID:ymarfoq,项目名称:outilACVDesagregation,代码行数:24,代码来源:benchmark_global_optimizers.py

示例12: scipy_graduate_walk

def scipy_graduate_walk(*args, **kwargs):
    """Scipy-compatible graduate_walk function wrapper.

    parameters:
        args[0]: target, function to be minimized
        args[1]: x0, starting point for minimization
        dx=1e-8: step in change of the point
        dx_start=0.1: starting value for dx step. Must be bigger that dx.
        dx_step=0.1: change of dx on each iteration. Should be less than 1.
        diagonal=False: defines directions for point movements. See
                generate_all_directions
                generate_nondiagonal_directions
            for more information.
        bounds=None: list of bounds for the movement
                [[min, max], [min, max], ...]
            if set to None, bounds are ignored
        ytol=1e-8: relative tolerance for search stop. See graduate_walk for
            more info.
    returns:
        OptimizeResult() object with properly set x, fun, nfev.
            success is always set to True, status to 1
    """
    target = args[0]
    x0 = args[1]
    dx = kwargs['dx'] if 'dx' in list(kwargs.keys()) else 1e-8
    dx_start = kwargs['dx_start'] if 'dx_start' in list(kwargs.keys()) else 0.1
    dx_step = kwargs['dx_step'] if 'dx_step' in list(kwargs.keys()) else 0.1
    if 'diagonal' in list(kwargs.keys()) and kwargs['diagonal']:
        directions = generate_all_directions(len(x0))
    else:
        directions = generate_nondiagonal_directions(len(x0))
    if 'bounds' in list(kwargs.keys()) and kwargs['bounds'] is not None:
        bounds = Bounds(kwargs['bounds'])
    else:
        bounds = None
    ytol_rel = kwargs['ytol_rel'] if 'ytol_rel' in list(kwargs.keys()) else 1e-8

    res = graduate_walk(target, x0, dx, directions, dx_start, dx_step,
                        bounds=bounds, ytol_rel=ytol_rel)

    answ = OptimizeResult()
    answ.x = res['x0']
    answ.fun = res['fval']
    answ.success = True
    answ.status = 1
    answ.nfev = res['fnval']
    return answ
开发者ID:nishbo,项目名称:simsimpy,代码行数:47,代码来源:walk_search.py

示例13: solve

    def solve(self):
        """
        Runs the DifferentialEvolutionSolver.

        Returns
        -------
        res : OptimizeResult
            The optimization result represented as a ``OptimizeResult`` object.
            Important attributes are: ``x`` the solution array, ``success`` a
            Boolean flag indicating if the optimizer exited successfully and
            ``message`` which describes the cause of the termination. See
            `OptimizeResult` for a description of other attributes.  If `polish`
            was employed, and a lower minimum was obtained by the polishing,
            then OptimizeResult also contains the ``jac`` attribute.
        """
        nit, warning_flag = 0, False
        status_message = _status_message['success']

        # The population may have just been initialized (all entries are
        # np.inf). If it has you have to calculate the initial energies.
        # Although this is also done in the evolve generator it's possible
        # that someone can set maxiter=0, at which point we still want the
        # initial energies to be calculated (the following loop isn't run).
        if np.all(np.isinf(self.population_energies)):
            self.population_energies[:] = self._calculate_population_energies(
                self.population)
            self._promote_lowest_energy()

        # do the optimisation.
        for nit in xrange(1, self.maxiter + 1):
            # evolve the population by a generation
            try:
                next(self)
            except StopIteration:
                warning_flag = True
                if self._nfev > self.maxfun:
                    status_message = _status_message['maxfev']
                elif self._nfev == self.maxfun:
                    status_message = ('Maximum number of function evaluations'
                                      ' has been reached.')
                break

            if self.disp:
                print("differential_evolution step %d: f(x)= %g"
                      % (nit,
                         self.population_energies[0]))

            # should the solver terminate?
            convergence = self.convergence

            if (self.callback and
                    self.callback(self._scale_parameters(self.population[0]),
                                  convergence=self.tol / convergence) is True):

                warning_flag = True
                status_message = ('callback function requested stop early '
                                  'by returning True')
                break

            if np.any(np.isinf(self.population_energies)):
                intol = False
            else:
                intol = (np.std(self.population_energies) <=
                         self.atol +
                         self.tol * np.abs(np.mean(self.population_energies)))
            if warning_flag or intol:
                break

        else:
            status_message = _status_message['maxiter']
            warning_flag = True

        DE_result = OptimizeResult(
            x=self.x,
            fun=self.population_energies[0],
            nfev=self._nfev,
            nit=nit,
            message=status_message,
            success=(warning_flag is not True))

        if self.polish:
            result = minimize(self.func,
                              np.copy(DE_result.x),
                              method='L-BFGS-B',
                              bounds=self.limits.T)

            self._nfev += result.nfev
            DE_result.nfev = self._nfev

            if result.fun < DE_result.fun:
                DE_result.fun = result.fun
                DE_result.x = result.x
                DE_result.jac = result.jac
                # to keep internal state consistent
                self.population_energies[0] = result.fun
                self.population[0] = self._unscale_parameters(result.x)

        return DE_result
开发者ID:ElDeveloper,项目名称:scipy,代码行数:98,代码来源:_differentialevolution.py

示例14: dummy_minimize

def dummy_minimize(func, dimensions, n_calls=100, random_state=None):
    """Random search by uniform sampling within the given bounds.

    Parameters
    ----------
    * `func` [callable]:
        Function to minimize. Should take a array of parameters and
        return the function values.

    * `dimensions` [list, shape=(n_dims,)]:
        List of search space dimensions.
        Each search dimension can be defined either as

        - a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer`
          dimensions),
        - a `(upper_bound, lower_bound, "prior")` tuple (for `Real`
          dimensions),
        - as a list of categories (for `Categorical` dimensions), or
        - an instance of a `Dimension` object (`Real`, `Integer` or
          `Categorical`).

    * `n_calls` [int, default=100]:
        Number of calls to `func` to find the minimum.

    * `random_state` [int, RandomState instance, or None (default)]:
        Set random state to something other than None for reproducible
        results.

    Returns
    -------
    * `res` [`OptimizeResult`, scipy object]:
        The optimization result returned as a OptimizeResult object.
        Important attributes are:

        - `x` [float]: location of the minimum.
        - `fun` [float]: function value at the minimum.
        - `x_iters` [array]: location of function evaluation for each
           iteration.
        - `func_vals` [array]: function value for each iteration.
        - `space` [Space]: the optimisation space.

        For more details related to the OptimizeResult object, refer
        http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html
    """
    rng = check_random_state(random_state)
    space = Space(dimensions)
    X = space.rvs(n_samples=n_calls, random_state=rng)

    init_y = func(X[0])
    if not np.isscalar(init_y):
        raise ValueError(
            "The function to be optimized should return a scalar")
    y = np.asarray([init_y] + [func(X[i]) for i in range(1, n_calls)])

    res = OptimizeResult()
    best = np.argmin(y)
    res.x = X[best]
    res.fun = y[best]
    res.func_vals = y
    res.x_iters = X
    res.space = space

    return res
开发者ID:ErmiaAzarkhalili,项目名称:scikit-optimize,代码行数:63,代码来源:dummy_opt.py

示例15: solve

    def solve(self):
        """
        Runs the DifferentialEvolutionSolver.

        Returns
        -------
        res : OptimizeResult
            The optimization result represented as a ``OptimizeResult`` object.
            Important attributes are: ``x`` the solution array, ``success`` a
            Boolean flag indicating if the optimizer exited successfully and
            ``message`` which describes the cause of the termination. See
            `OptimizeResult` for a description of other attributes. If polish
            was employed, then OptimizeResult also contains the ``hess_inv`` and
            ``jac`` attributes.
        """

        nfev, nit, warning_flag = 0, 0, False
        status_message = _status_message['success']

        # calculate energies to start with
        for index, candidate in enumerate(self.population):
            parameters = self._scale_parameters(candidate)
            self.population_energies[index] = self.func(parameters,
                                                        *self.args)
            nfev += 1

            if nfev > self.maxfun:
                warning_flag = True
                status_message = _status_message['maxfev']
                break

        minval = np.argmin(self.population_energies)

        # put the lowest energy into the best solution position.
        lowest_energy = self.population_energies[minval]
        self.population_energies[minval] = self.population_energies[0]
        self.population_energies[0] = lowest_energy

        self.population[[0, minval], :] = self.population[[minval, 0], :]

        if warning_flag:
            return OptimizeResult(
                           x=self.x,
                           fun=self.population_energies[0],
                           nfev=nfev,
                           nit=nit,
                           message=status_message,
                           success=(warning_flag != True))

        # do the optimisation.
        for nit in range(1, self.maxiter + 1):
            if self.dither is not None:
                self.scale = self.random_number_generator.rand(
                ) * (self.dither[1] - self.dither[0]) + self.dither[0]
            for candidate in range(np.size(self.population, 0)):
                if nfev > self.maxfun:
                    warning_flag = True
                    status_message = _status_message['maxfev']
                    break

                trial = self._mutate(candidate)
                self._ensure_constraint(trial)
                parameters = self._scale_parameters(trial)

                energy = self.func(parameters, *self.args)
                nfev += 1

                if energy < self.population_energies[candidate]:
                    self.population[candidate] = trial
                    self.population_energies[candidate] = energy

                    if energy < self.population_energies[0]:
                        self.population_energies[0] = energy
                        self.population[0] = trial

            # stop when the fractional s.d. of the population is less than tol
            # of the mean energy
            convergence = (np.std(self.population_energies) /
                           np.abs(np.mean(self.population_energies) +
                                  _MACHEPS))

            if self.disp:
                print("differential_evolution step %d: f(x)= %g"
                      % (nit,
                         self.population_energies[0]))

            if (self.callback and
                    self.callback(self._scale_parameters(self.population[0]),
                                  convergence=self.tol / convergence) is True):

                warning_flag = True
                status_message = ('callback function requested stop early '
                                  'by returning True')
                break

            if convergence < self.tol or warning_flag:
                break

        else:
            status_message = _status_message['maxiter']
#.........这里部分代码省略.........
开发者ID:ymarfoq,项目名称:outilACVDesagregation,代码行数:101,代码来源:_differentialevolution.py


注:本文中的scipy.optimize.OptimizeResult类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。