当前位置: 首页>>代码示例>>Python>>正文


Python hyperparameters.CategoricalHyperparameter方法代码示例

本文整理汇总了Python中ConfigSpace.hyperparameters.CategoricalHyperparameter方法的典型用法代码示例。如果您正苦于以下问题:Python hyperparameters.CategoricalHyperparameter方法的具体用法?Python hyperparameters.CategoricalHyperparameter怎么用?Python hyperparameters.CategoricalHyperparameter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace.hyperparameters的用法示例。


在下文中一共展示了hyperparameters.CategoricalHyperparameter方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _convert_hyper_parameters_to_cs

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p)))
            cs.add_hyperparameter(hp)

        return cs 
开发者ID:allegroai,项目名称:trains,代码行数:19,代码来源:bandster.py

示例2: _plot_budget

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def _plot_budget(self, df):
        limits = OrderedDict([('cost', {'lower': df['cost'].min(),
                                        'upper': df['cost'].max()})])
        for hp in self.runscontainer.scenario.cs.get_hyperparameters():
            if isinstance(hp, NumericalHyperparameter):
                limits[hp.name] = {'lower': hp.lower, 'upper': hp.upper}
                if hp.log:
                    limits[hp.name]['log'] = True
            elif isinstance(hp, CategoricalHyperparameter):
                # We pass strings as numbers and overwrite the labels
                df[hp.name].replace({v: i for i, v in enumerate(hp.choices)}, inplace=True)
                limits[hp.name] = {'lower': 0, 'upper': len(hp.choices) - 1, 'choices': hp.choices}
            else:
                raise ValueError("Hyperparameter %s of type %s causes undefined behaviour." % (hp.name, type(hp)))
        p = parallel_plot(df=df, axes=limits, color=df[df.columns[0]], palette=Viridis256)
        div = Div(text="Select up and down column grid lines to define filters. Double click a filter to reset it.")
        plot = column(div, p)
        return plot 
开发者ID:automl,项目名称:CAVE,代码行数:20,代码来源:parallel_coordinates.py

示例3: get_configspace

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_configspace():
    """ Returns the configuration space for the network to be configured in the example. """
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameters([
        CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']),
        CS.UniformFloatHyperparameter(
            'learning_rate_init', lower=1e-6, upper=1e-2, log=True)])
    
    solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam'])
    config_space.add_hyperparameter(solver)
    
    beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1)
    config_space.add_hyperparameter(beta_1)
    
    condition = CS.EqualsCondition(beta_1, solver, 'adam')
    config_space.add_condition(condition)
    
    beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1)
    config_space.add_hyperparameter(beta_2)
    
    condition = CS.EqualsCondition(beta_2, solver, 'adam')
    config_space.add_condition(condition)
    
    return config_space 
开发者ID:automl,项目名称:BOAH,代码行数:26,代码来源:helper_functions.py

示例4: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_techniques = set(pipeline_config['batch_loss_computation_techniques']).intersection(self.batch_loss_computation_techniques.keys())
        hp_batch_loss_computation = CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(possible_techniques))
        cs.add_hyperparameter(hp_batch_loss_computation)

        for name, technique in self.batch_loss_computation_techniques.items():
            if name not in possible_techniques:
                continue
            technique = self.batch_loss_computation_techniques[name]

            technique_cs = technique.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=("batch_loss_computation_technique", name)))
            cs.add_configuration_space(prefix=name, configuration_space=technique_cs,
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter={'parent': hp_batch_loss_computation, 'value': name})

        self._check_search_space_updates((possible_techniques, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:train_node.py

示例5: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        # add hyperparameters of initialization method
        possible_initialization_methods = set(pipeline_config["initialization_methods"]).intersection(self.initialization_methods.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("initialization_method", sorted(possible_initialization_methods)))

        for method_name, method_type in self.initialization_methods.items():
            if (method_name not in possible_initialization_methods):
                continue
            method_cs = method_type.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=method_name))
            cs.add_configuration_space(prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': method_name})

        # add hyperparameter of initializer
        initializer = self.initializers[pipeline_config["initializer"]]
        initializer_cs = initializer.get_hyperparameter_search_space(**self._get_search_space_updates(prefix="initializer"))
        cs.add_configuration_space(prefix="initializer", configuration_space=initializer_cs, delimiter=ConfigWrapper.delimiter)

        self._check_search_space_updates(("initializer", "*"), (possible_initialization_methods, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:25,代码来源:initialization_selector.py

示例6: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_networks = set(pipeline_config["networks"]).intersection(self.networks.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("network", sorted(possible_networks)))
        
        network_list = list()
        for network_name, network_type in self.networks.items():
            if (network_name not in possible_networks):
                continue
            network_list.append(network_name)
            network_cs = network_type.get_config_space(
                **self._get_search_space_updates(prefix=network_name))
            cs.add_configuration_space(prefix=network_name, configuration_space=network_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': network_name})
        self._check_search_space_updates((possible_networks, "*"))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:21,代码来源:network_selector.py

示例7: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()
        
        possible_optimizer = set(pipeline_config["optimizer"]).intersection(self.optimizer.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("optimizer", sorted(possible_optimizer)))
        
        for optimizer_name, optimizer_type in self.optimizer.items():
            if (optimizer_name not in possible_optimizer):
                continue
            optimizer_cs = optimizer_type.get_config_space(
                **self._get_search_space_updates(prefix=optimizer_name))
            cs.add_configuration_space( prefix=optimizer_name, configuration_space=optimizer_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': optimizer_name})

        self._check_search_space_updates(possible_optimizer, "*")
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:19,代码来源:optimizer_selector.py

示例8: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        if pipeline_config['categorical_features'] is None or not any(pipeline_config['categorical_features']) or 'none' not in pipeline_config['preprocessors']:
            # no categorical features -> no embedding
            return cs

        possible_embeddings = set(pipeline_config["embeddings"]).intersection(self.embedding_modules.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("embedding", sorted(possible_embeddings), default_value="none"))
        
        for embedding_name, embedding_type in self.embedding_modules.items():
            if (embedding_name not in possible_embeddings):
                continue
            embedding_cs = embedding_type.get_config_space(pipeline_config['categorical_features'],
                **self._get_search_space_updates(prefix=embedding_name))
            cs.add_configuration_space(prefix=embedding_name, configuration_space=embedding_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': embedding_name})
        
        self._check_search_space_updates((possible_embeddings, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:23,代码来源:embedding_selector.py

示例9: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_preprocessors = set(pipeline_config["preprocessors"]).intersection(self.preprocessors.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("preprocessor", sorted(possible_preprocessors)))
        
        for preprocessor_name, preprocessor_type in self.preprocessors.items():
            if (preprocessor_name not in possible_preprocessors):
                continue
            preprocessor_cs = preprocessor_type.get_hyperparameter_search_space(dataset_info=dataset_info,
                **self._get_search_space_updates(prefix=preprocessor_name))
            cs.add_configuration_space( prefix=preprocessor_name, configuration_space=preprocessor_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': preprocessor_name})

        self._check_search_space_updates((possible_preprocessors, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:19,代码来源:preprocessor_selector.py

示例10: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_lr_scheduler = set(pipeline_config["lr_scheduler"]).intersection(self.lr_scheduler.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("lr_scheduler", sorted(possible_lr_scheduler)))
        
        for lr_scheduler_name, lr_scheduler_type in self.lr_scheduler.items():
            if (lr_scheduler_name not in possible_lr_scheduler):
                continue
            lr_scheduler_cs = lr_scheduler_type.get_config_space(
                **self._get_search_space_updates(prefix=lr_scheduler_name))
            cs.add_configuration_space( prefix=lr_scheduler_name, configuration_space=lr_scheduler_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': lr_scheduler_name})

        self._check_search_space_updates((possible_lr_scheduler, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:19,代码来源:lr_scheduler_selector.py

示例11: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(self, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        hp_batch_loss_computation = cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(self.batch_loss_computation_techniques.keys())))

        for name, technique in self.batch_loss_computation_techniques.items():
            parent = {'parent': hp_batch_loss_computation, 'value': name} if hp_batch_loss_computation is not None else None
            cs.add_configuration_space(prefix=name, configuration_space=technique.get_hyperparameter_search_space(**pipeline_config),
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter=parent)

        possible_loss_comps = sorted(list(set(pipeline_config["batch_loss_computation_techniques"]).intersection(self.batch_loss_computation_techniques.keys())))

        if 'batch_loss_computation_techniques' not in pipeline_config.keys():
            cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", possible_loss_comps))
            self._check_search_space_updates()

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:20,代码来源:simple_train_node.py

示例12: get_hyperparameter

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter(hyper_type, name, value_range, log = False):
    if isinstance(value_range, tuple) and len(value_range) == 2 and isinstance(value_range[1], bool) and \
        isinstance(value_range[0], (tuple, list)):
        value_range, log = value_range

    if len(value_range) == 0:
        raise ValueError(name + ': The range has to contain at least one element')
    if len(value_range) == 1:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if len(value_range) == 2 and value_range[0] == value_range[1]:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if hyper_type == CSH.CategoricalHyperparameter:
        return CSH.CategoricalHyperparameter(name, value_range)
    if hyper_type == CSH.UniformFloatHyperparameter:
        assert len(value_range) == 2, "Float HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformFloatHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    if hyper_type == CSH.UniformIntegerHyperparameter:
        assert len(value_range) == 2, "Int HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformIntegerHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    raise ValueError('Unknown type: %s for hp %s' % (hyper_type, name) ) 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:config_space_hyperparameter.py

示例13: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        n_components=(10,2000),
        algorithm=('parallel', 'deflation'),
        whiten=(True, False),
        fun=('logcosh', 'exp', 'cube'),
    ):
        cs = ConfigSpace.ConfigurationSpace()

        n_components_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "n_components", n_components)
        algorithm_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'algorithm', algorithm)
        whiten_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'whiten', whiten)
        fun_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'fun', fun)

        if True in whiten:
            cs.add_hyperparameters([n_components_hp, algorithm_hp, whiten_hp, fun_hp])
            cs.add_condition(CSC.EqualsCondition(n_components_hp, whiten_hp, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:21,代码来源:fast_ica.py

示例14: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=(10, 2000),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:24,代码来源:kernel_pca.py

示例15: _create_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 别名]
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'], default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, hp_range, default_value=hp_default))

    return config_space 
开发者ID:HDI-Project,项目名称:BTB,代码行数:43,代码来源:smac.py


注:本文中的ConfigSpace.hyperparameters.CategoricalHyperparameter方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。