當前位置: 首頁>>代碼示例>>Python>>正文


Python hyperparameters.CategoricalHyperparameter方法代碼示例

本文整理匯總了Python中ConfigSpace.hyperparameters.CategoricalHyperparameter方法的典型用法代碼示例。如果您正苦於以下問題:Python hyperparameters.CategoricalHyperparameter方法的具體用法?Python hyperparameters.CategoricalHyperparameter怎麽用?Python hyperparameters.CategoricalHyperparameter使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在ConfigSpace.hyperparameters的用法示例。


在下文中一共展示了hyperparameters.CategoricalHyperparameter方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: _convert_hyper_parameters_to_cs

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p)))
            cs.add_hyperparameter(hp)

        return cs 
開發者ID:allegroai,項目名稱:trains,代碼行數:19,代碼來源:bandster.py

示例2: _plot_budget

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def _plot_budget(self, df):
        limits = OrderedDict([('cost', {'lower': df['cost'].min(),
                                        'upper': df['cost'].max()})])
        for hp in self.runscontainer.scenario.cs.get_hyperparameters():
            if isinstance(hp, NumericalHyperparameter):
                limits[hp.name] = {'lower': hp.lower, 'upper': hp.upper}
                if hp.log:
                    limits[hp.name]['log'] = True
            elif isinstance(hp, CategoricalHyperparameter):
                # We pass strings as numbers and overwrite the labels
                df[hp.name].replace({v: i for i, v in enumerate(hp.choices)}, inplace=True)
                limits[hp.name] = {'lower': 0, 'upper': len(hp.choices) - 1, 'choices': hp.choices}
            else:
                raise ValueError("Hyperparameter %s of type %s causes undefined behaviour." % (hp.name, type(hp)))
        p = parallel_plot(df=df, axes=limits, color=df[df.columns[0]], palette=Viridis256)
        div = Div(text="Select up and down column grid lines to define filters. Double click a filter to reset it.")
        plot = column(div, p)
        return plot 
開發者ID:automl,項目名稱:CAVE,代碼行數:20,代碼來源:parallel_coordinates.py

示例3: get_configspace

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_configspace():
    """ Returns the configuration space for the network to be configured in the example. """
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameters([
        CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']),
        CS.UniformFloatHyperparameter(
            'learning_rate_init', lower=1e-6, upper=1e-2, log=True)])
    
    solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam'])
    config_space.add_hyperparameter(solver)
    
    beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1)
    config_space.add_hyperparameter(beta_1)
    
    condition = CS.EqualsCondition(beta_1, solver, 'adam')
    config_space.add_condition(condition)
    
    beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1)
    config_space.add_hyperparameter(beta_2)
    
    condition = CS.EqualsCondition(beta_2, solver, 'adam')
    config_space.add_condition(condition)
    
    return config_space 
開發者ID:automl,項目名稱:BOAH,代碼行數:26,代碼來源:helper_functions.py

示例4: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_techniques = set(pipeline_config['batch_loss_computation_techniques']).intersection(self.batch_loss_computation_techniques.keys())
        hp_batch_loss_computation = CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(possible_techniques))
        cs.add_hyperparameter(hp_batch_loss_computation)

        for name, technique in self.batch_loss_computation_techniques.items():
            if name not in possible_techniques:
                continue
            technique = self.batch_loss_computation_techniques[name]

            technique_cs = technique.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=("batch_loss_computation_technique", name)))
            cs.add_configuration_space(prefix=name, configuration_space=technique_cs,
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter={'parent': hp_batch_loss_computation, 'value': name})

        self._check_search_space_updates((possible_techniques, "*"))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:22,代碼來源:train_node.py

示例5: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        # add hyperparameters of initialization method
        possible_initialization_methods = set(pipeline_config["initialization_methods"]).intersection(self.initialization_methods.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("initialization_method", sorted(possible_initialization_methods)))

        for method_name, method_type in self.initialization_methods.items():
            if (method_name not in possible_initialization_methods):
                continue
            method_cs = method_type.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=method_name))
            cs.add_configuration_space(prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': method_name})

        # add hyperparameter of initializer
        initializer = self.initializers[pipeline_config["initializer"]]
        initializer_cs = initializer.get_hyperparameter_search_space(**self._get_search_space_updates(prefix="initializer"))
        cs.add_configuration_space(prefix="initializer", configuration_space=initializer_cs, delimiter=ConfigWrapper.delimiter)

        self._check_search_space_updates(("initializer", "*"), (possible_initialization_methods, "*"))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:25,代碼來源:initialization_selector.py

示例6: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_networks = set(pipeline_config["networks"]).intersection(self.networks.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("network", sorted(possible_networks)))
        
        network_list = list()
        for network_name, network_type in self.networks.items():
            if (network_name not in possible_networks):
                continue
            network_list.append(network_name)
            network_cs = network_type.get_config_space(
                **self._get_search_space_updates(prefix=network_name))
            cs.add_configuration_space(prefix=network_name, configuration_space=network_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': network_name})
        self._check_search_space_updates((possible_networks, "*"))

        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:21,代碼來源:network_selector.py

示例7: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()
        
        possible_optimizer = set(pipeline_config["optimizer"]).intersection(self.optimizer.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("optimizer", sorted(possible_optimizer)))
        
        for optimizer_name, optimizer_type in self.optimizer.items():
            if (optimizer_name not in possible_optimizer):
                continue
            optimizer_cs = optimizer_type.get_config_space(
                **self._get_search_space_updates(prefix=optimizer_name))
            cs.add_configuration_space( prefix=optimizer_name, configuration_space=optimizer_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': optimizer_name})

        self._check_search_space_updates(possible_optimizer, "*")
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:19,代碼來源:optimizer_selector.py

示例8: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        if pipeline_config['categorical_features'] is None or not any(pipeline_config['categorical_features']) or 'none' not in pipeline_config['preprocessors']:
            # no categorical features -> no embedding
            return cs

        possible_embeddings = set(pipeline_config["embeddings"]).intersection(self.embedding_modules.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("embedding", sorted(possible_embeddings), default_value="none"))
        
        for embedding_name, embedding_type in self.embedding_modules.items():
            if (embedding_name not in possible_embeddings):
                continue
            embedding_cs = embedding_type.get_config_space(pipeline_config['categorical_features'],
                **self._get_search_space_updates(prefix=embedding_name))
            cs.add_configuration_space(prefix=embedding_name, configuration_space=embedding_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': embedding_name})
        
        self._check_search_space_updates((possible_embeddings, "*"))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:23,代碼來源:embedding_selector.py

示例9: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_preprocessors = set(pipeline_config["preprocessors"]).intersection(self.preprocessors.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("preprocessor", sorted(possible_preprocessors)))
        
        for preprocessor_name, preprocessor_type in self.preprocessors.items():
            if (preprocessor_name not in possible_preprocessors):
                continue
            preprocessor_cs = preprocessor_type.get_hyperparameter_search_space(dataset_info=dataset_info,
                **self._get_search_space_updates(prefix=preprocessor_name))
            cs.add_configuration_space( prefix=preprocessor_name, configuration_space=preprocessor_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': preprocessor_name})

        self._check_search_space_updates((possible_preprocessors, "*"))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:19,代碼來源:preprocessor_selector.py

示例10: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_lr_scheduler = set(pipeline_config["lr_scheduler"]).intersection(self.lr_scheduler.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("lr_scheduler", sorted(possible_lr_scheduler)))
        
        for lr_scheduler_name, lr_scheduler_type in self.lr_scheduler.items():
            if (lr_scheduler_name not in possible_lr_scheduler):
                continue
            lr_scheduler_cs = lr_scheduler_type.get_config_space(
                **self._get_search_space_updates(prefix=lr_scheduler_name))
            cs.add_configuration_space( prefix=lr_scheduler_name, configuration_space=lr_scheduler_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': lr_scheduler_name})

        self._check_search_space_updates((possible_lr_scheduler, "*"))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:19,代碼來源:lr_scheduler_selector.py

示例11: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(self, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        hp_batch_loss_computation = cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(self.batch_loss_computation_techniques.keys())))

        for name, technique in self.batch_loss_computation_techniques.items():
            parent = {'parent': hp_batch_loss_computation, 'value': name} if hp_batch_loss_computation is not None else None
            cs.add_configuration_space(prefix=name, configuration_space=technique.get_hyperparameter_search_space(**pipeline_config),
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter=parent)

        possible_loss_comps = sorted(list(set(pipeline_config["batch_loss_computation_techniques"]).intersection(self.batch_loss_computation_techniques.keys())))

        if 'batch_loss_computation_techniques' not in pipeline_config.keys():
            cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", possible_loss_comps))
            self._check_search_space_updates()

        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:20,代碼來源:simple_train_node.py

示例12: get_hyperparameter

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter(hyper_type, name, value_range, log = False):
    if isinstance(value_range, tuple) and len(value_range) == 2 and isinstance(value_range[1], bool) and \
        isinstance(value_range[0], (tuple, list)):
        value_range, log = value_range

    if len(value_range) == 0:
        raise ValueError(name + ': The range has to contain at least one element')
    if len(value_range) == 1:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if len(value_range) == 2 and value_range[0] == value_range[1]:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if hyper_type == CSH.CategoricalHyperparameter:
        return CSH.CategoricalHyperparameter(name, value_range)
    if hyper_type == CSH.UniformFloatHyperparameter:
        assert len(value_range) == 2, "Float HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformFloatHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    if hyper_type == CSH.UniformIntegerHyperparameter:
        assert len(value_range) == 2, "Int HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformIntegerHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    raise ValueError('Unknown type: %s for hp %s' % (hyper_type, name) ) 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:22,代碼來源:config_space_hyperparameter.py

示例13: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(
        dataset_info=None,
        n_components=(10,2000),
        algorithm=('parallel', 'deflation'),
        whiten=(True, False),
        fun=('logcosh', 'exp', 'cube'),
    ):
        cs = ConfigSpace.ConfigurationSpace()

        n_components_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "n_components", n_components)
        algorithm_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'algorithm', algorithm)
        whiten_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'whiten', whiten)
        fun_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'fun', fun)

        if True in whiten:
            cs.add_hyperparameters([n_components_hp, algorithm_hp, whiten_hp, fun_hp])
            cs.add_condition(CSC.EqualsCondition(n_components_hp, whiten_hp, True))

        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:21,代碼來源:fast_ica.py

示例14: get_hyperparameter_search_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=(10, 2000),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))
        return cs 
開發者ID:automl,項目名稱:Auto-PyTorch,代碼行數:24,代碼來源:kernel_pca.py

示例15: _create_config_space

# 需要導入模塊: from ConfigSpace import hyperparameters [as 別名]
# 或者: from ConfigSpace.hyperparameters import CategoricalHyperparameter [as 別名]
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'], default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, hp_range, default_value=hp_default))

    return config_space 
開發者ID:HDI-Project,項目名稱:BTB,代碼行數:43,代碼來源:smac.py


注:本文中的ConfigSpace.hyperparameters.CategoricalHyperparameter方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。