当前位置: 首页>>代码示例>>Python>>正文


Python ConfigSpace.ConfigurationSpace方法代码示例

本文整理汇总了Python中ConfigSpace.ConfigurationSpace方法的典型用法代码示例。如果您正苦于以下问题:Python ConfigSpace.ConfigurationSpace方法的具体用法?Python ConfigSpace.ConfigurationSpace怎么用?Python ConfigSpace.ConfigurationSpace使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace的用法示例。


在下文中一共展示了ConfigSpace.ConfigurationSpace方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUp

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def setUp(self):
		self.configspace = CS.ConfigurationSpace()

		self.HPs = []
		
		self.HPs.append( CS.CategoricalHyperparameter('parent', [1,2,3]))
		
		self.HPs.append( CS.CategoricalHyperparameter('child1_x1', ['foo','bar']))
		self.HPs.append( CS.UniformFloatHyperparameter('child2_x1', lower=-1, upper=1))
		self.HPs.append( CS.UniformIntegerHyperparameter('child3_x1', lower=-2, upper=5))

		self.configspace.add_hyperparameters(self.HPs)
		
		self.conditions = []
		
		self.conditions += [CS.EqualsCondition(self.HPs[1], self.HPs[0], 1)]
		self.conditions += [CS.EqualsCondition(self.HPs[2], self.HPs[0], 2)] 
		self.conditions += [CS.EqualsCondition(self.HPs[3], self.HPs[0], 3)]
		[self.configspace.add_condition(cond) for cond in self.conditions] 
开发者ID:automl,项目名称:HpBandSter,代码行数:21,代码来源:test_config_generators.py

示例2: setUp

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def setUp(self):
		self.configspace = CS.ConfigurationSpace(42)
		
		self.add_hyperparameters()

		x_train_confs = [ self.configspace.sample_configuration() for i in range(self.n_train)]
		self.x_train = np.array(	[c.get_array() for c in x_train_confs])	

		x_test_confs = [ self.configspace.sample_configuration() for i in range(self.n_test)]
		self.x_test= np.array(	[c.get_array() for c in x_test_confs])	
		
		self.sm_x_train = self.sm_transform_data(self.x_train)
		self.sm_x_test = self.sm_transform_data(self.x_test)
	
		self.sm_kde = sm.nonparametric.KDEMultivariate(data=self.sm_x_train,  var_type=self.var_types, bw='cv_ml')
		self.hp_kde_full = MultivariateKDE(self.configspace, fully_dimensional=True, fix_boundary=False)
		self.hp_kde_factor = MultivariateKDE(self.configspace, fully_dimensional=False, fix_boundary=False)
		self.hp_kde_full.fit(self.x_train,  bw_estimator='mlcv')
		self.hp_kde_factor.fit(self.x_train,  bw_estimator='mlcv') 
开发者ID:automl,项目名称:HpBandSter,代码行数:21,代码来源:test_kde.py

示例3: test_write_new_config

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def test_write_new_config(self):

        cs = CS.ConfigurationSpace()
        cs.add_hyperparameter(CS.CategoricalHyperparameter('test', [1]))

        with tempfile.TemporaryDirectory() as temp_dir:
            logger = json_result_logger(temp_dir)

            logger.new_config('1', cs.sample_configuration().get_dictionary(), {'test': 'test'})

            self.assertTrue(os.path.exists(temp_dir))
            self.assertTrue(os.path.exists(os.path.join(temp_dir, 'configs.json')))
            self.assertTrue(os.path.exists(os.path.join(temp_dir, 'results.json')))
            self.assertEqual(logger.config_ids, set('1'))

            with open(os.path.join(temp_dir, 'configs.json')) as fh:
                data = fh.read()
                data = data.rstrip()
                self.assertEqual(data, r'["1", {"test": 1}, {"test": "test"}]') 
开发者ID:automl,项目名称:HpBandSter,代码行数:21,代码来源:test_result.py

示例4: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_config_space():
	    config_space=CS.ConfigurationSpace()

	    # architecture hyperparameters
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_1', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_2', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_3', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('initial_filters', lower=8, upper=32, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_1', lower=0.5, upper=8, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_2', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_3', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_1', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_2', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_3', lower=1, upper=5, log=False))
	    # other hyperparameters
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('learning_rate', lower=1e-3, upper=1, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('batch_size', lower=32, upper=128, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('momentum', lower=1e-3, upper=0.99, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('alpha', lower=0, upper=1, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('death_rate', lower=0, upper=1, log=False))

	    return(config_space) 
开发者ID:arberzela,项目名称:EfficientNAS,代码行数:26,代码来源:cifar10_worker.py

示例5: _convert_hyper_parameters_to_cs

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p)))
            cs.add_hyperparameter(hp)

        return cs 
开发者ID:allegroai,项目名称:trains,代码行数:19,代码来源:bandster.py

示例6: create_configspace

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def create_configspace(parameter_config):
        """
        Wrap the Worker's get_configspace() function for HpBandSter interface
        """
        cs = CS.ConfigurationSpace()
        params = []
        for config in parameter_config:
            p = AbstractProposer.parse_param_config(config)
            if p['type'] == 'choice':
                param = CS.CategoricalHyperparameter(p['name'], choices=p['range'])
            else:  # for int or float
                param = dict(name=p['name'])
                param['lower'], param['upper'] = min(p['range']), max(p['range'])
                if p['type'] == 'int':
                    param = CS.UniformIntegerHyperparameter(**param)
                else:
                    param = CS.UniformFloatHyperparameter(**param)
            params.append(param)
        cs.add_hyperparameters(params)
        return cs 
开发者ID:LGE-ARC-AdvancedAI,项目名称:auptimizer,代码行数:22,代码来源:BOHBProposer.py

示例7: get_configspace

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_configspace():
    """ Returns the configuration space for the network to be configured in the example. """
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameters([
        CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']),
        CS.UniformFloatHyperparameter(
            'learning_rate_init', lower=1e-6, upper=1e-2, log=True)])
    
    solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam'])
    config_space.add_hyperparameter(solver)
    
    beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1)
    config_space.add_hyperparameter(beta_1)
    
    condition = CS.EqualsCondition(beta_1, solver, 'adam')
    config_space.add_condition(condition)
    
    beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1)
    config_space.add_hyperparameter(beta_2)
    
    condition = CS.EqualsCondition(beta_2, solver, 'adam')
    config_space.add_condition(condition)
    
    return config_space 
开发者ID:automl,项目名称:BOAH,代码行数:26,代码来源:helper_functions.py

示例8: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_techniques = set(pipeline_config['batch_loss_computation_techniques']).intersection(self.batch_loss_computation_techniques.keys())
        hp_batch_loss_computation = CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(possible_techniques))
        cs.add_hyperparameter(hp_batch_loss_computation)

        for name, technique in self.batch_loss_computation_techniques.items():
            if name not in possible_techniques:
                continue
            technique = self.batch_loss_computation_techniques[name]

            technique_cs = technique.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=("batch_loss_computation_technique", name)))
            cs.add_configuration_space(prefix=name, configuration_space=technique_cs,
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter={'parent': hp_batch_loss_computation, 'value': name})

        self._check_search_space_updates((possible_techniques, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:train_node.py

示例9: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        # add hyperparameters of initialization method
        possible_initialization_methods = set(pipeline_config["initialization_methods"]).intersection(self.initialization_methods.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("initialization_method", sorted(possible_initialization_methods)))

        for method_name, method_type in self.initialization_methods.items():
            if (method_name not in possible_initialization_methods):
                continue
            method_cs = method_type.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=method_name))
            cs.add_configuration_space(prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': method_name})

        # add hyperparameter of initializer
        initializer = self.initializers[pipeline_config["initializer"]]
        initializer_cs = initializer.get_hyperparameter_search_space(**self._get_search_space_updates(prefix="initializer"))
        cs.add_configuration_space(prefix="initializer", configuration_space=initializer_cs, delimiter=ConfigWrapper.delimiter)

        self._check_search_space_updates(("initializer", "*"), (possible_initialization_methods, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:25,代码来源:initialization_selector.py

示例10: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_networks = set(pipeline_config["networks"]).intersection(self.networks.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("network", sorted(possible_networks)))
        
        network_list = list()
        for network_name, network_type in self.networks.items():
            if (network_name not in possible_networks):
                continue
            network_list.append(network_name)
            network_cs = network_type.get_config_space(
                **self._get_search_space_updates(prefix=network_name))
            cs.add_configuration_space(prefix=network_name, configuration_space=network_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': network_name})
        self._check_search_space_updates((possible_networks, "*"))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:21,代码来源:network_selector.py

示例11: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        if pipeline_config['categorical_features'] is None or not any(pipeline_config['categorical_features']) or 'none' not in pipeline_config['preprocessors']:
            # no categorical features -> no embedding
            return cs

        possible_embeddings = set(pipeline_config["embeddings"]).intersection(self.embedding_modules.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("embedding", sorted(possible_embeddings), default_value="none"))
        
        for embedding_name, embedding_type in self.embedding_modules.items():
            if (embedding_name not in possible_embeddings):
                continue
            embedding_cs = embedding_type.get_config_space(pipeline_config['categorical_features'],
                **self._get_search_space_updates(prefix=embedding_name))
            cs.add_configuration_space(prefix=embedding_name, configuration_space=embedding_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': embedding_name})
        
        self._check_search_space_updates((possible_embeddings, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:23,代码来源:embedding_selector.py

示例12: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_preprocessors = set(pipeline_config["preprocessors"]).intersection(self.preprocessors.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("preprocessor", sorted(possible_preprocessors)))
        
        for preprocessor_name, preprocessor_type in self.preprocessors.items():
            if (preprocessor_name not in possible_preprocessors):
                continue
            preprocessor_cs = preprocessor_type.get_hyperparameter_search_space(dataset_info=dataset_info,
                **self._get_search_space_updates(prefix=preprocessor_name))
            cs.add_configuration_space( prefix=preprocessor_name, configuration_space=preprocessor_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': preprocessor_name})

        self._check_search_space_updates((possible_preprocessors, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:19,代码来源:preprocessor_selector.py

示例13: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        possible_lr_scheduler = set(pipeline_config["lr_scheduler"]).intersection(self.lr_scheduler.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("lr_scheduler", sorted(possible_lr_scheduler)))
        
        for lr_scheduler_name, lr_scheduler_type in self.lr_scheduler.items():
            if (lr_scheduler_name not in possible_lr_scheduler):
                continue
            lr_scheduler_cs = lr_scheduler_type.get_config_space(
                **self._get_search_space_updates(prefix=lr_scheduler_name))
            cs.add_configuration_space( prefix=lr_scheduler_name, configuration_space=lr_scheduler_cs, delimiter=ConfigWrapper.delimiter, 
                                        parent_hyperparameter={'parent': selector, 'value': lr_scheduler_name})

        self._check_search_space_updates((possible_lr_scheduler, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:19,代码来源:lr_scheduler_selector.py

示例14: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        hp_batch_loss_computation = cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", sorted(self.batch_loss_computation_techniques.keys())))

        for name, technique in self.batch_loss_computation_techniques.items():
            parent = {'parent': hp_batch_loss_computation, 'value': name} if hp_batch_loss_computation is not None else None
            cs.add_configuration_space(prefix=name, configuration_space=technique.get_hyperparameter_search_space(**pipeline_config),
                delimiter=ConfigWrapper.delimiter, parent_hyperparameter=parent)

        possible_loss_comps = sorted(list(set(pipeline_config["batch_loss_computation_techniques"]).intersection(self.batch_loss_computation_techniques.keys())))

        if 'batch_loss_computation_techniques' not in pipeline_config.keys():
            cs.add_hyperparameter(CSH.CategoricalHyperparameter("batch_loss_computation_technique", possible_loss_comps))
            self._check_search_space_updates()

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:20,代码来源:simple_train_node.py

示例15: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import ConfigurationSpace [as 别名]
def get_hyperparameter_search_space(self, **pipeline_config):
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        cs = CS.ConfigurationSpace()

        augment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('augment', [True, False]))
        autoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('autoaugment', [True, False]))
        fastautoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('fastautoaugment', [True, False]))

        cutout = cs.add_hyperparameter(CSH.CategoricalHyperparameter('cutout', [True, False]))
        cutout_length = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
        cutout_holes = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('cutout_holes', lower=1, upper=3, log=False))

        cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True))
        cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True))
        
        cs.add_condition(CS.EqualsCondition(autoaugment, augment, True))
        cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:image_augmentation.py


注:本文中的ConfigSpace.ConfigurationSpace方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。