当前位置: 首页>>代码示例>>Python>>正文


Python ConfigSpace.EqualsCondition方法代码示例

本文整理汇总了Python中ConfigSpace.EqualsCondition方法的典型用法代码示例。如果您正苦于以下问题:Python ConfigSpace.EqualsCondition方法的具体用法?Python ConfigSpace.EqualsCondition怎么用?Python ConfigSpace.EqualsCondition使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace的用法示例。


在下文中一共展示了ConfigSpace.EqualsCondition方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUp

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def setUp(self):
		self.configspace = CS.ConfigurationSpace()

		self.HPs = []
		
		self.HPs.append( CS.CategoricalHyperparameter('parent', [1,2,3]))
		
		self.HPs.append( CS.CategoricalHyperparameter('child1_x1', ['foo','bar']))
		self.HPs.append( CS.UniformFloatHyperparameter('child2_x1', lower=-1, upper=1))
		self.HPs.append( CS.UniformIntegerHyperparameter('child3_x1', lower=-2, upper=5))

		self.configspace.add_hyperparameters(self.HPs)
		
		self.conditions = []
		
		self.conditions += [CS.EqualsCondition(self.HPs[1], self.HPs[0], 1)]
		self.conditions += [CS.EqualsCondition(self.HPs[2], self.HPs[0], 2)] 
		self.conditions += [CS.EqualsCondition(self.HPs[3], self.HPs[0], 3)]
		[self.configspace.add_condition(cond) for cond in self.conditions] 
开发者ID:automl,项目名称:HpBandSter,代码行数:21,代码来源:test_config_generators.py

示例2: get_configspace

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_configspace():
    """ Returns the configuration space for the network to be configured in the example. """
    config_space = CS.ConfigurationSpace()
    config_space.add_hyperparameters([
        CSH.CategoricalHyperparameter('activation', ['tanh', 'relu']),
        CS.UniformFloatHyperparameter(
            'learning_rate_init', lower=1e-6, upper=1e-2, log=True)])
    
    solver = CSH.CategoricalHyperparameter('solver', ['sgd', 'adam'])
    config_space.add_hyperparameter(solver)
    
    beta_1 = CS.UniformFloatHyperparameter('beta_1', lower=0, upper=1)
    config_space.add_hyperparameter(beta_1)
    
    condition = CS.EqualsCondition(beta_1, solver, 'adam')
    config_space.add_condition(condition)
    
    beta_2 = CS.UniformFloatHyperparameter('beta_2', lower=0, upper=1)
    config_space.add_hyperparameter(beta_2)
    
    condition = CS.EqualsCondition(beta_2, solver, 'adam')
    config_space.add_condition(condition)
    
    return config_space 
开发者ID:automl,项目名称:BOAH,代码行数:26,代码来源:helper_functions.py

示例3: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_hyperparameter_search_space(self, **pipeline_config):
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        cs = CS.ConfigurationSpace()

        augment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('augment', [True, False]))
        autoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('autoaugment', [True, False]))
        fastautoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('fastautoaugment', [True, False]))

        cutout = cs.add_hyperparameter(CSH.CategoricalHyperparameter('cutout', [True, False]))
        cutout_length = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
        cutout_holes = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('cutout_holes', lower=1, upper=3, log=False))

        cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True))
        cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True))
        
        cs.add_condition(CS.EqualsCondition(autoaugment, augment, True))
        cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:image_augmentation.py

示例4: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_config_space(
        num_layers=(1, 15),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        mlp_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        max_dropout=(0, 1.0),
        use_dropout=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        mlp_shape_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'mlp_shape', mlp_shape)
        cs.add_hyperparameter(mlp_shape_hp)

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        max_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "max_units", max_units)
        cs.add_hyperparameter(max_units_hp)

        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)

        max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
        cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation', activation)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:27,代码来源:shapedmlpnet.py

示例5: insert_inter_node_hyperparameter_dependencies

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def insert_inter_node_hyperparameter_dependencies(self, config_space, dataset_info=None, **pipeline_config):
        if pipeline_config['categorical_features'] is None or not any(pipeline_config['categorical_features']) or 'none' not in pipeline_config['preprocessors']:
            # no categorical features -> no embedding
            return config_space
        embedding_hyperparameter = config_space.get_hyperparameter(EmbeddingSelector.get_name() + ConfigWrapper.delimiter + "embedding")
        preprocessor_hyperparameter = config_space.get_hyperparameter(PreprocessorSelector.get_name() + ConfigWrapper.delimiter + "preprocessor")

        condition = ConfigSpace.EqualsCondition(embedding_hyperparameter, preprocessor_hyperparameter, "none")

        config_space.add_condition(condition)
        return config_space 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:13,代码来源:embedding_selector.py

示例6: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_config_space(
        num_groups=(1, 9),
        blocks_per_group=(1, 4),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        max_shake_drop_probability=(0, 1),
        max_dropout=(0, 1.0),
        resnet_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        use_dropout=(True, False),
        use_shake_shake=(True, False),
        use_shake_drop=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        num_groups_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "blocks_per_group", blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "activation", activation)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_shake", use_shake_shake)
        
        shake_drop_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_drop", use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(cs, CS.UniformFloatHyperparameter, "max_shake_drop_probability",
                max_shake_drop_probability)
            cs.add_condition(CS.EqualsCondition(shake_drop_prob_hp, shake_drop_hp, True))
        
        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'resnet_shape', resnet_shape)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "max_units", max_units)

        if True in use_dropout:
            max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
            cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:38,代码来源:shapedresnet.py

示例7: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_config_space(growth_rate_range=(12, 40), nr_blocks=(3, 4), layer_range=([1, 12], [6, 24], [12, 64], [12, 64]), num_init_features=(32, 128), **kwargs):

        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter

        cs = CS.ConfigurationSpace()
        growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range)
        cs.add_hyperparameter(growth_rate_hp)
        # add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'bn_size', [2, 4])
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'num_init_features', num_init_features, log=True)
        # add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'bottleneck', [True, False])

        blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks)
        cs.add_hyperparameter(blocks_hp)
        use_dropout =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'use_dropout', [True, False])
        dropout =       add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'dropout', [0.0, 1.0])
        cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        if type(nr_blocks[0]) == int:
            min_blocks = nr_blocks[0]
            max_blocks = nr_blocks[1]
        else:
            min_blocks = nr_blocks[0][0]
            max_blocks = nr_blocks[0][1]

        for i in range(1, max_blocks+1):
            layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range[i-1])
            cs.add_hyperparameter(layer_hp)
            
            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:36,代码来源:densenet.py

示例8: get_configspace

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_configspace():
		"""
		It builds the configuration space with the needed hyperparameters.
		It is easily possible to implement different types of hyperparameters.
		Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter.
		:return: ConfigurationsSpace-Object
		"""
		cs = CS.ConfigurationSpace()

		lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True)

		# For demonstration purposes, we add different optimizers as categorical hyperparameters.
		# To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'.
		# SGD has a different parameter 'momentum'.
		optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])

		sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False)

		cs.add_hyperparameters([lr, optimizer, sgd_momentum])



		num_conv_layers =  CSH.UniformIntegerHyperparameter('num_conv_layers', lower=1, upper=3, default_value=2)
		
		num_filters_1 = CSH.UniformIntegerHyperparameter('num_filters_1', lower=4, upper=64, default_value=16, log=True)
		num_filters_2 = CSH.UniformIntegerHyperparameter('num_filters_2', lower=4, upper=64, default_value=16, log=True)
		num_filters_3 = CSH.UniformIntegerHyperparameter('num_filters_3', lower=4, upper=64, default_value=16, log=True)

		cs.add_hyperparameters([num_conv_layers, num_filters_1, num_filters_2, num_filters_3])


		dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False)
		num_fc_units = CSH.UniformIntegerHyperparameter('num_fc_units', lower=8, upper=256, default_value=32, log=True)

		cs.add_hyperparameters([dropout_rate, num_fc_units])


		# The hyperparameter sgd_momentum will be used,if the configuration
		# contains 'SGD' as optimizer.
		cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')
		cs.add_condition(cond)
		
		# You can also use inequality conditions:
		cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1)
		cs.add_condition(cond)

		cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2)
		cs.add_condition(cond)

		return cs 
开发者ID:automl,项目名称:HpBandSter,代码行数:52,代码来源:example_5_keras_worker.py

示例9: get_configspace

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_configspace():
		"""
		It builds the configuration space with the needed hyperparameters.
		It is easily possible to implement different types of hyperparameters.
		Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter.
		:return: ConfigurationsSpace-Object
		"""
		cs = CS.ConfigurationSpace()

		lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True)

		# For demonstration purposes, we add different optimizers as categorical hyperparameters.
		# To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'.
		# SGD has a different parameter 'momentum'.
		optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])

		sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False)

		cs.add_hyperparameters([lr, optimizer, sgd_momentum])

		# The hyperparameter sgd_momentum will be used,if the configuration
		# contains 'SGD' as optimizer.
		cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')
		cs.add_condition(cond)

		num_conv_layers =  CSH.UniformIntegerHyperparameter('num_conv_layers', lower=1, upper=3, default_value=2)
		
		num_filters_1 = CSH.UniformIntegerHyperparameter('num_filters_1', lower=4, upper=64, default_value=16, log=True)
		num_filters_2 = CSH.UniformIntegerHyperparameter('num_filters_2', lower=4, upper=64, default_value=16, log=True)
		num_filters_3 = CSH.UniformIntegerHyperparameter('num_filters_3', lower=4, upper=64, default_value=16, log=True)


		cs.add_hyperparameters([num_conv_layers, num_filters_1, num_filters_2, num_filters_3])
		
		# You can also use inequality conditions:
		cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1)
		cs.add_condition(cond)

		cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2)
		cs.add_condition(cond)


		dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False)
		num_fc_units = CSH.UniformIntegerHyperparameter('num_fc_units', lower=8, upper=256, default_value=32, log=True)

		cs.add_hyperparameters([dropout_rate, num_fc_units])

		return cs 
开发者ID:automl,项目名称:HpBandSter,代码行数:50,代码来源:example_5_pytorch_worker.py

示例10: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import EqualsCondition [as 别名]
def get_config_space(
        num_groups=((1, 9), False),
        blocks_per_group=((1, 4), False),
        num_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        max_shake_drop_probability=(0, 1),
        dropout=(0, 1.0),
        use_shake_drop=(True, False),
        use_shake_shake=(True, False),
        use_dropout=(True, False),
        **kwargs
    ):
        cs = ConfigSpace.ConfigurationSpace()

        num_groups_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "blocks_per_group", blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter, "activation", activation)
        
        use_dropout_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, "use_dropout", use_dropout)
        cs.add_hyperparameter(use_dropout_hp)
        add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter, "use_shake_shake", use_shake_shake)
        
        use_shake_drop_hp = add_hyperparameter(cs, ConfigSpace.CategoricalHyperparameter, "use_shake_drop", use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(cs, ConfigSpace.UniformFloatHyperparameter, "max_shake_drop_probability",
                max_shake_drop_probability)
            cs.add_condition(ConfigSpace.EqualsCondition(shake_drop_prob_hp, use_shake_drop_hp, True))
        

        # it is the upper bound of the nr of groups, since the configuration will actually be sampled.
        for i in range(0, num_groups[0][1] + 1):

            n_units_hp = add_hyperparameter(cs, ConfigSpace.UniformIntegerHyperparameter,
                "num_units_%d" % i, kwargs.pop("num_units_%d" % i, num_units))

            if i > 1:
                cs.add_condition(ConfigSpace.GreaterThanCondition(n_units_hp, num_groups_hp, i - 1))

            if True in use_dropout:
                dropout_hp = add_hyperparameter(cs, ConfigSpace.UniformFloatHyperparameter,
                    "dropout_%d" % i, kwargs.pop("dropout_%d" % i, dropout))
                dropout_condition_1 = ConfigSpace.EqualsCondition(dropout_hp, use_dropout_hp, True)

                if i > 1:
                
                    dropout_condition_2 = ConfigSpace.GreaterThanCondition(dropout_hp, num_groups_hp, i - 1)

                    cs.add_condition(ConfigSpace.AndConjunction(dropout_condition_1, dropout_condition_2))
                else:
                    cs.add_condition(dropout_condition_1)
        assert len(kwargs) == 0, "Invalid hyperparameter updates for resnet: %s" % str(kwargs)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:56,代码来源:resnet.py


注:本文中的ConfigSpace.EqualsCondition方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。