当前位置: 首页>>代码示例>>Python>>正文


Python hyperparameters.UniformIntegerHyperparameter方法代码示例

本文整理汇总了Python中ConfigSpace.hyperparameters.UniformIntegerHyperparameter方法的典型用法代码示例。如果您正苦于以下问题:Python hyperparameters.UniformIntegerHyperparameter方法的具体用法?Python hyperparameters.UniformIntegerHyperparameter怎么用?Python hyperparameters.UniformIntegerHyperparameter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace.hyperparameters的用法示例。


在下文中一共展示了hyperparameters.UniformIntegerHyperparameter方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_config_space():
	    config_space=CS.ConfigurationSpace()

	    # architecture hyperparameters
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_1', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_2', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_3', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('initial_filters', lower=8, upper=32, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_1', lower=0.5, upper=8, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_2', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_3', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_1', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_2', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_3', lower=1, upper=5, log=False))
	    # other hyperparameters
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('learning_rate', lower=1e-3, upper=1, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('batch_size', lower=32, upper=128, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('momentum', lower=1e-3, upper=0.99, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('alpha', lower=0, upper=1, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('death_rate', lower=0, upper=1, log=False))

	    return(config_space) 
开发者ID:arberzela,项目名称:EfficientNAS,代码行数:26,代码来源:cifar10_worker.py

示例2: _convert_hyper_parameters_to_cs

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p)))
            cs.add_hyperparameter(hp)

        return cs 
开发者ID:allegroai,项目名称:trains,代码行数:19,代码来源:bandster.py

示例3: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter_search_space(self, **pipeline_config):
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        cs = CS.ConfigurationSpace()

        augment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('augment', [True, False]))
        autoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('autoaugment', [True, False]))
        fastautoaugment = cs.add_hyperparameter(CSH.CategoricalHyperparameter('fastautoaugment', [True, False]))

        cutout = cs.add_hyperparameter(CSH.CategoricalHyperparameter('cutout', [True, False]))
        cutout_length = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
        cutout_holes = cs.add_hyperparameter(CSH.UniformIntegerHyperparameter('cutout_holes', lower=1, upper=3, log=False))

        cs.add_condition(CS.EqualsCondition(cutout_length, cutout, True))
        cs.add_condition(CS.EqualsCondition(cutout_holes, cutout, True))
        
        cs.add_condition(CS.EqualsCondition(autoaugment, augment, True))
        cs.add_condition(CS.EqualsCondition(fastautoaugment, augment, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:image_augmentation.py

示例4: get_hyperparameter

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter(hyper_type, name, value_range, log = False):
    if isinstance(value_range, tuple) and len(value_range) == 2 and isinstance(value_range[1], bool) and \
        isinstance(value_range[0], (tuple, list)):
        value_range, log = value_range

    if len(value_range) == 0:
        raise ValueError(name + ': The range has to contain at least one element')
    if len(value_range) == 1:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if len(value_range) == 2 and value_range[0] == value_range[1]:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if hyper_type == CSH.CategoricalHyperparameter:
        return CSH.CategoricalHyperparameter(name, value_range)
    if hyper_type == CSH.UniformFloatHyperparameter:
        assert len(value_range) == 2, "Float HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformFloatHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    if hyper_type == CSH.UniformIntegerHyperparameter:
        assert len(value_range) == 2, "Int HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformIntegerHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    raise ValueError('Unknown type: %s for hp %s' % (hyper_type, name) ) 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:config_space_hyperparameter.py

示例5: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_config_space(
        num_layers=(1, 15),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        mlp_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        max_dropout=(0, 1.0),
        use_dropout=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        mlp_shape_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'mlp_shape', mlp_shape)
        cs.add_hyperparameter(mlp_shape_hp)

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        max_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "max_units", max_units)
        cs.add_hyperparameter(max_units_hp)

        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)

        max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
        cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation', activation)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:27,代码来源:shapedmlpnet.py

示例6: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=((50, 10000), True),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:25,代码来源:nystroem.py

示例7: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        n_components=(10,2000),
        algorithm=('parallel', 'deflation'),
        whiten=(True, False),
        fun=('logcosh', 'exp', 'cube'),
    ):
        cs = ConfigSpace.ConfigurationSpace()

        n_components_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "n_components", n_components)
        algorithm_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'algorithm', algorithm)
        whiten_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'whiten', whiten)
        fun_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'fun', fun)

        if True in whiten:
            cs.add_hyperparameters([n_components_hp, algorithm_hp, whiten_hp, fun_hp])
            cs.add_condition(CSC.EqualsCondition(n_components_hp, whiten_hp, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:21,代码来源:fast_ica.py

示例8: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=(10, 2000),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:24,代码来源:kernel_pca.py

示例9: __init__

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def __init__(
            self, hp_ranges: HyperparameterRanges_CS, resource_attr_key: str,
            resource_attr_range: Tuple[int, int]):
        assert resource_attr_range[0] >= 1
        assert resource_attr_range[1] >= resource_attr_range[0]
        self.hp_ranges = hp_ranges
        self.resource_attr_key = resource_attr_key
        self.resource_attr_range = resource_attr_range
        # Extended configuration space including resource attribute
        config_space_ext = copy.deepcopy(hp_ranges.config_space)
        self.resource_attr_name = RESOURCE_ATTR_PREFIX + resource_attr_key
        # Allowed range: [1, resource_attr_range[1]]
        config_space_ext.add_hyperparameter(CSH.UniformIntegerHyperparameter(
            name=self.resource_attr_name, lower=1,
            upper=resource_attr_range[1]))
        self.hp_ranges_ext = HyperparameterRanges_CS(
            config_space_ext, name_last_pos=self.resource_attr_name) 
开发者ID:awslabs,项目名称:autogluon,代码行数:19,代码来源:config_ext.py

示例10: _create_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'], default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, hp_range, default_value=hp_default))

    return config_space 
开发者ID:HDI-Project,项目名称:BTB,代码行数:43,代码来源:smac.py

示例11: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        import ConfigSpace
        import ConfigSpace.hyperparameters as CSH

        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        batch_size_range = self._get_search_space_updates().get('batch_size', ((32, 500), True))
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'batch_size', batch_size_range)
        self._check_search_space_updates('batch_size')
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:13,代码来源:create_dataloader.py

示例12: is_constant

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def is_constant(hyper):
    if isinstance(hyper, CSH.Constant):
        return True, hyper.value

    elif isinstance(hyper, CSH.UniformFloatHyperparameter) or isinstance(hyper, CSH.UniformIntegerHyperparameter):
        if abs(hyper.upper - hyper.lower) < 1e-10:
            return True, hyper.lower
        
    elif isinstance(hyper, CSH.CategoricalHyperparameter):
        if len(hyper.choices) == 1:
            return True, hyper.choices[0]
        
    return False, None 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:15,代码来源:modify_config_space.py

示例13: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_config_space(
        num_groups=(1, 9),
        blocks_per_group=(1, 4),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        max_shake_drop_probability=(0, 1),
        max_dropout=(0, 1.0),
        resnet_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        use_dropout=(True, False),
        use_shake_shake=(True, False),
        use_shake_drop=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        num_groups_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "blocks_per_group", blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "activation", activation)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_shake", use_shake_shake)
        
        shake_drop_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_drop", use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(cs, CS.UniformFloatHyperparameter, "max_shake_drop_probability",
                max_shake_drop_probability)
            cs.add_condition(CS.EqualsCondition(shake_drop_prob_hp, shake_drop_hp, True))
        
        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'resnet_shape', resnet_shape)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "max_units", max_units)

        if True in use_dropout:
            max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
            cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:38,代码来源:shapedresnet.py

示例14: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_config_space(
        num_layers=((1, 15), False),
        num_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        dropout=(0.0, 0.8),
        use_dropout=(True, False),
        **kwargs
    ):
        cs = CS.ConfigurationSpace()

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)

        for i in range(1, num_layers[0][1] + 1):
            n_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "num_units_%d" % i, kwargs.pop("num_units_%d" % i, num_units))
            cs.add_hyperparameter(n_units_hp)

            if i > num_layers[0][0]:
                cs.add_condition(CS.GreaterThanCondition(n_units_hp, num_layers_hp, i - 1))

            if True in use_dropout:
                dropout_hp = get_hyperparameter(CSH.UniformFloatHyperparameter, "dropout_%d" % i, kwargs.pop("dropout_%d" % i, dropout))
                cs.add_hyperparameter(dropout_hp)
                dropout_condition_1 = CS.EqualsCondition(dropout_hp, use_dropout_hp, True)

                if i > num_layers[0][0]:
                    dropout_condition_2 = CS.GreaterThanCondition(dropout_hp, num_layers_hp, i - 1)
                    cs.add_condition(CS.AndConjunction(dropout_condition_1, dropout_condition_2))
                else:
                    cs.add_condition(dropout_condition_1)
        
        add_hyperparameter(cs, CSH.CategoricalHyperparameter,'activation', activation)
        assert len(kwargs) == 0, "Invalid hyperparameter updates for mlpnet: %s" % str(kwargs)
        return(cs) 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:37,代码来源:mlpnet.py

示例15: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformIntegerHyperparameter [as 别名]
def get_config_space(   nr_main_blocks=[1, 8], nr_residual_blocks=([1, 16], True), initial_filters=([8, 32], True), widen_factor=([0.5, 4], True), 
                            res_branches=([1, 5], False), filters_size=[3, 3], **kwargs):
                            
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH

        cs = CS.ConfigurationSpace()

        nr_main_blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "nr_main_blocks", nr_main_blocks)
        cs.add_hyperparameter(nr_main_blocks_hp)
        initial_filters_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "initial_filters", initial_filters)
        cs.add_hyperparameter(initial_filters_hp)
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'nr_convs', nr_convs, log=True)
        death_rate_hp = get_hyperparameter(ConfigSpace.UniformFloatHyperparameter, "death_rate", ([0,1], False))
        cs.add_hyperparameter(death_rate_hp)

        if type(nr_main_blocks[0]) is int:
            main_blocks_min = nr_main_blocks[0]
            main_blocks_max = nr_main_blocks[1]
        else:
            main_blocks_min = nr_main_blocks[0][0]
            main_blocks_max = nr_main_blocks[0][1]
	    
        for i in range(1, main_blocks_max + 1):
            blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'nr_residual_blocks_%d' % i, nr_residual_blocks)
            blocks = cs.add_hyperparameter(blocks_hp)
            widen_hp = get_hyperparameter(ConfigSpace.UniformFloatHyperparameter, 'widen_factor_%d' % i, widen_factor)
            widen = cs.add_hyperparameter(widen_hp)
            branches_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'res_branches_%d' % i, res_branches)
            branches = cs.add_hyperparameter(branches_hp)
            # filters = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'filters_size_%d' % i, filters_size, log=False)

            if i > main_blocks_min:
                cs.add_condition(CS.GreaterThanCondition(blocks_hp, nr_main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(widen_hp, nr_main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(branches_hp, nr_main_blocks_hp, i-1))
                # cs.add_condition(CS.GreaterThanCondition(filters, main_blocks, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:41,代码来源:resnet.py


注:本文中的ConfigSpace.hyperparameters.UniformIntegerHyperparameter方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。