当前位置: 首页>>代码示例>>Python>>正文


Python hyperparameters.UniformFloatHyperparameter方法代码示例

本文整理汇总了Python中ConfigSpace.hyperparameters.UniformFloatHyperparameter方法的典型用法代码示例。如果您正苦于以下问题:Python hyperparameters.UniformFloatHyperparameter方法的具体用法?Python hyperparameters.UniformFloatHyperparameter怎么用?Python hyperparameters.UniformFloatHyperparameter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace.hyperparameters的用法示例。


在下文中一共展示了hyperparameters.UniformFloatHyperparameter方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space():
	    config_space=CS.ConfigurationSpace()

	    # architecture hyperparameters
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_1', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_2', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('nr_residual_blocks_3', lower=1, upper=16, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('initial_filters', lower=8, upper=32, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_1', lower=0.5, upper=8, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_2', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('widen_factor_3', lower=0.5, upper=4, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_1', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_2', lower=1, upper=5, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('res_branches_3', lower=1, upper=5, log=False))
	    # other hyperparameters
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('learning_rate', lower=1e-3, upper=1, log=True))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('batch_size', lower=32, upper=128, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('weight_decay', lower=1e-5, upper=1e-3, log=True))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('momentum', lower=1e-3, upper=0.99, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('alpha', lower=0, upper=1, log=False))
	    config_space.add_hyperparameter(CSH.UniformIntegerHyperparameter('length', lower=0, upper=20, log=False))
	    config_space.add_hyperparameter(CSH.UniformFloatHyperparameter('death_rate', lower=0, upper=1, log=False))

	    return(config_space) 
开发者ID:arberzela,项目名称:EfficientNAS,代码行数:26,代码来源:cifar10_worker.py

示例2: _convert_hyper_parameters_to_cs

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def _convert_hyper_parameters_to_cs(self):
        # type: () -> CS.ConfigurationSpace
        cs = CS.ConfigurationSpace(seed=self._seed)
        for p in self._hyper_parameters:
            if isinstance(p, UniformParameterRange):
                hp = CSH.UniformFloatHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, UniformIntegerParameterRange):
                hp = CSH.UniformIntegerHyperparameter(
                    p.name, lower=p.min_value, upper=p.max_value, log=False, q=p.step_size)
            elif isinstance(p, DiscreteParameterRange):
                hp = CSH.CategoricalHyperparameter(p.name, choices=p.values)
            else:
                raise ValueError("HyperParameter type {} not supported yet with OptimizerBOHB".format(type(p)))
            cs.add_hyperparameter(hp)

        return cs 
开发者ID:allegroai,项目名称:trains,代码行数:19,代码来源:bandster.py

示例3: get_hyperparameter

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_hyperparameter(hyper_type, name, value_range, log = False):
    if isinstance(value_range, tuple) and len(value_range) == 2 and isinstance(value_range[1], bool) and \
        isinstance(value_range[0], (tuple, list)):
        value_range, log = value_range

    if len(value_range) == 0:
        raise ValueError(name + ': The range has to contain at least one element')
    if len(value_range) == 1:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if len(value_range) == 2 and value_range[0] == value_range[1]:
        return CSH.Constant(name, int(value_range[0]) if isinstance(value_range[0], bool) else value_range[0])
    if hyper_type == CSH.CategoricalHyperparameter:
        return CSH.CategoricalHyperparameter(name, value_range)
    if hyper_type == CSH.UniformFloatHyperparameter:
        assert len(value_range) == 2, "Float HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformFloatHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    if hyper_type == CSH.UniformIntegerHyperparameter:
        assert len(value_range) == 2, "Int HP range update for %s is specified by the two upper and lower values. %s given." %(name, len(value_range))
        return CSH.UniformIntegerHyperparameter(name, lower=value_range[0], upper=value_range[1], log=log)
    raise ValueError('Unknown type: %s for hp %s' % (hyper_type, name) ) 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:22,代码来源:config_space_hyperparameter.py

示例4: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(
        categorical_features=None,
        min_unique_values_for_embedding=((3, 300), True),
        dimension_reduction=(0, 1),
        **kwargs
    ):
        # dimension of entity embedding layer is a hyperparameter
        if categorical_features is None or not any(categorical_features):
            return CS.ConfigurationSpace()
        cs = CS.ConfigurationSpace()
        min_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "min_unique_values_for_embedding", min_unique_values_for_embedding)
        cs.add_hyperparameter(min_hp)
        for i in range(len([x for x in categorical_features if x])):
            ee_dimensions_hp = get_hyperparameter(CSH.UniformFloatHyperparameter, "dimension_reduction_" + str(i),
                kwargs.pop("dimension_reduction_" + str(i), dimension_reduction))
            cs.add_hyperparameter(ee_dimensions_hp)
        assert len(kwargs) == 0, "Invalid hyperparameter updates for learned embedding: %s" % str(kwargs)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:20,代码来源:embedding.py

示例5: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(
        num_layers=(1, 15),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        mlp_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        max_dropout=(0, 1.0),
        use_dropout=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        mlp_shape_hp = get_hyperparameter(CSH.CategoricalHyperparameter, 'mlp_shape', mlp_shape)
        cs.add_hyperparameter(mlp_shape_hp)

        num_layers_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, 'num_layers', num_layers)
        cs.add_hyperparameter(num_layers_hp)
        max_units_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "max_units", max_units)
        cs.add_hyperparameter(max_units_hp)

        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)

        max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
        cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'activation', activation)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:27,代码来源:shapedmlpnet.py

示例6: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=((50, 10000), True),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:25,代码来源:nystroem.py

示例7: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        kernel=('poly', 'rbf', 'sigmoid', 'cosine'),
        n_components=(10, 2000),
        gamma=((3.0517578125e-05, 8), True),
        degree=(2, 5),
        coef0=(-1, 1)
    ):
        cs = ConfigSpace.ConfigurationSpace()
        kernel_hp = add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'kernel', kernel)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "n_components", n_components)

        if "poly" in kernel:
            degree_hp = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'degree', degree)
            cs.add_condition(CSC.EqualsCondition(degree_hp, kernel_hp, "poly"))
        if set(["poly", "sigmoid"]) & set(kernel):
            coef0_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "coef0", coef0)
            cs.add_condition(CSC.InCondition(coef0_hp, kernel_hp, list(set(["poly", "sigmoid"]) & set(kernel))))
        if set(["poly", "rbf", "sigmoid"]) & set(kernel):
            gamma_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "gamma", gamma)
            cs.add_condition(CSC.InCondition(gamma_hp, kernel_hp, list(set(["poly", "rbf", "sigmoid"]) & set(kernel))))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:24,代码来源:kernel_pca.py

示例8: _create_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def _create_config_space(dict_hyperparams):
    """Create the hyperparameters hyperspace."""
    config_space = ConfigurationSpace()

    if not isinstance(dict_hyperparams, dict):
        raise TypeError('Hyperparams must be a dictionary.')

    for name, hyperparam in dict_hyperparams.items():
        hp_type = hyperparam['type']

        if hp_type == 'int':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformIntegerHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'float':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_min = min(hp_range)
            hp_max = max(hp_range)
            hp_default = hyperparam.get('default') or hp_min
            config_space.add_hyperparameter(
                hp.UniformFloatHyperparameter(name, hp_min, hp_max, default_value=hp_default))

        elif hp_type == 'bool':
            hp_default = bool(hyperparam.get('default'))
            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, ['true', 'false'], default_value=hp_default))

        elif hp_type == 'str':
            hp_range = hyperparam.get('range') or hyperparam.get('values')
            hp_range = [_NONE if hp is None else hp for hp in hp_range]
            hp_default = hyperparam.get('default') or hp_range[0]
            hp_default = _NONE if hp_default is None else hp_default

            config_space.add_hyperparameter(
                hp.CategoricalHyperparameter(name, hp_range, default_value=hp_default))

    return config_space 
开发者ID:HDI-Project,项目名称:BTB,代码行数:43,代码来源:smac.py

示例9: create_cs_from_pandaframe

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def create_cs_from_pandaframe(self, data):
        # TODO use from pyimp after https://github.com/automl/ParameterImportance/issues/72 is implemented
        warnings.warn("No parameter configuration space (pcs) provided! "
                      "Interpreting all parameters as floats. This might lead "
                      "to suboptimal analysis.", RuntimeWarning)
        self.logger.debug("Interpreting as parameters: %s", data.columns)
        minima = data.min()  # to define ranges of hyperparameter
        maxima = data.max()
        cs = ConfigurationSpace(seed=42)
        for p in data.columns:
            cs.add_hyperparameter(UniformFloatHyperparameter(p, lower=minima[p] - 1, upper=maxima[p] + 1))
        return cs 
开发者ID:automl,项目名称:CAVE,代码行数:14,代码来源:csv2rh.py

示例10: is_constant

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def is_constant(hyper):
    if isinstance(hyper, CSH.Constant):
        return True, hyper.value

    elif isinstance(hyper, CSH.UniformFloatHyperparameter) or isinstance(hyper, CSH.UniformIntegerHyperparameter):
        if abs(hyper.upper - hyper.lower) < 1e-10:
            return True, hyper.lower
        
    elif isinstance(hyper, CSH.CategoricalHyperparameter):
        if len(hyper.choices) == 1:
            return True, hyper.choices[0]
        
    return False, None 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:15,代码来源:modify_config_space.py

示例11: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(
        num_groups=(1, 9),
        blocks_per_group=(1, 4),
        max_units=((10, 1024), True),
        activation=('sigmoid', 'tanh', 'relu'),
        max_shake_drop_probability=(0, 1),
        max_dropout=(0, 1.0),
        resnet_shape=('funnel', 'long_funnel', 'diamond', 'hexagon', 'brick', 'triangle', 'stairs'),
        use_dropout=(True, False),
        use_shake_shake=(True, False),
        use_shake_drop=(True, False)
    ):
        cs = CS.ConfigurationSpace()
        
        num_groups_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "num_groups", num_groups)
        cs.add_hyperparameter(num_groups_hp)
        blocks_per_group_hp = get_hyperparameter(CS.UniformIntegerHyperparameter, "blocks_per_group", blocks_per_group)
        cs.add_hyperparameter(blocks_per_group_hp)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "activation", activation)
        use_dropout_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_dropout", use_dropout)
        add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_shake", use_shake_shake)
        
        shake_drop_hp = add_hyperparameter(cs, CS.CategoricalHyperparameter, "use_shake_drop", use_shake_drop)
        if True in use_shake_drop:
            shake_drop_prob_hp = add_hyperparameter(cs, CS.UniformFloatHyperparameter, "max_shake_drop_probability",
                max_shake_drop_probability)
            cs.add_condition(CS.EqualsCondition(shake_drop_prob_hp, shake_drop_hp, True))
        
        add_hyperparameter(cs, CSH.CategoricalHyperparameter, 'resnet_shape', resnet_shape)
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, "max_units", max_units)

        if True in use_dropout:
            max_dropout_hp = add_hyperparameter(cs, CSH.UniformFloatHyperparameter, "max_dropout", max_dropout)
            cs.add_condition(CS.EqualsCondition(max_dropout_hp, use_dropout_hp, True))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:38,代码来源:shapedresnet.py

示例12: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(growth_rate_range=(12, 40), nr_blocks=(3, 4), layer_range=([1, 12], [6, 24], [12, 64], [12, 64]), num_init_features=(32, 128), **kwargs):

        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter

        cs = CS.ConfigurationSpace()
        growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range)
        cs.add_hyperparameter(growth_rate_hp)
        # add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'bn_size', [2, 4])
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'num_init_features', num_init_features, log=True)
        # add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'bottleneck', [True, False])

        blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks)
        cs.add_hyperparameter(blocks_hp)
        use_dropout =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'use_dropout', [True, False])
        dropout =       add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'dropout', [0.0, 1.0])
        cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        if type(nr_blocks[0]) == int:
            min_blocks = nr_blocks[0]
            max_blocks = nr_blocks[1]
        else:
            min_blocks = nr_blocks[0][0]
            max_blocks = nr_blocks[0][1]

        for i in range(1, max_blocks+1):
            layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range[i-1])
            cs.add_hyperparameter(layer_hp)
            
            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:36,代码来源:densenet.py

示例13: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(
        learning_rate=((1e-4, 0.1), True),
        weight_decay=(1e-5, 0.1)
    ):
        cs = CS.ConfigurationSpace()
        add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'learning_rate', learning_rate)
        add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'weight_decay', weight_decay)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:10,代码来源:optimizer.py

示例14: get_config_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_config_space(
        gamma=(0.8, 0.9999)
    ):
        cs = CS.ConfigurationSpace()
        add_hyperparameter(cs, CSH.UniformFloatHyperparameter, 'gamma', gamma)
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:8,代码来源:lr_schedulers.py

示例15: get_hyperparameter_search_space

# 需要导入模块: from ConfigSpace import hyperparameters [as 别名]
# 或者: from ConfigSpace.hyperparameters import UniformFloatHyperparameter [as 别名]
def get_hyperparameter_search_space(
        dataset_info=None,
        n_components=((50, 10000), True),
        gamma=((3.0517578125e-05, 8), True),
    ):
        n_components_hp = get_hyperparameter(CSH.UniformIntegerHyperparameter, "n_components", n_components)
        gamma_hp = get_hyperparameter(CSH.UniformFloatHyperparameter, "gamma", gamma)
        cs = ConfigSpace.ConfigurationSpace()
        cs.add_hyperparameters([gamma_hp, n_components_hp])
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:12,代码来源:kitchen_sinks.py


注:本文中的ConfigSpace.hyperparameters.UniformFloatHyperparameter方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。