当前位置: 首页>>代码示例>>Python>>正文


Python ConfigSpace.hyperparameters方法代码示例

本文整理汇总了Python中ConfigSpace.hyperparameters方法的典型用法代码示例。如果您正苦于以下问题:Python ConfigSpace.hyperparameters方法的具体用法?Python ConfigSpace.hyperparameters怎么用?Python ConfigSpace.hyperparameters使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在ConfigSpace的用法示例。


在下文中一共展示了ConfigSpace.hyperparameters方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import hyperparameters [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        # add hyperparameters of initialization method
        possible_initialization_methods = set(pipeline_config["initialization_methods"]).intersection(self.initialization_methods.keys())
        selector = cs.add_hyperparameter(CSH.CategoricalHyperparameter("initialization_method", sorted(possible_initialization_methods)))

        for method_name, method_type in self.initialization_methods.items():
            if (method_name not in possible_initialization_methods):
                continue
            method_cs = method_type.get_hyperparameter_search_space(
                **self._get_search_space_updates(prefix=method_name))
            cs.add_configuration_space(prefix=method_name, configuration_space=method_cs, delimiter=ConfigWrapper.delimiter, 
                                       parent_hyperparameter={'parent': selector, 'value': method_name})

        # add hyperparameter of initializer
        initializer = self.initializers[pipeline_config["initializer"]]
        initializer_cs = initializer.get_hyperparameter_search_space(**self._get_search_space_updates(prefix="initializer"))
        cs.add_configuration_space(prefix="initializer", configuration_space=initializer_cs, delimiter=ConfigWrapper.delimiter)

        self._check_search_space_updates(("initializer", "*"), (possible_initialization_methods, "*"))
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:25,代码来源:initialization_selector.py

示例2: get_hyperparameter_search_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import hyperparameters [as 别名]
def get_hyperparameter_search_space(self, dataset_info=None, **pipeline_config):
        import ConfigSpace
        import ConfigSpace.hyperparameters as CSH

        pipeline_config = self.pipeline.get_pipeline_config(**pipeline_config)
        cs = ConfigSpace.ConfigurationSpace()

        batch_size_range = self._get_search_space_updates().get('batch_size', ((32, 500), True))
        add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'batch_size', batch_size_range)
        self._check_search_space_updates('batch_size')
        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:13,代码来源:create_dataloader.py

示例3: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import hyperparameters [as 别名]
def get_config_space(   nr_main_blocks=[1, 8], nr_residual_blocks=([1, 16], True), initial_filters=([8, 32], True), widen_factor=([0.5, 4], True), 
                            res_branches=([1, 5], False), filters_size=[3, 3], **kwargs):
                            
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH

        cs = CS.ConfigurationSpace()

        nr_main_blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "nr_main_blocks", nr_main_blocks)
        cs.add_hyperparameter(nr_main_blocks_hp)
        initial_filters_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "initial_filters", initial_filters)
        cs.add_hyperparameter(initial_filters_hp)
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'nr_convs', nr_convs, log=True)
        death_rate_hp = get_hyperparameter(ConfigSpace.UniformFloatHyperparameter, "death_rate", ([0,1], False))
        cs.add_hyperparameter(death_rate_hp)

        if type(nr_main_blocks[0]) is int:
            main_blocks_min = nr_main_blocks[0]
            main_blocks_max = nr_main_blocks[1]
        else:
            main_blocks_min = nr_main_blocks[0][0]
            main_blocks_max = nr_main_blocks[0][1]
	    
        for i in range(1, main_blocks_max + 1):
            blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'nr_residual_blocks_%d' % i, nr_residual_blocks)
            blocks = cs.add_hyperparameter(blocks_hp)
            widen_hp = get_hyperparameter(ConfigSpace.UniformFloatHyperparameter, 'widen_factor_%d' % i, widen_factor)
            widen = cs.add_hyperparameter(widen_hp)
            branches_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'res_branches_%d' % i, res_branches)
            branches = cs.add_hyperparameter(branches_hp)
            # filters = add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'filters_size_%d' % i, filters_size, log=False)

            if i > main_blocks_min:
                cs.add_condition(CS.GreaterThanCondition(blocks_hp, nr_main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(widen_hp, nr_main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(branches_hp, nr_main_blocks_hp, i-1))
                # cs.add_condition(CS.GreaterThanCondition(filters, main_blocks, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:41,代码来源:resnet.py

示例4: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import hyperparameters [as 别名]
def get_config_space(growth_rate_range=(12, 40), nr_blocks=(3, 4), layer_range=([1, 12], [6, 24], [12, 64], [12, 64]), num_init_features=(32, 128), **kwargs):

        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH
        from autoPyTorch.utils.config_space_hyperparameter import add_hyperparameter

        cs = CS.ConfigurationSpace()
        growth_rate_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'growth_rate', growth_rate_range)
        cs.add_hyperparameter(growth_rate_hp)
        # add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'bn_size', [2, 4])
        # add_hyperparameter(cs, CSH.UniformIntegerHyperparameter, 'num_init_features', num_init_features, log=True)
        # add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'bottleneck', [True, False])

        blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'blocks', nr_blocks)
        cs.add_hyperparameter(blocks_hp)
        use_dropout =   add_hyperparameter(cs,    CSH.CategoricalHyperparameter, 'use_dropout', [True, False])
        dropout =       add_hyperparameter(cs,   CSH.UniformFloatHyperparameter, 'dropout', [0.0, 1.0])
        cs.add_condition(CS.EqualsCondition(dropout, use_dropout, True))

        if type(nr_blocks[0]) == int:
            min_blocks = nr_blocks[0]
            max_blocks = nr_blocks[1]
        else:
            min_blocks = nr_blocks[0][0]
            max_blocks = nr_blocks[0][1]

        for i in range(1, max_blocks+1):
            layer_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'layer_in_block_%d' % i, layer_range[i-1])
            cs.add_hyperparameter(layer_hp)
            
            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(layer_hp, blocks_hp, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:36,代码来源:densenet.py

示例5: get_config_space

# 需要导入模块: import ConfigSpace [as 别名]
# 或者: from ConfigSpace import hyperparameters [as 别名]
def get_config_space(   nr_main_blocks=[3, 7], initial_filters=([8, 32], True), nr_sub_blocks=([1, 4], False),
                            op_types = ["inverted_residual", "dwise_sep_conv"], kernel_sizes=[3, 5],  strides=[1,2],
                            output_filters = [[12, 16, 20],
                                              [18, 24, 30],
                                              [24, 32, 40],
                                              [48, 64, 80],
                                              [72, 96, 120],
                                              [120, 160, 200], 
                                              [240, 320, 400]],   # the idea is to search for e.g. 0.75, 1, 1.25* output_filters(mainblock number)
                            skip_connection = [True, False], se_ratios = [0, 0.25], **kwargs):
                            
        import ConfigSpace as CS
        import ConfigSpace.hyperparameters as CSH

        cs = CS.ConfigurationSpace()

        
        main_blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "nr_main_blocks", nr_main_blocks)
        initial_filters_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, "initial_filters", initial_filters)
        cs.add_hyperparameter(main_blocks_hp)
        cs.add_hyperparameter(initial_filters_hp)

        if type(nr_main_blocks[0]) == int:
            min_blocks = nr_main_blocks[0]
            max_blocks = nr_main_blocks[1]
        else:
            min_blocks = nr_main_blocks[0][0]
            max_blocks = nr_main_blocks[0][1]
	    
        for i in range(1, max_blocks + 1):
            sub_blocks_hp = get_hyperparameter(ConfigSpace.UniformIntegerHyperparameter, 'nr_sub_blocks_%d' % i, nr_sub_blocks)
            op_type_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'op_type_%d' % i, op_types)
            kernel_size_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'kernel_size_%d' % i, kernel_sizes)
            stride_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'stride_%d' % i, strides)
            out_filters_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'out_filters_%d' % i, output_filters[i-1])             # take output_filters list i-1 as options
            se_ratio_hp = get_hyperparameter(ConfigSpace.CategoricalHyperparameter, 'se_ratio_%d' % i, se_ratios)
            cs.add_hyperparameter(sub_blocks_hp)
            cs.add_hyperparameter(op_type_hp)
            cs.add_hyperparameter(kernel_size_hp)
            cs.add_hyperparameter(stride_hp)
            cs.add_hyperparameter(out_filters_hp)
            cs.add_hyperparameter(se_ratio_hp)
            skip_con = cs.add_hyperparameter(CSH.CategoricalHyperparameter('skip_con_%d' % i, [True, False]))

            if i > min_blocks:
                cs.add_condition(CS.GreaterThanCondition(sub_blocks_hp, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(op_type_hp, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(kernel_size_hp, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(stride_hp, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(out_filters_hp, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(skip_con, main_blocks_hp, i-1))
                cs.add_condition(CS.GreaterThanCondition(se_ratio_hp, main_blocks_hp, i-1))

        return cs 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:56,代码来源:mobilenet.py


注:本文中的ConfigSpace.hyperparameters方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。