本文整理汇总了Python中nets.nasnet.nasnet.cifar_config方法的典型用法代码示例。如果您正苦于以下问题:Python nasnet.cifar_config方法的具体用法?Python nasnet.cifar_config怎么用?Python nasnet.cifar_config使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nets.nasnet.nasnet
的用法示例。
在下文中一共展示了nasnet.cifar_config方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testNoAuxHeadCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testNoAuxHeadCifarModel(self):
batch_size = 5
height, width = 32, 32
num_classes = 10
for use_aux_head in (True, False):
tf.reset_default_graph()
inputs = tf.random_uniform((batch_size, height, width, 3))
tf.train.create_global_step()
config = nasnet.cifar_config()
config.set_hparam('use_aux_head', int(use_aux_head))
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
config=config)
self.assertEqual('AuxLogits' in end_points, use_aux_head)
示例2: testOverrideHParamsCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testOverrideHParamsCifarModel(self):
batch_size = 5
height, width = 32, 32
num_classes = 10
inputs = tf.random_uniform((batch_size, height, width, 3))
tf.train.create_global_step()
config = nasnet.cifar_config()
config.set_hparam('data_format', 'NCHW')
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, end_points = nasnet.build_nasnet_cifar(
inputs, num_classes, config=config)
self.assertListEqual(
end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32])
示例3: testUseBoundedAcitvationCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testUseBoundedAcitvationCifarModel(self):
batch_size = 1
height, width = 32, 32
num_classes = 10
for use_bounded_activation in (True, False):
tf.reset_default_graph()
inputs = tf.random_uniform((batch_size, height, width, 3))
config = nasnet.cifar_config()
config.set_hparam('use_bounded_activation', use_bounded_activation)
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, _ = nasnet.build_nasnet_cifar(
inputs, num_classes, config=config)
for node in tf.get_default_graph().as_graph_def().node:
if node.op.startswith('Relu'):
self.assertEqual(node.op == 'Relu6', use_bounded_activation)
示例4: testNoAuxHeadCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testNoAuxHeadCifarModel(self):
batch_size = 5
height, width = 32, 32
num_classes = 10
for use_aux_head in (True, False):
tf.reset_default_graph()
inputs = tf.random.uniform((batch_size, height, width, 3))
tf.train.create_global_step()
config = nasnet.cifar_config()
config.set_hparam('use_aux_head', int(use_aux_head))
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, end_points = nasnet.build_nasnet_cifar(inputs, num_classes,
config=config)
self.assertEqual('AuxLogits' in end_points, use_aux_head)
示例5: testOverrideHParamsCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testOverrideHParamsCifarModel(self):
batch_size = 5
height, width = 32, 32
num_classes = 10
inputs = tf.random.uniform((batch_size, height, width, 3))
tf.train.create_global_step()
config = nasnet.cifar_config()
config.set_hparam('data_format', 'NCHW')
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, end_points = nasnet.build_nasnet_cifar(
inputs, num_classes, config=config)
self.assertListEqual(
end_points['Stem'].shape.as_list(), [batch_size, 96, 32, 32])
示例6: testUseBoundedAcitvationCifarModel
# 需要导入模块: from nets.nasnet import nasnet [as 别名]
# 或者: from nets.nasnet.nasnet import cifar_config [as 别名]
def testUseBoundedAcitvationCifarModel(self):
batch_size = 1
height, width = 32, 32
num_classes = 10
for use_bounded_activation in (True, False):
tf.reset_default_graph()
inputs = tf.random.uniform((batch_size, height, width, 3))
config = nasnet.cifar_config()
config.set_hparam('use_bounded_activation', use_bounded_activation)
with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
_, _ = nasnet.build_nasnet_cifar(
inputs, num_classes, config=config)
for node in tf.get_default_graph().as_graph_def().node:
if node.op.startswith('Relu'):
self.assertEqual(node.op == 'Relu6', use_bounded_activation)