本文整理汇总了Python中object_detection.utils.learning_schedules.exponential_decay_with_burnin方法的典型用法代码示例。如果您正苦于以下问题:Python learning_schedules.exponential_decay_with_burnin方法的具体用法?Python learning_schedules.exponential_decay_with_burnin怎么用?Python learning_schedules.exponential_decay_with_burnin使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类object_detection.utils.learning_schedules
的用法示例。
在下文中一共展示了learning_schedules.exponential_decay_with_burnin方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testExponentialDecayWithBurnin
# 需要导入模块: from object_detection.utils import learning_schedules [as 别名]
# 或者: from object_detection.utils.learning_schedules import exponential_decay_with_burnin [as 别名]
def testExponentialDecayWithBurnin(self):
global_step = tf.placeholder(tf.int32, [])
learning_rate_base = 1.0
learning_rate_decay_steps = 3
learning_rate_decay_factor = .1
burnin_learning_rate = .5
burnin_steps = 2
exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01]
learning_rate = learning_schedules.exponential_decay_with_burnin(
global_step, learning_rate_base, learning_rate_decay_steps,
learning_rate_decay_factor, burnin_learning_rate, burnin_steps)
with self.test_session() as sess:
output_rates = []
for input_global_step in range(8):
output_rate = sess.run(learning_rate,
feed_dict={global_step: input_global_step})
output_rates.append(output_rate)
self.assertAllClose(output_rates, exp_rates)
示例2: testExponentialDecayWithBurnin
# 需要导入模块: from object_detection.utils import learning_schedules [as 别名]
# 或者: from object_detection.utils.learning_schedules import exponential_decay_with_burnin [as 别名]
def testExponentialDecayWithBurnin(self):
def graph_fn(global_step):
learning_rate_base = 1.0
learning_rate_decay_steps = 3
learning_rate_decay_factor = .1
burnin_learning_rate = .5
burnin_steps = 2
min_learning_rate = .05
learning_rate = learning_schedules.exponential_decay_with_burnin(
global_step, learning_rate_base, learning_rate_decay_steps,
learning_rate_decay_factor, burnin_learning_rate, burnin_steps,
min_learning_rate)
assert learning_rate.op.name.endswith('learning_rate')
return (learning_rate,)
output_rates = [
self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9)
]
exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05]
self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
示例3: testExponentialDecayWithBurnin
# 需要导入模块: from object_detection.utils import learning_schedules [as 别名]
# 或者: from object_detection.utils.learning_schedules import exponential_decay_with_burnin [as 别名]
def testExponentialDecayWithBurnin(self):
def graph_fn(global_step):
learning_rate_base = 1.0
learning_rate_decay_steps = 3
learning_rate_decay_factor = .1
burnin_learning_rate = .5
burnin_steps = 2
learning_rate = learning_schedules.exponential_decay_with_burnin(
global_step, learning_rate_base, learning_rate_decay_steps,
learning_rate_decay_factor, burnin_learning_rate, burnin_steps)
assert learning_rate.op.name.endswith('learning_rate')
return (learning_rate,)
output_rates = [
self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8)
]
exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01]
self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
示例4: testExponentialDecayWithBurnin
# 需要导入模块: from object_detection.utils import learning_schedules [as 别名]
# 或者: from object_detection.utils.learning_schedules import exponential_decay_with_burnin [as 别名]
def testExponentialDecayWithBurnin(self):
def graph_fn(global_step):
learning_rate_base = 1.0
learning_rate_decay_steps = 3
learning_rate_decay_factor = .1
burnin_learning_rate = .5
burnin_steps = 2
learning_rate = learning_schedules.exponential_decay_with_burnin(
global_step, learning_rate_base, learning_rate_decay_steps,
learning_rate_decay_factor, burnin_learning_rate, burnin_steps)
return (learning_rate,)
output_rates = [
self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8)
]
exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01]
self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
示例5: _create_learning_rate
# 需要导入模块: from object_detection.utils import learning_schedules [as 别名]
# 或者: from object_detection.utils.learning_schedules import exponential_decay_with_burnin [as 别名]
def _create_learning_rate(learning_rate_config):
"""Create optimizer learning rate based on config.
Args:
learning_rate_config: A LearningRate proto message.
Returns:
A learning rate.
Raises:
ValueError: when using an unsupported input data type.
"""
learning_rate = None
learning_rate_type = learning_rate_config.WhichOneof('learning_rate')
if learning_rate_type == 'constant_learning_rate':
config = learning_rate_config.constant_learning_rate
learning_rate = tf.constant(config.learning_rate, dtype=tf.float32,
name='learning_rate')
if learning_rate_type == 'exponential_decay_learning_rate':
config = learning_rate_config.exponential_decay_learning_rate
learning_rate = learning_schedules.exponential_decay_with_burnin(
tf.train.get_or_create_global_step(),
config.initial_learning_rate,
config.decay_steps,
config.decay_factor,
burnin_learning_rate=config.burnin_learning_rate,
burnin_steps=config.burnin_steps,
min_learning_rate=config.min_learning_rate,
staircase=config.staircase)
if learning_rate_type == 'manual_step_learning_rate':
config = learning_rate_config.manual_step_learning_rate
if not config.schedule:
raise ValueError('Empty learning rate schedule.')
learning_rate_step_boundaries = [x.step for x in config.schedule]
learning_rate_sequence = [config.initial_learning_rate]
learning_rate_sequence += [x.learning_rate for x in config.schedule]
learning_rate = learning_schedules.manual_stepping(
tf.train.get_or_create_global_step(), learning_rate_step_boundaries,
learning_rate_sequence, config.warmup)
if learning_rate_type == 'cosine_decay_learning_rate':
config = learning_rate_config.cosine_decay_learning_rate
learning_rate = learning_schedules.cosine_decay_with_warmup(
tf.train.get_or_create_global_step(),
config.learning_rate_base,
config.total_steps,
config.warmup_learning_rate,
config.warmup_steps,
config.hold_base_rate_steps)
if learning_rate is None:
raise ValueError('Learning_rate %s not supported.' % learning_rate_type)
return learning_rate