本文整理汇总了Python中tensorflow.python.training.learning_rate_decay.exponential_decay函数的典型用法代码示例。如果您正苦于以下问题:Python exponential_decay函数的具体用法?Python exponential_decay怎么用?Python exponential_decay使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了exponential_decay函数的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testStaircase
def testStaircase(self):
with self.test_session():
step = state_ops.variable_op([], dtypes.int32)
assign_100 = state_ops.assign(step, 100)
assign_1 = state_ops.assign(step, 1)
assign_2 = state_ops.assign(step, 2)
decayed_lr = learning_rate_decay.exponential_decay(0.1, step, 3, 0.96, staircase=True)
# No change to learning rate
assign_1.op.run()
self.assertAllClose(decayed_lr.eval(), 0.1, 1e-6)
assign_2.op.run()
self.assertAllClose(decayed_lr.eval(), 0.1, 1e-6)
# Decayed learning rate
assign_100.op.run()
expected = 0.1 * 0.96 ** (100 // 3)
self.assertAllClose(decayed_lr.eval(), expected, 1e-6)
示例2: testVariables
def testVariables(self):
step = variables.VariableV1(1)
assign_1 = step.assign(1)
assign_2 = step.assign(2)
assign_100 = step.assign(100)
decayed_lr = learning_rate_decay.exponential_decay(
.1, step, 3, 0.96, staircase=True)
self.evaluate(variables.global_variables_initializer())
# No change to learning rate
self.evaluate(assign_1.op)
self.assertAllClose(self.evaluate(decayed_lr), .1, 1e-6)
self.evaluate(assign_2.op)
self.assertAllClose(self.evaluate(decayed_lr), .1, 1e-6)
# Decayed learning rate
self.evaluate(assign_100.op)
expected = .1 * 0.96**(100 // 3)
self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
示例3: testVariables
def testVariables(self):
with self.test_session():
step = variables.Variable(1)
assign_1 = step.assign(1)
assign_2 = step.assign(2)
assign_100 = step.assign(100)
decayed_lr = learning_rate_decay.exponential_decay(0.1, step, 3, 0.96, staircase=True)
variables.initialize_all_variables().run()
# No change to learning rate
assign_1.op.run()
self.assertAllClose(decayed_lr.eval(), 0.1, 1e-6)
assign_2.op.run()
self.assertAllClose(decayed_lr.eval(), 0.1, 1e-6)
# Decayed learning rate
assign_100.op.run()
expected = 0.1 * 0.96 ** (100 // 3)
self.assertAllClose(decayed_lr.eval(), expected, 1e-6)
示例4: testStaircase
def testStaircase(self):
with self.test_session():
step = gen_state_ops._variable(shape=[], dtype=dtypes.int32,
name="step", container="", shared_name="")
assign_100 = state_ops.assign(step, 100)
assign_1 = state_ops.assign(step, 1)
assign_2 = state_ops.assign(step, 2)
decayed_lr = learning_rate_decay.exponential_decay(.1, step, 3, 0.96,
staircase=True)
# No change to learning rate
assign_1.op.run()
self.assertAllClose(decayed_lr.eval(), .1, 1e-6)
assign_2.op.run()
self.assertAllClose(decayed_lr.eval(), .1, 1e-6)
# Decayed learning rate
assign_100.op.run()
expected = .1 * 0.96 ** (100 // 3)
self.assertAllClose(decayed_lr.eval(), expected, 1e-6)
示例5: testStaircase
def testStaircase(self):
if context.executing_eagerly():
step = resource_variable_ops.ResourceVariable(0)
self.evaluate(variables.global_variables_initializer())
decayed_lr = learning_rate_decay.exponential_decay(
.1, step, 3, 0.96, staircase=True)
# No change to learning rate due to staircase
expected = .1
self.evaluate(step.assign(1))
self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
expected = .1
self.evaluate(step.assign(2))
self.assertAllClose(self.evaluate(decayed_lr), .1, 1e-6)
# Decayed learning rate
expected = .1 * 0.96 ** (100 // 3)
self.evaluate(step.assign(100))
self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
示例6: testContinuous
def testContinuous(self):
with self.test_session():
step = 5
decayed_lr = learning_rate_decay.exponential_decay(0.05, step, 10, 0.96)
expected = .05 * 0.96 ** (5.0 / 10.0)
self.assertAllClose(decayed_lr.eval(), expected, 1e-6)
示例7: testContinuous
def testContinuous(self):
self.evaluate(variables.global_variables_initializer())
step = 5
decayed_lr = learning_rate_decay.exponential_decay(0.05, step, 10, 0.96)
expected = .05 * 0.96**(5.0 / 10.0)
self.assertAllClose(self.evaluate(decayed_lr), expected, 1e-6)
示例8: f2
def f2():
return learning_rate_decay.exponential_decay(lr_init, lr_gstep,
decay_steps, lr_dec, True)
示例9: make_opt
def make_opt():
gstep = training_util.get_or_create_global_step()
lr = learning_rate_decay.exponential_decay(1.0, gstep, 10, 0.9)
return training.GradientDescentOptimizer(lr)