当前位置: 首页>>代码示例>>Python>>正文


Python backend.cast_to_floatx方法代码示例

本文整理汇总了Python中keras.backend.cast_to_floatx方法的典型用法代码示例。如果您正苦于以下问题:Python backend.cast_to_floatx方法的具体用法?Python backend.cast_to_floatx怎么用?Python backend.cast_to_floatx使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在keras.backend的用法示例。


在下文中一共展示了backend.cast_to_floatx方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
    constants = []
    if 0 < self.dropout_U < 1:
      ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
      ones = K.tile(ones, (1, self.output_dim))
      B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(3)]
      constants.append(B_U)
    else:
      constants.append([K.cast_to_floatx(1.) for _ in range(3)])

    if 0 < self.dropout_W < 1:
      input_shape = self.input_spec[0].shape
      input_dim = input_shape[-1]
      ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
      ones = K.tile(ones, (1, input_dim))
      B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(3)]
      constants.append(B_W)
    else:
      constants.append([K.cast_to_floatx(1.) for _ in range(3)])
    return constants 
开发者ID:LaurentMazare,项目名称:deep-models,代码行数:22,代码来源:rhn.py

示例2: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
		constants = []
		if 0 < self.dropout_U < 1:
			ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
			ones = K.tile(ones, (1, self.hidden_recurrent_dim))
			B_U = K.in_train_phase(K.dropout(ones, self.dropout_U), ones)
			constants.append(B_U)
		else:
			constants.append(K.cast_to_floatx(1.))
        
		if self.consume_less == 'cpu' and 0 < self.dropout_W < 1:
			input_shape = self.input_spec[0].shape
			input_dim = input_shape[-1]
			ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
			ones = K.tile(ones, (1, input_dim))
			B_W = K.in_train_phase(K.dropout(ones, self.dropout_W), ones)
			constants.append(B_W)
		else:
			constants.append(K.cast_to_floatx(1.))

		return constants 
开发者ID:bnsnapper,项目名称:keras_bn_library,代码行数:23,代码来源:rnnrbm.py

示例3: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
		constants = []
		if 0 < self.dropout_U < 1:
			ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
			ones = K.tile(ones, (1, self.input_dim))
			B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(4)]
			constants.append(B_U)
		else:
			constants.append([K.cast_to_floatx(1.) for _ in range(4)])

		if 0 < self.dropout_W < 1:
			input_shape = K.int_shape(x)
			input_dim = input_shape[-1]
			ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
			ones = K.tile(ones, (1, int(input_dim)))
			B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(4)]
			constants.append(B_W)
		else:
			constants.append([K.cast_to_floatx(1.) for _ in range(4)])
		return constants 
开发者ID:bnsnapper,项目名称:keras_bn_library,代码行数:22,代码来源:recurrent.py

示例4: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
        constants = []
        if 0 < self.dropout_U < 1:
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.output_dim))
            B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(4)]
            constants.append(B_U)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])

        if 0 < self.dropout_W < 1:
            input_shape = self.input_spec[0].shape
            input_dim = input_shape[-1]
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, int(input_dim)))
            B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(4)]
            constants.append(B_W)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])
        return constants 
开发者ID:SigmaQuan,项目名称:NTM-Keras,代码行数:22,代码来源:lstm2ntm.py

示例5: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
      constants = []
      if 0 < self.dropout_U < 1:
          ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
          ones = K.concatenate([ones] * self.output_dim, 1)
          B_U = K.in_train_phase(K.dropout(ones, self.dropout_U), ones)
          constants.append(B_U)
      else:
          constants.append(K.cast_to_floatx(1.))
      if self.consume_less == 'cpu' and 0 < self.dropout_W < 1:
          input_shape = self.input_spec[0].shape
          input_dim = input_shape[-1]
          ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
          ones = K.concatenate([ones] * input_dim, 1)
          B_W = K.in_train_phase(K.dropout(ones, self.dropout_W), ones)
          constants.append(B_W)
      else:
          constants.append(K.cast_to_floatx(1.))
      return constants 
开发者ID:commaai,项目名称:research,代码行数:21,代码来源:layers.py

示例6: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
        constants = []
        if 0 < self.dropout_U < 1:
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.output_dim))
            B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(3)]
            constants.append(B_U)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(3)])

        if 0 < self.dropout_W < 1:
            input_shape = K.int_shape(x)
            input_dim = input_shape[-1]
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, int(input_dim)))
            B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(3)]
            constants.append(B_W)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(3)])
        return constants 
开发者ID:wentaozhu,项目名称:recurrent-attention-for-QA-SQUAD-based-on-keras,代码行数:22,代码来源:QnA.py

示例7: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, inputs, training=None):
        constants = []
        '''if 0 < self.dropout_U < 1:
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.units))
            B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(3)]
            constants.append(B_U)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(3)])

        if 0 < self.dropout_W < 1:
            input_shape = K.int_shape(x)
            input_dim = input_shape[-1]
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, int(input_dim)))
            B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(3)]
            constants.append(B_W)
        else:'''
        constants.append([K.cast_to_floatx(1.) for _ in range(3)])
        return constants 
开发者ID:wentaozhu,项目名称:recurrent-attention-for-QA-SQUAD-based-on-keras,代码行数:22,代码来源:rnnlayer.py

示例8: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
        constants = []
        if 0 < self.dropout_U < 1:
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.concatenate([ones] * self.output_dim, 1)
            B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(4)]
            constants.append(B_U)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])

        if 0 < self.dropout_W < 1:
            input_shape = self.input_spec[0].shape
            input_dim = input_shape[-1]
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.concatenate([ones] * input_dim, 1)
            B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(4)]
            constants.append(B_W)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])
        return constants 
开发者ID:braingineer,项目名称:ikelos,代码行数:22,代码来源:rtn.py

示例9: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, inputs, training=None):
        constants = []
        constants.append([K.cast_to_floatx(1.) for _ in range(3)])

        if 0. < self.recurrent_dropout < 1:
            ones = K.ones_like(K.reshape(inputs[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.units))

            def dropped_inputs():
                return K.dropout(ones, self.recurrent_dropout)
            rec_dp_mask = [K.in_train_phase(dropped_inputs,
                                            ones,
                                            training=training) for _ in range(3)]
            constants.append(rec_dp_mask)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(3)])
        return constants 
开发者ID:Tuyki,项目名称:TT_RNN,代码行数:19,代码来源:TTRNN.py

示例10: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, inputs, training=None):
        constants = []
        if 0. < self.recurrent_dropout < 1.:
            ones = K.ones_like(K.reshape(inputs[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.units))

            def dropped_inputs():
                return K.dropout(ones, self.recurrent_dropout)

            rec_dp_mask = [K.in_train_phase(dropped_inputs,
                                            ones,
                                            training=training) for _ in range(3)]
            constants.append(rec_dp_mask)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(3)])
        return constants 
开发者ID:aspuru-guzik-group,项目名称:chemical_vae,代码行数:18,代码来源:tgru_k2_gpu.py

示例11: bbalpha_softmax_cross_entropy_with_mc_logits

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def bbalpha_softmax_cross_entropy_with_mc_logits(alpha):
    alpha = K.cast_to_floatx(alpha)
    def loss(y_true, mc_logits):
        # log(p_ij), p_ij = softmax(logit_ij)
        #assert mc_logits.ndim == 3
        mc_log_softmax = mc_logits - K.max(mc_logits, axis=2, keepdims=True)
        mc_log_softmax = mc_log_softmax - K.log(K.sum(K.exp(mc_log_softmax), axis=2, keepdims=True))
        mc_ll = K.sum(y_true * mc_log_softmax, -1)  # N x K
        K_mc = mc_ll.get_shape().as_list()[1]	# only for tensorflow
        return - 1. / alpha * (logsumexp(alpha * mc_ll, 1) + K.log(1.0 / K_mc))
    return loss


###################################################################
# the model 
开发者ID:YingzhenLi,项目名称:Dropout_BBalpha,代码行数:17,代码来源:BBalpha_dropout.py

示例12: __init__

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def __init__(self, l1=0., l2=0.,**kwargs):
        self.l1 = K.cast_to_floatx(l1)
        self.l2 = K.cast_to_floatx(l2)
        self.uses_learning_phase = True
        super(ActivityRegularizerOneDim, self).__init__(**kwargs)
        #self.layer = None 
开发者ID:wentaozhu,项目名称:deep-mil-for-whole-mammogram-classification,代码行数:8,代码来源:customlayers.py

示例13: get_constants

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def get_constants(self, x):
        print("begin get_constants(self, x)")
        constants = []
        if 0 < self.dropout_U < 1:
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, self.controller_output_dim))
            B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(4)]
            constants.append(B_U)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])

        if 0 < self.dropout_W < 1:
            input_shape = self.input_spec[0].shape
            input_dim = input_shape[-1]
            ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
            ones = K.tile(ones, (1, int(input_dim)))
            B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(4)]
            constants.append(B_W)
        else:
            constants.append([K.cast_to_floatx(1.) for _ in range(4)])

        # if 0 < self.dropout_R < 1:
        #     input_shape = self.input_spec[0].shape
        #     input_dim = input_shape[-1]
        #     ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
        #     ones = K.tile(ones, (1, int(input_dim)))
        #     B_R = [K.in_train_phase(K.dropout(ones, self.dropout_R), ones) for _ in range(4)]
        #     constants.append(B_R)
        # else:
        #     constants.append([K.cast_to_floatx(1.) for _ in range(4)])

        print("end get_constants(self, x)")
        return constants 
开发者ID:SigmaQuan,项目名称:NTM-Keras,代码行数:35,代码来源:ntm.py

示例14: __init__

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def __init__(self, epsilon=0.0025, **kwargs):
        super(SineReLU, self).__init__(**kwargs)
        self.supports_masking = True
        self.epsilon = K.cast_to_floatx(epsilon) 
开发者ID:keras-team,项目名称:keras-contrib,代码行数:6,代码来源:sinerelu.py

示例15: test_clip

# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import cast_to_floatx [as 别名]
def test_clip():
    clip_instance = constraints.clip()
    clipped = clip_instance(K.variable(example_array))
    assert(np.max(np.abs(K.eval(clipped))) <= K.cast_to_floatx(0.01))
    clip_instance = constraints.clip(0.1)
    clipped = clip_instance(K.variable(example_array))
    assert(np.max(np.abs(K.eval(clipped))) <= K.cast_to_floatx(0.1)) 
开发者ID:keras-team,项目名称:keras-contrib,代码行数:9,代码来源:constraints_test.py


注:本文中的keras.backend.cast_to_floatx方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。