本文整理汇总了Python中keras.backend.random_binomial方法的典型用法代码示例。如果您正苦于以下问题:Python backend.random_binomial方法的具体用法?Python backend.random_binomial怎么用?Python backend.random_binomial使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.backend
的用法示例。
在下文中一共展示了backend.random_binomial方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def call(self, x, mask=None):
if self.mode == 'maximum_likelihood':
# draw maximum likelihood sample from Bernoulli distribution
# x* = argmax_x p(x) = 1 if p(x=1) >= 0.5
# 0 otherwise
return K.round(x)
elif self.mode == 'random':
# draw random sample from Bernoulli distribution
# x* = x ~ p(x) = 1 if p(x=1) > uniform(0, 1)
# 0 otherwise
#return self.srng.binomial(size=x.shape, n=1, p=x, dtype=K.floatx())
return K.random_binomial(x.shape, p=x, dtype=K.floatx())
elif self.mode == 'mean_field':
# draw mean-field approximation sample from Bernoulli distribution
# x* = E[p(x)] = E[Bern(x; p)] = p
return x
elif self.mode == 'nrlu':
return nrlu(x)
else:
raise NotImplementedError('Unknown sample mode!')
示例2: sample_h_given_x
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def sample_h_given_x(self, x):
h_pre = K.dot(x, self.Wrbm) + self.bh
h_sigm = self.activation(self.scaling_h_given_x * h_pre)
# drop out noise
#if(0.0 < self.p < 1.0):
# noise_shape = self._get_noise_shape(h_sigm)
# h_sigm = K.in_train_phase(K.dropout(h_sigm, self.p, noise_shape), h_sigm)
if(self.hidden_unit_type == 'binary'):
h_samp = K.random_binomial(shape=h_sigm.shape, p=h_sigm)
# random sample
# \hat{h} = 1, if p(h=1|x) > uniform(0, 1)
# 0, otherwise
elif(self.hidden_unit_type == 'nrlu'):
h_samp = nrlu(h_pre)
else:
h_samp = h_sigm
if(0.0 < self.p < 1.0):
noise_shape = self._get_noise_shape(h_samp)
h_samp = K.in_train_phase(K.dropout(h_samp, self.p, noise_shape), h_samp)
return h_samp, h_pre, h_sigm
示例3: sample_x_given_h
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def sample_x_given_h(self, h):
x_pre = K.dot(h, self.Wrbm.T) + self.bx
if(self.visible_unit_type == 'gaussian'):
x_samp = self.scaling_x_given_h * x_pre
return x_samp, x_samp, x_samp
else:
x_sigm = K.sigmoid(self.scaling_x_given_h * x_pre)
x_samp = K.random_binomial(shape=x_sigm.shape, p=x_sigm)
return x_samp, x_pre, x_sigm
示例4: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def call(self, input,deterministic=False, **kwargs):
if self.gain is not None:
input = input * self.gain
if deterministic or not self.strength:
return input
in_shape = self.input_shape
in_axes = range(len(in_shape))
in_shape = [in_shape[axis] if in_shape[axis] is not None else input.shape[axis] for axis in in_axes] # None => Theano expr
rnd_shape = [in_shape[axis] for axis in self.axes]
broadcast = [self.axes.index(axis) if axis in self.axes else 'x' for axis in in_axes]
one = K.constant(1)
if self.mode == 'drop':
p = one - self.strength
rnd = K.random_binomial(tuple(rnd_shape), p=p, dtype=input.dtype) / p
elif self.mode == 'mul':
rnd = (one + self.strength) ** K.random_normal(tuple(rnd_shape), dtype=input.dtype)
elif self.mode == 'prop':
coef = self.strength * K.constant(np.sqrt(np.float32(self.input_shape[1])))
rnd = K.random_normal(tuple(rnd_shape), dtype=input.dtype) * coef + one
else:
raise ValueError('Invalid GDropLayer mode', self.mode)
if self.normalize:
rnd = rnd / K.sqrt(K.mean(rnd ** 2, axis=3, keepdims=True))
return input * K.permute_dimensions(rnd,broadcast)
示例5: _random_arr
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def _random_arr(self, count, p):
return K.random_binomial((count,), p=p)
示例6: _build_global_switch
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def _build_global_switch(self):
# A randomly sampled tensor that will signal if the batch
# should use global or local droppath
return K.equal(K.random_binomial((), p=self.global_p, seed=self.switch_seed), 1.)
示例7: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def call(self, x, mask=None):
if isinstance(x, list):
x,_ = x
if mask is not None and isinstance(mask, list):
mask,_ = mask
if 0. < self.dropout < 1.:
retain_p = 1. - self.dropout
dims = self.W._keras_shape[:-1]
B = K.random_binomial(dims, p=retain_p) * (1. / retain_p)
B = K.expand_dims(B)
W = K.in_train_phase(self.W * B, self.W)
else:
W = self.W
if self.mode == 'matrix':
return K.gather(W,x)
elif self.mode == 'tensor':
# quick and dirty: only allowing for 3dim inputs when it's tensor mode
assert K.ndim(x) == 3
# put sequence on first; gather; take diagonal across shared batch dimension
# in other words, W is (B, S, F)
# incoming x is (B, S, A)
inds = K.arange(self.W._keras_shape[0])
#out = K.gather(K.permute_dimensions(W, (1,0,2)), x).diagonal(axis1=0, axis2=3)
#return K.permute_dimensions(out, (3,0,1,2))
### method above doesn't do grads =.=
# tensor abc goes to bac, indexed onto with xyz, goes to xyzac,
# x == a, so shape to xayzc == xxyzc
# take diagonal on first two: xyzc
#out = K.colgather()
out = K.gather(K.permute_dimensions(W, (1,0,2)), x)
out = K.permute_dimensions(out, (0,3,1,2,4))
out = K.gather(out, (inds, inds))
return out
else:
raise Exception('sanity check. should not be here.')
#all_dims = T.arange(len(self.W._keras_shape))
#first_shuffle = [all_dims[self.embed_dim]] + all_dims[:self.embed_dim] + all_dims[self.embed_dim+1:]
## 1. take diagonal from 0th to
## chang eof tactics
## embed on time or embed on batch. that's all I'm supporting.
## if it's embed on time, then, x.ndim+1 is where batch will be, and is what
## i need to take the diagonal over.
## now dim shuffle the xdims + 1 to the front.
#todo: get second shuffle or maybe find diagonal calculations
#out = K.gather(W, x)
#return out
### reference
#A = S(np.arange(60).reshape(3,4,5))
#x = S(np.random.randint(0, 4, (3,4,10)))
#x_emb = A.dimshuffle(1,0,2)[x].dimshuffle(0,3,1,2,4)[T.arange(A.shape[0]), T.arange(A.shape[0])]
示例8: _get_sampler_by_string
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import random_binomial [as 别名]
def _get_sampler_by_string(self, loss):
output = self.outputs[0]
inputs = self.inputs
if loss in ["MSE", "mse", "mean_squared_error"]:
output += samplers.random_normal(K.shape(output), mean=0.0, std=1.0)
draw_sample = K.function(inputs + [K.learning_phase()], [output])
def sample_gaussian(inputs, use_dropout=False):
'''
Helper to draw samples from a gaussian distribution
'''
return draw_sample(inputs + [int(use_dropout)])[0]
return sample_gaussian
elif loss == "binary_crossentropy":
output = K.random_binomial(K.shape(output), p=output)
draw_sample = K.function(inputs + [K.learning_phase()], [output])
def sample_binomial(inputs, use_dropout=False):
'''
Helper to draw samples from a binomial distribution
'''
return draw_sample(inputs + [int(use_dropout)])[0]
return sample_binomial
elif loss in ["mean_absolute_error", "mae", "MAE"]:
output += samplers.random_laplace(K.shape(output), mu=0.0, b=1.0)
draw_sample = K.function(inputs + [K.learning_phase()], [output])
def sample_laplace(inputs, use_dropout=False):
'''
Helper to draw samples from a Laplacian distribution
'''
return draw_sample(inputs + [int(use_dropout)])[0]
return sample_laplace
elif loss == "mixture_of_gaussians":
pi, mu, log_sig = densities.split_mixture_of_gaussians(output, self.n_components)
samples = samplers.random_gmm(pi, mu, K.exp(log_sig))
draw_sample = K.function(inputs + [K.learning_phase()], [samples])
return lambda inputs, use_dropout: draw_sample(inputs + [int(use_dropout)])[0]
else:
raise NotImplementedError("Unrecognised loss: %s.\
Cannot build a generic sampler" % loss)