本文整理汇总了Python中rbm.RBM.fit方法的典型用法代码示例。如果您正苦于以下问题:Python RBM.fit方法的具体用法?Python RBM.fit怎么用?Python RBM.fit使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类rbm.RBM
的用法示例。
在下文中一共展示了RBM.fit方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: fit_network
# 需要导入模块: from rbm import RBM [as 别名]
# 或者: from rbm.RBM import fit [as 别名]
def fit_network(self, X, labels=None):
if labels is None:
labels = numpy.zeros((X.shape[0], 2))
self.layers = []
temp_X = X
for j in range(self.num_layers):
print "\nTraining Layer %i" % (j + 1)
print "components: %i" % self.components[j]
print "batch_size: %i" % self.batch_size[j]
print "learning_rate: %0.3f" % self.learning_rate[j]
print "bias_learning_rate: %0.3f" % self.bias_learning_rate[j]
print "epochs: %i" % self.epochs[j]
print "Sparsity: %s" % str(self.sparsity_rate[j])
print "Sparsity Phi: %s" % str(self.phi)
if j != 0:
self.plot_weights = False
model = RBM(n_components=self.components[j], batch_size=self.batch_size[j],
learning_rate=self.learning_rate[j], regularization_mu=self.sparsity_rate[j],
n_iter=self.epochs[j], verbose=True, learning_rate_bias=self.bias_learning_rate[j],
plot_weights=self.plot_weights, plot_histograms=self.plot_histograms, phi=self.phi)
if j + 1 == self.num_layers and labels is not None:
model.fit(numpy.asarray(temp_X), numpy.asarray(labels))
else:
model.fit(numpy.asarray(temp_X))
temp_X = model._mean_hiddens(temp_X) # hidden layer given visable units
print "Trained Layer %i\n" % (j + 1)
self.layers.append(model)
示例2: pretrain_rbm_layers
# 需要导入模块: from rbm import RBM [as 别名]
# 或者: from rbm.RBM import fit [as 别名]
def pretrain_rbm_layers(v, validation_v=None, n_hidden=[], gibbs_steps=[], batch_size=[], num_epochs=[], learning_rate=[], probe_epochs=[]):
rbm_layers = []
n_rbm = len(n_hidden)
# create rbm layers
for i in range(n_rbm):
rbm = RBM(n_hidden=n_hidden[i],
gibbs_steps=gibbs_steps[i],
batch_size=batch_size[i],
num_epochs=num_epochs[i],
learning_rate=learning_rate[i],
probe_epochs=probe_epochs[i])
rbm_layers.append(rbm)
# pretrain rbm layers
input = v
validation_input = validation_v
for rbm, i in zip(rbm_layers, range(len(rbm_layers))):
print '### pretraining RBM Layer {i}'.format(i=i)
rbm.fit(input, validation_input)
output = rbm.sample_h_given_v(input, rbm.params['W'], rbm.params['c'])
if validation_input is not None:
validation_output = rbm.sample_h_given_v(validation_input, rbm.params['W'], rbm.params['c'])
else:
validation_output = None
input = output
validation_input = validation_output
return rbm_layers
示例3: RBM
# 需要导入模块: from rbm import RBM [as 别名]
# 或者: from rbm.RBM import fit [as 别名]
'step_config': 1,
'learning_rate': 0.1,
'weight_decay': 0}
# initialize model object
rbm = RBM(layers=layers)
if args.model_file:
assert os.path.exists(args.model_file), '%s not found' % args.model_file
logger.info('loading initial model state from %s' % args.model_file)
rbm.load_weights(args.model_file)
# setup standard fit callbacks
callbacks = Callbacks(rbm, train_set, output_file=args.output_file,
progress_bar=args.progress_bar)
# add a callback ot calculate
if args.serialize > 0:
# add callback for saving checkpoint file
# every args.serialize epchs
checkpoint_schedule = args.serialize
checkpoint_model_path = args.save_path
callbacks.add_serialize_callback(checkpoint_schedule, checkpoint_model_path)
rbm.fit(train_set, optimizer=optimizer, num_epochs=num_epochs, callbacks=callbacks)
for mb_idx, (x_val, y_val) in enumerate(valid_set):
hidden = rbm.fprop(x_val)
break
示例4: GlorotUniform
# 需要导入模块: from rbm import RBM [as 别名]
# 或者: from rbm.RBM import fit [as 别名]
'sparse_cost': 0.001,
'sparse_target': 0.01,
'persistant': False,
'kPCD': 1,
'use_fast_weights': False
}
n_epochs = 1
init = GlorotUniform()
# it seems that the data have shape 30x30x30, though I think it should be 24 with padding=2
layers = [RBMConvolution3D([6, 6, 6, 48], strides=2, padding=0, init=init, name='l1_conv'),
RBMConvolution3D([5, 5, 5, 160], strides=2, padding=0, init=init, name='l2_conv'),
RBMConvolution3D([4, 4, 4, 512], strides=2, padding=0, init=init, name='l3_conv'),
RBMLayer(1200, init=init, name='l4_rbm'),
RBMLayerWithLabels(4000, n_classes, name='l4_rbm_with_labels')]
rbm = RBM(layers=layers)
# callbacks = Callbacks(rbm, data, output_file='./output.hdf5')
callbacks = Callbacks(rbm, data)
t = time.time()
rbm.fit(data, optimizer=parameters, num_epochs=n_epochs, callbacks=callbacks)
t = time.time() - t
print "Training time: ", t
示例5: expit
# 需要导入模块: from rbm import RBM [as 别名]
# 或者: from rbm.RBM import fit [as 别名]
# In[6]:
shape = observed_data.variables["Prcp"][:].shape
lt = 176-1
ln = 23-1
y = observed_data.variables["Prcp"][:, lt, ln]
normalized_gridded = (gridded - gridded[:400].mean(axis=0)) / gridded[:400].std(axis=0)
#normalized_gridded = (normalized_gridded.T - normalized_gridded.T.mean(axis=0)) / normalized_gridded.T.std(axis=0)
#normalized_gridded = normalized_gridded.T
def expit(x, beta=1):
return 1 / (1 + numpy.exp(-beta * x))
squashed_gridded = expit(normalized_gridded, beta=1)
height, bins = numpy.histogram(squashed_gridded, bins=100)
pyplot.bar(bins[:-1], height, width=1/100.)
pyplot.imshow(squashed_gridded[13].reshape(nlat,nlon))
# In[7]:
boltzmann = RBM(n_iter=100, plot_histograms=True, verbose=True, n_components=500)
boltzmann.fit(squashed_gridded)
# In[ ]: