本文整理汇总了Python中sklearn.hmm.GaussianHMM.fit方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianHMM.fit方法的具体用法?Python GaussianHMM.fit怎么用?Python GaussianHMM.fit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.hmm.GaussianHMM
的用法示例。
在下文中一共展示了GaussianHMM.fit方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: predictWithHMM
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def predictWithHMM(index, window = 252):
training_X = X[range(index-window,index),:]
training_y = actual_y[range(index-window,index)]
testing_X = X[index,:].reshape(1,training_X.shape[1])
testing_y = y[index]
# PCA DATA
if perform_pca:
pca = PCA(n_components= pca_components)
pca.fit(training_X)
training_X = pca.transform(training_X)
testing_X = pca.transform(testing_X)
model = GaussianHMM(n_components, "diag",n_iter=1000)
model.fit([training_X])
hidden_states = model.predict(training_X)
predicted_hidden_state = model.predict(testing_X)
# DO PROBALISTIC APPROACH
# pr = model.predict_proba(testing_X)
# print pr
prob = 0
state_idx = (hidden_states == predicted_hidden_state)
median_val = np.mean(training_y[state_idx])
return int(median_val>0), testing_y, prob
示例2: train
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def train(X, n_components):
###############################################################################
# Run Gaussian HMM
print ("fitting to HMM and decoding ...")
# make an HMM instance and execute fit
model = GaussianHMM(n_components, covariance_type="diag", n_iter=2000)
model.fit([X])
# predict the optimal sequence of internal hidden state
hidden_states = model.predict(X)
print ("done\n")
###############################################################################
# print trained parameters and plot
print ("Transition matrix")
print (model.transmat_)
print ()
print ("means and vars of each hidden state")
for i in range(n_components):
print ("%dth hidden state" % i)
print ("mean = ", model.means_[i])
print ("var = ", np.diag(model.covars_[i]))
print ()
return hidden_states, model
示例3: use_hmm
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def use_hmm(img_times, change_vals, fps=10, min_secs_for_train_to_pass=8):
from sklearn.hmm import GaussianHMM
X = np.column_stack(change_vals)
n_components = 2
model = GaussianHMM(n_components, covariance_type="diag", n_iter=1000)
model.fit([X.T])
#thresh = 10**-15
#model.transmat_ = np.array([[1-thresh,thresh],[1-thresh,thresh]])
hidden_states = model.predict(X.T)
# print trained parameters and plot
print("Transition matrix")
print(model.transmat_)
print()
print("means and vars of each hidden state")
for i in range(n_components):
print("%dth hidden state" % i)
print("mean = ", model.means_[i])
print("var = ", np.diag(model.covars_[i]))
print()
if model.means_[0][0] > model.means_[1][0]: # assume most most frames have no train, switch labels if necessary
hidden_states = 1 - hidden_states
train_spotted = filter_out_short_motions(hidden_states, min_secs_for_train_to_pass, fps)
plot_timeline(img_times, change_vals, hidden_states, train_spotted)
utils.copy_image_subset(config.experiment_data_frames, config.experiment_output_frames_hmm, np.nonzero(train_spotted)[0])
return train_spotted
示例4: create_hmm_by_label
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def create_hmm_by_label(label):
seqs = get_sequences_by_label(label)
n_states = 3
hmm = GaussianHMM(n_states, covariance_type="diag", n_iter=1000)
hmm.fit([seqs])
return hmm
示例5: run
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def run(self, protos):
models = []
for nstate, label, seq in protos:
train = self._training.run(seq)
f1, f2 = self._feature.run(train, True)
o = np.vstack((f1[:,1], f2)).T
(start, trans) = self.init_left_right_model(nstate)
clf = GaussianHMM(n_components=nstate, covariance_type=self._covar,
transmat=trans, startprob=start)
clf.fit(np.array([o]))
models.append({'id':label, 'model':clf})
self._models = models
return models
示例6: create_hmm_by_labels
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def create_hmm_by_labels(labels, dbs):
seqs_all= []
for label in labels:
seqs = get_sequences_by_label_multi_dbs(label, dbs)
seqs_all.append(seqs)
seqs_all = np.array(seqs_all)[0]
#print seqs_all
#print np.shape(seqs_all)
n_states = 3
hmm = GaussianHMM(n_states, covariance_type="full", n_iter=1000)
hmm.fit(seqs_all)
return hmm
示例7: HMM
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def HMM(data, sid, means_prior=None):
# data is _not_ an event-frame, but an array
# of the most recent trade events
# Create scikit-learn model using the means
# from the previous model as a prior
model = GaussianHMM(HIDDEN_STATES, covariance_type="diag", n_iter=10, means_prior=means_prior, means_weight=0.5)
# Extract variation and volume
diff = data.variation[sid].values
volume = data.volume[sid].values
X = np.column_stack([diff, volume])
if len(diff) < HIDDEN_STATES:
return None
# Estimate model
model.fit([X])
return model
示例8: get_hmms
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def get_hmms (self):
for gesture_type in self.gesture_types:
print_status ("Get_Hmms", "Fitting for gesture_type: " + gesture_type)
### Step 1: fill hmm_examples appropriately ###
hmm_examples = []
for gesture in self.gestures[gesture_type]:
hmm_rep = gesture.get_hmm_rep ()
hmm_examples.append (hmm_rep)
### Step 2: fit parameters for the hmm ###
hmm = GaussianHMM (self.num_hmm_states)
hmm.fit (hmm_examples)
### Step 3: store the hmm in self.hmms ###
self.hmms[gesture_type] = hmm
print_inner_status (gesture_type, "predicted the following sequences: (score: sequence)")
for example in hmm_examples:
print " ", hmm.score (example), ": ", hmm.predict (example)
示例9: gaussian_hmm_model
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def gaussian_hmm_model(stock_market_quote, n_components=5):
close_v = np.asarray(stock_market_quote.get_closing_price())
volume = np.asanyarray(stock_market_quote.get_volume())
volume = volume[:-1]
diff = close_v[1:] - close_v[:-1]
close_v = close_v[1:]
X = np.column_stack([diff, volume])
model = GaussianHMM(n_components, covariance_type="diag")
model.fit([X])
hidden_states = model.predict(X)
print "Transition matrix"
print model.transmat_
print ""
print "means and vars of each hidden state"
for i in xrange(n_components):
print "%dth hidden state" % i
print "mean = ", model.means_[i]
print "var = ", np.diag(model.covars_[i])
print ""
'''Visualization of Closing Price with respect to Volume, clustered by
hidden states of data
'''
fig = mlp.figure()
ax = fig.add_subplot(111)
for i in xrange(n_components):
idx = (hidden_states == i)
ax.plot(volume[idx], close_v[idx], 'o', label="%dth hidden state" % i)
ax.legend()
ax.set_xlabel('Volume of Stock', fontsize=20)
ax.set_ylabel('Closing Price of Stock', fontsize=20)
ax.set_title("""Quote's Volume and closing volume change
in different hidden states""")
ax.grid(True)
mlp.show()
示例10: hmm
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def hmm(samples):
model = GaussianHMM(n_components=3)
samples = samples.dropna()
idx = samples.index
if samples.values.ndim < 2:
#import pdb; pdb.set_trace()
m = samples.values.shape
samples = samples.values.reshape(m[0],1)
model.fit([samples])
#_, states = model.decode(samples, algorithm='map')
framelogprob = model._compute_log_likelihood(samples)
logprob, fwdlattice = model._do_forward_pass(framelogprob)
n, _ = model.means_.shape
frame = pd.DataFrame(
framelogprob, index=idx, columns=map(lambda x: "frame_"+str(x), range(n)) )
forward = pd.DataFrame(
fwdlattice, index=idx, columns=map(lambda x: "forward_"+str(x), range(n)) )
#import pdb; pdb.set_trace()
predict = pd.DataFrame(
(fwdlattice-framelogprob)[1:, :], index=idx[:-1], columns=map(lambda x: "predict_"+str(x), range(n)))
import pdb; pdb.set_trace()
return model, frame.join(forward)
示例11: main
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
def main():
"""
First ARG: list of training files
Second ARG: save name for model
"""
file1 = sys.argv[1]
outname = sys.argv[2]
file_list = [f[0:-1] for f in open(file1,'r')]
models, transitions, priors = calc_transmat(file_list)
hmm = GaussianHMM(
transitions.shape[0],
"full",
#startprob=priors,
n_iter=500,
transmat=transitions,
init_params='mcs',
params='mcs',
)
feats, _ = load_feats_labels(file_list)
feat, lab = load_feats_labels(file_list)
#hmm.means_ = np.transpose(models['mean'])
#hmm.covars_ = models['sigma']
print 'Fitting'
start = timeit.default_timer()
hmm.fit([np.transpose(feat)])
stop = timeit.default_timer()
print 'Training Time: ' + str(stop - start)
features, labels = load_feats_labels(['audio.arff'])
_, seq = hmm.decode(np.transpose(features))
#print filter(lambda(x,y): x==y, zip(labels, map(int2label, seq)))
print len(filter(lambda(x,y): x==y, zip(labels, map(int2label, seq))))
pickle.dump(hmm, open(outname, "wb"))
plt.imshow(transitions, interpolation='nearest')
plt.show()
示例12: HMM
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
class HMM(object):
'''
class for creating and manipulating HMM model
'''
def __init__(self,**kwargs):
if 'steam_obj' not in kwargs:
self.steam_obj = Steam()
else:
self.steam_obj = kwargs['steam_obj']
if 'weather_obj' not in kwargs:
self.weather_obj = Weather()
else:
self.weather_obj = kwargs['weather_obj']
steam_obj = self.steam_obj
weather_obj = self.weather_obj
hour_of_day = steam_obj.ts.index.map(lambda x: x.hour + (x.minute/60.0))
day_of_week = steam_obj.ts.index.map(lambda x: x.dayofweek)
df_hmm = pd.DataFrame({'steam':steam_obj.ts,'weather':weather_obj.ts, \
'hour_of_day':hour_of_day,'day_of_week':day_of_week},index=steam_obj.ts.index)
#its imp that the order for columns is maintain
#while slicing the HMM model
self.df_hmm,self.X_hmm = self.gen_meta_data(steam_obj,weather_obj)
if 'n_states' not in kwargs:
self.plot_elbow(3,15)
else:
self.n_states = kwargs['n_states']
def __len__(self):
return len(self.X_hmm)
def build_model(self):
n_states = self.n_states
X_hmm = self.X_hmm
self.model = GaussianHMM(n_states,covariance_type='diag',n_iter=1000)
self.model.fit([X_hmm])
self.hidden_states = self.model.predict(X_hmm)
def build_forecast_model(self):
model = self.model
n_states = self.n_states
model_forecast = copy.deepcopy(model)
model_forecast.n_features = model.n_features-1
model_forecast._means_ = model.means_[:,1:]
model_forecast._covars_ = model._covars_[:,1:]
self.model_forecast = model_forecast
def gen_meta_data(self,steam_obj=None,weather_obj=None):
if steam_obj!=None:
hour_of_day = steam_obj.ts.index.map(lambda x: x.hour + (x.minute/60.0))
day_of_week = steam_obj.ts.index.map(lambda x: x.dayofweek)
df_hmm = pd.DataFrame({'steam':steam_obj.ts,'weather':weather_obj.ts, \
'hour_of_day':hour_of_day},index=steam_obj.ts.index)
#df_hmm = pd.DataFrame({'steam':steam_obj.ts,'weather':weather_obj.ts, \
# 'hour_of_day':hour_of_day,'day_of_week':day_of_week},index=steam_obj.ts.index)
# X_hmm = df_hmm.as_matrix(columns=['steam','weather'])
X_hmm = df_hmm.as_matrix(columns=['steam','weather','hour_of_day'])
#X_hmm = df_hmm.as_matrix(columns=['steam','weather','hour_of_day','day_of_week'])
else:
hour_of_day = weather_obj.ts.index.map(lambda x: x.hour + (x.minute/60.0))
day_of_week = weather_obj.ts.index.map(lambda x: x.dayofweek)
df_hmm = pd.DataFrame({'weather':weather_obj.ts, \
'hour_of_day':hour_of_day},index=weather_obj.ts.index)
#df_hmm = pd.DataFrame({'weather':weather_obj.ts, \
# 'hour_of_day':hour_of_day,'day_of_week':day_of_week},index=weather_obj.ts.index)
# X_hmm = df_hmm.as_matrix(columns=['weather'])
X_hmm = df_hmm.as_matrix(columns=['weather','hour_of_day'])
#X_hmm = df_hmm.as_matrix(columns=['weather','hour_of_day','day_of_week'])
return df_hmm,X_hmm
def plot_model(self,x_ax=None,y_ax=None):
X_hmm = self.X_hmm
steam_ts = self.steam_obj.ts
if x_ax == None:
x_ax = np.asarray([item.to_datetime() for item in steam_ts.index])
if y_ax == None:
y_ax = X_hmm[:,0]
hidden_states = self.hidden_states
n_states = self.n_states
fig = plt.figure()
ax = fig.add_subplot(111)
for i in xrange(n_states):
print i
idx = (hidden_states==i)
if i<7:
ax.plot(x_ax[idx],y_ax[idx],'o',label='%dth state'%i)
elif i<14:
ax.plot(x_ax[idx],y_ax[idx],'x',label='%dth state'%i)
elif i<21:
ax.plot(x_ax[idx],y_ax[idx],'+',label='%dth state'%i)
elif i<28:
ax.plot(x_ax[idx],y_ax[idx],'*',label='%dth state'%i)
ax.set_title('%d State HMM'%(n_states))
ax.legend()
ax.set_ylabel('Load (Mlb/Hr)')
ax.set_xlabel('Time')
ax.grid(True)
plt.show()
def plot_elbow(self,start,end):
#.........这里部分代码省略.........
示例13: __init__
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
class GaussianHmmLib:
"""
ref: http://scikit-learn.org/0.14/auto_examples/applications/plot_hmm_stock_analysis.html
https://www.quantopian.com/posts/inferring-latent-states-using-a-gaussian-hidden-markov-model
bear market: smaller mean, higher variant
bull market: higher mean, smaller variant
"""
def __init__(self, dbhandler, *args, **kwargs):
self.dbhandler = dbhandler
self.sids = self.dbhandler.stock.ids
self.n_components = int(kwargs.pop('n_components')) or 5
self.n_iter = int(kwargs.pop('n_iter')) or 1000
def run(self, data):
sid = self.sids[0]
self.dates = data[sid]['price'].values
self.close_v = data[sid]['close_v'].values
self.volume = data[sid]['volume'].values[1:]
# take diff of close value
# this makes len(diff) = len(close_t) - 1
# therefore, others quantity also need to be shifted
self.diff = self.close_v[1:] - self.close_v[:-1]
# pack diff and volume for training
self.X = np.column_stack([self.diff, self.volume])
# make an HMM instance and execute fit
self.model = GaussianHMM(self.n_components, covariance_type="diag", n_iter=self.n_iter)
self.model.fit([self.X], n_iter=self.n_iter)
# predict the optimal sequence of internal hidden state
self.hidden_states = self.model.predict(self.X)
def report(self):
# print trained parameters and plot
print "Transition matrix"
print self.model.transmat_
print ""
print "means and vars of each hidden state"
for i in xrange(self.n_components):
print "%dth hidden state" % i
print "mean = ", self.model.means_[i]
print "var = ", np.diag(self.model.covars_[i])
print ""
years = YearLocator() # every year
months = MonthLocator() # every month
yearsFmt = DateFormatter('%Y')
fig = plt.figure()
ax = fig.add_subplot(111)
for i in xrange(self.n_components):
# use fancy indexing to plot data in each state
idx = (self.hidden_states == i)
ax.plot_date(self.dates[idx], self.close_v[idx], 'o', label="%dth hidden state" % i)
ax.legend()
# format the ticks
ax.xaxis.set_major_locator(years)
ax.xaxis.set_major_formatter(yearsFmt)
ax.xaxis.set_minor_locator(months)
ax.autoscale_view()
# format the coords message box
ax.fmt_xdata = DateFormatter('%Y-%m-%d')
ax.fmt_ydata = lambda x: '$%1.2f' % x
ax.grid(True)
fig.autofmt_xdate()
plt.savefig("gaussianhmm_%s.png" %(self.sids[0]))
示例14: overlapped_samples
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
t, last_index = overlapped_samples(file_path, incident_reported_time=int(incident_time), overlap=5, window=10, with_end=2)
if t is None:
print file_path, 'is bad'
else:
model.means_ = means
model.covars_ = covs
print 'shape intial', np.shape(covs)
'''
best_seq = model.decode(t)
print 'intial,', best_seq
print 'final means', model.means_
print 'initial trans', tmat
print 'initial startprobs', smat, sum(smat)
'''
model.fit([t])
best_seq = model.decode(t)
print 'file', file_path
print 'final,', best_seq
#print 'final means', model.means_
#print 'final trans', model.transmat_
#print 'final startprob', model.startprob_
if np.isnan(model.means_).any() == False and np.isnan(model.covars_).any() == False:
means = model.means_
covs = np.array([np.diag(model.covars_[0])])
for i in range(1, model.n_components):
covs = np.vstack((covs, [np.diag(model.covars_[i])]))
print 'shape after', np.shape(covs)
tmat = model.transmat_
示例15: GaussianHMM
# 需要导入模块: from sklearn.hmm import GaussianHMM [as 别名]
# 或者: from sklearn.hmm.GaussianHMM import fit [as 别名]
print "Doing replicate", repInx, "/", numReps, "with", numState, "states"
sys.stdout.flush()
# cluster all the available data and use that as initial point
means = cluster.KMeans(n_clusters=numState).fit(indata.iloc[:,0:num_data]).cluster_centers_
cv = np.cov(indata.iloc[:,0:num_data].T)
covars = mixture.distribute_covar_matrix_to_match_covariance_type(cv, "tied", num_data)
covars[covars==0] = 1e-5
model = GaussianHMM(numState, covariance_type="tied", n_iter=1000, init_params='abdefghijklnopqrstuvwxyzABDEFGHIJKLNOPQRSTUVWXYZ')
model.means_ = means
model.covars_ = covars
print("Fitting model...")
sys.stdout.flush()
model.fit(data)
print("Decoding states...")
sys.stdout.flush()
# do a loop over everything and record in one long array
states = np.array([])
score = 0
for i in range(0, len(data)):
hidden_states = model.decode(data[i])
states = np.append(states, hidden_states[1])
score = score + model.score(data[i])
print("Saving data...")
sys.stdout.flush()
# save the states and LLH