本文整理汇总了Python中sklearn.hmm.GaussianHMM类的典型用法代码示例。如果您正苦于以下问题:Python GaussianHMM类的具体用法?Python GaussianHMM怎么用?Python GaussianHMM使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GaussianHMM类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: predictWithHMM
def predictWithHMM(index, window = 252):
training_X = X[range(index-window,index),:]
training_y = actual_y[range(index-window,index)]
testing_X = X[index,:].reshape(1,training_X.shape[1])
testing_y = y[index]
# PCA DATA
if perform_pca:
pca = PCA(n_components= pca_components)
pca.fit(training_X)
training_X = pca.transform(training_X)
testing_X = pca.transform(testing_X)
model = GaussianHMM(n_components, "diag",n_iter=1000)
model.fit([training_X])
hidden_states = model.predict(training_X)
predicted_hidden_state = model.predict(testing_X)
# DO PROBALISTIC APPROACH
# pr = model.predict_proba(testing_X)
# print pr
prob = 0
state_idx = (hidden_states == predicted_hidden_state)
median_val = np.mean(training_y[state_idx])
return int(median_val>0), testing_y, prob
示例2: use_hmm
def use_hmm(img_times, change_vals, fps=10, min_secs_for_train_to_pass=8):
from sklearn.hmm import GaussianHMM
X = np.column_stack(change_vals)
n_components = 2
model = GaussianHMM(n_components, covariance_type="diag", n_iter=1000)
model.fit([X.T])
#thresh = 10**-15
#model.transmat_ = np.array([[1-thresh,thresh],[1-thresh,thresh]])
hidden_states = model.predict(X.T)
# print trained parameters and plot
print("Transition matrix")
print(model.transmat_)
print()
print("means and vars of each hidden state")
for i in range(n_components):
print("%dth hidden state" % i)
print("mean = ", model.means_[i])
print("var = ", np.diag(model.covars_[i]))
print()
if model.means_[0][0] > model.means_[1][0]: # assume most most frames have no train, switch labels if necessary
hidden_states = 1 - hidden_states
train_spotted = filter_out_short_motions(hidden_states, min_secs_for_train_to_pass, fps)
plot_timeline(img_times, change_vals, hidden_states, train_spotted)
utils.copy_image_subset(config.experiment_data_frames, config.experiment_output_frames_hmm, np.nonzero(train_spotted)[0])
return train_spotted
示例3: train
def train(X, n_components):
###############################################################################
# Run Gaussian HMM
print ("fitting to HMM and decoding ...")
# make an HMM instance and execute fit
model = GaussianHMM(n_components, covariance_type="diag", n_iter=2000)
model.fit([X])
# predict the optimal sequence of internal hidden state
hidden_states = model.predict(X)
print ("done\n")
###############################################################################
# print trained parameters and plot
print ("Transition matrix")
print (model.transmat_)
print ()
print ("means and vars of each hidden state")
for i in range(n_components):
print ("%dth hidden state" % i)
print ("mean = ", model.means_[i])
print ("var = ", np.diag(model.covars_[i]))
print ()
return hidden_states, model
示例4: create_hmm_by_label
def create_hmm_by_label(label):
seqs = get_sequences_by_label(label)
n_states = 3
hmm = GaussianHMM(n_states, covariance_type="diag", n_iter=1000)
hmm.fit([seqs])
return hmm
示例5: HMMGestureMonitor
class HMMGestureMonitor (GestureMonitor):
def __init__ (self, _train_ms_list, _gesture_name, FeatureExtractor=AVFeatureExtractor):
GestureMonitor.__init__ (self, _train_ms_list, _gesture_name, FeatureExtractor)
def train (self, motion_sequences):
dfs = [ms.get_dataframe () for ms in motion_sequences]
examples = [self.feature_extractor.extract (df) for df in dfs]
examples = [e for e in examples if not np.isnan(np.sum(e))]
self.hmm = GaussianHMM (n_components=5).fit (examples)
self.score_threshold = GMScoreThreshold (self.hmm.score, examples)
self.window_timespans = self.calculate_window_timespans (motion_sequences)
def classify_window_df (self, window_df):
features = self.feature_extractor.extract (window_df)
score = self.score_threshold.classify (features)
return score
def get_current_reaction (self):
scores = [self.hmm.score (self.feature_extractor.extract(window_df)) for window_df in self.get_window_dfs ()]
if len(scores) > 0:
return np.max(scores)
else:
return None
示例6: run
def run(self, protos):
models = []
for nstate, label, seq in protos:
train = self._training.run(seq)
f1, f2 = self._feature.run(train, True)
o = np.vstack((f1[:,1], f2)).T
(start, trans) = self.init_left_right_model(nstate)
clf = GaussianHMM(n_components=nstate, covariance_type=self._covar,
transmat=trans, startprob=start)
clf.fit(np.array([o]))
models.append({'id':label, 'model':clf})
self._models = models
return models
示例7: create_hmm_by_labels
def create_hmm_by_labels(labels, dbs):
seqs_all= []
for label in labels:
seqs = get_sequences_by_label_multi_dbs(label, dbs)
seqs_all.append(seqs)
seqs_all = np.array(seqs_all)[0]
#print seqs_all
#print np.shape(seqs_all)
n_states = 3
hmm = GaussianHMM(n_states, covariance_type="full", n_iter=1000)
hmm.fit(seqs_all)
return hmm
示例8: get_trained_model
def get_trained_model(rootpath, condition, n_states, n_iterations, feature, cov_type):
fname_mean = condition + '-cond-' + feature + '-feat-' + str(n_states) + '-states-' + str(n_iterations) + '-iter-mean.txt'
fname_cov = condition + '-cond-' + feature + '-feat-' + str(n_states) + '-states-' + str(n_iterations) + '-iter-cov.txt'
fname_tmat = condition + '-cond-' + feature + '-feat-' + str(n_states) + '-states-' + str(n_iterations) + '-iter-transtion.txt'
constructed_path_mean = rootpath + condition + '/' + fname_mean
mean = np.loadtxt(constructed_path_mean)
iter_list = range(n_states)
iter_list.reverse()
deleted_means = []
for i in iter_list:
if mean[i][mean[i] > 0.01].shape[0] == 0:
print 'skipping deleting ith mean:', i, mean[i]
#mean = np.delete(mean, i, 0)
#deleted_means.append(i)
constructed_path_cov = rootpath + condition + '/' + fname_cov
if cov_type == 'full':
cov = load_full(constructed_path_cov, n_states, 10)
else:
cov = np.loadtxt(constructed_path_cov)
constructed_path_tmat = rootpath + condition + '/' + fname_tmat
tmat = np.loadtxt(constructed_path_tmat)
#fixing tmat if any of the means and covs were deleted
deleted_means.sort()
deleted_means.reverse()
for di in deleted_means:
tmat = np.delete(tmat, di, 1)
tmat = np.delete(tmat, di, 0)
smat = np.zeros(tmat.shape[0])
smat[0] = 1.0
sum_fix = np.sum(tmat, axis=1)
sum_fix = 1.0 / sum_fix
#print tmat
for i in range(tmat.shape[0]):
tmat[i] = tmat[i] * sum_fix[i]
#print 'corrected\n', tmat
if n_states != tmat.shape[0]:
print 'removed some states, n_states now corrected to: ', tmat.shape[0], 'was originaly', n_states
n_states = tmat.shape[0]
model = GaussianHMM(n_components=n_states, covariance_type=cov_type, startprob=smat, transmat=tmat, n_iter=0, init_params='mc')
model.means_ = mean
model.covars_ = cov
return model
示例9: train
def train (self, motion_sequences):
dfs = [ms.get_dataframe () for ms in motion_sequences]
examples = [self.feature_extractor.extract (df) for df in dfs]
examples = [e for e in examples if not np.isnan(np.sum(e))]
self.hmm = GaussianHMM (n_components=5).fit (examples)
self.score_threshold = GMScoreThreshold (self.hmm.score, examples)
self.window_timespans = self.calculate_window_timespans (motion_sequences)
示例10: __init__
def __init__(self, n_states, n_features):
from sklearn.hmm import GaussianHMM
self.impl = GaussianHMM(n_states, params='stmc')
self._sequences = None
self.means_ = None
self.vars_ = None
self.transmat_ = None
self.startprob_ = None
示例11: HMM
def HMM(data, sid, means_prior=None):
# data is _not_ an event-frame, but an array
# of the most recent trade events
# Create scikit-learn model using the means
# from the previous model as a prior
model = GaussianHMM(HIDDEN_STATES, covariance_type="diag", n_iter=10, means_prior=means_prior, means_weight=0.5)
# Extract variation and volume
diff = data.variation[sid].values
volume = data.volume[sid].values
X = np.column_stack([diff, volume])
if len(diff) < HIDDEN_STATES:
return None
# Estimate model
model.fit([X])
return model
示例12: get_hmms
def get_hmms (self):
for gesture_type in self.gesture_types:
print_status ("Get_Hmms", "Fitting for gesture_type: " + gesture_type)
### Step 1: fill hmm_examples appropriately ###
hmm_examples = []
for gesture in self.gestures[gesture_type]:
hmm_rep = gesture.get_hmm_rep ()
hmm_examples.append (hmm_rep)
### Step 2: fit parameters for the hmm ###
hmm = GaussianHMM (self.num_hmm_states)
hmm.fit (hmm_examples)
### Step 3: store the hmm in self.hmms ###
self.hmms[gesture_type] = hmm
print_inner_status (gesture_type, "predicted the following sequences: (score: sequence)")
for example in hmm_examples:
print " ", hmm.score (example), ": ", hmm.predict (example)
示例13: gaussian_hmm_model
def gaussian_hmm_model(stock_market_quote, n_components=5):
close_v = np.asarray(stock_market_quote.get_closing_price())
volume = np.asanyarray(stock_market_quote.get_volume())
volume = volume[:-1]
diff = close_v[1:] - close_v[:-1]
close_v = close_v[1:]
X = np.column_stack([diff, volume])
model = GaussianHMM(n_components, covariance_type="diag")
model.fit([X])
hidden_states = model.predict(X)
print "Transition matrix"
print model.transmat_
print ""
print "means and vars of each hidden state"
for i in xrange(n_components):
print "%dth hidden state" % i
print "mean = ", model.means_[i]
print "var = ", np.diag(model.covars_[i])
print ""
'''Visualization of Closing Price with respect to Volume, clustered by
hidden states of data
'''
fig = mlp.figure()
ax = fig.add_subplot(111)
for i in xrange(n_components):
idx = (hidden_states == i)
ax.plot(volume[idx], close_v[idx], 'o', label="%dth hidden state" % i)
ax.legend()
ax.set_xlabel('Volume of Stock', fontsize=20)
ax.set_ylabel('Closing Price of Stock', fontsize=20)
ax.set_title("""Quote's Volume and closing volume change
in different hidden states""")
ax.grid(True)
mlp.show()
示例14: main
def main():
"""
First ARG: list of training files
Second ARG: save name for model
"""
file1 = sys.argv[1]
outname = sys.argv[2]
file_list = [f[0:-1] for f in open(file1,'r')]
models, transitions, priors = calc_transmat(file_list)
hmm = GaussianHMM(
transitions.shape[0],
"full",
#startprob=priors,
n_iter=500,
transmat=transitions,
init_params='mcs',
params='mcs',
)
feats, _ = load_feats_labels(file_list)
feat, lab = load_feats_labels(file_list)
#hmm.means_ = np.transpose(models['mean'])
#hmm.covars_ = models['sigma']
print 'Fitting'
start = timeit.default_timer()
hmm.fit([np.transpose(feat)])
stop = timeit.default_timer()
print 'Training Time: ' + str(stop - start)
features, labels = load_feats_labels(['audio.arff'])
_, seq = hmm.decode(np.transpose(features))
#print filter(lambda(x,y): x==y, zip(labels, map(int2label, seq)))
print len(filter(lambda(x,y): x==y, zip(labels, map(int2label, seq))))
pickle.dump(hmm, open(outname, "wb"))
plt.imshow(transitions, interpolation='nearest')
plt.show()
示例15: hmm
def hmm(samples):
model = GaussianHMM(n_components=3)
samples = samples.dropna()
idx = samples.index
if samples.values.ndim < 2:
#import pdb; pdb.set_trace()
m = samples.values.shape
samples = samples.values.reshape(m[0],1)
model.fit([samples])
#_, states = model.decode(samples, algorithm='map')
framelogprob = model._compute_log_likelihood(samples)
logprob, fwdlattice = model._do_forward_pass(framelogprob)
n, _ = model.means_.shape
frame = pd.DataFrame(
framelogprob, index=idx, columns=map(lambda x: "frame_"+str(x), range(n)) )
forward = pd.DataFrame(
fwdlattice, index=idx, columns=map(lambda x: "forward_"+str(x), range(n)) )
#import pdb; pdb.set_trace()
predict = pd.DataFrame(
(fwdlattice-framelogprob)[1:, :], index=idx[:-1], columns=map(lambda x: "predict_"+str(x), range(n)))
import pdb; pdb.set_trace()
return model, frame.join(forward)