当前位置: 首页>>代码示例>>Python>>正文


Python GaussianHMM.predict方法代码示例

本文整理汇总了Python中hmmlearn.hmm.GaussianHMM.predict方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianHMM.predict方法的具体用法?Python GaussianHMM.predict怎么用?Python GaussianHMM.predict使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在hmmlearn.hmm.GaussianHMM的用法示例。


在下文中一共展示了GaussianHMM.predict方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: bench_gaussian_hmm

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def bench_gaussian_hmm(size):
    title = "benchmarking Gaussian HMM on a sample of size {0}".format(size)
    print(title.center(36, " "))
    ghmm = GaussianHMM()
    ghmm.means_ = [[42], [24]]
    ghmm.covars_ = [[1], [1]]

    with timed_step("generating sample"):
        sample, _states = ghmm.sample(size)

    with timed_step("fitting"):
        fit = GaussianHMM(n_components=2).fit([sample])

    with timed_step("estimating states"):
        fit.predict(sample)
开发者ID:LoganDing,项目名称:hmmlearn,代码行数:17,代码来源:speed.py

示例2: fit

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
	def fit(self):

		if self.verbose:
			print "[Clustering] Clearing old model and segmentation"
		
		self.segmentation = []
		self.model = []


		new_segments = []
		new_model = []

		g = GaussianHMM(n_components=self.n_components)

		all_demos = self._demonstrations[0]
		lens = [np.shape(self._demonstrations[0])[0]]
		for i in range(1, len(self._demonstrations)):
			all_demos = np.concatenate([all_demos,self._demonstrations[i]])
			lens.append(np.shape(self._demonstrations[i])[0])

		g.fit(all_demos,lens) 
			
		for d in self._demonstrations:
			new_segments.append(self.findTransitions(g.predict(d)))
			#print g.predict(d)
			new_model.append(g)

		self.segmentation = new_segments
		self.model = new_model
开发者ID:BerkeleyAutomation,项目名称:tsc,代码行数:31,代码来源:clustering.py

示例3: main

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def main(args):
    x, X = loadDiffRows(args.diffFile)
    model = GaussianHMM(n_components=3,
                        covariance_type="diag",
                        n_iter=100000000000)
    model.transmat_ = numpy.array([[0.5, 0.5, 0.0],
                                   [0.0, 0.5, 0.5],
                                   [0.0, 0.0, 1.0]])
    model.fit(X)
    print(model.transmat_)
    model.transmat_[0][2] = 0.
    model.transmat_[1][0] = 0.
    model.transmat_[2][0] = 0.
    model.transmat_[2][1] = 0.
    
    exp = args.outFile.split('/')[-1].split('_')[0]
    with open(args.outFile, 'w') as fout:
        print('exp\tbin\treads\tstate', file=fout)
        for seq in X:
            hiddenStates = model.predict(seq)
            for idx,v in enumerate(zip(x,hiddenStates)):
                r,h = v
                print(exp + '\t' + str(idx) + '\t'
                      + str(r) + '\t' + str(h),
                      file=fout)
开发者ID:samesense,项目名称:pol2_states,代码行数:27,代码来源:hmm.py

示例4: mainHMM

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def mainHMM(filePrefix):
    X_train, length_train, X_test, length_test = loadOneRoute(filePrefix)
    # Run Gaussian HMM
    print "fitting to HMM and decoding ..."
    model = GaussianHMM(n_components=4, covariance_type="diag", n_iter=2000).fit(X_train[:, 0:5], length_train)
    hidden_states = model.predict(X_test[:, 0:5], length_test)
    print "done"

    print hidden_states[0:20]
    print hidden_states[20:40]
    print hidden_states[40:60]
    print hidden_states[60:80]

    # Print trained parameters and plot
    print("Transition matrix")
    print(model.transmat_)
    print("Start Prob")
    print(model.startprob_)

    print("Means and vars of each hidden state")
    for i in range(model.n_components):
        print("{0}th hidden state".format(i))
        print("mean = ", model.means_[i])
        print("var = ", np.diag(model.covars_[i]))


    print np.array(hidden_states).reshape((sum(length_test), 1))
开发者ID:windy-lf,项目名称:AirTicketPredicting,代码行数:29,代码来源:HMM_test.py

示例5: __init__

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
class HMM:
    __slots__ = [
        "model"
    ]

    def __init__(self):
        pass


    def draw(self, data):
        figure()
        plot(range(len(data)),data,alpha=0.8,color='red')
        show()


    def train(self, data, n_components):
        print("Training Data: %s" % data)
        self.data = data
        self.model = GaussianHMM(n_components, algorithm='viterbi', covariance_type='diag')
        X = np.reshape(data, (len(data),1))
        self.model = self.model.fit([X])

        self.hidden_states = self.model.predict(X)
        print("Sequence of States: " % self.hidden_states)


    def eval(self, obs):
        print("Testing Data: %s" % obs)
        X = np.reshape(obs, (len(obs),1))
        print("Eval: %s" % str(self.model.score(X)))


    def plot(self):
        fig = figure(facecolor="white")
        ax = fig.add_subplot(111)

        for i in range(self.model.n_components):
            # use fancy indexing to plot data in each state
            idx = (self.hidden_states == i)
            ax.plot(np.array(range(len(self.data)))[idx], np.array(self.data)[idx], '.', label="State %d" % (i+1))

        ax.legend()
        show()
开发者ID:mkdmkk,项目名称:infaas,代码行数:45,代码来源:hmm.py

示例6: HmmClassifier

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
class HmmClassifier():
    def __init__(self, referenceSeqs, inputSeq):
        self.referenceSeqs = referenceSeqs
        self.inputSeq = inputSeq

        # feel free to change this model
        self.model = GaussianHMM(n_components=2, covariance_type="full", n_iter=2000)

    def predict(self):
        probs = []
        for referenceSeq in self.referenceSeqs:
            #print "reference: {}".format(referenceSeq)
            self.model.fit(referenceSeq)
            hidden_states = self.model.predict(referenceSeq)
            prob = self.model.score(self.inputSeq)
            probs.append(prob)

        # return the index of the max prob
        return probs.index(max(probs))
开发者ID:lujunzju,项目名称:AirTicketPredicting,代码行数:21,代码来源:HmmClassifier.py

示例7: calculate_weights

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
    def calculate_weights(self, date, amount):
        if self.stacked == False:
            for elements in self.tradingDates:
                if elements.get('dt') >= self.start_date and elements.get('dt') <= date :
                    self.trainingDates.append(elements['dt'])
            for assetCode in self.asset_codes:
                assetValues = []
#                 for each_date in self.trainingDates:
#                     assetValues.append(StockData.objects.filter(dt=each_date,ticker=assetCode).values("price_close")[0]['price_close'])
                assetValues = [StockData.objects.filter(dt=each_date,ticker=assetCode).values("price_close")[0]['price_close'] for each_date in self.trainingDates]    
                self.historical_Data[assetCode] = assetValues
            self.stacked = True
        else:
            assetValues = []
            for assetCode in self.asset_codes:
                self.historical_Data[assetCode].append(StockData.objects.filter(dt=date,ticker=assetCode).values("price_close")[0]['price_close'])    
        
        target = {'money': amount}    
        for assetCode in self.asset_codes:
            close_v = np.array(self.historical_Data[assetCode])
            diff = np.diff(close_v)
            X = np.column_stack([diff])
            model = GaussianHMM(n_components=2, covariance_type="diag", n_iter=1000).fit(X)
            hidden_states = model.predict(X)
            stableProb = 0
            if hidden_states[len(hidden_states) - 1] == 1:
                stableProb = model.transmat_[1][1]
            else:
                stableProb = 0
            target[assetCode] = stableProb
            target['money'] -= stableProb * close_v[len(close_v) - 1]
            
        self.weight = []
        self.weight.append(target['money'])
#         for assetCode in self.asset_codes:
#             self.weight.append(target[assetCode])
        self.weight += [target[assetCode] for assetCode in self.asset_codes]    
        return self.weight
开发者ID:taeheumcho,项目名称:AI_project_taeheum,代码行数:40,代码来源:Strategy_HMM.py

示例8: hmmtest

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def hmmtest(trade_data, test_data):
    # pack diff and volume for training
    # delete record containng infinity    
    X = test_data[test_data['Strategy_Gross_Return_RDP_5'] != float("inf")]
    X = test_data
    ###############################################################################
    # Run Gaussian HMM
    #print("fitting to HMM and decoding ...", end='')
    n_components = 4
    covariance_type = 'full'
    n_iter = 1000
    
    # make an HMM instance and execute fit
    model = GaussianHMM(n_components=n_components, covariance_type=covariance_type, n_iter=n_iter).fit(X)
    #model= GMMHMM(n_components=4,n_mix=3,covariance_type="diag", n_iter=100).fit(X)
    # model = MultinomialHMM(n_components=4, n_iter=100).fit(X)
    # predict the optimal sequence of internal hidden state
    hidden_states = model.predict(X)
    
    #print("done\n")
    
    ###############################################################################
    # print trained parameters and plot
    #print("Transition matrix")
    #print(model.transmat_)
    #print()
    
    print("means and vars of each hidden state")
    for i in range(model.n_components):
        print("%dth hidden state" % i)
        print("mean = ", model.means_[i])
        print("var = ", np.diag(model.covars_[i]))
        
        
    plotHmmState(model, hidden_states, trade_data)
    
    return model
开发者ID:jp1989326,项目名称:Quant-Ver1,代码行数:39,代码来源:hmmPeriodDivison.py

示例9: GaussianHMM

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
dim_h = 5
N_train = 500
n_stocks = 1
X = in_data[:N_train,:(n_stocks*3)]
n_factors = X.shape[1] / n_stocks

# Make an HMM instance and execute fit

model = GaussianHMM(n_components=dim_h, covariance_type="diag", 
					n_iter=1000).fit(in_data_ema[:(N_train),:])

RMSE_train = np.zeros(N_train)
ER_train = np.zeros(N_train)

# Predict the optimal sequence of internal hidden state
hidden_states = model.predict(in_data_ema[:N_train,:])
	state_cur = hidden_states[i]
	# model.transmat_
	pred_ind = np.arange(n_stocks) * n_factors
	
	mean_cur = model.means_[state_cur,:]
	mean_pred = mean_cur[pred_ind]
	# need 
	prev_ema = in_data_ema[i,pred_ind]
	mean_pred = rm_ema(mean_pred, prev_ema, n_ema=n_ema)
	
	covar_cur = model.covars_[state_cur,:]
	covar_pred = covar_cur[pred_ind,:][:,pred_ind]
	covar_pred = rm_ema(covar_pred, 0, n_ema=n_ema)

	y_true = in_data[(i+1),pred_ind]
开发者ID:mufan-li,项目名称:Qtn2016,代码行数:33,代码来源:hmm.py

示例10: predict_states

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def predict_states(X,group_id,empirical_states):
	#print("fitting to HMM and decoding ...")
	max_state_number = (group_id+1)*10
	n_components = 2
	
	# make an HMM instance and execute fit
	model = GaussianHMM(n_components, covariance_type="diag", n_iter=1000)
	
	# Train n number of HMM to avoid loacl minimal 
	max_score = 0
	max_proba_states = []
	transmat = [[]]
	n = 2
	for i in range(1,n):
		model.fit([X])
		score = model.decode(X)[0]
		if i==1 or max_score < score:
			max_score = score
			max_proba_states = model.predict(X)
			transmat = model.transmat_
		
		'''	
		print "score", score
		# predict the optimal sequence of internal hidden state
		hidden_states = model.predict(X)
		print hidden_states
		'''
	# end multiple training
	
	#print max_score, max_proba_states, transmat
	
	# Compare the state with empirical states
	max_proba_states = max_proba_states.tolist()
	max_proba_states_inver = []
	for s in max_proba_states:
		max_proba_states_inver.append(0 if s == 1 else 1)
	
	#print empirical_states, max_proba_states, max_proba_states_inver
	
	difference_state = np.subtract(np.array(max_proba_states),np.array(empirical_states)).tolist()
	difference_state_inver = np.subtract(np.array(max_proba_states_inver),np.array(empirical_states)).tolist()
	
	difference = np.sum(np.power(difference_state,2))
	difference_inver = np.sum(np.power(difference_state_inver,2))
	
	#print difference, difference_inver
	
	if(difference_inver < difference):
		max_proba_states = max_proba_states_inver
	# end switch bits
	
	# Predict future state
	future_states_proba = np.dot([0,1],transmat)
	future_state = 0
	if future_states_proba[1] > future_states_proba[0]:
		future_state = 1	
	# End
	
	result_states = max_proba_states+[future_state for i in range(0,max_state_number-len(max_proba_states))];
	return result_states		
	print("done\n")
开发者ID:xwtt8,项目名称:Hospital-Readmission-,代码行数:63,代码来源:multiFeaturesHmm.py

示例11: smooth

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
         fillValue = 30.0
     elif parameter == 'Length':
         fillValue = 325.0
     else:
         fillValue = 0
     if (parameter + '_smoothed') not in fbf.columns:
         fbf[parameter] = fbf[parameter].fillna(method='pad', limit=5).fillna(fillValue)
         fbf = smooth(fbf, parameter)
         fbf.to_pickle(directory + '/frame_by_frame_synced.pickle')
 
 #CREATE HIDDEN MARKOV MODEL
 
 _fbf = fbf.loc[fbf['synced_time'] > np.timedelta64(0,'ns')]  #take only post-stimulus data
 X = np.column_stack(_fbf[ i +'_smoothed'] for i in parameters)
 
 state_values = pd.DataFrame(THE_model.predict(X), columns=['state'])
 #DISCARD CASES WHERE ONE OR MORE STATES OCCURS RARELY (<1%).
 DISCARD = False
 for i in list(set(state_values['state'])):
     if (len(state_values[state_values['state']==i]) / float(len(state_values)) < 0.005) & (len(state_values[state_values['state']==i]) >0):
         print i, len(state_values), len(state_values[state_values['state'] == i]), '\t', FLY_ID
         state_values.loc[state_values['state']==i, 'state'] = np.nan
         #DISCARD = True
 state_values['state'] = state_values['state'].fillna(method='pad').fillna(method='bfill')
 state_values = np.array(state_values['state']) 
 
 statesdf = pd.DataFrame(state_values, columns=['state'], index = _fbf.index)
 statesdf['FLY_ID'] = FLY_ID
 try:
     statesdf['GROUP'] = GROUP
     statesdf.to_pickle(directory + '/states.pickle')
开发者ID:dbath,项目名称:wahnsinn,代码行数:33,代码来源:hmm_transition_probability.py

示例12: GaussianHMM

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
#spx_ret = spx_ret * 1000.0
rets = np.column_stack([spx_ret])

# Create the Gaussian Hidden markov Model and fit it
# to the SPY returns data, outputting a score
hmm_model = GaussianHMM(
    n_components=3,                     # number of states
    covariance_type="full",             # full covariance matrix vs diagonal
    n_iter=1000                         # number of iterations
).fit(rets)

print("Model Score:", hmm_model.score(rets))

# Plot the in sample hidden states closing values
# Predict the hidden states array
hidden_states = hmm_model.predict(rets)

print('Percentage of hidden state 1 = %f' % (sum(hidden_states)/len(hidden_states)))

print("Transition matrix")
print(hmm_model.transmat_)

print("Means and vars of each hidden state")
for i in range(hmm_model.n_components):                   # 0 is down, 1 is up
    print("{0}th hidden state".format(i))
    print("mean = ", hmm_model.means_[i])
    print("var = ", np.diag(hmm_model.covars_[i]))

fig, axs = plt.subplots(hmm_model.n_components, sharex=True, sharey=True)
colours = cm.rainbow(np.linspace(0, 1, hmm_model.n_components))
for i, (ax, colour) in enumerate(zip(axs, colours)):
开发者ID:homeoffice-ys,项目名称:EliteQuant_Python,代码行数:33,代码来源:hidden_markov_chain.py

示例13: runHmm

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
def runHmm(patient_record,date_list,group_id,empirical_states):
###############################################################################
# Processing the data
	max_state_number = (group_id+1)*10
	
	X = np.zeros(shape=(max(len(patient_record),2),20))
	index = 0
	for date in date_list:
		tmp_list = []
		#print(date)
		for key, value in patient_record[date].iteritems():
			tmp_list.append(value)
		X[index] = np.array(tmp_list)
		index+=1
		
	# if no lab test is available, train with an all zero array
	if X.shape[0]  == 0:
		X = np.zeros(shape=(2,20))
	elif X.shape[0] == 1:
		X[1] = np.zeros(shape=(1,20))
		
	#print(X)	
	#print(X.shape)
	
###############################################################################
# Run Gaussian HMM
	print("fitting to HMM and decoding ...")
	n_components = 2
	
	# make an HMM instance and execute fit
	model = GaussianHMM(n_components, covariance_type="diag", n_iter=1000)
	
	# Train n number of HMM to avoid loacl minimal 
	max_score = 0
	max_proba_states = []
	transmat = [[]]
	n = 2
	for i in range(1,n):
		model.fit([X])
		score = model.decode(X)[0]
		if i==1 or max_score < score:
			max_score = score
			max_proba_states = model.predict(X)
			transmat = model.transmat_
		
		'''	
		print "score", score
		# predict the optimal sequence of internal hidden state
		hidden_states = model.predict(X)
		print hidden_states
		'''
	# end multiple training
	
	#print max_score, max_proba_states, transmat
	
	# Compare the state with empirical states
	max_proba_states = max_proba_states.tolist()
	max_proba_states_inver = []
	for s in max_proba_states:
		max_proba_states_inver.append(0 if s == 1 else 1)
	
	#print empirical_states, max_proba_states, max_proba_states_inver
	
	difference_state = np.subtract(np.array(max_proba_states),np.array(empirical_states)).tolist()
	difference_state_inver = np.subtract(np.array(max_proba_states_inver),np.array(empirical_states)).tolist()
	
	difference = np.sum(np.power(difference_state,2))
	difference_inver = np.sum(np.power(difference_state_inver,2))
	
	#print difference, difference_inver
	
	if(difference_inver < difference):
		max_proba_states = max_proba_states_inver
	# end switch bits
	
	# Predict future state
	future_states_proba = np.dot([0,1],transmat)
	future_state = 0
	if future_states_proba[1] > future_states_proba[0]:
		future_state = 1	
	# End
	
	result_states = max_proba_states+[future_state for i in range(0,max_state_number-len(max_proba_states))];
	
	return result_states
	'''
	state = [0,1]
	transmat = np.array(model.transmat_)
	
	print np.dot(state,transmat)
	
	print np.array(model.transmat_)
	
	#print (hidden_states)
	#print (hidden_states.shape)
	'''
		
	print("done\n")
开发者ID:xwtt8,项目名称:Hospital-Readmission-,代码行数:100,代码来源:unsplearnHmm.py

示例14: GaussianHMM

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
import pylab as pl
import numpy as np
from hmmlearn.hmm import GaussianHMM
from matplotlib.dates import YearLocator, MonthLocator, DateFormatter
import nyc

###############################################################################
# print trained parameters and plot
###############################################################################

new_x = np.asarray(train_set)

n_comps = 6
model = GaussianHMM(n_comps)
model.fit([new_x])
hidden_states = model.predict(new_x)

print("means and vars of each hidden state")
for i in range(n_comps):
    print("%dth hidden state" % i)
    print("mean = ", model.means_[i])
    print("var = ", np.diag(model.covars_[i]))
    print()

years = YearLocator()   # every year
months = MonthLocator()  # every month
yearsFmt = DateFormatter('%Y')
fig = pl.figure()
ax = fig.add_subplot(111)

ald = np.asarray(all_days)
开发者ID:xldenis,项目名称:instabike,代码行数:33,代码来源:hmm.py

示例15: print

# 需要导入模块: from hmmlearn.hmm import GaussianHMM [as 别名]
# 或者: from hmmlearn.hmm.GaussianHMM import predict [as 别名]
diff = np.diff(close_v)
dates = dates[1:]
close_v = close_v[1:]

# Pack diff and volume for training.
X = np.column_stack([diff, volume])

###############################################################################
# Run Gaussian HMM
print("fitting to HMM and decoding ...", end="")

# Make an HMM instance and execute fit
model = GaussianHMM(n_components=4, covariance_type="diag", n_iter=1000).fit(X)

# Predict the optimal sequence of internal hidden state
hidden_states = model.predict(X)

print("done")

###############################################################################
# Print trained parameters and plot
print("Transition matrix")
print(model.transmat_)
print()

print("Means and vars of each hidden state")
for i in range(model.n_components):
    print("{0}th hidden state".format(i))
    print("mean = ", model.means_[i])
    print("var = ", np.diag(model.covars_[i]))
    print()
开发者ID:BiuBiuBiLL,项目名称:hmmlearn,代码行数:33,代码来源:plot_hmm_stock_analysis.py


注:本文中的hmmlearn.hmm.GaussianHMM.predict方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。