本文整理汇总了Python中hyperopt.Trials.trial_attachments方法的典型用法代码示例。如果您正苦于以下问题:Python Trials.trial_attachments方法的具体用法?Python Trials.trial_attachments怎么用?Python Trials.trial_attachments使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类hyperopt.Trials
的用法示例。
在下文中一共展示了Trials.trial_attachments方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: optimize_model_pytorch
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
def optimize_model_pytorch(device, args, train_GWAS, train_y, test_GWAS, test_y, out_folder ="", startupJobs = 40, maxevals = 200, noOut = False):
global numTrials_pytorch
numTrials_pytorch= 0
trials = Trials()
trial_wrapper = partial(trial_pytorch,device = device, args = args , train_GWAS = train_GWAS, train_y = train_y , test_GWAS = test_GWAS , test_y = test_y)
best_pars = fmin(trial_wrapper, parameter_space_pytorch(), algo=partial(tpe.suggest, n_startup_jobs=(startupJobs) ), max_evals=maxevals, trials=trials)
# Print the selected 'best' hyperparameters.
if noOut == False: print('\nBest hyperparameter settings: ',space_eval(parameter_space_pytorch(), best_pars),'\n')
# loops through the 1st entry in the dict that holds all the lookup keys
regression = True
for p in trials.trials[0]['misc']['idxs']: plot_optimization_pytorch(trials, p, regression, out_folder = out_folder)
best_pars = space_eval(parameter_space_pytorch(), best_pars) # this turns the indices into the actual params into the valid aprameter space
# override the epochs with the early start
lowestLossIndex = np.argmin(trials.losses())
trials.trial_attachments(trials.trials[lowestLossIndex])['highestAcc_epoch']
best_pars['earlyStopEpochs'] = trials.trial_attachments(trials.trials[lowestLossIndex])['highestAcc_epoch']
best_pars['earlyStopEpochs'] += 1 # as epochs are 0 based otherwise...
best_pars['epochs'] = best_pars['earlyStopEpochs']
if best_pars['epochs'] <= 0 : best_pars['epochs'] = 1 # we dont want a network without any training, as that will cause a problem for deep dreaming
return(best_pars)
示例2: run
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
def run(self):
start = time.time()
trials = Trials()
best = fmin(self._obj, self.model_param_space._build_space(), tpe.suggest, self.max_evals, trials)
best_params = space_eval(self.model_param_space._build_space(), best)
best_params = self.model_param_space._convert_int_param(best_params)
trial_rmses = np.asarray(trials.losses(), dtype=float)
best_ind = np.argmin(trial_rmses)
best_rmse_mean = trial_rmses[best_ind]
best_rmse_std = trials.trial_attachments(trials.trials[best_ind])["std"]
self.logger.info("-"*50)
self.logger.info("Best RMSE")
self.logger.info(" Mean: %.6f"%best_rmse_mean)
self.logger.info(" std: %.6f"%best_rmse_std)
self.logger.info("Best param")
self.task._print_param_dict(best_params)
end = time.time()
_sec = end - start
_min = int(_sec/60.)
self.logger.info("Time")
if _min > 0:
self.logger.info(" %d mins"%_min)
else:
self.logger.info(" %d secs"%_sec)
self.logger.info("-"*50)
示例3: TunningParamter
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
def TunningParamter(param,data,features,feature,source_name,real_value,int_boolean):
data = data[~pd.isnull(all_data[feature])]
print data.shape
ISOTIMEFORMAT='%Y-%m-%d %X'
start = time.strftime(ISOTIMEFORMAT, time.localtime())
trials = Trials()
objective = lambda p : trainModel(p, data, features, feature,source_name,real_value,int_boolean)
best_parameters = fmin(objective, param, algo =tpe.suggest,max_evals=param['max_evals'],trials= trials)
#now we need to get best_param
trials_loss = np.asanyarray(trials.losses(),dtype=float)
best_loss = min(trials_loss)
ind = np.where(trials_loss==best_loss)[0][0]
best_loss_std = trials.trial_attachments(trials.trials[ind])['std']
end = time.strftime(ISOTIMEFORMAT,time.localtime())
dumpMessage(best_parameters, best_loss, best_loss_std,param['task'],source_name,start,end)
示例4: minimize
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
def minimize(self, restarts=2, epochs=600, tune_space=None):
from hyperopt import fmin, tpe, Trials
if tune_space is None:
initial_values = self.tf_session.run(self.variables)
tune_space = self._make_tune_space(initial_values)
# TODO: This report structure has the downside of not writing
# anything to disk until it's 100% complete.
reports = []
# Make minimize deterministic
R = np.random.RandomState(self.seed)
for restarts in range(restarts):
trials = Trials()
best = fmin(fn=self._evaluate,
space=tune_space,
algo=tpe.suggest,
max_evals=epochs,
trials=trials,
rstate=R)
self._assign_values(best)
reports.extend(trials.trial_attachments(t)['report'] for t in trials.trials)
return self.evaluator.make_agg_report(reports)
示例5: open
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
log_handler = open(log_file, 'wb' )
writer = csv.writer( log_handler )
headers = ['trial_counter', 'kappa_mean', 'kappa_std' ]
for k,v in sorted(param_space.items()):
headers.append(k)
writer.writerow( headers )
log_handler.flush()
print("************************************************************")
print("Search for the best params")
#global trial_counter
trial_counter = 0
trials = Trials()
objective = lambda p: hyperopt_wrapper(p,feat_name)
best_params = fmin(objective, param_space, algo=tpe.suggest,
trials=trials, max_evals=param_space["max_evals"])
for f in int_feat:
if best_params.has_key(f):
best_params[f] = int(best_params[f])
print("************************************************************")
print("Best params")
for k,v in best_params.items():
print " %s: %s" % (k,v)
trial_kappas = -np.asarray(trials.losses(), dtype=float)
best_kappa_mean = max(trial_kappas)
ind = np.where(trial_kappas == best_kappa_mean)[0][0]
best_kappa_std = trials.trial_attachments(trials.trials[ind])['std']
print("Kappa stats")
print(" Mean: %.6f\n Std: %.6f" % (best_kappa_mean, best_kappa_std))
示例6: zip
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
train = pd.read_csv("../data/train.process.csv")
for feat_name,feat_fold in zip(feat_names,feat_folders):
#at first we need to read to for our model
#this is for reduce time to read data
print 'read data for trainning'
print 'generate model in condition in %s'%(feat_name)
print "Search for the best models"
print "fea_name %s"%(feat_name)
#for reduce the time for read data
#the train.shape[0]=39774
ISOTIMEFORMAT='%Y-%m-%d %X'
start_time = time.strftime( ISOTIMEFORMAT, time.localtime() )
param_space = para_spaces[feat_name]
trials = Trials()
objective = lambda p : trainModel(p, feat_fold, feat_name)
best_params = fmin(objective,param_space,algo=tpe.suggest,
trials=trials, max_evals=param_space["max_evals"])
print type(best_params)
print best_params
for f in int_feat:
if best_params.has_key(f):
best_params[f] = int(best_params[f])
trial_acc = -np.asanyarray(trials.losses(), dtype=float )
best_acc_mean = max(trial_acc)
ind = np.where(trial_acc==best_acc_mean)[0][0]
best_acc_std = trials.trial_attachments(trials.trials[ind])['std']
end_time = time.strftime( ISOTIMEFORMAT, time.localtime() )
dumpModelMessage(best_params, best_acc_mean, best_acc_std, feat_fold,feat_name,start_time,end_time)
print ("Best stats")
print ('Mean:%.6f \nStd:%.6f \n'%(best_acc_mean,best_acc_std))
示例7: Trials
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
param_space = param_spaces[model_name]
trials = Trials()
objective = lambda p: hyperopt_wrapper(p, feat_key, model_name, train, loss)
best_params = fmin(objective, param_space, algo=tpe.suggest,
trials=trials, max_evals=max_evals)
for f in int_feat:
if best_params.has_key(f):
best_params[f] = int(best_params[f])
print("************************************************************")
print("Best params")
for k,v in best_params.items():
print " %s: %s" % (k,v)
trial_losses = -np.asarray(trials.losses(), dtype=float)
best_loss_mean = max(trial_losses)
ind = np.where(trial_losses == best_loss_mean)[0][0]
best_loss_std = trials.trial_attachments(trials.trials[ind])['std']
print("Loss stats")
print(" Mean: %.6f\n Std: %.6f" % (best_loss_mean, best_loss_std))
else:
print '-------- generating submission -------'
test = pd.read_csv(test_file, index_col = False)
test_ids = test['ID']
test.drop('ID', axis=1, inplace=True)
best_params = loads(dumps(cv_scores.find({'model_name':model_name, 'feat_key':feat_key}).sort([('loss_cv_mean', -1)]).limit(1)))[0]
print("Best params")
for k,v in best_params.items():
print " %s: %s" % (k,v)
示例8: print
# 需要导入模块: from hyperopt import Trials [as 别名]
# 或者: from hyperopt.Trials import trial_attachments [as 别名]
data = [X_all, y_class_tr_all, y_reg_tr_all]
# =========================== Search the best params ===========================
print("------------------------------------------------------------------------")
print("-------- Search the best params for %s --------" % ftmodnm)
starttime = time.clock()
log_handler = log(ftmodnm)
trial_counter = 0
ftmodinfo = [model, data]
trials = Trials()
objective = lambda p: hyperopt_wrapper(p, ftmodinfo)
best_params = fmin(objective, param, algo=tpe.suggest, trials=trials, max_evals=param["max_evals"])
for f in modp.int_feat():
if f in best_params:
best_params[f] = int(best_params[f])
elapsed = round((time.clock() - starttime) / 60.0, 2)
print("************************************************************")
print("Best params for %s in %.2f min" %(ftmodnm, elapsed))
for k, v in best_params.items():
print(" %s: %s" % (k, v))
trial_RMSEs = np.asarray(trials.losses(), dtype=float)
best_RMSE_mean = min(trial_RMSEs)
ind = np.where(trial_RMSEs == best_RMSE_mean)[0][0]
best_RMSE_std = trials.trial_attachments(trials.trials[ind])['std']
print("RMSE stats")
print(" Mean: %.6f\n Std: %.6f" % (best_RMSE_mean, best_RMSE_std))
print(" Trial: %s" % str(ind + 1))
print("************************************************************")
print()