本文整理汇总了Python中pandas.HDFStore.close方法的典型用法代码示例。如果您正苦于以下问题:Python HDFStore.close方法的具体用法?Python HDFStore.close怎么用?Python HDFStore.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pandas.HDFStore
的用法示例。
在下文中一共展示了HDFStore.close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: build_from_openfisca
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def build_from_openfisca( directory = None):
df_age_final = None
for yr in range(2006,2010):
simulation = SurveySimulation()
simulation.set_config(year = yr)
simulation.set_param()
simulation.set_survey()
df_age = get_age_structure(simulation)
df_age[yr] = df_age['wprm']
del df_age['wprm']
if df_age_final is None:
df_age_final = df_age
else:
df_age_final = df_age_final.merge(df_age)
if directory is None:
directory = os.path.dirname(__file__)
fname = os.path.join(directory, H5_FILENAME)
store = HDFStore(fname)
print df_age_final.dtypes
store.put("openfisca", df_age_final)
store.close()
示例2: build_actualisation_groups
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def build_actualisation_groups(self, filename = None):
'''
Builds actualisation groups
'''
if filename is None:
data_dir = CONF.get('paths', 'data_dir')
fname = "actualisation_groups.h5"
filename = os.path.join(data_dir, fname)
store = HDFStore(filename)
df = store['vars']
coeff_list = sorted(unique(df['coeff'].dropna()))
vars = dict()
for coeff in coeff_list:
vars[coeff] = list(df[ df['coeff']==coeff ]['var'])
self.actualisation_vars = vars
self.coeffs_df = store['names']
self.coeffs_df['coeff'] = self.coeffs_df['coeff'].str.replace(' ','') # remove spaces
yr = 1*self.survey_year
self.coeffs_df['value'] = 1
while yr < self.datesim_year:
if yr in self.coeffs_df.columns:
factor = self.coeffs_df[yr]
else:
factor = 1
self.coeffs_df['value'] = self.coeffs_df['value']*factor
yr += 1
self.coeffs_df.set_index(['coeff'], inplace = True)
store.close()
示例3: show_temp
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def show_temp(config_files_directory = default_config_files_directory):
hdf_file_path = get_tmp_file_path(config_files_directory = config_files_directory)
store = HDFStore(hdf_file_path)
log.info("{}".format(store))
store.close()
示例4: download
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def download():
""" Convenience method that downloads all the weather data required
for the machine learning examples.
"""
reader = GSODDataReader()
year_list = range(2001, 2012)
austin = reader.collect_data(year_list, exact_station=True,
station_name='AUSTIN CAMP MABRY', state='TX', country='US')
houston = reader.collect_data(year_list, exact_station=True,
station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')
new_york = reader.collect_data(year_list, exact_station=True,
station_name='NEW YORK/LA GUARDIA', state='NY', country='US')
newark = reader.collect_data(year_list, exact_station=True,
station_name='NEWARK INTL AIRPORT', state='NJ', country='US')
punta_arenas = reader.collect_data(year_list, exact_station=True,
station_name='PUNTA ARENAS', country='CH')
wellington = reader.collect_data(year_list, exact_station=True,
station_name='WELLINGTON AIRPORT', country='NZ')
store = HDFStore('weather.h5')
store['austin'] = austin
store['houston'] = houston
store['nyc'] = new_york
store['newark'] = newark
store['punta_arenas'] = punta_arenas
store['wellington'] = wellington
store.close()
示例5: HDFStorePanel
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
class HDFStorePanel(BaseIO):
goal_time = 0.2
def setup(self):
self.fname = '__test__.h5'
with warnings.catch_warnings(record=True):
self.p = Panel(np.random.randn(20, 1000, 25),
items=['Item%03d' % i for i in range(20)],
major_axis=date_range('1/1/2000', periods=1000),
minor_axis=['E%03d' % i for i in range(25)])
self.store = HDFStore(self.fname)
self.store.append('p1', self.p)
def teardown(self):
self.store.close()
self.remove(self.fname)
def time_read_store_table_panel(self):
with warnings.catch_warnings(record=True):
self.store.select('p1')
def time_write_store_table_panel(self):
with warnings.catch_warnings(record=True):
self.store.append('p2', self.p)
示例6: convert_fiducial
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def convert_fiducial(filename, output_type="csv"):
'''
Converts the fiducial comparison HDF5 files into a CSV file.
Parameters
----------
filename : str
HDF5 file.
output_type : str, optional
Type of file to output.
'''
store = HDFStore(filename)
data_columns = dict()
for key in store.keys():
data = store[key].sort(axis=1)
mean_data = data.mean(axis=1)
data_columns[key[1:]] = mean_data
store.close()
df = DataFrame(data_columns)
output_name = "".join(filename.split(".")[:-1]) + "." + output_type
df.to_csv(output_name)
示例7: main
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def main():
# the loaded data is a DataFrame
genedata = load_gene_dataset()
# randomly split the dataset to three folds
# this code should be improved in the future
kfold = 3.0
data_kfold = {}
train, fold1 = train_test_split(genedata, test_size=1/kfold)
data_kfold['fold1'] = fold1
fold3, fold2 = train_test_split(train, test_size=0.5)
data_kfold['fold2'] = fold2
data_kfold['fold3'] = fold3
# now we want to train a network for each fold
# store the results in h5 file
geneStore = HDFStore('predGeneExp1.h5')
for i, key in enumerate(data_kfold):
print(key)
test_data = data_kfold[key]
X_val, y_val = get_input_output(test_data)
keys = data_kfold.keys()
keys.remove(key)
training_data = pd.concat([data_kfold[keys[0]],data_kfold[keys[1]]])
X_train, y_train = get_input_output(training_data)
print(keys)
# use the these data to train the network
main_training(key, X_train, y_train, X_val, y_val, geneStore)
# the h5 must be closed after using
geneStore.close()
示例8: save_temp
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def save_temp(dataframe, name = None, year = None, config_files_directory = default_config_files_directory):
"""
Save a temporary table
Parameters
----------
dataframe : pandas DataFrame
the dataframe to save
name : string, default None
year : integer, default None
year of the data
"""
if year is None:
raise Exception("year is needed")
if name is None:
raise Exception("name is needed")
hdf_file_path = get_tmp_file_path(config_files_directory = config_files_directory)
store = HDFStore(hdf_file_path)
log.info("{}".format(store))
store_path = "{}/{}".format(year, name)
if store_path in store.keys():
del store["{}/{}".format(year, name)]
dataframe.to_hdf(hdf_file_path, store_path)
store.close()
return True
示例9: in_store
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def in_store(self, path):
s = HDFStore(self.path)
val = False
if path in s:
val = True
s.close()
return val
示例10: SAVE_ChangeDictOrder
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def SAVE_ChangeDictOrder(_processedEvents):
'''Change the nesting order for the final HDF database - insted of correct/attention, it will go attention/present/correct etc'''
h_path = "/Users/ryszardcetnarski/Desktop/Nencki/TD/HDF/"
#Replace the '_EVENTS' because the path n HDF must match exactly, otherwise it was not savivng anything, weirdo
all_event_names = sorted([name.replace('_EVENTS', '') for name in events_names if bef_aft_dict[bef_aft_switch + '_mat'] in name])
store = HDFStore(h_path +bef_aft_dict[bef_aft_switch+ '_hdf'])
for _data, recording in zip(_processedEvents, all_event_names):
print('I')
sname = recording.rfind("/") +1
subId = recording[sname:-4].replace("-", "_")
store[subId + '/events/attention/correct'] = _data['correct']['attention'].convert_objects()
store[subId + '/events/motor/correct'] = _data['correct']['motor'].convert_objects()
store[subId + '/events/attention/incorrect'] = _data['incorrect']['attention'].convert_objects()
store[subId + '/events/motor/incorrect'] = _data['incorrect']['motor'].convert_objects()
#print(_data['incorrect']['motor'].convert_objects())
store.close()
示例11: _get
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def _get(self, path):
s = HDFStore(self.path)
d = None
if path in s:
d = s[path]
s.close()
return d
示例12: put
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def put(self, path, obj):
s = HDFStore(self.path)
if path in s:
print "updating %s" % path
s.remove(path)
s[path] = obj
s.close()
示例13: test_chunk
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def test_chunk():
print "debut"
writer = None
years = range(2011,2012)
filename = destination_dir+'output3.h5'
store = HDFStore(filename)
for year in years:
yr = str(year)
# fname = "Agg_%s.%s" %(str(yr), "xls")
simu = SurveySimulation()
simu.set_config(year = yr)
simu.set_param()
import time
tps = {}
for nb_chunk in range(1,5):
deb_chunk = time.clock()
simu.set_config(survey_filename='C:\\Til\\output\\to_run_leg.h5', num_table=3, chunks_count=nb_chunk ,
print_missing=False)
simu.compute()
tps[nb_chunk] = time.clock() - deb_chunk
voir = simu.output_table.table3['foy']
print len(voir)
pdb.set_trace()
agg3 = Aggregates()
agg3.set_simulation(simu)
agg3.compute()
df1 = agg3.aggr_frame
print df1.to_string()
print tps
store.close()
示例14: func_wrapper
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def func_wrapper(*args, **kwargs):
temporary_store = HDFStore(file_path)
try:
return func(*args, temporary_store = temporary_store, **kwargs)
finally:
gc.collect()
temporary_store.close()
示例15: remove
# 需要导入模块: from pandas import HDFStore [as 别名]
# 或者: from pandas.HDFStore import close [as 别名]
def remove(self, path):
s = HDFStore(self.path)
if path in s:
print("removing %s" % path)
s.remove(path)
s.flush(fsync=True)
s.close()