本文整理汇总了Python中opus_core.store.attribute_cache.AttributeCache类的典型用法代码示例。如果您正苦于以下问题:Python AttributeCache类的具体用法?Python AttributeCache怎么用?Python AttributeCache使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了AttributeCache类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: QCDataSizeTests
class QCDataSizeTests(opus_unittest.OpusTestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache')
self.table_name = 'test_table'
self.storage = AttributeCache(self.temp_dir)
def tearDown(self):
if os.path.exists(self.temp_dir):
rmtree(self.temp_dir)
def test_detect(self):
# create cache where a table has attributes of different length,
# namely size 2 in 1980 and size 3 in 1979
SimulationState().set_current_time(1980)
table_data = {'int_column': np.array([100, 70], dtype="int32"),
'bool_column': np.array([False, True])}
# file name will be e.g. 'int_column.li4' for a little-endian machine
self.storage.write_table(self.table_name, table_data)
SimulationState().set_current_time(1979)
table_data = {'flt_column': np.array([10, 70, 5.7], dtype="float32")}
self.storage.write_table(self.table_name, table_data)
res = DatasetSizeModel(self.temp_dir).run()
SimulationState().set_current_time(2000)
self.assertEqual(res.sum(), 1)
# reset time to the original one
self.assertEqual(SimulationState().get_current_time(), 2000)
示例2: compare_travel_data_sets
def compare_travel_data_sets(self):
# get copied travel data csv
copied_travel_data_location = os.path.join(self.destination, "opus_matsim", "tmp")
if not os.path.exists(copied_travel_data_location):
raise StandardError("Travel data not found: %s" % copied_travel_data_location)
logger.log_status("Get copied travel data: %s" % copied_travel_data_location)
# convert travel data csv into travel data set matrix
in_storage = csv_storage(storage_location=copied_travel_data_location)
table_name = "travel_data"
travel_data_attribute = "single_vehicle_to_work_travel_cost"
travel_data_set = TravelDataDataset(in_storage=in_storage, in_table_name=table_name)
travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=999)
# get exsisting travel data set and convert it also into travel data set matrix
year = self.run_config["base_year"] + 2
attribute_cache = AttributeCache(cache_directory=self.run_config["cache_directory"])
cache_storage = attribute_cache.get_flt_storage_for_year(year)
existing_travel_data_set = TravelDataDataset(in_storage=cache_storage, in_table_name=table_name)
existing_travel_data_attribute_mat = existing_travel_data_set.get_attribute_as_matrix(
travel_data_attribute, fill=999
)
from numpy import savetxt # for debugging
savetxt(os.path.join(self.destination, "origin_travel_data.txt"), travel_data_attribute_mat, fmt="%f")
savetxt(os.path.join(self.destination, "existing_travel_data"), existing_travel_data_attribute_mat, fmt="%f")
# compare both data set matices
compare = travel_data_attribute_mat == existing_travel_data_attribute_mat
# return result
return compare.all()
示例3: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
opus_data_directory = params_dict['opus_data_directory']
opus_data_year = params_dict['opus_data_year']
csv_data_path = params_dict['csv_data_path']
table_name = params_dict['csv_table_name']
input_storage = csv_storage(storage_location = csv_data_path)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
SimulationState().set_current_time(opus_data_year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if table_name == 'ALL':
logCB('caching all tables...\n')
lst = input_storage.get_table_names()
else:
lst = [table_name]
for i in lst:
logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
(i, opus_data_year, opus_data_directory))
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage)
logCB("Successfully exported all tables.")
示例4: opusRun
def opusRun(progressCB, logCB, params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
esri_data_path = params_dict["esri_data_path"]
esri_table_name = params_dict["esri_table_name"]
opus_data_directory = params_dict["opus_data_directory"]
opus_data_year = params_dict["opus_data_year"]
input_storage = esri_storage(storage_location=esri_data_path)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
SimulationState().set_current_time(opus_data_year)
SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache())
if esri_table_name == "ALL":
logCB("Sending all tables to OPUS storage...\n")
lst = input_storage.get_table_names()
for i in lst:
ExportStorage().export_dataset(dataset_name=i, in_storage=input_storage, out_storage=output_storage)
else:
logCB("Exporting table '%s' to OPUS storage located at %s...\n" % (esri_table_name, opus_data_directory))
ExportStorage().export_dataset(
dataset_name=esri_table_name, in_storage=input_storage, out_storage=output_storage
)
logCB("Finished exporting table '%s'\n" % (esri_table_name))
示例5: test
def test(self):
opus_core_path = OpusPackage().get_opus_core_path()
dbf_directory = os.path.join(
opus_core_path, 'tests', 'data', 'dbf')
table_name = 'test_logical'
cache_directory = self._temp_dir
year = 1000
exporter = ExportDbfTableToCacheCommand(
dbf_directory = dbf_directory,
table_name = table_name,
cache_directory = cache_directory,
year = year,
)
exporter.execute()
attribute_cache = AttributeCache(cache_directory=cache_directory)
old_time = SimulationState().get_current_time()
SimulationState().set_current_time(year)
values = attribute_cache.load_table(table_name)
self.assertEqual(set(['keyid', 'works']), set(values.keys()))
self.assert_(ma.allequal(array([1,2,3,4,5]), values['keyid']))
self.assert_(ma.allequal(array([1,1,-1,0,0]), values['works']))
SimulationState().set_current_time(old_time)
示例6: opusRun
def opusRun(progressCB, logCB, config):
tm_config = config["travel_model_configuration"]
for key, val in tm_config.iteritems():
tm_config[str(key)] = str(val)
opus_data_directory = tm_config["travel_data_dir"]
opus_data_year = tm_config["year_dir"]
csv_data_path = tm_config["travel_data_path"]
table_name = tm_config["travel_data_table_name"]
input_storage = csv_storage(storage_location=csv_data_path)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
SimulationState().set_current_time(opus_data_year)
SessionConfiguration(new_instance=True, package_order=[], in_storage=AttributeCache())
if table_name == "ALL":
logCB("caching all tables...\n")
lst = input_storage.get_table_names()
else:
lst = [table_name]
for i in lst:
logCB("Exporting table '%s' to year %s of cache located at %s...\n" % (i, opus_data_year, opus_data_directory))
ExportStorage().export_dataset(dataset_name=i, in_storage=input_storage, out_storage=output_storage)
logCB("Successfully exported all tables.")
示例7: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
# Output csv data path
csv_data_path = params_dict['csv_data_path']
# Data clasification - Database (must be specified)
opus_data_directory = params_dict['opus_data_directory']
# Data clasification - Dataset (explicit or ALL)
opus_data_year = params_dict['opus_data_year']
# Data clasification - Array (explicit or ALL)
opus_table_name = params_dict['opus_table_name']
execute_after_export = params_dict['execute_after_export']
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]
if opus_data_year != 'ALL':
attribute_cache_years = [opus_data_year]
for year in attribute_cache_years:
input_storage = attribute_cache.get_flt_storage_for_year(year)
output_storage = csv_storage(storage_location = csv_data_path)
SimulationState().set_current_time(year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if opus_table_name != 'ALL':
opus_table_name_list = [opus_table_name]
else:
opus_table_name_list = input_storage.get_table_names()
for i in opus_table_name_list:
logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage,
)
logCB("Successfully exported all datasets.")
if execute_after_export:
file_name_list = [output_storage._get_file_path_for_table(i)
for i in opus_table_name_list]
subprocess.Popen([execute_after_export] + file_name_list)
示例8: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
opus_data_directory = params_dict['opus_data_directory']
opus_data_directory = paths.prepend_opus_home_if_relative(opus_data_directory)
opus_data_year = params_dict['opus_data_year']
database_name = params_dict['database_name']
table_name = params_dict['table_name']
database_server_connection = params_dict['database_server_connection']
overwrite = params_dict['overwrite']
dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
server = DatabaseServer(database_server_configuration = dbs_config)
opusdb = server.get_database(database_name=database_name, create_if_doesnt_exist=False)
input_storage = sql_storage(storage_location = opusdb)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
SimulationState().set_current_time(opus_data_year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if table_name == 'ALL':
lst = input_storage.get_table_names()
else:
lst = re.split(' +', table_name.strip())
tables = len(lst)
lst_out = create_list_string(lst, ', ')
logCB('caching tables:\n%s\n' % lst_out)
for j, i in enumerate(lst, start=1):
logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
(i, opus_data_year, opus_data_directory))
try:
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage,
overwrite = overwrite,
)
except:
logCB('Error in exporting %s.' % i)
progressCB(100 * j / tables)
logCB('successfully cached tables:\n%s\n' % lst_out)
示例9: DatasetSizeModel
class DatasetSizeModel(Model):
"""Checks if all datasets after collapsing over all years have attributes of the same size."""
def __init__(self, directory=None):
if directory is None:
directory = SimulationState().get_cache_directory()
self.cache = AttributeCache(directory)
def run(self):
year_orig = SimulationState().get_current_time()
years = self.years_in_cache()
SimulationState().set_current_time(years[0])
storages = {}
for year in years:
storages[year] = flt_storage(os.path.join(self.cache.get_storage_location(), '%s' % year))
tables = self.cache.get_table_names()
counts = pd.Series(np.zeros(len(tables), dtype="int32"), index=tables)
for table in tables:
columns = self.cache._get_column_names_and_years(table)
values = []
names = []
colyears = []
for col, year in columns:
if col in names:
continue
data = storages[year].load_table(table, column_names=col)
values.append(data[col].size)
names.append(col)
colyears.append(year)
values = np.array(values)
if(all(values == values[0])):
continue # all attributes have the same size
# there is an inconsistency in attributes length
names = np.array(names)
colyears = np.array(colyears)
uc = np.unique(values, return_counts=True)
imax = np.argmax(uc[1])
idx = np.where(values <> uc[0][imax])[0]
df = pd.DataFrame({"column": names[idx], "year": colyears[idx], "size": values[idx]})
df = df.append(pd.DataFrame({"column": np.array(["all other columns"]), "year": np.array([years[0]]), "size": np.array([uc[0][imax]])}))
logger.log_status("Inconsistency in table ", table, ":\n", df)
counts[table] = df.shape[0] - 1
SimulationState().set_current_time(year_orig)
logger.log_status("Model total:", counts.sum(), ' size inconsistencies found.')
return counts
def years_in_cache(self):
return self.cache._get_sorted_list_of_years(start_with_current_year=False)
示例10: setup_environment
def setup_environment(cache_directory, year, package_order, additional_datasets={}):
gc.collect()
ss = SimulationState(new_instance=True)
ss.set_cache_directory(cache_directory)
ss.set_current_time(year)
ac = AttributeCache()
storage = ac.get_flt_storage_for_year(year)
sc = SessionConfiguration(new_instance=True,
package_order=package_order,
in_storage=ac)
logger.log_status("Setup environment for year %s. Use cache directory %s." % (year, storage.get_storage_location()))
dp = sc.get_dataset_pool()
for name, ds in additional_datasets.iteritems():
dp.replace_dataset(name, ds)
return dp
示例11: test
def test(self):
# Set up a test cache
storage = AttributeCache(cache_directory=self._temp_dir)
SimulationState().set_current_time(2000)
table_name = 'foo'
values = {
'attribute1': array([1,2,3], dtype=int32),
'attribute2': array([4,5,6], dtype=int32),
}
storage.write_table(table_name, values)
table_dir = os.path.join(self._temp_dir, '2000', table_name)
self.assert_(os.path.exists(table_dir))
actual = set(os.listdir(table_dir))
expected = set(['attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements])
self.assertEqual(expected, actual)
exporter = ExportCacheToDbfTableCommand(
cache_directory = self._temp_dir,
year = '2000',
table_name = table_name,
dbf_directory = self._temp_dir,
decimalcount = 4,
)
exporter.execute()
out_storage = dbf_storage(self._temp_dir)
db = _dbf_class(out_storage._get_file_path_for_table(table_name))
length = max([len(values[key]) for key in values.keys()])
i = 0
field_type = {}
for name, type in [field.fieldInfo()[:2] for field in db.header.fields]:
field_type[name] = type
for rec in db:
for key in values.keys():
if field_type[key.upper()] is 'F':
self.assertAlmostEqual(values[key][i], rec[key], 4)
else:
self.assertEqual(values[key][i], rec[key])
i = i + 1
self.assertEquals(length, i, msg="More values expected than the dbf file contains")
db.close()
示例12: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
# Output esri data path
esri_data_path = params_dict['esri_data_path']
# Data clasification - Database (must be specified)
opus_data_directory = params_dict['opus_data_directory']
# Data clasification - Dataset (explicit or ALL)
opus_data_year = params_dict['opus_data_year']
# Data clasification - Array (explicit or ALL)
opus_table_name = params_dict['opus_table_name']
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]
if opus_data_year != 'ALL':
attribute_cache_years = [opus_data_year]
for year in attribute_cache_years:
input_storage = attribute_cache.get_flt_storage_for_year(year)
if esri_is_avail:
output_storage = esri_storage(storage_location = esri_data_path)
else:
output_storage = None
SimulationState().set_current_time(year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if opus_table_name != 'ALL':
opus_table_name_list = [opus_table_name]
else:
opus_table_name_list = input_storage.get_table_names()
for i in opus_table_name_list:
logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage,
)
示例13: run
def run(self, directory=None, check_size=True):
"""
"directory" is the cache to be compared to the reference. It should not include the year
as the model checks all years.
Set "check_sizes" to False if no size check of the datasets is required.
"""
if directory is None:
directory = SimulationState().get_cache_directory()
self.cache = AttributeCache(directory)
year_orig = SimulationState().get_current_time()
years = self.years_in_cache()
SimulationState().set_current_time(years[0])
storages = {}
for year in years:
storages[year] = flt_storage(os.path.join(self.cache.get_storage_location(), '%s' % year))
df = pd.DataFrame(columns=["Table", "Less-than-ref", "More-than-ref", "Year", "Size", "Size-ref"])
tables = self.cache.get_table_names()
for table in tables:
columns_list = self.cache.get_column_names(table)
columns = Set(columns_list)
ref_columns_list = self.reference_storage.get_column_names(table, lowercase=True)
ref_columns = Set(ref_columns_list)
more = columns.difference(ref_columns)
less = ref_columns.difference(columns)
samesize = True
if check_size:
table_size = self.cache.load_table(table, columns_list[0])[columns_list[0]].size
reftable_size = self.reference_storage.load_table(table, ref_columns_list[0])[ref_columns_list[0]].size
if table_size <> reftable_size:
samesize = False
if len(more) == 0 and len(less) == 0 and samesize:
continue
df.loc[df.shape[0]] = [table, ', '.join(less), ', '.join(more), '', 0, 0]
if len(more) == 0 and samesize:
continue
# if there are columns in the "more" column, write out the corresponding years
columns_and_years = self.cache._get_column_names_and_years(table)
more_years = []
for col, year in columns_and_years:
if col in more:
more_years.append(year)
df.loc[df.shape[0]-1, "Year"] = ', '.join(np.unique(np.array(more_years).astype("str")))
if not samesize: # there is difference in table sizes
df.loc[df.shape[0]-1, "Size"] = table_size
df.loc[df.shape[0]-1, "Size-ref"] = reftable_size
if not check_size or (df['Size'].sum()==0 and df['Size-ref'].sum()==0):
# remove the size columns if not used
del df['Size']
del df['Size-ref']
if df.shape[0] > 0:
logger.log_status("Differences in data structure relative to %s:" % self.reference_storage.get_storage_location())
logger.log_status(df)
else:
logger.log_status("Data structure corresponds to the one in %s" % self.reference_storage.get_storage_location())
return df
示例14: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
database_name = params_dict['database_name']
opus_data_directory = params_dict['opus_data_directory']
opus_data_year = params_dict['opus_data_year']
opus_table_name = params_dict['opus_table_name']
database_server_connection = params_dict['database_server_connection']
dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
server = DatabaseServer(database_server_configuration = dbs_config)
opusdb = server.get_database(database_name=database_name)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
attribute_cache_years = [int(year) for year in os.listdir(opus_data_directory) if year.isdigit() and len(year) == 4]
if opus_data_year != 'ALL':
attribute_cache_years = [opus_data_year]
for year in attribute_cache_years:
#input_storage = sql_storage(storage_location = opusdb)
input_storage = attribute_cache.get_flt_storage_for_year(year)
#output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
output_storage = sql_storage(storage_location = opusdb)
SimulationState().set_current_time(year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if opus_table_name != 'ALL':
opus_table_name_list = [opus_table_name]
else:
opus_table_name_list = input_storage.get_table_names()
for i in opus_table_name_list:
logCB("Exporting %s, %s, %s\n" % (i,year,opus_data_directory))
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage,
)
示例15: opusRun
def opusRun(progressCB,logCB,params):
params_dict = {}
for key, val in params.iteritems():
params_dict[str(key)] = str(val)
opus_data_directory = params_dict['opus_data_directory']
opus_data_year = params_dict['opus_data_year']
database_name = params_dict['database_name']
table_name = params_dict['table_name']
database_server_connection = params_dict['database_server_connection']
dbs_config = DatabaseServerConfiguration(database_configuration=database_server_connection)
server = DatabaseServer(database_server_configuration = dbs_config)
opusdb = server.get_database(database_name=database_name)
input_storage = sql_storage(storage_location = opusdb)
attribute_cache = AttributeCache(cache_directory=opus_data_directory)
output_storage = attribute_cache.get_flt_storage_for_year(opus_data_year)
SimulationState().set_current_time(opus_data_year)
SessionConfiguration(new_instance=True,
package_order=[],
in_storage=AttributeCache())
if table_name == 'ALL':
logCB('caching all tables...\n')
lst = input_storage.get_table_names()
for i in lst:
ExportStorage().export_dataset(
dataset_name = i,
in_storage = input_storage,
out_storage = output_storage,
)
else:
logCB("Exporting table '%s' to year %s of cache located at %s...\n" %
(table_name, opus_data_year, opus_data_directory))
ExportStorage().export_dataset(
dataset_name = table_name,
in_storage = input_storage,
out_storage = output_storage)