本文整理汇总了Python中opus_core.store.attribute_cache.AttributeCache.write_table方法的典型用法代码示例。如果您正苦于以下问题:Python AttributeCache.write_table方法的具体用法?Python AttributeCache.write_table怎么用?Python AttributeCache.write_table使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类opus_core.store.attribute_cache.AttributeCache
的用法示例。
在下文中一共展示了AttributeCache.write_table方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: QCDataSizeTests
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
class QCDataSizeTests(opus_unittest.OpusTestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache')
self.table_name = 'test_table'
self.storage = AttributeCache(self.temp_dir)
def tearDown(self):
if os.path.exists(self.temp_dir):
rmtree(self.temp_dir)
def test_detect(self):
# create cache where a table has attributes of different length,
# namely size 2 in 1980 and size 3 in 1979
SimulationState().set_current_time(1980)
table_data = {'int_column': np.array([100, 70], dtype="int32"),
'bool_column': np.array([False, True])}
# file name will be e.g. 'int_column.li4' for a little-endian machine
self.storage.write_table(self.table_name, table_data)
SimulationState().set_current_time(1979)
table_data = {'flt_column': np.array([10, 70, 5.7], dtype="float32")}
self.storage.write_table(self.table_name, table_data)
res = DatasetSizeModel(self.temp_dir).run()
SimulationState().set_current_time(2000)
self.assertEqual(res.sum(), 1)
# reset time to the original one
self.assertEqual(SimulationState().get_current_time(), 2000)
示例2: setUp
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache')
table_name = 'test_table'
storage = AttributeCache(self.temp_dir)
self.temp_refdir = tempfile.mkdtemp(prefix='opus_tmp_reference_cache')
refstorage = AttributeCache(self.temp_refdir)
# Create two caches with the same table but with different attributes.
# The simulation cache has two years
SimulationState().set_current_time(2010)
table_data = {'int_column': np.array([100, 70], dtype="int32"),
'bool_column': np.array([False, True])}
storage.write_table(table_name, table_data)
SimulationState().set_current_time(2000)
table_data = {'flt_column': np.array([10, 70], dtype="float32")}
storage.write_table(table_name, table_data)
# create reference cache
SimulationState().set_current_time(2005)
table_data = {'str_column': np.array(['a', 'b']),
'bool_column': np.array([False, True])}
refstorage.write_table(table_name, table_data)
# create another simulation cache with a table of different length
self.temp_dir2 = tempfile.mkdtemp(prefix='opus_tmp_attribute_cache2')
storage = AttributeCache(self.temp_dir2)
SimulationState().set_current_time(2010)
table_data = {'str_column': np.array(['a', 'b', 'c']),
'bool_column': np.array([False, True, True])}
storage.write_table(table_name, table_data)
示例3: test
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
def test(self):
# Set up a test cache
storage = AttributeCache(cache_directory=self._temp_dir)
SimulationState().set_current_time(2000)
table_name = 'foo'
values = {
'attribute1': array([1,2,3], dtype=int32),
'attribute2': array([4,5,6], dtype=int32),
}
storage.write_table(table_name, values)
table_dir = os.path.join(self._temp_dir, '2000', table_name)
self.assert_(os.path.exists(table_dir))
actual = set(os.listdir(table_dir))
expected = set(['attribute1.%(endian)si4' % replacements, 'attribute2.%(endian)si4' % replacements])
self.assertEqual(expected, actual)
exporter = ExportCacheToDbfTableCommand(
cache_directory = self._temp_dir,
year = '2000',
table_name = table_name,
dbf_directory = self._temp_dir,
decimalcount = 4,
)
exporter.execute()
out_storage = dbf_storage(self._temp_dir)
db = _dbf_class(out_storage._get_file_path_for_table(table_name))
length = max([len(values[key]) for key in values.keys()])
i = 0
field_type = {}
for name, type in [field.fieldInfo()[:2] for field in db.header.fields]:
field_type[name] = type
for rec in db:
for key in values.keys():
if field_type[key.upper()] is 'F':
self.assertAlmostEqual(values[key][i], rec[key], 4)
else:
self.assertEqual(values[key][i], rec[key])
i = i + 1
self.assertEquals(length, i, msg="More values expected than the dbf file contains")
db.close()
示例4: MergeCache
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
class MergeCache(Model):
"""Merge multiple years of one cache directory into a single one that can be used
for example for a warm start."""
def __init__(self, directory):
self.storage = AttributeCache(directory)
def run(self, year, cleanup_settings={}):
SimulationState().set_current_time(year)
tables = self.storage.get_table_names()
# cleanup
for table in tables:
tabdata = self.storage.load_table(table)
if table in cleanup_settings.keys():
for attr in cleanup_settings[table]:
if attr in tabdata.keys():
logger.log_status('Deleting attribute %s in %s.' % (attr, table))
del tabdata[attr]
self.storage.write_table(table, tabdata)
logger.log_status('Deleting all computed tables.')
self.storage.delete_computed_tables()
logger.log_status('Cache directory merged into %s' % year)
示例5: setUp
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
def setUp(self):
household_data = {
'household_id': array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
'blockgroup_id': array([1, 1, 1, 1, 2, 2, 2, 2, 2, 3]),
}
fraction_data = {
'fraction_id': array([1, 2, 3, 4, 5, 6, 6]), #unused, but necessary to use dataset_pool to get data
'blockgroup_id': array([1, 1, 1, 2, 2, 2, 3]),
'zone_id': array([1, 2, 3, 3, 4, 5, 6]),
'fraction': array([0.25, 0.25, 0.5, 0.2, 0.4, 0.4, 1.0])
}
blockgroup_data = {
#unused by the model, for result verification only
'blockgroup_id': array([1, 2, 3]),
}
zone_data = {
#unused by the model, for result verification only
'zone_id': array([1, 2, 3, 4, 5, 6]),
}
self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp')
SimulationState().set_cache_directory(self.tmp_dir)
attribute_cache = AttributeCache()
self.dataset_pool = SessionConfiguration(new_instance=True,
package_order=['urbansim', 'opus_core'],
in_storage=attribute_cache).get_dataset_pool()
#storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir)
attribute_cache.write_table(table_name = 'households', table_data = household_data)
attribute_cache.write_table(table_name = 'fractions', table_data = fraction_data)
attribute_cache.write_table(table_name = 'blockgroups', table_data = blockgroup_data)
attribute_cache.write_table(table_name = 'zones', table_data = zone_data)
#self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core'])
self.household = self.dataset_pool.get_dataset('household')
self.fraction = self.dataset_pool.get_dataset('fraction')
self.blockgroup = self.dataset_pool.get_dataset('blockgroup')
self.zone = self.dataset_pool.get_dataset('zone')
示例6: Tests
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
class Tests(opus_unittest.OpusTestCase):
def setUp(self):
building_data = {
"building_id": array([1, 2, 3, 4, 5, 6, 7, 8]),
"parcel_id": array([1, 2, 2, 3, 4, 4, 5, 5]),
"non_residential_sqft": array([6, 2, 3, 6, 1, 2, 5, 0]),
"residential_units": array([0, 0, 0, 0, 0, 0, 1, 1]),
"price_per_unit": array([50, 21, 32, 15, 60, 90, 100, 200]),
}
parcel_data = {
"parcel_id": array([1, 2, 3, 4, 5]),
"generic_land_use_type_id": array([6, 6, 3, 4, 1]),
"raz_id": array([3, 4, 5, 5, 6]),
}
job_data = {
"job_id": array([1, 2, 3, 4, 5, 6, 7, 8]),
"building_id": array([1, 1, 2, 3, 6, 1, 6, 4]),
#'parcel_id': array([ 1, 1, 2, 2, 4, 1, 4, 3]),
#'raz_id': array([ 3, 3, 4, 4, 5, 3, 5, 5]),
"sector_id": array([13, 12, 13, 12, 13, 13, 12, 13]),
"dummy_id": array([1, 2, 3, 4, 5, 6, 7, 8]),
}
self.tmp_dir = tempfile.mkdtemp(prefix="urbansim_tmp")
SimulationState().set_cache_directory(self.tmp_dir)
self.attribute_cache = AttributeCache()
self.dataset_pool = SessionConfiguration(
new_instance=True, package_order=["urbansim", "opus_core"], in_storage=self.attribute_cache
).get_dataset_pool()
# storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir)
self.attribute_cache.write_table(table_name="buildings", table_data=building_data)
self.attribute_cache.write_table(table_name="parcels", table_data=parcel_data)
# self.attribute_cache.write_table(table_name = 'households', table_data = household_data)
self.attribute_cache.write_table(table_name="jobs", table_data=job_data)
# self.attribute_cache.write_table(table_name = 'persons', table_data = person_data)
# self.attribute_cache.write_table(table_name = 'refinements', table_data = refinement_data)
# self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core'])
# self.refinement = self.dataset_pool.get_dataset('refinement')
self.jobs = self.dataset_pool.get_dataset("job")
# self.persons = self.dataset_pool.get_dataset('person')
# self.hhs = self.dataset_pool.get_dataset('household')
self.buildings = self.dataset_pool.get_dataset("building")
# self.buildings.compute_variables('raz_id=building.disaggregate(parcel.raz_id)', self.dataset_pool)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_add_and_remove_agents(self):
"""
"""
scheduled_events_data = {
"year": array([2000, 2000, 2000, 2000, 2000]),
"action": array(["remove", "remove", "add", "add", "target"]),
"amount": array([1, 1, 4, 3, 7]),
"sector_id": array([13, 12, -1, 11, 12]),
"building_id": array([-1, -1, -1, 8, -1]),
"raz_id": array([3, 5, 5, -1, -1]),
}
# self.attribute_cache.write_table(table_name = 'scheduled_events', table_data = scheduled_events_data)
# events_dataset = self.dataset_pool.get_dataset('scheduled_event')
storage = StorageFactory().get_storage("dict_storage")
storage.write_table(table_name="events", table_data=scheduled_events_data)
events_dataset = Dataset(in_storage=storage, in_table_name="events", id_name=[])
model = ScheduledEventsModel(self.jobs, scheduled_events_dataset=events_dataset)
model.run(year=2000, dataset_pool=self.dataset_pool)
# check that there are indeed 50000 total households after running the model
results = self.jobs.size()
should_be = 18
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
# examine each action in turn:
results = logical_and(self.jobs.get_attribute("sector_id") == 13, self.jobs.get_attribute("raz_id") == 3).sum()
should_be = 2 - 1
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
results = logical_and(self.jobs.get_attribute("sector_id") == 12, self.jobs.get_attribute("raz_id") == 5).sum()
should_be = 1 - 1
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
results = (self.jobs.get_attribute("raz_id") == 5).sum()
should_be = 3 - 1 + 4
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
results = logical_and(
self.jobs.get_attribute("sector_id") == 11, self.jobs.get_attribute("building_id") == 8
).sum()
should_be = 0 + 3
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
results = (self.jobs.get_attribute("sector_id") == 12).sum()
should_be = 7
self.assertEqual(should_be, results, "Error, should_be: %s, but result: %s" % (should_be, results))
#.........这里部分代码省略.........
示例7: setUp
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
def setUp(self):
building_data = {
'building_id': array([1, 2, 3, 4, 5, 6, 7, 8]),
'parcel_id': array([1, 2, 2, 3, 4, 4, 5, 5]),
'non_residential_sqft': \
array([6, 2, 3, 6, 1, 2, 5, 0]),
'residential_units': \
array([0, 0, 0, 0, 0, 0, 1, 1])
}
parcel_data = {
'parcel_id': array([1, 2, 3, 4, 5]),
'generic_land_use_type_id': array([6, 6, 3, 4, 1]),
'raz_id': array([3, 4, 5, 5, 6])
}
job_data = {
'job_id': array([ 1, 2, 3, 4, 5, 6, 7, 8]),
'building_id': array([ 1, 1, 2, 3, 6, 1, 6, 4]),
'sector_id': array([13,12,13,12,13,13,12,13]),
'dummy_id': array([ 1, 2, 3, 4, 5, 6, 7, 8])
}
household_data = {
'household_id': array([1, 2]),
'building_id': array([7, 8]),
'persons': array([3, 4]),
}
person_data = {
'person_id': array([ 1, 2, 3, 4, 5, 6, 7]),
'household_id': array([ 1, 1, 1, 2, 2, 2, 2]),
'job_id': array([ 2, 1, -1, -1, 3, 4, 7])
}
refinement_data = {
'refinement_id': arange(1, 8),
'year': array([2021,2021,2021,2022, 2023, 2024, 2024]),
'transaction_id':array([1, 1, 1, 2, 3, 1, 1]),
'action': array(['subtract', 'subtract', 'add', 'target', 'add', 'add', 'set_value']),
'amount': array([2, 1, 4, 7, 1, 1, -1]),
'agent_dataset': array(['job',
'job',
'job',
'household',
'household',
'household',
'person'
]),
'agent_expression': array(['job.sector_id==13',
'job.sector_id==13',
'',
'household.household_id>0',
'household.persons>5',
'household.persons==3',
'person.job_id'
]),
'location_expression': array(['urbansim.building.raz_id==3',
'urbansim.building.raz_id==4',
'(urbansim.building.raz_id==5) * (building.disaggregate(parcel.generic_land_use_type_id)==4)',
'urbansim.building.raz_id==6',
'urbansim.building.raz_id==6',
'urbansim.building.raz_id==6',
'household.refinement_id==6'
]),
'location_capacity_attribute':array(['',
'non_residential_sqft',
'non_residential_sqft',
'residential_units',
'residential_units',
'',
''
])
}
self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp')
SimulationState().set_cache_directory(self.tmp_dir)
attribute_cache = AttributeCache()
self.dataset_pool = SessionConfiguration(new_instance=True,
package_order=['urbansim', 'opus_core'],
in_storage=attribute_cache).get_dataset_pool()
#storage = StorageFactory().get_storage('flt_storage', storage_location=self.tmp_dir)
attribute_cache.write_table(table_name = 'buildings', table_data = building_data)
attribute_cache.write_table(table_name = 'parcels', table_data = parcel_data)
attribute_cache.write_table(table_name = 'households', table_data = household_data)
attribute_cache.write_table(table_name = 'jobs', table_data = job_data)
attribute_cache.write_table(table_name = 'persons', table_data = person_data)
attribute_cache.write_table(table_name = 'refinements', table_data = refinement_data)
#self.dataset_pool = DatasetPool(storage = storage, package_order = ['urbansim_parcel', 'urbansim', 'opus_core'])
self.refinement = self.dataset_pool.get_dataset('refinement')
self.jobs = self.dataset_pool.get_dataset('job')
self.persons = self.dataset_pool.get_dataset('person')
self.hhs = self.dataset_pool.get_dataset('household')
self.buildings = self.dataset_pool.get_dataset('building')
示例8: Tests
# 需要导入模块: from opus_core.store.attribute_cache import AttributeCache [as 别名]
# 或者: from opus_core.store.attribute_cache.AttributeCache import write_table [as 别名]
class Tests(opus_unittest.OpusTestCase):
"""unittest"""
def setUp(self, attribute_cache=True):
hh_data = {
'household_id': array([1, 2, 3, 4]),
'building_id': array([11, 22, 33, 22]),
'size': array([4, 3, 2, 1]),
'income': array([51, 52, 53, 54])*1000,
'keep': array([4.1, 4.2, 4.3, 4.4]),
}
p_data = {
'person_id': array([ 1, 2, 3, 5, 6, 7, 8, 9, 10]),
'household_id': array([ 1, 1, 1, 2, 2, 3, 3, 3, 4]),
'age': array([75, 71, 29, 56, 16, 22, 20, 96, 88]),
}
if attribute_cache:
self.tmp_dir = tempfile.mkdtemp(prefix='urbansim_tmp')
SimulationState().set_cache_directory(self.tmp_dir)
self.attribute_cache = AttributeCache()
self.dataset_pool = SessionConfiguration(new_instance=True,
package_order=['urbansim', 'opus_core'],
in_storage=self.attribute_cache
).get_dataset_pool()
self.attribute_cache.write_table(table_name='households',
table_data=hh_data)
self.attribute_cache.write_table(table_name='persons',
table_data=p_data)
self.hh_ds = self.dataset_pool.get_dataset('household')
self.p_ds = self.dataset_pool.get_dataset('person')
else:
storage = StorageFactory().get_storage('dict_storage')
storage.write_table(table_name='households',
table_data=hh_data)
self.hh_ds = Dataset(in_storage=storage,
in_table_name='households',
dataset_name='household')
storage.write_table(table_name='persons',
table_data=p_data)
self.p_ds = Dataset(in_storage=storage,
in_table_name='persons',
dataset_name='person')
self.dmgh_data_dir = tempfile.mkdtemp(prefix='urbansim_tmp')
self.dmgh_data_file = os.path.join(self.dmgh_data_dir,
'demographic_data.h5')
out_fh = h5py.File(self.dmgh_data_file, 'w')
n_hhs = 5
hh_dtype = {'names':['year', 'household_id', 'income', 'head_person_id'],
'formats':['i4', 'i4', 'f8', 'i4']}
hhdata = out_fh.create_dataset('household', shape=(n_hhs, ), dtype=hh_dtype,
compression='gzip', compression_opts=9)
hhs = [(2000, 5, 65000.0, 9),
(2000, 1, 61000.0, 3),
(2000, 2, 62000.0, 4),
(2000, 3, 63000.0, 7),
(2001, 1, 71000.0, 3)]
hhdata[:] = array(hhs, dtype=hh_dtype)
n_ps = 16
ps_dtype = {'names':['year', 'person_id', 'household_id', 'age'],
'formats':['i4', 'i4', 'i4', 'i4']}
psdata = out_fh.create_dataset('person', shape=(n_ps, ), dtype=ps_dtype,
compression='gzip', compression_opts=9)
ps = [(2000, 1, 1, 76),
(2000, 2, 1, 72),
(2000, 3, 1, 30),
(2000, 4, 2, -1),
(2000, 5, 2, 57),
(2000, 6, 2, 17),
(2000, 9, 5, 67),
(2000,10, 5, 71),
(2000, 7, 3, 23),
(2000, 8, 3, 21),
(2000,81, 3, 2),
(2001, 1, 1, 77),
(2001, 2, 1, 73),
(2001, 3, 1, 31),
(2001, 4, 1, 35),
(2001,31, 1, 1)]
psdata[:] = array(ps, dtype=ps_dtype)
dataset_names = ['household', 'person']
for dataset_name in dataset_names:
for year in unique(out_fh[dataset_name][:, 'year']):
year_str = str(year)
group = out_fh.get(year_str, None)
if group is None:
group = out_fh.create_group(year_str)
is_year = out_fh[dataset_name][:, 'year'] == year
group.create_dataset(dataset_name, data=out_fh[dataset_name][is_year])
del out_fh[dataset_name]
out_fh.close()
#.........这里部分代码省略.........