本文整理汇总了Python中opus_core.session_configuration.SessionConfiguration.get_dataset方法的典型用法代码示例。如果您正苦于以下问题:Python SessionConfiguration.get_dataset方法的具体用法?Python SessionConfiguration.get_dataset怎么用?Python SessionConfiguration.get_dataset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类opus_core.session_configuration.SessionConfiguration
的用法示例。
在下文中一共展示了SessionConfiguration.get_dataset方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def run(self):
dataset_pool = SessionConfiguration().get_dataset_pool()
z_scen0 = dataset_pool.get_dataset('zones_baseline')
tcd = z_scen0['tcd']
tco = z_scen0['tco']
vpd = z_scen0['vpd']
vpo = z_scen0['vpo']
zones = dataset_pool.get_dataset('zone')
zones.modify_attribute('tcd', tcd)
zones.modify_attribute('tco', tco)
zones.modify_attribute('vpd', vpd)
zones.modify_attribute('vpo', vpo)
z_scen0.delete_one_attribute('tcd')
z_scen0.delete_one_attribute('tco')
z_scen0.delete_one_attribute('vpd')
z_scen0.delete_one_attribute('vpo')
示例2: __init__
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def __init__(self, config):
ss = SimulationState(new_instance=True)
ss.set_current_time(config['base_year'])
ss.set_cache_directory(config['cache_directory'])
SessionConfiguration(new_instance=True,
package_order=config['dataset_pool_configuration'].package_order,
in_storage=AttributeCache())
#if not os.path.exists(config['cache_directory']): ## if cache exists, it will automatically skip
cacher = CreateBaseyearCache()
cache_dir = cacher.run(config)
if 'estimation_database_configuration' in config:
db_server = DatabaseServer(config['estimation_database_configuration'])
db = db_server.get_database(config['estimation_database_configuration'].database_name)
out_storage = StorageFactory().get_storage(
'sql_storage',
storage_location = db)
else:
output_cache = os.path.join(config['cache_directory'], str(config['base_year']+1))
out_storage = StorageFactory().get_storage('flt_storage', storage_location=output_cache)
dataset_pool = SessionConfiguration().get_dataset_pool()
households = dataset_pool.get_dataset("household")
buildings = dataset_pool.get_dataset("building")
zones = dataset_pool.get_dataset("zone")
zone_ids = zones.get_id_attribute()
capacity_attribute_name = "residential_units" #_of_use_id_%s" % id
capacity_variable_name = "%s=sanfrancisco.zone.aggregate_%s_from_building" % \
(capacity_attribute_name, capacity_attribute_name)
buildings.compute_variables("sanfrancisco.building.zone_id", dataset_pool=dataset_pool)
zones.compute_variables(capacity_variable_name, dataset_pool=dataset_pool)
building_zone_id = buildings.get_attribute('zone_id')
# is_household_unplace = datasets['household'].get_attribute("building_id") <= 0
is_household_unplaced = 1 #all households are unplaced
household_building_id = zeros(households.size(), dtype='int32')-1 #datasets['household'].get_attribute("building_id")
for zone_id in zone_ids:
capacity = zones.get_attribute_by_id(capacity_attribute_name, zone_id)
is_household_in_this_zone = (households.get_attribute('zone_id') == zone_id)
is_unplaced_household_in_this_zone = is_household_in_this_zone * is_household_unplaced
is_building_in_this_zone = (building_zone_id == zone_id)
# if not is_household_in_this_zone.sum() <= capacity:
if capacity == 0 or is_household_in_this_zone.sum()==0:
print "WARNING: zone %s has %s households but only %s units" % (zone_id, is_household_in_this_zone.sum(), capacity)
continue
prob = buildings.get_attribute(capacity_attribute_name) * is_building_in_this_zone / array(capacity, dtype=float64)
r = random(sum(is_unplaced_household_in_this_zone))
prob_cumsum = ncumsum(prob)
index_to_bldg = searchsorted(prob_cumsum, r)
household_building_id[where(is_unplaced_household_in_this_zone)] = buildings.get_attribute_by_index('building_id', index_to_bldg)
# import pdb;pdb.set_trace()
households.set_values_of_one_attribute('building_id', household_building_id)
households.write_dataset(out_table_name='households', out_storage=out_storage)
示例3: run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def run(self, year, condition=None, max_iter=10):
"""
'year' is the current year of the simulation.
'condition' should be a boolean expression defined on any dataset.
The method iterates over the given models until all values of the expression are True.
'max_iter' gives the maximum number of iterations to run, if 'condition' is not fulfilled.
If it is None, there is no limit and thus, the condition must be fulfilled in order to terminate.
If 'condition' is None, the set of models is run only once.
"""
self.config['years'] = (year, year)
if condition is None:
return self.model_system.run_in_same_process(self.config)
dataset_pool = SessionConfiguration().get_dataset_pool()
variable_name = VariableName(condition)
dataset = dataset_pool.get_dataset(variable_name.get_dataset_name())
condition_value = dataset.compute_variables(variable_name, dataset_pool=dataset_pool)
result = None
iter = 1
while not alltrue(condition_value):
result = self.model_system.run_in_same_process(self.config)
if max_iter is None or iter > max_iter:
break
iter = iter + 1
# force to recompute the condition
dataset = SessionConfiguration().get_dataset_pool().get_dataset(variable_name.get_dataset_name())
dataset.delete_computed_attributes()
condition_value = dataset.compute_variables(variable_name,
dataset_pool=SessionConfiguration().get_dataset_pool())
if not alltrue(condition_value):
logger.log_status('%s did not converge. Maximum number of iterations (%s) reached.' % (self.model_name, max_iter))
else:
logger.log_status('%s converged in %s iterations.' % (self.model_name, iter-1))
return result
示例4: run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def run(self, config, year, *args, **kwargs):
"""This is the main entry point. It gets the appropriate configuration info from the
travel_model_configuration part of this config, and then copies the specified
UrbanSim data into files for travel mdel to read.
"""
cache_directory = config['cache_directory']
simulation_state = SimulationState()
simulation_state.set_cache_directory(cache_directory)
simulation_state.set_current_time(year)
attribute_cache = AttributeCache()
dataset_pool = SessionConfiguration(new_instance=True,
package_order=config['dataset_pool_configuration'].package_order,
in_storage=attribute_cache).get_dataset_pool()
#cache_storage = AttributeCache().get_flt_storage_for_year(year_for_base_year_cache)
#datasets = DatasetFactory().create_datasets_from_flt(config.get('datasets_to_preload',{}),
#"urbansim",
#additional_arguments={'in_storage': attribute_cache})
zone_set = dataset_pool.get_dataset('travel_zone')
self.prepare_for_run(config['travel_model_configuration'], year)
self.create_travel_model_input_file(config=config,
year=year,
zone_set=zone_set,
datasets=dataset_pool,
*args, **kwargs)
示例5: run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def run(self):
"""Keeps household building type id attribute consistent with residential_building_type_id.
"""
dataset_pool = SessionConfiguration().get_dataset_pool()
household_set = dataset_pool.get_dataset("household")
household_set.delete_one_attribute("county")
county = household_set.compute_variables(
"_county = household.disaggregate(parcel.county_id, intermediates=[building])"
)
household_set.add_primary_attribute(name="county", data=county)
示例6: prepare_for_run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def prepare_for_run(self, control_total_dataset_name=None, control_total_table=None, control_total_storage=None):
if (control_total_storage is None) or ((control_total_table is None) and (control_total_dataset_name is None)):
dataset_pool = SessionConfiguration().get_dataset_pool()
self.control_totals = dataset_pool.get_dataset( 'annual_%s_control_total' % self.dataset.get_dataset_name() )
return self.control_totals
if not control_total_dataset_name:
control_total_dataset_name = DatasetFactory().dataset_name_for_table(control_total_table)
self.control_totals = DatasetFactory().search_for_dataset(control_total_dataset_name,
package_order=SessionConfiguration().package_order,
arguments={'in_storage':control_total_storage,
'in_table_name':control_total_table,
'id_name':[]
}
)
return self.control_totals
示例7: prepare_for_run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def prepare_for_run(self, scheduled_events_dataset_name=None, scheduled_events_table=None, scheduled_events_storage=None):
if (scheduled_events_storage is None) or ((scheduled_events_table is None) and (scheduled_events_dataset_name is None)):
## this should not happen
dataset_pool = SessionConfiguration().get_dataset_pool()
self.scheduled_events = dataset_pool.get_dataset( 'scheduled_%s_events' % self.dataset.get_dataset_name() )
return self.scheduled_events
if not scheduled_events_dataset_name:
scheduled_events_dataset_name = DatasetFactory().dataset_name_for_table(scheduled_events_table)
self.scheduled_events = DatasetFactory().search_for_dataset(scheduled_events_dataset_name,
package_order=SessionConfiguration().package_order,
arguments={'in_storage':scheduled_events_storage,
'in_table_name':scheduled_events_table,
'id_name':[]
}
)
return self.scheduled_events
示例8: prepare_for_run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def prepare_for_run(self, dataset_name=None, table_name=None, storage=None):
if (storage is None) or ((table_name is None) and (dataset_name is None)):
dataset_pool = SessionConfiguration().get_dataset_pool()
dataset = dataset_pool.get_dataset( 'target_vacancy' )
return dataset
if not dataset_name:
dataset_name = DatasetFactory().dataset_name_for_table(table_name)
dataset = DatasetFactory().search_for_dataset(dataset_name,
package_order=SessionConfiguration().package_order,
arguments={'in_storage':storage,
'in_table_name':table_name,
'id_name':[]
}
)
if self.target_vancy_dataset is None:
self.target_vancy_dataset = dataset
return dataset
示例9: prepare_for_run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def prepare_for_run(self, dataset_name=None, table_name=None, storage=None):
"""Load target vacancies table."""
if (storage is None) or ((table_name is None) and (dataset_name is None)):
dataset_pool = SessionConfiguration().get_dataset_pool()
dataset = dataset_pool.get_dataset("target_vacancy")
return dataset
if not dataset_name:
dataset_name = DatasetFactory().dataset_name_for_table(table_name)
dataset = DatasetFactory().search_for_dataset(
dataset_name,
package_order=SessionConfiguration().package_order,
arguments={"in_storage": storage, "in_table_name": table_name, "id_name": []},
)
if self.target_vancy_dataset is None:
self.target_vancy_dataset = dataset
"""Load household control totals table. Purpose: Anticipation of next year's household number.
"""
control_totals_dataset = DatasetFactory().search_for_dataset(
"control_totals",
package_order=SessionConfiguration().package_order,
arguments={"in_storage": storage, "in_table_name": "annual_household_control_totals", "id_name": []},
)
if self.control_totals is None:
self.control_totals = control_totals_dataset
"""Load employment control totals table. Purpose: Anticipation of next year's employment number.
"""
employment_control_totals_dataset = DatasetFactory().search_for_dataset(
"control_totals",
package_order=SessionConfiguration().package_order,
arguments={"in_storage": storage, "in_table_name": "annual_employment_control_totals", "id_name": []},
)
if self.employment_control_totals is None:
self.employment_control_totals = employment_control_totals_dataset
return dataset
示例10: target_func
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def target_func(self, est_v, func=lambda x, y: np.sum(np.abs(x - y)), **kwargs):
""" Target function."""
simulation_state = SimulationState()
simulation_state.set_current_time(self.base_year)
simulation_state.set_cache_directory(self.cache_directory)
attribute_cache = AttributeCache()
dataset_pool = SessionConfiguration(
new_instance=True, package_order=self.package_order, in_storage=attribute_cache
).get_dataset_pool()
calib_datasets = {}
for dataset_name, calib_attr in calib_datasets.iteritems():
dataset = dataset_pool.get_dataset(dataset_name, dataset_arguments={"id_name": []})
assert (
subset is None
or subset.get(dataset_name, None) is None
or subset_patterns is None
or subset_patterns.get(dataset_name, None) is None
)
if subset is not None and subset.get(dataset_name, None) is not None:
subset_attr, subset_cond = subset.get(dataset_name)
index = np.in1d(dataset[subset_attr], subset_cond)
elif subset_patterns is not None and subset_patterns.get(dataset_name, None) is not None:
subset_attr, subset_pattern = subset_patterns.get(dataset_name)
index = array([True if re.search(subset_pattern, attr_v) else False for attr_v in dataset[subset_attr]])
else:
index = arange(dataset.size(), dtype="i")
calib_datasets[dataset_name] = [dataset, calib_attr, index]
prediction = self.update_prediction(est_v, simulation_state, dataset_pool, calib_datasets, **kwargs)
## allow keys in target not appearing in prediction
## assuming their values to be 0
### every key in target should appear in prediction
# assert np.all( np.in1d(self.target.keys(), prediction.keys()) )
target = np.array(self.target.values())
predct = np.array([prediction[k] if prediction.has_key(k) else 0 for k in self.target.keys()])
results = func(predct, target)
return results
示例11: run
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
def run(self, config, year, *args, **kwargs):
"""This is the main entry point. It gets the appropriate values from the
travel_model_configuration part of this config, and then copies the specified
data into the specified travel_data variable names. Results in
a new travel_data cache for year+1.
"""
cache_directory = config['cache_directory']
simulation_state = SimulationState()
simulation_state.set_current_time(year)
simulation_state.set_cache_directory(cache_directory)
logger.start_block('Getting data from travel model')
next_year = year + 1
flt_dir_for_next_year = os.path.join(cache_directory, str(next_year))
if not os.path.exists(flt_dir_for_next_year):
os.mkdir(flt_dir_for_next_year)
attribute_cache = AttributeCache()
dataset_pool = SessionConfiguration(new_instance=True,
package_order=config['dataset_pool_configuration'].package_order,
in_storage=attribute_cache).get_dataset_pool()
zone_set = dataset_pool.get_dataset('zone')
# zone_set = ZoneDataset(in_storage_location=flt_dir_for_this_year,
# in_storage_type='flt_storage',
# in_table_name='zones')
zone_set.load_dataset()
self.prepare_for_run(config['travel_model_configuration'], year)
travel_data_set = self.get_travel_data_from_travel_model(config, year, zone_set,
*args, **kwargs)
logger.end_block()
logger.start_block('Writing travel data to cache')
out_storage = StorageFactory().get_storage('flt_storage', storage_location = flt_dir_for_next_year)
#out_storage = flt_storage(Resources(data={"storage_location":flt_dir_for_next_year}))
travel_data_set.write_dataset(attributes=travel_data_set.get_known_attribute_names(),
out_storage=out_storage,
out_table_name='travel_data')
logger.end_block()
示例12: SimulationState
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import get_dataset [as 别名]
SimulationState().set_cache_directory(cache_directory)
# SimulationState().set_current_time(year)
SessionConfiguration(new_instance=True,
package_order=package_order,
in_storage=AttributeCache())
for year in range(base_year+1, end_year+1, 1):
SimulationState().set_current_time(year)
# SessionConfiguration(new_instance=True,
# package_order=package_order,
# in_storage=AttributeCache())
dataset_pool=SessionConfiguration().get_dataset_pool()
dataset_pool.remove_all_datasets()
# dataset_pool = DatasetPool(
# package_order=['psrc','urbansim','opus_core'],
# storage=AttributeCache())
proposal_set = dataset_pool.get_dataset("development_project_proposal")
template_component = dataset_pool.get_dataset("development_template_component")
from urbansim_parcel.datasets.development_project_proposal_component_dataset import create_from_proposals_and_template_components
proposal_component = create_from_proposals_and_template_components(proposal_set,
template_component,
dataset_pool=dataset_pool)
proposal_component.write_dataset(out_storage=AttributeCache().get_flt_storage_for_year(year),
out_table_name="development_project_proposal_components")