本文整理汇总了Python中opus_core.session_configuration.SessionConfiguration.size方法的典型用法代码示例。如果您正苦于以下问题:Python SessionConfiguration.size方法的具体用法?Python SessionConfiguration.size怎么用?Python SessionConfiguration.size使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类opus_core.session_configuration.SessionConfiguration
的用法示例。
在下文中一共展示了SessionConfiguration.size方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import size [as 别名]
def __init__(self, config):
if 'estimation_database_configuration' in config:
db_server = DatabaseServer(config['estimation_database_configuration'])
db = db_server.get_database(config['estimation_database_configuration'].database_name)
out_storage = StorageFactory().build_storage_for_dataset(
type='sql_storage', storage_location=db)
else:
out_storage = StorageFactory().get_storage(type='flt_storage',
storage_location=os.path.join(config['cache_directory'], str(config['base_year']+1)))
simulation_state = SimulationState()
simulation_state.set_cache_directory(config['cache_directory'])
simulation_state.set_current_time(config['base_year'])
attribute_cache = AttributeCache()
SessionConfiguration(new_instance=True,
package_order=config['dataset_pool_configuration'].package_order,
in_storage=attribute_cache)
if not os.path.exists(os.path.join(config['cache_directory'], str(config['base_year']))):
#raise RuntimeError, "datasets uncached; run prepare_estimation_data.py first"
CacheScenarioDatabase().run(config, unroll_gridcells=False)
for dataset_name in config['datasets_to_preload']:
SessionConfiguration().get_dataset_from_pool(dataset_name)
households = SessionConfiguration().get_dataset_from_pool("household")
household_ids = households.get_id_attribute()
workers = households.get_attribute("workers")
hh_ids = []
member_ids = []
is_worker = []
job_ids = []
for i in range(households.size()):
if workers[i] > 0:
hh_ids += [household_ids[i]] * workers[i]
member_ids += range(1, workers[i]+1)
is_worker += [1] * workers[i]
job_ids += [-1] * workers[i]
in_storage = StorageFactory().get_storage('dict_storage')
persons_table_name = 'persons'
in_storage.write_table(
table_name=persons_table_name,
table_data={
'person_id':arange(len(hh_ids))+1,
'household_id':array(hh_ids),
'member_id':array(member_ids),
'is_worker':array(is_worker),
'job_id':array(job_ids),
},
)
persons = PersonDataset(in_storage=in_storage, in_table_name=persons_table_name)
persons.write_dataset(out_storage=out_storage, out_table_name=persons_table_name)
示例2: HLCMEstimator
# 需要导入模块: from opus_core.session_configuration import SessionConfiguration [as 别名]
# 或者: from opus_core.session_configuration.SessionConfiguration import size [as 别名]
class HLCMEstimator(Estimator):
def estimate(self, spec_var=None, spec_py=None,
submodel_string = "workers",
agent_sample_rate=0.005, alt_sample_size=None):
"""
"""
CLOSE = 0.001
sampler = "opus_core.samplers.weighted_sampler"
if alt_sample_size==None:
sampler = None
date_time_str=strftime("%Y_%m_%d__%H_%M", localtime())
agent_sample_rate_str = "__ASR_" + str(agent_sample_rate)
alt_sample_size_str = "_ALT_" + str(alt_sample_size)
info_file = date_time_str + agent_sample_rate_str + alt_sample_size_str + "__info.txt"
logger.enable_file_logging(date_time_str + agent_sample_rate_str + alt_sample_size_str + "__run.txt")
logger.enable_memory_logging()
logger.log_status("Constrained Estimation with agent sample rate of %s and alternatvie sample size %s\n" % \
(agent_sample_rate, alt_sample_size))
t1 = time()
SimulationState().set_current_time(2000)
self.nbs = SessionConfiguration().get_dataset_from_pool("neighborhood")
self.hhs = SessionConfiguration().get_dataset_from_pool('household')
depts, lambda_value = compute_lambda(self.nbs)
supply, vacancy_rate = compute_supply_and_vacancy_rate(self.nbs, depts, lambda_value)
self.nbs.set_values_of_one_attribute("supply", supply)
dataset_pool = SessionConfiguration().get_dataset_pool()
dataset_pool.add_datasets_if_not_included({'vacancy_rate': vacancy_rate,
'sample_rate':agent_sample_rate
})
SessionConfiguration()["CLOSE"] = CLOSE
SessionConfiguration()['info_file'] = info_file
if self.save_estimation_results:
out_storage = StorageFactory().build_storage_for_dataset(type='sql_storage',
storage_location=self.out_con)
if spec_py is not None:
reload(spec_py)
spec_var = spec_py.specification
if spec_var is not None:
self.specification = load_specification_from_dictionary(spec_var)
else:
in_storage = StorageFactory().build_storage_for_dataset(type='sql_storage',
storage_location=self.in_con)
self.specification = EquationSpecification(in_storage=in_storage)
self.specification.load(in_table_name="household_location_choice_model_specification")
#submodel_string = "workers"
seed(71) # was: seed(71,110)
self.model_name = "household_location_choice_model"
model = HouseholdLocationChoiceModelCreator().get_model(location_set=self.nbs,
submodel_string=submodel_string,
sampler = sampler,
estimation_size_agents = agent_sample_rate * 100/20,
# proportion of the agent set that should be used for the estimation,
#
sample_size_locations = alt_sample_size, # choice set size (includes current location)
compute_capacity_flag = True,
probabilities = "opus_core.mnl_probabilities",
choices = "urbansim.lottery_choices",
run_config = Resources({"capacity_string":"supply"}),
estimate_config = Resources({"capacity_string":"supply","compute_capacity_flag":True}))
#TODO: since households_for_estimation currently is the same as households, create_households_for_estimation
#becomes unnecesarry
#agent_set, agents_index_for_estimation = create_households_for_estimation(self.hhs, self.in_con)
agent_set = self.hhs; agents_index_for_estimation = arange(self.hhs.size())
self.result = model.estimate(self.specification,
agent_set=agent_set,
agents_index=agents_index_for_estimation,
debuglevel=self.debuglevel,
procedure="urbansim.constrain_estimation_bhhh_two_loops" ) #"urbansim.constrain_estimation_bhhh"
#save estimation results
if self.save_estimation_results:
self.save_results(out_storage)
logger.log_status("Estimation done. " + str(time()-t1) + " s")