本文整理汇总了Python中tests.utils.helpers.job_from_file函数的典型用法代码示例。如果您正苦于以下问题:Python job_from_file函数的具体用法?Python job_from_file怎么用?Python job_from_file使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了job_from_file函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
def setUp(self):
client = kvs.get_client()
# Delete managed job id info so we can predict the job key
# which will be allocated for us
client.delete(kvs.tokens.CURRENT_JOBS)
self.generated_files = []
self.job = helpers.job_from_file(helpers.get_data_path(CONFIG_FILE))
self.job_with_includes = helpers.job_from_file(helpers.get_data_path(CONFIG_WITH_INCLUDES))
示例2: test_compute_disagg_matrix
def test_compute_disagg_matrix(self):
"""Test the core function of the main disaggregation task."""
# for the given test input data, we expect the calculator to return
# this gmv:
expected_gmv = 0.2259803374787534
the_job = helpers.job_from_file(DISAGG_DEMO_CONFIG_FILE)
helpers.store_hazard_logic_trees(the_job)
site = shapes.Site(0.0, 0.0)
poe = 0.1
result_dir = tempfile.gettempdir()
gmv, matrix_path = disagg_core.compute_disagg_matrix(
the_job, site, poe, result_dir)
# Now test the following:
# 1) The matrix file exists
# 2) The matrix file has a size > 0
# 3) Check that the returned GMV is what we expect
# Here we don't test the actual matrix contents or the hdf5 file;
# there are tests on the Java side which verify the actual data in the
# matrix, plus other tests on the Python side which deal with saving
# the matrix.
self.assertTrue(os.path.exists(matrix_path))
self.assertTrue(os.path.getsize(matrix_path) > 0)
self.assertEqual(expected_gmv, gmv)
# For clean up, delete the hdf5 we generated.
os.unlink(matrix_path)
示例3: test_job_with_only_hazard_config_only_has_hazard_section
def test_job_with_only_hazard_config_only_has_hazard_section(self):
FLAGS.include_defaults = False
try:
job_with_only_hazard = \
helpers.job_from_file(helpers.get_data_path(HAZARD_ONLY))
self.assertEqual(["HAZARD"], job_with_only_hazard.sections)
finally:
FLAGS.include_defaults = True
示例4: test_deterministic_job_completes
def test_deterministic_job_completes(self):
"""
Exercise the deterministic risk job and make sure it runs end-to-end.
"""
risk_job = helpers.job_from_file(TEST_JOB_FILE)
# KVS garbage collection is going to be called asynchronously by the
# job. We don't actually want that to happen.
with patch('subprocess.Popen'):
risk_job.launch()
示例5: test_default_validators_scenario_job
def test_default_validators_scenario_job(self):
"""Test to ensure that a Scenario job always includes the
:class:`openquake.job.config.ScenarioComputationValidator`."""
scenario_job_path = helpers.demo_file('scenario_risk/config.gem')
scenario_job = helpers.job_from_file(scenario_job_path)
validators = config.default_validators(scenario_job.sections,
scenario_job.params)
self.assertTrue(any(
isinstance(v, ScenarioComputationValidator) for v in validators))
示例6: setUp
def setUp(self):
self.job_ctxt = helpers.job_from_file(os.path.join(helpers.DATA_DIR,
'config.gem'))
[input] = models.inputs4job(self.job_ctxt.job_id,
input_type="exposure")
owner = models.OqUser.objects.get(user_name="openquake")
emdl = input.model()
if not emdl:
emdl = models.ExposureModel(
owner=owner, input=input, description="RCT exposure model",
category="RCT villas", stco_unit="roofs",
stco_type="aggregated")
emdl.save()
asset_data = [
((0, 0), shapes.Site(10.0, 10.0),
{u'stco': 5.07, u'asset_ref': u'a5625',
u'taxonomy': u'rctc-ad-83'}),
((0, 1), shapes.Site(10.1, 10.0),
{u'stco': 5.63, u'asset_ref': u'a5629',
u'taxonomy': u'rctc-ad-83'}),
((1, 0), shapes.Site(10.0, 10.1),
{u'stco': 11.26, u'asset_ref': u'a5630',
u'taxonomy': u'rctc-ad-83'}),
((1, 1), shapes.Site(10.1, 10.1),
{u'stco': 5.5, u'asset_ref': u'a5636',
u'taxonomy': u'rctc-ad-83'}),
]
assets = emdl.exposuredata_set.filter(taxonomy="rctc-ad-83"). \
order_by("id")
for idx, (gcoo, site, adata) in enumerate(asset_data):
if not assets:
location = geos.GEOSGeometry(site.point.to_wkt())
asset = models.ExposureData(exposure_model=emdl, site=location,
**adata)
asset.save()
else:
asset = assets[idx]
GRID_ASSETS[gcoo] = asset
self.grid = shapes.Grid(shapes.Region.from_coordinates(
[(10.0, 10.0), (10.0, 10.1), (10.1, 10.1), (10.1, 10.0)]), 0.1)
# this is the expected output of grid_assets_iterator and an input of
# asset_losses_per_site
self.grid_assets = [
(shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]),
(shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]),
(shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]),
(shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])]
示例7: test_default_validators_disagg_job
def test_default_validators_disagg_job(self):
"""Test to ensure that a Disaggregation job always includes the
:class:`openquake.job.config.DisaggregationValidator`.
"""
da_job_path = helpers.demo_file('disaggregation/config.gem')
da_job = helpers.job_from_file(da_job_path)
validators = config.default_validators(da_job.sections, da_job.params)
# test that the default validators include a DisaggregationValidator
self.assertTrue(any(
isinstance(v, DisaggregationValidator) for v in validators))
示例8: test_can_store_and_read_jobs_from_kvs
def test_can_store_and_read_jobs_from_kvs(self):
flags_debug_default = flags.FLAGS.debug
flags.FLAGS.debug = "debug"
try:
self.job = helpers.job_from_file(os.path.join(helpers.DATA_DIR, CONFIG_FILE))
job_from_kvs = CalculationProxy.from_kvs(self.job.job_id)
self.assertEqual(flags.FLAGS.debug, job_from_kvs.params.pop("debug"))
self.assertEqual(self.job, job_from_kvs)
finally:
helpers.cleanup_loggers()
# Restore the default global FLAGS.debug level
# so we don't break stuff.
flags.FLAGS.debug = flags_debug_default
示例9: test_default_validators_classical_job
def test_default_validators_classical_job(self):
"""Test to ensure that a classical always includes the
:class:`openquake.job.config.ClassicalValidator`.
"""
classical_risk_job_path = helpers.demo_file(
'classical_psha_based_risk/config.gem')
classical_risk_job = helpers.job_from_file(classical_risk_job_path)
validators = config.default_validators(classical_risk_job.sections,
classical_risk_job.params)
self.assertTrue(
any(isinstance(v, ClassicalValidator) for v in validators))
示例10: setUp
def setUp(self):
self.job = helpers.job_from_file(os.path.join(helpers.DATA_DIR,
'config.gem'))
self.grid = shapes.Grid(shapes.Region.from_coordinates(
[(1.0, 3.0), (1.0, 4.0), (2.0, 4.0), (2.0, 3.0)]),
1.0)
# this is the expected output of grid_assets_iterator and an input of
# asset_losses_per_site
self.grid_assets = [
(shapes.GridPoint(self.grid, 0, 0), GRID_ASSETS[(0, 0)]),
(shapes.GridPoint(self.grid, 1, 0), GRID_ASSETS[(0, 1)]),
(shapes.GridPoint(self.grid, 0, 1), GRID_ASSETS[(1, 0)]),
(shapes.GridPoint(self.grid, 1, 1), GRID_ASSETS[(1, 1)])]
示例11: setUp
def setUp(self):
kvs.flush()
self.job = helpers.job_from_file(SCENARIO_SMOKE_TEST)
self.job.params[NUMBER_OF_CALC_KEY] = "1"
self.job.params["SERIALIZE_RESULTS_TO"] = "xml"
# saving the default java implementation
self.default = det.ScenarioEventBasedMixin.compute_ground_motion_field
self.grid = self.job.region.grid
self.job.to_kvs()
示例12: test_read_sites_from_exposure
def test_read_sites_from_exposure(self):
"""
Test reading site data from an exposure file using
:py:function:`openquake.risk.job.general.read_sites_from_exposure`.
"""
job_config_file = helpers.smoketest_file('simplecase/config.gem')
test_job = helpers.job_from_file(job_config_file)
expected_sites = [
shapes.Site(-118.077721, 33.852034),
shapes.Site(-118.067592, 33.855398),
shapes.Site(-118.186739, 33.779013)]
self.assertEqual(expected_sites,
general.read_sites_from_exposure(test_job))
示例13: test_spawn_job_supervisor
def test_spawn_job_supervisor(self):
class FakeProcess(object):
pid = 42
oq_config.Config().cfg['supervisor']['exe'] = '/supervise me'
job = helpers.job_from_file(helpers.get_data_path(CONFIG_FILE))
with patch('subprocess.Popen') as popen:
popen.return_value = FakeProcess()
spawn_job_supervisor(job_id=job.job_id, pid=54321)
self.assertEqual(popen.call_count, 1)
self.assertEqual(popen.call_args,
((['/supervise me', str(job.job_id), '54321'], ),
{'env': os.environ}))
job = OqJob.objects.get(pk=job.job_id)
self.assertEqual(job.supervisor_pid, 42)
self.assertEqual(job.job_pid, 54321)
示例14: test_hazard_engine_jobber_runs
def test_hazard_engine_jobber_runs(self):
"""Construction of LogicTreeProcessor in Java should not throw
errors, and should have params loaded from KVS."""
hazengine = helpers.job_from_file(TEST_JOB_FILE)
self.generated_files.append(hazengine.super_config_path)
with mixins.Mixin(hazengine, openquake.hazard.job.HazJobMixin):
hazengine.execute()
source_model_key = tokens.source_model_key(hazengine.job_id)
self.kvs_client.get(source_model_key)
# We have the random seed in the config, so this is guaranteed
# TODO(JMC): Add this back in
# self.assertEqual(source_model, TEST_SOURCE_MODEL)
gmpe_key = tokens.gmpe_key(hazengine.job_id)
self.kvs_client.get(gmpe_key)
示例15: test_read_sites_from_exposure
def test_read_sites_from_exposure(self):
# Test reading site data from an exposure file using
# :py:function:`openquake.risk.read_sites_from_exposure`.
job_cfg = helpers.testdata_path('simplecase/config.gem')
test_job = helpers.job_from_file(job_cfg)
calc = core.EventBasedRiskCalculator(test_job)
calc.store_exposure_assets()
expected_sites = set([
shapes.Site(-118.077721, 33.852034),
shapes.Site(-118.067592, 33.855398),
shapes.Site(-118.186739, 33.779013)])
actual_sites = set(engine.read_sites_from_exposure(test_job))
self.assertEqual(expected_sites, actual_sites)