本文整理汇总了Python中tvb.core.services.operation_service.OperationService.prepare_operations方法的典型用法代码示例。如果您正苦于以下问题:Python OperationService.prepare_operations方法的具体用法?Python OperationService.prepare_operations怎么用?Python OperationService.prepare_operations使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.core.services.operation_service.OperationService
的用法示例。
在下文中一共展示了OperationService.prepare_operations方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_group
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
def create_group(test_user=None, test_project=None, subject="John Doe"):
"""
Create a group of 2 operations, each with at least one resultant DataType.
"""
if test_user is None:
test_user = TestFactory.create_user()
if test_project is None:
test_project = TestFactory.create_project(test_user)
### Retrieve Adapter instance
algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
### Prepare Operations group. Execute them synchronously
service = OperationService()
operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
service.launch_operation(operations[0].id, False, adapter_inst)
service.launch_operation(operations[1].id, False, adapter_inst)
resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
return resulted_dts, operations[0].fk_operation_group
示例2: __init__
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
#.........这里部分代码省略.........
"Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name
)
self.logger.error(exc)
raise exc
simulation_state = simulation_state[0]
burst_config.update_simulation_parameter("simulation_state", simulation_state.gid)
burst_config = burst_configuration.clone()
count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project)
burst_config.name = burst_config.name + "_" + launch_mode + str(count)
## 2. Create Operations and do the actual launch
if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
## New Burst entry in the history
burst_id = self._store_burst_config(burst_config)
thread = threading.Thread(
target=self._async_launch_and_prepare,
kwargs={
"burst_config": burst_config,
"simulator_index": simulator_index,
"simulator_id": simulator_id,
"user_id": user_id,
},
)
thread.start()
return burst_id, burst_config.name
else:
## Continue simulation
## TODO
return burst_config.id, burst_config.name
@transactional
def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id):
"""
Prepare all required operations for burst launch.
"""
project_id = burst_config.fk_project
burst_id = burst_config.id
workflow_step_list = []
starting_index = simulator_index + 1
sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
metadata = {DataTypeMetaData.KEY_BURST: burst_id}
launch_data = burst_config.get_all_simulator_values()[0]
operations, group = self.operation_service.prepare_operations(
user_id, project_id, sim_algo, sim_algo.algo_group.group_category, metadata, **launch_data
)
group_launched = group is not None
if group_launched:
starting_index += 1
for tab in burst_config.tabs:
for portlet_cfg in tab.portlets:
### For each portlet configuration stored, update the step index ###
### and also change the dynamic parameters step indexes to point ###
### to the simulator outputs. ##
if portlet_cfg is not None:
analyzers = portlet_cfg.analyzers
visualizer = portlet_cfg.visualizer
for entry in analyzers:
entry.step_index = starting_index
self.workflow_service.set_dynamic_step_references(entry, simulator_index)
workflow_step_list.append(entry)
starting_index += 1
### Change the dynamic parameters to point to the last adapter from this portlet execution.
示例3: FlowContollerTest
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
#.........这里部分代码省略.........
launch_params['simulation_length'] = '[10000,10001,10002]'
launch_params[model.RANGE_PARAMETER_1] = 'simulation_length'
launch_params = {"simulator_parameters": json.dumps(launch_params)}
burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id']
return dao.get_burst_by_id(burst_id)
def _wait_for_burst_ops(self, burst_config):
""" sleeps until some operation of the burst is created"""
waited = 1
timeout = 50
operations = dao.get_operations_in_burst(burst_config.id)
while not len(operations) and waited <= timeout:
sleep(1)
waited += 1
operations = dao.get_operations_in_burst(burst_config.id)
operations = dao.get_operations_in_burst(burst_config.id)
return operations
def test_stop_burst_operation(self):
burst_config = self._long_burst_launch()
operation = self._wait_for_burst_ops(burst_config)[0]
self.assertFalse(operation.has_finished)
self.flow_c.stop_burst_operation(operation.id, 0, False)
operation = dao.get_operation_by_id(operation.id)
self.assertEqual(operation.status, model.STATUS_CANCELED)
def test_stop_burst_operation_group(self):
burst_config = self._long_burst_launch(True)
operations = self._wait_for_burst_ops(burst_config)
operations_group_id = 0
for operation in operations:
self.assertFalse(operation.has_finished)
operations_group_id = operation.fk_operation_group
self.flow_c.stop_burst_operation(operations_group_id, 1, False)
for operation in operations:
operation = dao.get_operation_by_id(operation.id)
self.assertEqual(operation.status, model.STATUS_CANCELED)
def test_remove_burst_operation(self):
burst_config = self._long_burst_launch()
operation = self._wait_for_burst_ops(burst_config)[0]
self.assertFalse(operation.has_finished)
self.flow_c.stop_burst_operation(operation.id, 0, True)
operation = dao.try_get_operation_by_id(operation.id)
self.assertTrue(operation is None)
def test_remove_burst_operation_group(self):
burst_config = self._long_burst_launch(True)
operations = self._wait_for_burst_ops(burst_config)
operations_group_id = 0
for operation in operations:
self.assertFalse(operation.has_finished)
operations_group_id = operation.fk_operation_group
self.flow_c.stop_burst_operation(operations_group_id, 1, True)
for operation in operations:
operation = dao.try_get_operation_by_id(operation.id)
self.assertTrue(operation is None)
def _launch_test_algo_on_cluster(self, **data):
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
algo_group = adapter.algorithm_group
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
self.operation_service._send_to_cluster(operations, adapter)
return operations
def test_stop_operations(self):
data = {"test1_val1": 5, 'test1_val2': 5}
operations = self._launch_test_algo_on_cluster(**data)
operation = dao.get_operation_by_id(operations[0].id)
self.assertFalse(operation.has_finished)
self.flow_c.stop_operation(operation.id, 0, False)
operation = dao.get_operation_by_id(operation.id)
self.assertEqual(operation.status, model.STATUS_CANCELED)
def test_stop_operations_group(self):
data = {model.RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5}
operations = self._launch_test_algo_on_cluster(**data)
operation_group_id = 0
for operation in operations:
operation = dao.get_operation_by_id(operation.id)
self.assertFalse(operation.has_finished)
operation_group_id = operation.fk_operation_group
self.flow_c.stop_operation(operation_group_id, 1, False)
for operation in operations:
operation = dao.get_operation_by_id(operation.id)
self.assertEqual(operation.status, model.STATUS_CANCELED)
示例4: TestWorkflow
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
class TestWorkflow(TransactionalTestCase):
"""
Test that workflow conversion methods are valid.
"""
def transactional_setup_method(self):
"""
Sets up the testing environment;
saves config file;
creates a test user, a test project;
creates burst, operation, flow and workflow services
"""
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.workflow_service = WorkflowService()
self.burst_service = BurstService()
self.operation_service = OperationService()
self.flow_service = FlowService()
def transactional_teardown_method(self):
"""
Remove project folders and clean up database.
"""
FilesHelper().remove_project_structure(self.test_project.name)
self.delete_project_folders()
def __create_complex_workflow(self, workflow_step_list):
"""
Creates a burst with a complex workflow with a given list of workflow steps.
:param workflow_step_list: a list of workflow steps that will be used in the
creation of a new workflow for a new burst
"""
burst_config = TestFactory.store_burst(self.test_project.id)
stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())
first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1",
"TestAdapterDatatypeInput")
metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
first_step_algorithm,
first_step_algorithm.algorithm_category,
metadata, **kwargs)
workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id,
burst_id=burst_config.id,
simulator_index=0,
simulator_id=first_step_algorithm.id,
operations=operations)
self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
burst_config.id, self.test_project.id, group,
operations)
#fire the first op
if len(operations) > 0:
self.operation_service.launch_operation(operations[0].id, False)
return burst_config.id
def test_workflow_generation(self):
"""
A simple test just for the fact that a workflow is created an ran,
no dynamic parameters are passed. In this case we create a two steps
workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
The first adapter doesn't return anything and the second returns one
tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
are actually ran by checking that two operations are created and that
one dataType is stored.
"""
workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
"TestAdapter2", step_index=1,
static_kwargs={"test2": 2}),
TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
"TestAdapter1", step_index=2,
static_kwargs={"test1_val1": 1, "test1_val2": 1})]
self.__create_complex_workflow(workflow_step_list)
stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
assert len(stored_datatypes) == 2, "DataType from second step was not stored."
assert stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
assert stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."
finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
assert finished == 3, "Didnt start operations for both adapters in workflow."
assert started == 0, "Some operations from workflow didnt finish."
assert error == 0, "Some operations finished with error status."
def test_workflow_dynamic_params(self):
"""
A simple test just for the fact that dynamic parameters are passed properly
between two workflow steps:
step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance.
The second adapter has this passed as a dynamic workflow parameter.
We check that the steps are actually ran by checking that two operations
#.........这里部分代码省略.........
示例5: TestOperationService
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
#.........这里部分代码省略.........
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
with pytest.raises(NoMemoryAvailableException):
self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
self._assert_no_dt2()
def test_launch_operation_HDD_full_space_started_ops(self):
"""
Test the actual operation flow by executing a test adapter.
"""
space_taken_by_started = 100
adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "",
status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
dao.store_entity(started_operation)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
with pytest.raises(NoMemoryAvailableException):
self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data)
self._assert_no_dt2()
def test_stop_operation(self):
"""
Test that an operation is successfully stopped.
"""
adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
data = {"test": 5}
algo = adapter.stored_adapter
algo_category = dao.get_category_by_id(algo.fk_category)
operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
algo_category, {}, **data)
self.operation_service._send_to_cluster(operations, adapter)
self.operation_service.stop_operation(operations[0].id)
operation = dao.get_operation_by_id(operations[0].id)
assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!"
def test_stop_operation_finished(self):
"""
Test that an operation that is already finished is not changed by the stop operation.
"""
adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
data = {"test1_val1": 5, 'test1_val2': 5}
algo = adapter.stored_adapter
algo_category = dao.get_category_by_id(algo.fk_category)
operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
algo_category, {}, **data)
self.operation_service._send_to_cluster(operations, adapter)
operation = dao.get_operation_by_id(operations[0].id)
operation.status = model.STATUS_FINISHED
dao.store_entity(operation)
self.operation_service.stop_operation(operations[0].id)
operation = dao.get_operation_by_id(operations[0].id)
assert operation.status, model.STATUS_FINISHED == "Operation shouldn't have been canceled!"
def test_array_from_string(self):
"""
Simple test for parse array on 1d, 2d and 3d array.
"""
row = {'description': 'test.',
'default': 'None',
示例6: BurstService
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
#.........这里部分代码省略.........
simulation_state = dao.get_generic_entity(SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS,
burst_config.id, "fk_parent_burst")
if simulation_state is None or len(simulation_state) < 1:
exc = BurstServiceException("Simulation State not found for %s, "
"thus we are unable to branch from it!" % burst_config.name)
self.logger.error(exc)
raise exc
simulation_state = simulation_state[0]
burst_config.update_simulation_parameter("simulation_state", simulation_state.gid)
burst_config = burst_configuration.clone()
count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project)
burst_config.name = burst_config.name + "_" + launch_mode + str(count)
## 2. Create Operations and do the actual launch
if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
## New Burst entry in the history
burst_id = self._store_burst_config(burst_config)
thread = threading.Thread(target=self._async_launch_and_prepare,
kwargs={'burst_config': burst_config,
'simulator_index': simulator_index,
'simulator_id': simulator_id,
'user_id': user_id})
thread.start()
return burst_id, burst_config.name
else:
## Continue simulation
## TODO
return burst_config.id, burst_config.name
@transactional
def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id):
"""
Prepare all required operations for burst launch.
"""
project_id = burst_config.fk_project
burst_id = burst_config.id
workflow_step_list = []
starting_index = simulator_index + 1
sim_algo = FlowService().get_algorithm_by_identifier(simulator_id)
metadata = {DataTypeMetaData.KEY_BURST: burst_id}
launch_data = burst_config.get_all_simulator_values()[0]
operations, group = self.operation_service.prepare_operations(user_id, project_id, sim_algo,
sim_algo.algorithm_category, metadata,
**launch_data)
group_launched = group is not None
if group_launched:
starting_index += 1
for tab in burst_config.tabs:
for portlet_cfg in tab.portlets:
### For each portlet configuration stored, update the step index ###
### and also change the dynamic parameters step indexes to point ###
### to the simulator outputs. ##
if portlet_cfg is not None:
analyzers = portlet_cfg.analyzers
visualizer = portlet_cfg.visualizer
for entry in analyzers:
entry.step_index = starting_index
self.workflow_service.set_dynamic_step_references(entry, simulator_index)
workflow_step_list.append(entry)
starting_index += 1
### Change the dynamic parameters to point to the last adapter from this portlet execution.
示例7: OperationServiceTest
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import prepare_operations [as 别名]
#.........这里部分代码省略.........
group.id,
"",
status=model.STATUS_STARTED,
estimated_disk_size=space_taken_by_started,
)
dao.store_entity(started_operation)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.assertRaises(
NoMemoryAvailableException,
self.operation_service.initiate_operation,
self.test_user,
self.test_project.id,
adapter,
tmp_folder,
**data
)
self._assert_no_dt2()
def test_stop_operation(self):
"""
Test that an operation is successfully stopped.
"""
module = "tvb.tests.framework.adapters.testadapter2"
class_name = "TestAdapter2"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 5}
algo_group = adapter.algorithm_group
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
operations, _ = self.operation_service.prepare_operations(
self.test_user.id, self.test_project.id, algo, algo_category, {}, **data
)
self.operation_service._send_to_cluster(operations, adapter)
self.operation_service.stop_operation(operations[0].id)
operation = dao.get_operation_by_id(operations[0].id)
self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
def test_stop_operation_finished(self):
"""
Test that an operation that is already finished is not changed by the stop operation.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
data = {"test1_val1": 5, "test1_val2": 5}
algo_group = adapter.algorithm_group
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
operations, _ = self.operation_service.prepare_operations(
self.test_user.id, self.test_project.id, algo, algo_category, {}, **data
)
self.operation_service._send_to_cluster(operations, adapter)
operation = dao.get_operation_by_id(operations[0].id)
operation.status = model.STATUS_FINISHED
dao.store_entity(operation)
self.operation_service.stop_operation(operations[0].id)
operation = dao.get_operation_by_id(operations[0].id)
self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
def test_array_from_string(self):
"""