本文整理汇总了Python中tvb.core.services.operation_service.OperationService.launch_operation方法的典型用法代码示例。如果您正苦于以下问题:Python OperationService.launch_operation方法的具体用法?Python OperationService.launch_operation怎么用?Python OperationService.launch_operation使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.core.services.operation_service.OperationService
的用法示例。
在下文中一共展示了OperationService.launch_operation方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_group
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import launch_operation [as 别名]
def create_group(test_user=None, test_project=None, subject="John Doe"):
"""
Create a group of 2 operations, each with at least one resultant DataType.
"""
if test_user is None:
test_user = TestFactory.create_user()
if test_project is None:
test_project = TestFactory.create_project(test_user)
### Retrieve Adapter instance
algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
### Prepare Operations group. Execute them synchronously
service = OperationService()
operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
service.launch_operation(operations[0].id, False, adapter_inst)
service.launch_operation(operations[1].id, False, adapter_inst)
resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
return resulted_dts, operations[0].fk_operation_group
示例2: TestWorkflow
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import launch_operation [as 别名]
class TestWorkflow(TransactionalTestCase):
"""
Test that workflow conversion methods are valid.
"""
def transactional_setup_method(self):
"""
Sets up the testing environment;
saves config file;
creates a test user, a test project;
creates burst, operation, flow and workflow services
"""
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.workflow_service = WorkflowService()
self.burst_service = BurstService()
self.operation_service = OperationService()
self.flow_service = FlowService()
def transactional_teardown_method(self):
"""
Remove project folders and clean up database.
"""
FilesHelper().remove_project_structure(self.test_project.name)
self.delete_project_folders()
def __create_complex_workflow(self, workflow_step_list):
"""
Creates a burst with a complex workflow with a given list of workflow steps.
:param workflow_step_list: a list of workflow steps that will be used in the
creation of a new workflow for a new burst
"""
burst_config = TestFactory.store_burst(self.test_project.id)
stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())
first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1",
"TestAdapterDatatypeInput")
metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
first_step_algorithm,
first_step_algorithm.algorithm_category,
metadata, **kwargs)
workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id,
burst_id=burst_config.id,
simulator_index=0,
simulator_id=first_step_algorithm.id,
operations=operations)
self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
burst_config.id, self.test_project.id, group,
operations)
#fire the first op
if len(operations) > 0:
self.operation_service.launch_operation(operations[0].id, False)
return burst_config.id
def test_workflow_generation(self):
"""
A simple test just for the fact that a workflow is created an ran,
no dynamic parameters are passed. In this case we create a two steps
workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
The first adapter doesn't return anything and the second returns one
tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
are actually ran by checking that two operations are created and that
one dataType is stored.
"""
workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
"TestAdapter2", step_index=1,
static_kwargs={"test2": 2}),
TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
"TestAdapter1", step_index=2,
static_kwargs={"test1_val1": 1, "test1_val2": 1})]
self.__create_complex_workflow(workflow_step_list)
stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
assert len(stored_datatypes) == 2, "DataType from second step was not stored."
assert stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
assert stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."
finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
assert finished == 3, "Didnt start operations for both adapters in workflow."
assert started == 0, "Some operations from workflow didnt finish."
assert error == 0, "Some operations finished with error status."
def test_workflow_dynamic_params(self):
"""
A simple test just for the fact that dynamic parameters are passed properly
between two workflow steps:
step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance.
The second adapter has this passed as a dynamic workflow parameter.
We check that the steps are actually ran by checking that two operations
#.........这里部分代码省略.........
示例3: TestOperationService
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import launch_operation [as 别名]
class TestOperationService(BaseTestCase):
"""
Test class for the introspection module. Some tests from here do async launches. For those
cases Transactional tests won't work.
TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
"""
def setup_method(self):
"""
Reset the database before each test.
"""
self.clean_database()
initialize_storage()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.operation_service = OperationService()
self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
def teardown_method(self):
"""
Reset the database when test is done.
"""
TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
self.clean_database()
def _assert_no_dt2(self):
count = dao.count_datatypes(self.test_project.id, Datatype2)
assert 0 == count
def _assert_stored_dt2(self, expected_cnt=1):
count = dao.count_datatypes(self.test_project.id, Datatype2)
assert expected_cnt == count
datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
return datatype
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 0, "There should be no operation"
adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 1, "Expected one operation group"
assert all_operations[0][2] == 2, "Expected 2 operations in group"
operation_group_id = all_operations[0][3]
assert operation_group_id != None, "The operation should be part of a group."
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def test_initiate_operation(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
adapter = TestFactory.create_adapter(module, class_name)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test1_val1": 5, "test1_val2": 5}
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
tmp_folder, **data)
assert res.index("has finished.") > 10, "Operation didn't finish"
group = dao.get_algorithm_by_module(module, class_name)
assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
assert group.classname == 'TestAdapter1', "Wrong data stored."
dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
assert count == 1
assert len(dts) == 1
datatype = dao.get_datatype_by_id(dts[0][0])
assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
assert datatype.type == output_type, "Wrong data stored."
#.........这里部分代码省略.........
示例4: OperationServiceTest
# 需要导入模块: from tvb.core.services.operation_service import OperationService [as 别名]
# 或者: from tvb.core.services.operation_service.OperationService import launch_operation [as 别名]
class OperationServiceTest(BaseTestCase):
"""
Test class for the introspection module. Some tests from here do async launches. For those
cases Transactional tests won't work.
TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
"""
def setUp(self):
"""
Reset the database before each test.
"""
self.clean_database()
initialize_storage()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.operation_service = OperationService()
self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
def tearDown(self):
"""
Reset the database when test is done.
"""
TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
self.clean_database()
def _assert_no_dt2(self):
count = dao.count_datatypes(self.test_project.id, Datatype2)
self.assertEqual(0, count)
def _assert_stored_dt2(self, expected_cnt=1):
count = dao.count_datatypes(self.test_project.id, Datatype2)
self.assertEqual(expected_cnt, count)
datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
return datatype
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 0, "There should be no operation")
algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
adapter_instance = flow_service.build_adapter_instance(group)
data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 1, "Expected one operation group")
self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")
operation_group_id = all_operations[0][3]
self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
def test_initiate_operation(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test1_val1": 5, "test1_val2": 5}
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
res = self.operation_service.initiate_operation(
self.test_user, self.test_project.id, adapter, tmp_folder, **data
)
self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
group = dao.find_group(module, class_name)
self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
self.assertEqual(count, 1)
self.assertEqual(len(dts), 1)
datatype = dao.get_datatype_by_id(dts[0][0])
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
self.assertEqual(datatype.type, output_type, "Wrong data stored.")
def test_delete_dt_free_HDD_space(self):
"""
#.........这里部分代码省略.........