本文整理汇总了Python中tvb.core.services.flow_service.FlowService类的典型用法代码示例。如果您正苦于以下问题:Python FlowService类的具体用法?Python FlowService怎么用?Python FlowService使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FlowService类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_launch_two_ops_HDD_full_space
def test_launch_two_ops_HDD_full_space(self):
"""
Launch two operations and give available space for user so that the first should finish,
but after the update to the user hdd size the second should not.
"""
module = "tvb.tests.framework.adapters.testadapter3"
class_name = "TestAdapterHDDRequired"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data))
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
datatype = self._assert_stored_dt2()
# Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
# plus what is estimated to be required from the next one (transform from B to MB)
TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(
adapter.get_required_disk_size(**data) - 1
)
self.assertRaises(
NoMemoryAvailableException,
self.operation_service.initiate_operation,
self.test_user,
self.test_project.id,
adapter,
tmp_folder,
**data
)
self._assert_stored_dt2()
示例2: test_launch_operation_HDD_full_space_started_ops
def test_launch_operation_HDD_full_space_started_ops(self):
"""
Test the actual operation flow by executing a test adapter.
"""
space_taken_by_started = 100
module = "tvb.tests.framework.adapters.testadapter3"
class_name = "TestAdapterHDDRequired"
group = dao.find_group(module, class_name)
started_operation = model.Operation(
self.test_user.id,
self.test_project.id,
group.id,
"",
status=model.STATUS_STARTED,
estimated_disk_size=space_taken_by_started,
)
dao.store_entity(started_operation)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.assertRaises(
NoMemoryAvailableException,
self.operation_service.initiate_operation,
self.test_user,
self.test_project.id,
adapter,
tmp_folder,
**data
)
self._assert_no_dt2()
示例3: _BaseLinksTest
class _BaseLinksTest(TransactionalTestCase):
GEORGE1st = "george the grey"
GEORGE2nd = "george"
def setUpTVB(self):
"""
Creates a user, an algorithm and 2 projects
Project src_project will have an operation and 2 datatypes
Project dest_project will be empty.
Initializes a flow and a project service
"""
datatype_factory = DatatypesFactory()
self.user = datatype_factory.user
self.src_project = datatype_factory.project
self.red_datatype = datatype_factory.create_simple_datatype(subject=self.GEORGE1st)
self.blue_datatype = datatype_factory.create_datatype_with_storage(subject=self.GEORGE2nd)
# create the destination project
self.dest_project = TestFactory.create_project(admin=datatype_factory.user, name="destination")
self.flow_service = FlowService()
self.project_service = ProjectService()
def tearDown(self):
self.clean_database(delete_folders=True)
def red_datatypes_in(self, project_id):
return self.flow_service.get_available_datatypes(project_id, Datatype1)[1]
def blue_datatypes_in(self, project_id):
return self.flow_service.get_available_datatypes(project_id, Datatype2)[1]
示例4: test_datatypes_groups
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 0, "There should be no operation")
algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
adapter_instance = flow_service.build_adapter_instance(group)
data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 1, "Expected one operation group")
self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")
operation_group_id = all_operations[0][3]
self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
示例5: fire_simulation
def fire_simulation(project_id=1, **kwargs):
project = dao.get_project_by_id(project_id)
flow_service = FlowService()
# below the holy procedure to launch with the correct parameters taken from the defaults
stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
flatten_interface = simulator_adapter.flaten_input_interface()
itree_mngr = flow_service.input_tree_manager
prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
stored_adapter.fk_category)
launch_args = {}
for entry in prepared_flatten_interface:
value = entry['default']
if isinstance(value, dict):
value = str(value)
if hasattr(value, 'tolist'):
value = value.tolist()
launch_args[entry['name']] = value
launch_args.update(**kwargs)
# end of magic
launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
project.id, **launch_args)[0]
return launched_operation
示例6: test_initiate_operation
def test_initiate_operation(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test1_val1": 5, "test1_val2": 5}
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
res = self.operation_service.initiate_operation(
self.test_user, self.test_project.id, adapter, tmp_folder, **data
)
self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
group = dao.find_group(module, class_name)
self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
self.assertEqual(count, 1)
self.assertEqual(len(dts), 1)
datatype = dao.get_datatype_by_id(dts[0][0])
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
self.assertEqual(datatype.type, output_type, "Wrong data stored.")
示例7: test_datatypes_groups
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 0, "There should be no operation"
adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 1, "Expected one operation group"
assert all_operations[0][2] == 2, "Expected 2 operations in group"
operation_group_id = all_operations[0][3]
assert operation_group_id != None, "The operation should be part of a group."
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
示例8: _store_imported_datatypes_in_db
def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
def by_time(dt):
return dt.create_date or datetime.now()
if burst_ids_mapping is None:
burst_ids_mapping = {}
if dt_burst_mappings is None:
dt_burst_mappings = {}
all_datatypes.sort(key=by_time)
for datatype in all_datatypes:
old_burst_id = dt_burst_mappings.get(datatype.gid)
if old_burst_id is not None:
datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]
datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)
if not datatype_allready_in_tvb:
# Compute disk size. Similar to ABCAdapter._capture_operation_results.
# No need to close the h5 as we have not written to it.
associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)
self.store_datatype(datatype)
else:
FlowService.create_link([datatype_allready_in_tvb.id], project.id)
示例9: test_launch_two_ops_HDD_full_space
def test_launch_two_ops_HDD_full_space(self):
"""
Launch two operations and give available space for user so that the first should finish,
but after the update to the user hdd size the second should not.
"""
module = "tvb.tests.framework.adapters.testadapter3"
class_name = "TestAdapterHDDRequired"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
self.assertEqual(len(dts), 1)
datatype = dao.get_datatype_by_id(dts[0][0])
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
self.assertEqual(datatype.type, output_type, "Wrong data stored.")
#Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
#plus what is estimated to be required from the next one (transform from B to MB)
TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
float(adapter.get_required_disk_size(**data) - 1)
self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
self.test_project.id, adapter,
tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
self.assertEqual(len(dts), 1)
示例10: test_adapter_memory
def test_adapter_memory(self):
"""
Test that a method not implemented exception is raised in case the
get_required_memory_size method is not implemented.
"""
algo_group = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
adapter = FlowService().build_adapter_instance(algo_group)
self.assertEqual(42, adapter.get_required_memory_size())
示例11: test_launch_operation_HDD_with_space
def test_launch_operation_HDD_with_space(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter3"
class_name = "TestAdapterHDDRequired"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
self._assert_stored_dt2()
示例12: test_launch_operation_HDD_full_space
def test_launch_operation_HDD_full_space(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter3"
class_name = "TestAdapterHDDRequired"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
data = {"test": 100}
TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
self.test_project.id, adapter,
tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
self._assert_no_dt2()
示例13: setUp
def setUp(self):
""" Prepare some entities to work with during tests:"""
self.flow_service = FlowService()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(admin=self.test_user)
### Insert some starting data in the database.
categ1 = model.AlgorithmCategory('one', True)
self.categ1 = dao.store_entity(categ1)
categ2 = model.AlgorithmCategory('two', rawinput=True)
self.categ2 = dao.store_entity(categ2)
group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
self.algo_group1 = dao.store_entity(group1)
group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
self.algo_group2 = dao.store_entity(group2)
group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
self.algo_group3 = dao.store_entity(group3)
group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
self.algo_group_v = dao.store_entity(group_v)
algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
self.algorithm_v = dao.store_entity(algo_v)
algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
self.algorithm1 = dao.store_entity(algo1)
示例14: __init__
def __init__(self, conf):
"""
:param conf: burst configuration entity
"""
self.logger = get_logger(__name__)
self.flow_service = FlowService()
self.conf = conf
示例15: __init__
def __init__(self):
self.logger = get_logger(self.__class__.__module__)
self.user_service = UserService()
self.flow_service = FlowService()
analyze_category = self.flow_service.get_launchable_non_viewers()
self.analyze_category_link = '/flow/step/' + str(analyze_category.id)
self.analyze_adapters = None
self.connectivity_tab_link = '/flow/step_connectivity'
view_category = self.flow_service.get_visualisers_category()
conn_id = self.flow_service.get_algorithm_by_module_and_class(CONNECTIVITY_MODULE, CONNECTIVITY_CLASS)[1].id
connectivity_link = self.get_url_adapter(view_category.id, conn_id)
self.connectivity_submenu = [dict(title="Large Scale Connectivity", subsection="connectivity",
description="View Connectivity Regions. Perform Connectivity lesions",
link=connectivity_link),
dict(title="Local Connectivity", subsection="local",
link='/spatial/localconnectivity/step_1/1',
description="Create or view existent Local Connectivity entities.")]
self.burst_submenu = [dict(link='/burst', subsection='burst',
title='Simulation Cockpit', description='Manage simulations'),
dict(link='/burst/dynamic', subsection='dynamic',
title='Phase plane', description='Configure model dynamics')]