本文整理汇总了Python中tvb.core.services.flow_service.FlowService.build_adapter_instance方法的典型用法代码示例。如果您正苦于以下问题:Python FlowService.build_adapter_instance方法的具体用法?Python FlowService.build_adapter_instance怎么用?Python FlowService.build_adapter_instance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.core.services.flow_service.FlowService
的用法示例。
在下文中一共展示了FlowService.build_adapter_instance方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_datatypes_groups
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 0, "There should be no operation")
algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
adapter_instance = flow_service.build_adapter_instance(group)
data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 1, "Expected one operation group")
self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")
operation_group_id = all_operations[0][3]
self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
示例2: fire_simulation
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
def fire_simulation(project_id=1, **kwargs):
project = dao.get_project_by_id(project_id)
flow_service = FlowService()
# below the holy procedure to launch with the correct parameters taken from the defaults
_, algo_group = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
simulator_adapter = flow_service.build_adapter_instance(algo_group)
flatten_interface = simulator_adapter.flaten_input_interface()
prepared_flatten_interface = flow_service.prepare_parameters(flatten_interface, project.id,
algo_group.fk_category)
launch_args = {}
for entry in prepared_flatten_interface:
value = entry['default']
if isinstance(value, dict):
value = str(value)
if hasattr(value, 'tolist'):
value = value.tolist()
launch_args[entry['name']] = value
launch_args.update(**kwargs)
# end of magic
launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
project.id, **launch_args)[0]
return launched_operation
示例3: ModelValidator
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
class ModelValidator(object):
overwrites = {}
def __init__(self, overwrites=None, settings_file=None):
""" Parameters can be overwritten either from a settigns file or from a dictionary. """
if overwrites is not None:
self.overwrites.update(overwrites)
if settings_file is not None:
settings = open(sys.argv[1]).read()
for line in settings.split('\n'):
key, value = line.split('=')
self.overwrites[key.strip()] = value.strip()
if KEY_PROJECT not in self.overwrites:
raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
self.flow_service = FlowService()
self.operation_service = OperationService()
def launch_validation(self):
"""
Prepare the arguments to be submitted and launch actual operations group.
TODO: Now get the results and check if any errors
"""
_, algo_group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
simulator_adapter = self.flow_service.build_adapter_instance(algo_group)
launch_args = {}
flatten_interface = simulator_adapter.flaten_input_interface()
prepared_flatten_interface = self.flow_service.prepare_parameters(flatten_interface, self.project.id,
algo_group.fk_category)
for entry in prepared_flatten_interface:
value = entry['default']
if isinstance(value, dict):
value = str(value)
if hasattr(value, 'tolist'):
value = value.tolist()
launch_args[entry['name']] = value
launch_args.update(self.overwrites)
nr_of_operations = 1
for key in self.overwrites:
if key.startswith(PARAM_RANGE_PREFIX):
range_values = self.operation_service.get_range_values(launch_args, key)
nr_of_operations *= len(range_values)
do_launch = False
print "Warning! This will launch %s operations. Do you agree? (yes/no)" % nr_of_operations
while 1:
accept = raw_input()
if accept.lower() == 'yes':
do_launch = True
break
if accept.lower() == 'no':
do_launch = False
break
print "Please type either yes or no"
if do_launch:
self.launched_operations = self.flow_service.fire_operation(simulator_adapter, self.project.administrator,
self.project.id, **launch_args)
return self.validate_results(0)
else:
return "Operation canceled by user."
def validate_results(self, last_verified_index):
error_count = 0
while last_verified_index < len(self.launched_operations):
operation_to_check = self.launched_operations[last_verified_index]
operation = dao.get_operation_by_id(operation_to_check.id)
if operation.status == STATUS_STARTED:
sleep(10)
if operation.status == STATUS_ERROR:
sys.stdout.write("E(" + str(operation_to_check.id) + ")")
error_count += 1
last_verified_index += 1
sys.stdout.flush()
if operation.status == STATUS_FINISHED:
last_verified_index += 1
sys.stdout.write('.')
sys.stdout.flush()
if error_count:
return "%s operations in error; %s operations successfully." % (error_count,
len(self.launched_operations) - error_count)
return "All operations finished successfully!"
示例4: SpatioTemporalController
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
class SpatioTemporalController(BaseController):
"""
Base class which contains methods related to spatio-temporal actions.
"""
def __init__(self):
BaseController.__init__(self)
self.flow_service = FlowService()
self.logger = get_logger(__name__)
editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
subsection='regionstim', description='Create a new Stimulus on Region level'),
dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
subsection='surfacestim', description='Create a new Stimulus on Surface level')]
self.submenu_list = editable_entities
@expose_page
@settings
def index(self, **data):
"""
Displays the main page for the spatio temporal section.
"""
template_specification = {'title': "Spatio temporal", 'data': data, 'mainContent': 'header_menu'}
return self.fill_default_attributes(template_specification)
@staticmethod
def display_surface(surface_gid):
"""
Generates the HTML for displaying the surface with the given ID.
"""
surface = ABCAdapter.load_entity_by_gid(surface_gid)
common.add2session(PARAM_SURFACE, surface_gid)
url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering()
return {
'urlVerticesPick': json.dumps(url_vertices_pick),
'urlTrianglesPick': json.dumps(url_triangles_pick),
'urlNormalsPick': json.dumps(url_normals_pick),
'urlVertices': json.dumps(url_vertices),
'urlTriangles': json.dumps(url_triangles),
'urlNormals': json.dumps(url_normals),
'brainCenter': json.dumps(surface.center())
}
@staticmethod
def prepare_entity_interface(input_list):
"""
Prepares the input tree obtained from a creator.
"""
return {'inputList': input_list,
common.KEY_PARAMETERS_CONFIG: False}
def get_creator_and_interface(self, creator_module, creator_class, datatype_instance, lock_midpoint_for_eq=None):
"""
Returns a Tuple: a creator instance and a dictionary for the creator interface.
The interface is prepared for rendering, it is populated with existent data, in case of a
parameter of type DataType. The name of the attributes are also prefixed to identify groups.
"""
algo_group = self.flow_service.get_algorithm_by_module_and_class(creator_module, creator_class)[1]
group, _ = self.flow_service.prepare_adapter(common.get_current_project().id, algo_group)
#I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service
# because the selects that display dataTypes will also have the 'All' entry.
datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
input_list = datatype_instance.interface[traited_interface.INTERFACE_ATTRIBUTES]
if lock_midpoint_for_eq is not None:
for idx in lock_midpoint_for_eq:
input_list[idx] = self._lock_midpoints(input_list[idx])
category = self.flow_service.get_visualisers_category()
input_list = self.flow_service.prepare_parameters(input_list, common.get_current_project().id, category.id)
input_list = ABCAdapter.prepare_param_names(input_list)
return self.flow_service.build_adapter_instance(group), input_list
@staticmethod
def get_series_json(data, label):
""" For each data point entry, build the FLOT specific JSON. """
return '{"data": %s, "label": "%s"}' % (json.dumps(data), label)
@staticmethod
def build_final_json(list_of_series):
""" Given a list with all the data points, build the final FLOT json. """
return '[' + ','.join(list_of_series) + ']'
@staticmethod
def get_ui_message(list_of_equation_names):
"""
The message returned by this method should be displayed if
the equation with the given name couldn't be evaluated in all points.
"""
if list_of_equation_names:
return ("Could not evaluate the " + ", ".join(list_of_equation_names) + " equation(s) "
#.........这里部分代码省略.........
示例5: FlowServiceTest
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
class FlowServiceTest(TransactionalTestCase):
"""
This class contains tests for the tvb.core.services.flow_service module.
"""
def setUp(self):
""" Prepare some entities to work with during tests:"""
self.flow_service = FlowService()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(admin=self.test_user)
### Insert some starting data in the database.
categ1 = model.AlgorithmCategory('one', True)
self.categ1 = dao.store_entity(categ1)
categ2 = model.AlgorithmCategory('two', rawinput=True)
self.categ2 = dao.store_entity(categ2)
group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
self.algo_group1 = dao.store_entity(group1)
group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
self.algo_group2 = dao.store_entity(group2)
group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
self.algo_group3 = dao.store_entity(group3)
group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
self.algo_group_v = dao.store_entity(group_v)
algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
self.algorithm_v = dao.store_entity(algo_v)
algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
self.algorithm1 = dao.store_entity(algo1)
def tearDown(self):
for algo in [self.algorithm1, self.algorithm_v]:
dao.remove_entity(model.Algorithm, algo.id)
for group in [self.algo_group1, self.algo_group2, self.algo_group3, self.algorithm_v]:
dao.remove_entity(model.AlgorithmGroup, group.id)
for categ in [self.categ1, self.categ2]:
dao.remove_entity(model.AlgorithmCategory, categ.id)
def test_groups_for_categories(self):
"""
Test getting algorithms for specific categories.
"""
category1 = self.flow_service.get_groups_for_categories([self.categ1])
category2 = self.flow_service.get_groups_for_categories([self.categ2])
dummy = model.AlgorithmCategory('dummy', rawinput=True)
dummy.id = 999
unexisting_cat = self.flow_service.get_groups_for_categories([dummy])
self.assertEqual(len(category1), 2)
self.assertEqual(len(category2), 2)
self.assertEqual(len(unexisting_cat), 0)
for group in category1:
if group.module not in ["test_module1", "test_module3"]:
self.fail("Some invalid data retrieved")
for group in category2:
if group.module not in ["test_module2", TEST_ADAPTER_VALID_MODULE]:
self.fail("Some invalid data retrieved")
def test_get_broup_by_identifier(self):
"""
Test for the get_algorithm_by_identifier.
"""
algo_ret = self.flow_service.get_algo_group_by_identifier(self.algo_group1.id)
self.assertEqual(algo_ret.id, self.algo_group1.id, "ID-s are different!")
self.assertEqual(algo_ret.module, self.algo_group1.module, "Modules are different!")
self.assertEqual(algo_ret.fk_category, self.algo_group1.fk_category, "Categories are different!")
self.assertEqual(algo_ret.classname, self.algo_group1.classname, "Class names are different!")
def test_build_adapter_instance(self):
"""
Test standard flow for building an adapter instance.
"""
algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
adapter = ABCAdapter.build_adapter(algo_group)
self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")
def test_build_adapter_invalid(self):
"""
Test flow for trying to build an adapter that does not inherit from ABCAdapter.
"""
group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
self.assertRaises(OperationException, self.flow_service.build_adapter_instance, group)
def test_prepare_adapter(self):
"""
#.........这里部分代码省略.........
示例6: ProjectStructureTest
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
#.........这里部分代码省略.........
self.project_service.remove_datatype(project.id, first_dt.gid)
self._check_if_datatype_was_removed(first_dt)
self._check_if_datatype_was_removed(second_dt)
self._check_if_datatype_was_removed(datatype_group)
self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)
def test_remove_datatype_group(self):
"""
Tests the deletion of a datatype group.
"""
project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]
self.project_service.remove_datatype(project.id, datatype_group.gid)
self._check_if_datatype_was_removed(first_dt)
self._check_if_datatype_was_removed(second_dt)
self._check_if_datatype_was_removed(datatype_group)
self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)
def _create_mapped_arrays(self, project_id):
"""
:param project_id: the project in which the arrays are created
:return: a list of dummy `MappedArray`
"""
count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
self.assertEqual(count, 0)
algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
group, _ = self.flow_service.prepare_adapter(project_id, algo_group)
adapter_instance = self.flow_service.build_adapter_instance(group)
data = {'param_1': 'some value'}
#create 3 data types
self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
self.assertEqual(count, 1)
self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
self.assertEqual(count, 2)
self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
"tvb.datatypes.arrays.MappedArray")
self.assertEqual(count, 3)
return array_wrappers
def _create_operation(self, project_id, algorithm_id):
"""
dummy operation
:param project_id: the project in which the operation is created
:param algorithm_id: the algorithm to be run for the operation
:return: a dummy `Operation` with the given specifications
"""
algorithm = dao.get_algorithm_by_id(algorithm_id)
meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
DataTypeMetaData.KEY_STATE: "RAW_DATA"}
operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params',
meta=json.dumps(meta), status=model.STATUS_FINISHED)
return dao.store_entity(operation)
示例7: SerializationManager
# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import build_adapter_instance [as 别名]
class SerializationManager(object):
"""
Constructs data types based on a burst configuration.
Updates the burst configuration.
"""
def __init__(self, conf):
"""
:param conf: burst configuration entity
"""
self.logger = get_logger(__name__)
self.flow_service = FlowService()
self.conf = conf
def _build_simulator_adapter(self):
_, group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
return self.flow_service.build_adapter_instance(group)
def has_model_pse_ranges(self):
""" Returns True if the burst configuration describes a range on a model parameter """
first_range = self.conf.get_simulation_parameter_value(RANGE_PARAMETER_1)
second_range = self.conf.get_simulation_parameter_value(RANGE_PARAMETER_2)
first_range_on = first_range is not None and str(first_range).startswith(MODEL_PARAMETERS)
second_range_on = second_range is not None and str(second_range).startswith(MODEL_PARAMETERS)
return first_range_on or second_range_on
def _get_params_dict(self):
""" Convert ui inputs from the configuration to python types """
simulator_adapter = self._build_simulator_adapter()
return simulator_adapter.convert_ui_inputs(self.conf.get_all_simulator_values()[0], False)
def __make_instance_from_burst_config(self, params_dict, parent_class, class_name_key, params_key):
""" This is used internally to create a model or an integrator based on the burst config """
class_name = self.conf.get_simulation_parameter_value(class_name_key)
parameters = params_dict[params_key]
noise_framework.build_noise(parameters)
try:
return get_traited_instance_for_name(class_name, parent_class, parameters)
except Exception:
self.logger.exception("Could not create an instance of %s with the given parameters. "
"A new instance will be created with the default values." % class_name)
return get_traited_instance_for_name(class_name, parent_class, {})
def __make_shallow_model(self):
""" Creates a model of the type present in the config without setting any parameters on it """
class_name = self.conf.get_simulation_parameter_value(PARAM_MODEL)
return get_traited_instance_for_name(class_name, Model, {})
def make_model_and_integrator(self):
"""
:return: A model and an integrator.
:rtype: Model, Integrator
"""
params_dict = self._get_params_dict()
model = self.__make_instance_from_burst_config(params_dict, Model, PARAM_MODEL, MODEL_PARAMETERS)
integrator = self.__make_instance_from_burst_config(params_dict, Integrator,
PARAM_INTEGRATOR, INTEGRATOR_PARAMETERS)
return model, integrator
def get_connectivity(self):
""" Prepare Connectivity """
connectivity_gid = self.conf.get_simulation_parameter_value(PARAM_CONNECTIVITY)
return ABCAdapter.load_entity_by_gid(connectivity_gid)
def get_surface(self):
""" Prepare Surface """
surface_gid = self.conf.get_simulation_parameter_value(PARAM_SURFACE)
if surface_gid:
return ABCAdapter.load_entity_by_gid(surface_gid)
return None
@staticmethod
def group_parameter_values_by_name(model_parameters_list):
"""
@:param model_parameters_list: Given a list of model parameters like this:
[{"a": 2.0, 'b': 1.0},
{"a": 3.0, 'b': 7.0}])
@:return: This method will group them by param name to get:
{'a': [2.0, 3.0], 'b': [1.0, 7.0]}
"""
ret = {}
for model_parameters in model_parameters_list:
for param_name, param_val in model_parameters.iteritems():
if param_name not in ret:
ret[param_name] = []
ret[param_name].append(param_val)
return ret
def write_model_parameters(self, model_name, model_parameters_list):
"""
Update model parameters in burst config.
#.........这里部分代码省略.........