当前位置: 首页>>代码示例>>Python>>正文


Python FlowService.prepare_adapter方法代码示例

本文整理汇总了Python中tvb.core.services.flow_service.FlowService.prepare_adapter方法的典型用法代码示例。如果您正苦于以下问题:Python FlowService.prepare_adapter方法的具体用法?Python FlowService.prepare_adapter怎么用?Python FlowService.prepare_adapter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tvb.core.services.flow_service.FlowService的用法示例。


在下文中一共展示了FlowService.prepare_adapter方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_datatypes_groups

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
开发者ID:lcosters,项目名称:tvb-framework,代码行数:37,代码来源:operation_service_test.py

示例2: test_reduce_dimension_component

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]
    def test_reduce_dimension_component(self):
        """
        Tests the generation of the component which allows the user
        to select one dimension from a multi dimension array
        """
        flow_service = FlowService()
        array_count = self.count_all_entities(MappedArray)
        assert 0 == array_count, "Expected to find no data"
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {}, **PARAMS)
        inserted_arrays, array_count = flow_service.get_available_datatypes(self.test_project.id, MappedArray)
        assert 1 == array_count, "Problems when inserting data"

        algorithm = flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        interface = flow_service.prepare_adapter(self.test_project.id, algorithm)
        self.template_specification['inputList'] = interface
        resulted_html = _template2string(self.template_specification)
        self.soup = BeautifulSoup(resulted_html)

        found_divs = self.soup.find_all('p', attrs=dict(id="dimensionsDiv_input_data"))
        assert len(found_divs) == 1, "Data generated incorrect"

        gid = inserted_arrays[0][2]
        cherrypy.session = {'user': self.test_user}
        entity = dao.get_datatype_by_gid(gid)
        component_content = FlowController().gettemplatefordimensionselect(gid, "input_data")
        self.soup = BeautifulSoup(component_content)

        #check dimensions
        found_selects_0 = self.soup.find_all('select', attrs=dict(id="dimId_input_data_dimensions_0"))
        found_selects_1 = self.soup.find_all('select', attrs=dict(id="dimId_input_data_dimensions_1"))
        found_selects_2 = self.soup.find_all('select', attrs=dict(id="dimId_input_data_dimensions_2"))
        assert len(found_selects_0) == 1, "select not found"
        assert len(found_selects_1) == 1, "select not found"
        assert len(found_selects_2) == 1, "select not found"

        #check the aggregation functions selects
        agg_selects_0 = self.soup.find_all('select', attrs=dict(id="funcId_input_data_dimensions_0"))
        agg_selects_1 = self.soup.find_all('select', attrs=dict(id="funcId_input_data_dimensions_1"))
        agg_selects_2 = self.soup.find_all('select', attrs=dict(id="funcId_input_data_dimensions_2"))
        assert len(agg_selects_0), 1 == "incorrect first dim"
        assert len(agg_selects_1), 1 == "incorrect second dim"
        assert len(agg_selects_2), 1 == "incorrect third dim."

        data_shape = entity.shape
        assert len(data_shape) == 3, "Shape of the array is incorrect"
        for i in range(data_shape[0]):
            options = self.soup.find_all('option', attrs=dict(value=gid + "_0_" + str(i)))
            assert len(options) == 1, "Generated option is incorrect"
            assert options[0].text == "Time " + str(i), "The label of the option is not correct"
            assert options[0].parent["name"] == "input_data_dimensions_0"
        for i in range(data_shape[1]):
            options = self.soup.find_all('option', attrs=dict(value=gid + "_1_" + str(i)))
            assert len(options) == 1, "Generated option is incorrect"
            assert options[0].text == "Channel " + str(i), "Option's label incorrect"
            assert options[0].parent["name"] == "input_data_dimensions_1", "incorrect parent"
        for i in range(data_shape[2]):
            options = self.soup.find_all('option', attrs=dict(value=gid + "_2_" + str(i)))
            assert len(options) == 1, "Generated option is incorrect"
            assert options[0].text == "Line " + str(i), "The label of the option is not correct"
            assert options[0].parent["name"] == "input_data_dimensions_2"

        #check the expected hidden fields
        expected_shape = self.soup.find_all('input', attrs=dict(id="input_data_expected_shape"))
        assert len(expected_shape) == 1, "The generated option is not correct"
        assert expected_shape[0]["value"] == "expected_shape_", "The generated option is not correct"
        input_hidden_op = self.soup.find_all('input', attrs=dict(id="input_data_operations"))
        assert len(input_hidden_op) == 1, "The generated option is not correct"
        assert input_hidden_op[0]["value"] == "operations_", "The generated option is not correct"
        input_hidden_dim = self.soup.find_all('input', attrs=dict(id="input_data_expected_dim"))
        assert len(input_hidden_dim) == 1, "The generated option is not correct"
        assert input_hidden_dim[0]["value"] == "requiredDim_1", "The generated option is not correct"
        input_hidden_shape = self.soup.find_all('input', attrs=dict(id="input_data_array_shape"))
        assert len(input_hidden_shape) == 1, "The generated option is not correct"
        assert input_hidden_shape[0]["value"] == "[5, 1, 3]", "The generated option is not correct"

        #check only the first option from the aggregations functions selects
        options = self.soup.find_all('option', attrs=dict(value="func_none"))
        assert len(options) == 3, "The generated option is not correct"
开发者ID:the-virtual-brain,项目名称:tvb-framework,代码行数:83,代码来源:genshi_test.py

示例3: SpatioTemporalController

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]
class SpatioTemporalController(BaseController):
    """
    Base class which contains methods related to spatio-temporal actions.
    """

    def __init__(self):
        BaseController.__init__(self)
        self.flow_service = FlowService()
        self.logger = get_logger(__name__)
        editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
                                  subsection='regionstim', description='Create a new Stimulus on Region level'),
                             dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
                                  subsection='surfacestim', description='Create a new Stimulus on Surface level')]
        self.submenu_list = editable_entities


    @expose_page
    @settings
    def index(self, **data):
        """
        Displays the main page for the spatio temporal section.
        """
        template_specification = {'title': "Spatio temporal", 'data': data, 'mainContent': 'header_menu'}
        return self.fill_default_attributes(template_specification)



    @staticmethod
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
        url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering()

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface.center())
        }


    @staticmethod
    def prepare_entity_interface(input_list):
        """
        Prepares the input tree obtained from a creator.
        """
        return {'inputList': input_list,
                common.KEY_PARAMETERS_CONFIG: False}


    def get_creator_and_interface(self, creator_module, creator_class, datatype_instance, lock_midpoint_for_eq=None):
        """
        Returns a Tuple: a creator instance and a dictionary for the creator interface.
        The interface is prepared for rendering, it is populated with existent data, in case of a
        parameter of type DataType. The name of the attributes are also prefixed to identify groups.
        """
        algo_group = self.flow_service.get_algorithm_by_module_and_class(creator_module, creator_class)[1]
        group, _ = self.flow_service.prepare_adapter(common.get_current_project().id, algo_group)

        #I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service
        # because the selects that display dataTypes will also have the 'All' entry.
        datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
        input_list = datatype_instance.interface[traited_interface.INTERFACE_ATTRIBUTES]
        if lock_midpoint_for_eq is not None:
            for idx in lock_midpoint_for_eq:
                input_list[idx] = self._lock_midpoints(input_list[idx])
        category = self.flow_service.get_visualisers_category()
        input_list = self.flow_service.prepare_parameters(input_list, common.get_current_project().id, category.id)
        input_list = ABCAdapter.prepare_param_names(input_list)

        return self.flow_service.build_adapter_instance(group), input_list


    @staticmethod
    def get_series_json(data, label):
        """ For each data point entry, build the FLOT specific JSON. """
        return '{"data": %s, "label": "%s"}' % (json.dumps(data), label)


    @staticmethod
    def build_final_json(list_of_series):
        """ Given a list with all the data points, build the final FLOT json. """
        return '[' + ','.join(list_of_series) + ']'


    @staticmethod
    def get_ui_message(list_of_equation_names):
        """
        The message returned by this method should be displayed if
        the equation with the given name couldn't be evaluated in all points.
        """
        if list_of_equation_names:
            return ("Could not evaluate the " + ", ".join(list_of_equation_names) + " equation(s) "
#.........这里部分代码省略.........
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:103,代码来源:base_spatio_temporal_controller.py

示例4: FlowServiceTest

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]
class FlowServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.flow_service module.
    """


    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo_group1 = dao.store_entity(group1)
        group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        self.algo_group2 = dao.store_entity(group2)
        group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        self.algo_group3 = dao.store_entity(group3)

        group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
        self.algo_group_v = dao.store_entity(group_v)

        algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
        self.algorithm_v = dao.store_entity(algo_v)

        algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm1 = dao.store_entity(algo1)


    def tearDown(self):
        for algo in [self.algorithm1, self.algorithm_v]:
            dao.remove_entity(model.Algorithm, algo.id)

        for group in [self.algo_group1, self.algo_group2, self.algo_group3, self.algorithm_v]:
            dao.remove_entity(model.AlgorithmGroup, group.id)

        for categ in [self.categ1, self.categ2]:
            dao.remove_entity(model.AlgorithmCategory, categ.id)


    def test_groups_for_categories(self):
        """
        Test getting algorithms for specific categories.
        """
        category1 = self.flow_service.get_groups_for_categories([self.categ1])
        category2 = self.flow_service.get_groups_for_categories([self.categ2])

        dummy = model.AlgorithmCategory('dummy', rawinput=True)
        dummy.id = 999
        unexisting_cat = self.flow_service.get_groups_for_categories([dummy])

        self.assertEqual(len(category1), 2)
        self.assertEqual(len(category2), 2)
        self.assertEqual(len(unexisting_cat), 0)

        for group in category1:
            if group.module not in ["test_module1", "test_module3"]:
                self.fail("Some invalid data retrieved")
        for group in category2:
            if group.module not in ["test_module2", TEST_ADAPTER_VALID_MODULE]:
                self.fail("Some invalid data retrieved")



    def test_get_broup_by_identifier(self):
        """
        Test for the get_algorithm_by_identifier.
        """
        algo_ret = self.flow_service.get_algo_group_by_identifier(self.algo_group1.id)
        self.assertEqual(algo_ret.id, self.algo_group1.id, "ID-s are different!")
        self.assertEqual(algo_ret.module, self.algo_group1.module, "Modules are different!")
        self.assertEqual(algo_ret.fk_category, self.algo_group1.fk_category, "Categories are different!")
        self.assertEqual(algo_ret.classname, self.algo_group1.classname, "Class names are different!")


    def test_build_adapter_instance(self):
        """
        Test standard flow for building an adapter instance.
        """
        algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        adapter = ABCAdapter.build_adapter(algo_group)
        self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")


    def test_build_adapter_invalid(self):
        """
        Test flow for trying to build an adapter that does not inherit from ABCAdapter.
        """
        group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
        self.assertRaises(OperationException, self.flow_service.build_adapter_instance, group)


    def test_prepare_adapter(self):
        """
#.........这里部分代码省略.........
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:103,代码来源:flow_service_test.py

示例5: ProjectStructureTest

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]

#.........这里部分代码省略.........
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        group, _ = self.flow_service.prepare_adapter(project_id, algo_group)

        adapter_instance = self.flow_service.build_adapter_instance(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers


    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params',
                                    meta=json.dumps(meta), status=model.STATUS_FINISHED)
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:70,代码来源:project_structure_test.py

示例6: FlowServiceTest

# 需要导入模块: from tvb.core.services.flow_service import FlowService [as 别名]
# 或者: from tvb.core.services.flow_service.FlowService import prepare_adapter [as 别名]
class FlowServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.flow_service module.
    """


    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                          TEST_ADAPTER_VALID_CLASS, category.id))


    def tearDown(self):
        dao.remove_entity(model.Algorithm, self.algorithm)


    def test_get_uploaders(self):

        result = self.flow_service.get_upload_algorithms()
        self.assertEqual(29, len(result))
        found = False
        for algo in result:
            if algo.classname == self.algorithm.classname and algo.module == self.algorithm.module:
                found = True
                break
        self.assertTrue(found, "Uploader incorrectly returned")


    def test_get_analyze_groups(self):

        category, groups = self.flow_service.get_analyze_groups()
        self.assertEqual(category.displayname, 'Analyze')
        self.assertTrue(len(groups) > 1)
        self.assertTrue(isinstance(groups[0], model.AlgorithmTransientGroup))


    def test_get_visualizers_for_group(self):

        _, op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)
        result = self.flow_service.get_visualizers_for_group(dt_group.gid)
        # Only the discreet is expected
        self.assertEqual(1, len(result))
        self.assertEqual(DISCRETE_PSE_ADAPTER_CLASS, result[0].classname)


    def test_get_launchable_algorithms(self):

        factory = DatatypesFactory()
        conn = factory.create_connectivity(4)[1]
        ts = factory.create_timeseries(conn)
        result = self.flow_service.get_launchable_algorithms(ts.gid)
        self.assertTrue('Analyze' in result)
        self.assertTrue('View' in result)



    def test_get_roup_by_identifier(self):
        """
        Test for the get_algorithm_by_identifier.
        """
        algo_ret = self.flow_service.get_algorithm_by_identifier(self.algorithm.id)
        self.assertEqual(algo_ret.id, self.algorithm.id, "ID-s are different!")
        self.assertEqual(algo_ret.module, self.algorithm.module, "Modules are different!")
        self.assertEqual(algo_ret.fk_category, self.algorithm.fk_category, "Categories are different!")
        self.assertEqual(algo_ret.classname, self.algorithm.classname, "Class names are different!")


    def test_build_adapter_instance(self):
        """
        Test standard flow for building an adapter instance.
        """
        adapter = TestFactory.create_adapter(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")


    def test_build_adapter_invalid(self):
        """
        Test flow for trying to build an adapter that does not inherit from ABCAdapter.
        """
        group = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
        self.assertRaises(IntrospectionException, ABCAdapter.build_adapter, group)


    def test_prepare_adapter(self):
        """
        Test preparation of an adapter.
        """
        stored_adapter = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
        interface = self.flow_service.prepare_adapter(self.test_project.id, stored_adapter)
        self.assertTrue(isinstance(stored_adapter, model.Algorithm), "Something went wrong with valid data!")
        self.assertTrue("name" in interface[0], "Bad interface created!")
        self.assertEquals(interface[0]["name"], "test", "Bad interface!")
        self.assertTrue("type" in interface[0], "Bad interface created!")
#.........这里部分代码省略.........
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:103,代码来源:flow_service_test.py


注:本文中的tvb.core.services.flow_service.FlowService.prepare_adapter方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。