本文整理汇总了Python中tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory.get_project方法的典型用法代码示例。如果您正苦于以下问题:Python DatatypesFactory.get_project方法的具体用法?Python DatatypesFactory.get_project怎么用?Python DatatypesFactory.get_project使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory
的用法示例。
在下文中一共展示了DatatypesFactory.get_project方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ObjSurfaceImporterTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class ObjSurfaceImporterTest(TransactionalTestCase):
"""
Unit-tests for Obj Surface importer.
"""
torrus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')
def setUp(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def tearDown(self):
FilesHelper().remove_project_structure(self.test_project.name)
def _importSurface(self, import_file_path=None):
### Retrieve Adapter instance
group = dao.find_group('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
importer = ABCAdapter.build_adapter(group)
args = {'data_file': import_file_path,
"surface_type": FACE,
DataTypeMetaData.KEY_SUBJECT: "John"}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
self.assertEqual(1, len(data_types), "Project should contain only one data type.")
surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
self.assertTrue(surface is not None, "Surface should not be None")
return surface
def test_import_quads_no_normals(self):
"""
Test that import works with a file which contains quads and no normals
"""
surface = self._importSurface(self.face)
self.assertEqual(8614, len(surface.vertices))
self.assertEqual(8614, len(surface.vertex_normals))
self.assertEqual(17224, len(surface.triangles))
def test_import_simple_with_normals(self):
"""
Test that import works with an OBJ file which included normals
"""
surface = self._importSurface(self.torrus)
self.assertEqual(441, surface.number_of_vertices)
self.assertEqual(441, len(surface.vertex_normals))
self.assertEqual(800, surface.number_of_triangles)
示例2: EEGMonitorTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class EEGMonitorTest(TransactionalTestCase):
"""
Unit-tests for EEG Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
'EEG_unit_vectors_BrainProducts_62.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
viewer = EegMonitor()
result = viewer.launch(time_series)
expected_keys = ['tsNames', 'groupedLabels', 'tsModes', 'tsStateVars', 'longestChannelLength',
'label_x', 'entities', 'page_size', 'number_of_visible_points',
'extended_view', 'initialSelection', 'ag_settings', 'ag_settings']
for key in expected_keys:
self.assertTrue(key in result, "key not found %s" % key)
expected_ag_settings = ['channelsPerSet', 'channelLabels', 'noOfChannels', 'translationStep',
'normalizedSteps', 'nan_value_found', 'baseURLS', 'pageSize',
'nrOfPages', 'timeSetPaths', 'totalLength', 'number_of_visible_points',
'extended_view', 'measurePointsSelectionGIDs']
ag_settings = json.loads(result['ag_settings'])
for key in expected_ag_settings:
self.assertTrue(key in ag_settings, "ag_settings should have the key %s" % key)
示例3: TestObjSurfaceImporter
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestObjSurfaceImporter(TransactionalTestCase):
"""
Unit-tests for Obj Surface importer.
"""
torus = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'test_torus.obj')
face = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')
def transactional_setup_method(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def transactional_teardown_method(self):
FilesHelper().remove_project_structure(self.test_project.name)
def _import_surface(self, import_file_path=None):
### Retrieve Adapter instance
importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
args = {'data_file': import_file_path,
"surface_type": FACE,
DataTypeMetaData.KEY_SUBJECT: "John"}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
assert 1, len(data_types) == "Project should contain only one data type."
surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
assert surface is not None, "Surface should not be None"
return surface
def test_import_quads_no_normals(self):
"""
Test that import works with a file which contains quads and no normals
"""
surface = self._import_surface(self.face)
assert 8614 == len(surface.vertices)
assert 8614 == len(surface.vertex_normals)
assert 17224 == len(surface.triangles)
def test_import_simple_with_normals(self):
"""
Test that import works with an OBJ file which included normals
"""
surface = self._import_surface(self.torus)
assert 441 == surface.number_of_vertices
assert 441 == len(surface.vertex_normals)
assert 800 == surface.number_of_triangles
示例4: ZIPSurfaceImporterTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class ZIPSurfaceImporterTest(TransactionalTestCase):
"""
Unit-tests for Zip Surface importer.
"""
surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')
def setUp(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def tearDown(self):
FilesHelper().remove_project_structure(self.test_project.name)
def _importSurface(self, import_file_path=None):
### Retrieve Adapter instance
group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
importer = ABCAdapter.build_adapter(group)
args = {
'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
'zero_based_triangles': True,
DataTypeMetaData.KEY_SUBJECT: "John"
}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
self.assertEqual(1, len(data_types), "Project should contain only one data type.")
surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
self.assertTrue(surface is not None, "Surface should not be None")
return surface
def test_import_surf_zip(self):
surface = self._importSurface(self.surf_skull)
self.assertEqual(4096, len(surface.vertices))
self.assertEqual(4096, surface.number_of_vertices)
self.assertEqual(8188, len(surface.triangles))
self.assertEqual(8188, surface.number_of_triangles)
self.assertEqual('', surface.user_tag_3)
self.assertTrue(surface.valid_for_simulations)
示例5: TimeSeriesTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TimeSeriesTest(TransactionalTestCase):
"""
Unit-tests for Time Series Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
viewer = TimeSeries()
result = viewer.launch(timeseries)
expected_keys = [
"t0",
"shape",
"preview",
"labelsStateVar",
"labelsModes",
"mainContent",
"labels",
"labels_json",
"figsize",
"dt",
]
for key in expected_keys:
self.assertTrue(key in result)
示例6: MatTimeSeriesImporterTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class MatTimeSeriesImporterTest(TransactionalTestCase):
base_pth = os.path.join(os.path.dirname(tvb_data.__file__), 'berlinSubjects', 'QL_20120814')
bold_path = os.path.join(base_pth, 'QL_BOLD_regiontimecourse.mat')
connectivity_path = os.path.join(base_pth, 'QL_20120814_Connectivity.zip')
def setUp(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
self._import_connectivity()
def tearDown(self):
FilesHelper().remove_project_structure(self.test_project.name)
def _import_connectivity(self):
group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
importer = ABCAdapter.build_adapter(group)
### Launch Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id,
uploaded=self.connectivity_path, Data_Subject='QL')
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
def test_import_bold(self):
### Retrieve Adapter instance
group = dao.find_group('tvb.adapters.uploaders.mat_timeseries_importer', 'MatTimeSeriesImporter')
importer = ABCAdapter.build_adapter(group)
args = dict(data_file=self.bold_path, dataset_name='QL_20120824_DK_BOLD_timecourse', structure_path='',
transpose=False, slice=None, sampling_rate=1000, start_time=0,
tstype='region',
tstype_parameters_option_region_connectivity=self.connectivity.gid,
Data_Subject="QL")
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())
self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
示例7: TestZIPSurfaceImporter
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestZIPSurfaceImporter(TransactionalTestCase):
"""
Unit-tests for Zip Surface importer.
"""
surf_skull = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'outer_skull_4096.zip')
def transactional_setup_method(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def transactional_teardown_method(self):
FilesHelper().remove_project_structure(self.test_project.name)
def _importSurface(self, import_file_path=None):
### Retrieve Adapter instance
importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
'zero_based_triangles': True,
DataTypeMetaData.KEY_SUBJECT: "John"}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
assert 1, len(data_types) == "Project should contain only one data type."
surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
assert surface is not None, "Surface should not be None"
return surface
def test_import_surf_zip(self):
surface = self._importSurface(self.surf_skull)
assert 4096 == len(surface.vertices)
assert 4096 == surface.number_of_vertices
assert 8188 == len(surface.triangles)
assert 8188 == surface.number_of_triangles
assert '' == surface.user_tag_3
assert surface.valid_for_simulations
示例8: ConnectivityViewerTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class ConnectivityViewerTest(TransactionalTestCase):
"""
Unit-tests for Connectivity Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
viewer = ConnectivityViewer()
result = viewer.launch(self.connectivity)
expected_keys = ['weightsMin', 'weightsMax', 'urlWeights', 'urlVertices',
'urlTriangles', 'urlTracts', 'urlPositions', 'urlNormals',
'rightHemisphereJson', 'raysArray', 'rayMin', 'rayMax', 'positions',
'leftHemisphereJson', 'connectivity_entity', 'bothHemisphereJson']
for key in expected_keys:
self.assertTrue(key in result)
示例9: TestCovarianceViewer
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestCovarianceViewer(TransactionalTestCase):
"""
Unit-tests for Covariance Viewer.
"""
def transactional_setup_method(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
assert self.connectivity is not None
def transactional_teardown_method(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
time_series = self.datatypeFactory.create_timeseries(self.connectivity)
covariance = self.datatypeFactory.create_covariance(time_series)
viewer = CovarianceVisualizer()
result = viewer.launch(covariance)
expected_keys = ['matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
for key in expected_keys:
assert (key in result)
示例10: TestTimeSeries
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestTimeSeries(TransactionalTestCase):
"""
Unit-tests for Time Series Viewer.
"""
def transactional_setup_method(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
assert self.connectivity is not None
def transactional_teardown_method(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
timeseries = self.datatypeFactory.create_timeseries(self.connectivity)
viewer = TimeSeries()
result = viewer.launch(timeseries)
expected_keys = ['t0', 'shape', 'preview', 'labelsStateVar', 'labelsModes',
'mainContent', 'labels', 'labels_json', 'figsize', 'dt']
for key in expected_keys:
assert key in result
示例11: NetworkxImporterTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class NetworkxImporterTest(TransactionalTestCase):
"""
Unit-tests for Obj Surface importer.
"""
upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')
def setUp(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def tearDown(self):
FilesHelper().remove_project_structure(self.test_project.name)
def test_import(self):
count_before = self.count_all_entities(Connectivity)
self.assertEqual(0, count_before)
### Retrieve Adapter instance
group = dao.find_group('tvb.adapters.uploaders.networkx_importer', 'NetworkxConnectivityImporter')
importer = ABCAdapter.build_adapter(group)
args = {'data_file': self.upload_file,
DataTypeMetaData.KEY_SUBJECT: "John"}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
count_after = self.count_all_entities(Connectivity)
self.assertEqual(1, count_after)
conn = self.get_all_entities(Connectivity)[0]
self.assertEqual(83, conn.number_of_regions)
示例12: TestNetworkxImporter
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestNetworkxImporter(TransactionalTestCase):
"""
Unit-tests for Obj Surface importer.
"""
upload_file = os.path.join(os.path.dirname(__file__), "test_data", 'connectome_83.gpickle')
def transactional_setup_method(self):
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
def transactional_teardown_method(self):
FilesHelper().remove_project_structure(self.test_project.name)
def test_import(self):
count_before = self.count_all_entities(Connectivity)
assert 0 == count_before
### Retrieve Adapter instance
importer = TestFactory.create_adapter('tvb.adapters.uploaders.networkx_importer',
'NetworkxConnectivityImporter')
args = {'data_file': self.upload_file,
DataTypeMetaData.KEY_SUBJECT: "John"}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
count_after = self.count_all_entities(Connectivity)
assert 1 == count_after
conn = self.get_all_entities(Connectivity)[0]
assert 83 == conn.number_of_regions
示例13: CrossCoherenceViewerTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class CrossCoherenceViewerTest(TransactionalTestCase):
"""
Unit-tests for Cross Coherence Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
time_series = self.datatypeFactory.create_timeseries(self.connectivity)
cross_coherence = self.datatypeFactory.create_crosscoherence(time_series)
viewer = CrossCoherenceVisualizer()
result = viewer.launch(cross_coherence)
expected_keys = ['matrix_data', 'matrix_shape', 'matrix_strides', 'frequency']
for key in expected_keys:
self.assertTrue(key in result)
示例14: TestSensorsImporter
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class TestSensorsImporter(TransactionalTestCase):
"""
Unit-tests for Sensors importer.
"""
EEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'eeg_unitvector_62.txt.bz2')
MEG_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'meg_151.txt.bz2')
def transactional_setup_method(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project and a `Sensors_Importer`
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
self.importer = Sensors_Importer()
def transactional_teardown_method(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def _import(self, import_file_path, sensors_type, expected_data):
"""
This method is used for importing sensors
:param import_file_path: absolute path of the file to be imported
"""
### Retrieve Adapter instance
importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
data_types = FlowService().get_available_datatypes(self.test_project.id,
expected_data.module + "." + expected_data.type)[0]
assert 1 == len(data_types), "Project should contain only one data type = Sensors."
time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
assert time_series is not None, "Sensors instance should not be none"
return time_series
def test_import_eeg_sensors(self):
"""
This method tests import of a file containing EEG sensors.
"""
eeg_sensors = self._import(self.EEG_FILE, self.importer.EEG_SENSORS, SensorsEEG())
expected_size = 62
assert eeg_sensors.labels is not None
assert expected_size == len(eeg_sensors.labels)
assert expected_size == len(eeg_sensors.locations)
assert (expected_size, 3) == eeg_sensors.locations.shape
assert expected_size == eeg_sensors.number_of_sensors
def test_import_meg_sensors(self):
"""
This method tests import of a file containing MEG sensors.
"""
meg_sensors = self._import(self.MEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
expected_size = 151
assert meg_sensors.labels is not None
assert expected_size == len(meg_sensors.labels)
assert expected_size == len(meg_sensors.locations)
assert (expected_size, 3) == meg_sensors.locations.shape
assert expected_size == meg_sensors.number_of_sensors
assert meg_sensors.has_orientation
assert expected_size == len(meg_sensors.orientations)
assert (expected_size, 3) == meg_sensors.orientations.shape
def test_import_meg_without_orientation(self):
"""
This method tests import of a file without orientation.
"""
try:
self._import(self.EEG_FILE, self.importer.MEG_SENSORS, SensorsMEG())
raise AssertionError("Import should fail in case of a MEG import without orientation.")
except OperationException:
# Expected exception
pass
def test_import_internal_sensors(self):
"""
This method tests import of a file containing internal sensors.
"""
internal_sensors = self._import(self.EEG_FILE, self.importer.INTERNAL_SENSORS, SensorsInternal())
expected_size = 62
assert internal_sensors.labels is not None
assert expected_size == len(internal_sensors.labels)
assert expected_size == len(internal_sensors.locations)
assert (expected_size, 3) == internal_sensors.locations.shape
assert expected_size == internal_sensors.number_of_sensors
示例15: SensorViewersTest
# 需要导入模块: from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory [as 别名]
# 或者: from tvb.tests.framework.datatypes.datatypes_factory.DatatypesFactory import get_project [as 别名]
class SensorViewersTest(TransactionalTestCase):
"""
Unit-tests for Sensors viewers.
"""
EXPECTED_KEYS_INTERNAL = {'urlMeasurePoints': None, 'urlMeasurePointsLabels': None, 'noOfMeasurePoints': 103,
'minMeasure': 0, 'maxMeasure': 103, 'urlMeasure': None, 'shelfObject': None}
EXPECTED_KEYS_EEG = EXPECTED_KEYS_INTERNAL.copy()
EXPECTED_KEYS_EEG.update({'urlVertices': None, 'urlTriangles': None, 'urlLines': None, 'urlNormals': None,
'boundaryURL': '', 'urlAlphas': '', 'urlAlphasIndices': '',
'noOfMeasurePoints': 62, 'maxMeasure': 62})
EXPECTED_KEYS_MEG = EXPECTED_KEYS_EEG.copy()
EXPECTED_KEYS_MEG.update({'noOfMeasurePoints': 151, 'maxMeasure': 151})
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
## Import Shelf Face Object
zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'face_surface_old.zip')
TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, FACE, True)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch_EEG(self):
"""
Check that all required keys are present in output from EegSensorViewer launch.
"""
## Import Sensors
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'EEG_unit_vectors_BrainProducts_62.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.EEG_SENSORS)
sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
## Import EEGCap
zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'eeg_skin_surface.zip')
TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, EEG_CAP, True)
eeg_cap_surface = TestFactory.get_entity(self.test_project, EEGCap())
viewer = EegSensorViewer()
viewer.current_project_id = self.test_project.id
## Launch with EEG Cap selected
result = viewer.launch(sensors, eeg_cap_surface)
self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
self.assertIsNotNone(result[key], "Value at key %s should not be None" % key)
## Launch without EEG Cap
result = viewer.launch(sensors)
self.assert_compliant_dictionary(self.EXPECTED_KEYS_EEG, result)
for key in ['urlVertices', 'urlTriangles', 'urlLines', 'urlNormals']:
self.assertTrue(not result[key] or result[key] == "[]",
"Value at key %s should be None or empty, but is %s" % (key, result[key]))
def test_launch_MEG(self):
"""
Check that all required keys are present in output from MEGSensorViewer launch.
"""
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'meg_channels_reg13.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.MEG_SENSORS)
sensors = TestFactory.get_entity(self.test_project, SensorsMEG())
viewer = MEGSensorViewer()
viewer.current_project_id = self.test_project.id
result = viewer.launch(sensors)
self.assert_compliant_dictionary(self.EXPECTED_KEYS_MEG, result)
def test_launch_internal(self):
"""
Check that all required keys are present in output from InternalSensorViewer launch.
"""
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'internal_39.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.INTERNAL_SENSORS)
sensors = TestFactory.get_entity(self.test_project, SensorsInternal())
viewer = InternalSensorViewer()
viewer.current_project_id = self.test_project.id
result = viewer.launch(sensors)
self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)