本文整理汇总了Python中tvb.core.adapters.abcadapter.ABCAdapter类的典型用法代码示例。如果您正苦于以下问题:Python ABCAdapter类的具体用法?Python ABCAdapter怎么用?Python ABCAdapter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ABCAdapter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: read_datatype_attribute
def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
"""
Retrieve from a given DataType a property or a method result.
:returns: JSON representation of the attribute.
:param entity_gid: GID for DataType entity
:param dataset_name: name of the dataType property /method
:param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
Ignored if the attribute is not an ndarray
:param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
pair, a load_entity will be performed and kwargs will be updated to contain the result
:param kwargs: extra parameters to be passed when dataset_name is method.
"""
self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
entity = ABCAdapter.load_entity_by_gid(entity_gid)
datatype_kwargs = json.loads(datatype_kwargs)
if datatype_kwargs:
for key, value in datatype_kwargs.iteritems():
kwargs[key] = ABCAdapter.load_entity_by_gid(value)
dataset = getattr(entity, dataset_name)
if not kwargs:
# why the deep copy?
result = copy.deepcopy(dataset)
else:
result = dataset(**kwargs)
if isinstance(result, numpy.ndarray):
# for ndarrays honor the flatten kwarg and convert to lists as ndarrs are not json-able
if flatten is True or flatten == "True":
result = result.flatten()
return result.tolist()
else:
return result
示例2: update_default_values
def update_default_values(portlet_interface, portlet_configuration):
"""
:param portlet_interface: a list of AdapterConfiguration entities.
:param portlet_configuration: a PortletConfiguration entity.
Update the defaults from each AdapterConfiguration entity with the
values stored in the corresponding workflow step held in the
PortletConfiguration entity.
"""
# Check for any defaults first in analyzer steps
if portlet_configuration.analyzers:
for adapter_idx in xrange(len(portlet_interface[:-1])):
saved_configuration = portlet_configuration.analyzers[adapter_idx]
replaced_defaults_dict = ABCAdapter.fill_defaults(
portlet_interface[adapter_idx].interface, saved_configuration.static_param
)
portlet_interface[adapter_idx].interface = replaced_defaults_dict
# Check for visualization defaults
if portlet_configuration.visualizer:
saved_configuration = portlet_configuration.visualizer
replaced_defaults_dict = ABCAdapter.fill_defaults(
portlet_interface[-1].interface, saved_configuration.static_param
)
portlet_interface[-1].interface = replaced_defaults_dict
示例3: load_region_stimulus
def load_region_stimulus(self, region_stimulus_gid, from_step=None):
"""
Loads the interface for the selected region stimulus.
"""
selected_region_stimulus = ABCAdapter.load_entity_by_gid(region_stimulus_gid)
temporal_eq = selected_region_stimulus.temporal
spatial_eq = selected_region_stimulus.spatial
connectivity = selected_region_stimulus.connectivity
weights = selected_region_stimulus.weight
temporal_eq_type = temporal_eq.__class__.__name__
spatial_eq_type = spatial_eq.__class__.__name__
default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type,
'connectivity': connectivity.gid, 'weight': json.dumps(weights)}
for param in temporal_eq.parameters:
prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type)
prepared_name = prepared_name + '_parameters_parameters_' + str(param)
default_dict[prepared_name] = str(temporal_eq.parameters[param])
for param in spatial_eq.parameters:
prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param)
default_dict[prepared_name] = str(spatial_eq.parameters[param])
input_list = self.get_creator_and_interface(REGION_STIMULUS_CREATOR_MODULE,
REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1]
input_list = ABCAdapter.fill_defaults(input_list, default_dict)
context = common.get_from_session(KEY_REGION_CONTEXT)
context.reset()
context.update_from_interface(input_list)
context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_region_stimulus.user_tag_1
context.set_active_stimulus(region_stimulus_gid)
return self.do_step(from_step)
示例4: read_datatype_attribute
def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
"""
Retrieve from a given DataType a property or a method result.
:returns: JSON with a NumPy array
:param entity_gid: GID for DataType entity
:param dataset_name: name of the dataType property /method
:param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
:param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
pair, a load_entity will be performed and kwargs will be updated to contain the result
:param kwargs: extra parameters to be passed when dataset_name is method.
"""
try:
self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
entity = ABCAdapter.load_entity_by_gid(entity_gid)
if kwargs is None:
kwargs = {}
datatype_kwargs = json.loads(datatype_kwargs)
if datatype_kwargs is not None:
for key in datatype_kwargs:
kwargs[key] = ABCAdapter.load_entity_by_gid(datatype_kwargs[key])
if len(kwargs) < 1:
numpy_array = copy.deepcopy(getattr(entity, dataset_name))
else:
numpy_array = eval("entity." + dataset_name + "(**kwargs)")
if (flatten is True) or (flatten == "True"):
numpy_array = numpy_array.flatten()
return numpy_array.tolist()
except Exception, excep:
self.logger.error("Could not retrieve complex entity field:" + str(entity_gid) + "/" + str(dataset_name))
self.logger.exception(excep)
示例5: __init__
def __init__(self, list_of_entities_to_store):
"""
Expacts a list of 'DataType' instances.
"""
ABCAdapter.__init__(self)
if (list_of_entities_to_store is None
or not isinstance(list_of_entities_to_store, list)
or len(list_of_entities_to_store) == 0):
raise Exception("The adapter expacts a list of entities")
self.list_of_entities_to_store = list_of_entities_to_store
示例6: setUp
def setUp(self):
"""
Set up any additionally needed parameters.
"""
super(GenshiTestGroup, self).setUp()
xml_group_path = os.path.join('tests', 'framework', 'interfaces', 'web', "test_group.xml")
algo_group = dao.find_group('tvb.tests.framework.adapters.testgroupadapter', 'TestGroupAdapter', xml_group_path)
self.xml_group_adapter = ABCAdapter.build_adapter(algo_group)
input_tree = self.xml_group_adapter.get_input_tree()
input_tree = ABCAdapter.prepare_param_names(input_tree)
self.template_specification['inputList'] = input_tree
self.template_specification[common.KEY_PARAMETERS_CONFIG] = False
resulted_html = _template2string(self.template_specification)
self.soup = BeautifulSoup(resulted_html)
示例7: _import
def _import(self, import_file_path, surface_gid, connectivity_gid):
"""
This method is used for importing region mappings
:param import_file_path: absolute path of the file to be imported
"""
### Retrieve Adapter instance
group = dao.find_group("tvb.adapters.uploaders.region_mapping_importer", "RegionMapping_Importer")
importer = ABCAdapter.build_adapter(group)
args = {
"mapping_file": import_file_path,
"surface": surface_gid,
"connectivity": connectivity_gid,
DataTypeMetaData.KEY_SUBJECT: "test",
}
now = datetime.datetime.now()
### Launch import Operation
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
# During setup we import a CFF which creates an additional RegionMapping
# So, here we have to find our mapping (just imported)
data_filter = FilterChain(fields=[FilterChain.datatype + ".create_date"], operations=[">"], values=[now])
region_mapping = self._get_entity(RegionMapping(), data_filter)
return region_mapping
示例8: test_wrong_shape
def test_wrong_shape(self):
"""
Verifies that importing a different shape throws exception
"""
group = dao.find_group(
"tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
)
importer = ABCAdapter.build_adapter(group)
file_path = os.path.join(
os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_62_surface_16k.mat"
)
args = {
"projection_file": file_path,
"dataset_name": "ProjectionMatrix",
"sensors": self.sensors.gid,
"surface": self.surface.gid,
DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
}
try:
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
self.fail("This was expected not to run! 62 rows in proj matrix, but 65 sensors")
except OperationException:
pass
示例9: test_happy_flow_surface_import
def test_happy_flow_surface_import(self):
"""
Verifies the happy flow for importing a surface.
"""
dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
group = dao.find_group(
"tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
)
importer = ABCAdapter.build_adapter(group)
file_path = os.path.join(
os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_65_surface_16k.npy"
)
args = {
"projection_file": file_path,
"dataset_name": "ProjectionMatrix",
"sensors": self.sensors.gid,
"surface": self.surface.gid,
DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
}
FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
dt_count_after = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
self.assertEqual(dt_count_before + 1, dt_count_after)
示例10: get_surface_model_parameters_data
def get_surface_model_parameters_data(self, default_selected_model_param=None):
"""
Returns a dictionary which contains all the data needed for drawing the
model parameters.
"""
context_model_parameters = base.get_from_session(KEY_CONTEXT_MPS)
if default_selected_model_param is None:
default_selected_model_param = context_model_parameters.prepared_model_parameter_names.values()[0]
equation_displayer = EquationDisplayer()
equation_displayer.trait.bound = interface.INTERFACE_ATTRIBUTES_ONLY
input_list = equation_displayer.interface[interface.INTERFACE_ATTRIBUTES]
input_list[0] = self._lock_midpoints(input_list[0])
options = []
for original_param, modified_param in context_model_parameters.prepared_model_parameter_names.items():
attributes = deepcopy(input_list)
self._fill_default_values(attributes, modified_param)
option = {'name': original_param, 'value': modified_param, 'attributes': attributes}
options.append(option)
input_list = [{'name': 'model_param', 'type': 'select', 'default': default_selected_model_param,
'label': 'Model param', 'required': True, 'options': options}]
input_list = ABCAdapter.prepare_param_names(input_list)
return {base.KEY_PARAMETERS_CONFIG: False, 'inputList': input_list,
'applied_equations': context_model_parameters.get_configure_info()}
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:26,代码来源:surface_model_parameters_controller.py
示例11: cdata2eeg_mapping
def cdata2eeg_mapping(eeg_mapping_data, meta, storage_path, expected_shape=0):
"""
Currently not used
"""
tmpdir = os.path.join(gettempdir(), eeg_mapping_data.parent_cfile.get_unique_cff_name())
LOG.debug("Using temporary folder for EEG_Mapping import: " + tmpdir)
_zipfile = ZipFile(eeg_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
eeg_projection_path = _zipfile.extract(eeg_mapping_data.src, tmpdir)
eeg_projection_data = read_matlab_data(eeg_projection_path, constants.DATA_NAME_PROJECTION)
if eeg_projection_data.shape[1] < expected_shape:
padding = numpy.zeros((eeg_projection_data.shape[0], expected_shape - eeg_projection_data.shape[1]))
eeg_projection_data = numpy.hstack((eeg_projection_data, padding))
gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
surface_data = ABCAdapter.load_entity_by_gid(gid)
projection_matrix = projections.ProjectionSurfaceEEG(storage_path = storage_path)
projection_matrix.projection_data = eeg_projection_data
projection_matrix.sources = surface_data
projection_matrix.sensors = None
### TODO if we decide to use this method, we will need to find a manner to fill the sensors.
return projection_matrix
示例12: cdata2local_connectivity
def cdata2local_connectivity(local_connectivity_data, meta, storage_path, expected_length=0):
"""
From a CData entry in CFF, create LocalConnectivity entity.
"""
##### expected_length = cortex.region_mapping.shape[0]
tmpdir = os.path.join(gettempdir(), local_connectivity_data.parent_cfile.get_unique_cff_name())
LOG.debug("Using temporary folder for Local Connectivity import: " + tmpdir)
_zipfile = ZipFile(local_connectivity_data.parent_cfile.src, 'r', ZIP_DEFLATED)
local_connectivity_path = _zipfile.extract(local_connectivity_data.src, tmpdir)
gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
surface_data = ABCAdapter.load_entity_by_gid(gid)
local_connectivity = surfaces.LocalConnectivity()
local_connectivity.storage_path = storage_path
local_connectivity_data = read_matlab_data(local_connectivity_path, constants.DATA_NAME_LOCAL_CONN)
if local_connectivity_data.shape[0] < expected_length:
padding = sparse.csc_matrix((local_connectivity_data.shape[0],
expected_length - local_connectivity_data.shape[0]))
local_connectivity_data = sparse.hstack([local_connectivity_data, padding])
padding = sparse.csc_matrix((expected_length - local_connectivity_data.shape[0],
local_connectivity_data.shape[1]))
local_connectivity_data = sparse.vstack([local_connectivity_data, padding])
local_connectivity.equation = None
local_connectivity.matrix = local_connectivity_data
local_connectivity.surface = surface_data
uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
if os.path.isdir(tmpdir):
shutil.rmtree(tmpdir)
return local_connectivity, uid
示例13: get_template_for_adapter
def get_template_for_adapter(self, project_id, step_key, algo_group, submit_url, session_reset=True, is_burst=True):
""" Get Input HTML Interface template or a given adapter """
try:
if session_reset:
self.context.clean_from_session()
group = None
# Cache some values in session, for performance
previous_tree = self.context.get_current_input_tree()
previous_sub_step = self.context.get_current_substep()
if not session_reset and previous_tree is not None and previous_sub_step == algo_group.id:
adapter_interface = previous_tree
else:
group, adapter_interface = self.flow_service.prepare_adapter(project_id, algo_group)
self.context.add_adapter_to_session(algo_group, adapter_interface)
category = self.flow_service.get_category_by_id(step_key)
title = "Fill parameters for step " + category.displayname.lower()
if group:
title = title + " - " + group.displayname
current_defaults = self.context.get_current_default()
if current_defaults is not None:
#Change default values in tree, according to selected input
adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_defaults)
template_specification = dict(submitLink=submit_url, inputList=adapter_interface, title=title)
self._populate_section(algo_group, template_specification, is_burst)
return template_specification
except OperationException, oexc:
self.logger.error("Inconsistent Adapter")
self.logger.exception(oexc)
common.set_warning_message('Inconsistent Adapter! Please review the link (development problem)!')
示例14: test_build_adapter_instance
def test_build_adapter_instance(self):
"""
Test standard flow for building an adapter instance.
"""
algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
adapter = ABCAdapter.build_adapter(algo_group)
self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")
示例15: get_series_array_discrete
def get_series_array_discrete(self, datatype_group_gid, backPage, color_metric=None, size_metric=None):
"""
Create new data for when the user chooses to refresh from the UI.
"""
if color_metric == 'None':
color_metric = None
if size_metric == 'None':
size_metric = None
algorithm = self.flow_service.get_algorithm_by_module_and_class(DISCRETE_PSE_ADAPTER_MODULE,
DISCRETE_PSE_ADAPTER_CLASS)
adapter = ABCAdapter.build_adapter(algorithm)
if self._is_compatible(algorithm, datatype_group_gid):
try:
pse_context = adapter.prepare_parameters(datatype_group_gid, backPage, color_metric, size_metric)
return dict(series_array=pse_context.series_array,
has_started_ops=pse_context.has_started_ops)
except LaunchException as ex:
error_msg = urllib.quote(ex.message)
else:
error_msg = urllib.quote(
"Discrete PSE is incompatible (most probably due to result size being too large).")
name = urllib.quote(adapter._ui_name)
raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))