本文整理汇总了Python中tvb.core.entities.file.files_helper.FilesHelper.write_operation_metadata方法的典型用法代码示例。如果您正苦于以下问题:Python FilesHelper.write_operation_metadata方法的具体用法?Python FilesHelper.write_operation_metadata怎么用?Python FilesHelper.write_operation_metadata使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.core.entities.file.files_helper.FilesHelper
的用法示例。
在下文中一共展示了FilesHelper.write_operation_metadata方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _export_linked_datatypes
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
def _export_linked_datatypes(self, project, zip_file):
files_helper = FilesHelper()
linked_paths = self._get_linked_datatypes_storage_path(project)
if not linked_paths:
# do not export an empty operation
return
# Make a import operation which will contain links to other projects
algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
op = model.Operation(None, project.id, algo.id, '')
op.project = project
op.algorithm = algo
op.id = 'links-to-external-projects'
op.start_now()
op.mark_complete(model.STATUS_FINISHED)
# write operation.xml to disk
files_helper.write_operation_metadata(op)
op_folder = files_helper.get_operation_folder(op.project.name, op.id)
operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
op_folder_name = os.path.basename(op_folder)
# add operation.xml
zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))
# add linked datatypes to archive in the import operation
for pth in linked_paths:
zip_pth = op_folder_name + '/' + os.path.basename(pth)
zip_file.write(pth, zip_pth)
# remove these files, since we only want them in export archive
files_helper.remove_folder(op_folder)
示例2: _adapt_epileptor_simulations
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
def _adapt_epileptor_simulations():
"""
Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
"""
session = SA_SESSIONMAKER()
epileptor_old = "EpileptorPermittivityCoupling"
epileptor_new = "Epileptor"
param_model = "model"
try:
all_ep_ops = session.query(model.Operation).filter(
model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
files_helper = FilesHelper()
all_bursts = dict()
for ep_op in all_ep_ops:
try:
op_params = parse_json_parameters(ep_op.parameters)
if op_params[param_model] != epileptor_old:
LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
continue
LOGGER.debug("Updating " + str(op_params))
op_params[param_model] = epileptor_new
ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
LOGGER.debug("New params:" + ep_op.parameters)
files_helper.write_operation_metadata(ep_op)
burst = dao.get_burst_for_operation_id(ep_op.id)
if burst is not None:
LOGGER.debug("Updating burst:" + str(burst))
burst.prepare_after_load()
burst.simulator_configuration[param_model] = {'value': epileptor_new}
burst._simulator_configuration = json.dumps(burst.simulator_configuration,
cls=MapAsJson.MapAsJsonEncoder)
if not all_bursts.has_key(burst.id):
all_bursts[burst.id] = burst
except Exception:
LOGGER.exception("Could not process " + str(ep_op))
session.add_all(all_ep_ops)
session.add_all(all_bursts.values())
session.commit()
except Exception:
LOGGER.exception("Could not update Simulation Epileptor Params")
finally:
session.close()
示例3: FilesHelperTest
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
class FilesHelperTest(TransactionalTestCase):
"""
This class contains tests for the tvb.core.entities.file.files_helper module.
"""
PROJECT_NAME = "test_proj"
def setUp(self):
"""
Set up the context needed by the tests.
"""
self.files_helper = FilesHelper()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
def tearDown(self):
""" Remove generated project during tests. """
self.delete_project_folders()
def test_check_created(self):
""" Test standard flows for check created. """
self.files_helper.check_created()
self.assertTrue(os.path.exists(root_storage), "Storage not created!")
self.files_helper.check_created(os.path.join(root_storage, "test"))
self.assertTrue(os.path.exists(root_storage), "Storage not created!")
self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!")
def test_get_project_folder(self):
"""
Test the get_project_folder method which should create a folder in case
it doesn't already exist.
"""
project_path = self.files_helper.get_project_folder(self.test_project)
self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
folder_path = self.files_helper.get_project_folder(self.test_project, "43")
self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")
def test_rename_project_structure(self):
""" Try to rename the folder structure of a project. Standard flow. """
self.files_helper.get_project_folder(self.test_project)
path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
self.assertNotEqual(path, name, "Rename didn't take effect.")
def test_rename_structure_same_name(self):
""" Try to rename the folder structure of a project. Same name. """
self.files_helper.get_project_folder(self.test_project)
self.assertRaises(FileStructureException, self.files_helper.rename_project_structure,
self.test_project.name, self.PROJECT_NAME)
def test_remove_project_structure(self):
""" Check that remove project structure deletes the corresponding folder. Standard flow. """
full_path = self.files_helper.get_project_folder(self.test_project)
self.assertTrue(os.path.exists(full_path), "Folder was not created.")
self.files_helper.remove_project_structure(self.test_project.name)
self.assertFalse(os.path.exists(full_path), "Project folder not deleted.")
def test_write_project_metadata(self):
""" Write XML for test-project. """
self.files_helper.write_project_metadata(self.test_project)
expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
self.assertTrue(os.path.exists(expected_file))
project_meta = XMLReader(expected_file).read_metadata()
loaded_project = model.Project(None, None)
loaded_project.from_dict(project_meta, self.test_user.id)
self.assertEqual(self.test_project.name, loaded_project.name)
self.assertEqual(self.test_project.description, loaded_project.description)
self.assertEqual(self.test_project.gid, loaded_project.gid)
expected_dict = self.test_project.to_dict()[1]
del expected_dict['last_updated']
found_dict = loaded_project.to_dict()[1]
del found_dict['last_updated']
self.assertDictContainsSubset(expected_dict, found_dict)
self.assertDictContainsSubset(found_dict, expected_dict)
def test_write_operation_metadata(self):
"""
Test that a correct XML is created for an operation.
"""
operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
self.assertFalse(os.path.exists(expected_file))
self.files_helper.write_operation_metadata(operation)
self.assertTrue(os.path.exists(expected_file))
operation_meta = XMLReader(expected_file).read_metadata()
loaded_operation = model.Operation(None, None, None, None)
loaded_operation.from_dict(operation_meta, dao)
expected_dict = operation.to_dict()[1]
#.........这里部分代码省略.........
示例4: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
#.........这里部分代码省略.........
image_name = operation.algorithm.name.replace(' ', '-')
else:
# default to a generic name
image_name = "figure"
figure_count = dao.get_figure_count(project.id, user.id) + 1
return 'TVB-%s-%s' % (image_name, figure_count)
def store_result_figure(self, project, user, img_type, export_data, image_name=None, operation_id=None):
"""
Store into a file, Result Image and reference in DB.
"""
store_path, file_name = self._image_path(project.name, img_type)
if img_type == FigureService._TYPE_PNG: # PNG file from canvas
self._write_png(store_path, export_data)
elif img_type == FigureService._TYPE_SVG: # SVG file from svg viewer
self._write_svg(store_path, export_data)
if operation_id:
operation = dao.get_operation_by_id(operation_id)
else:
operation = None
operation_id = None
image_name = self._generate_image_name(project, user, operation, image_name)
# Store entity into DB
entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
image_name, file_name, img_type)
entity = dao.store_entity(entity)
# Load instance from DB to have lazy fields loaded
figure = dao.load_figure(entity.id)
# Write image meta data to disk
self.file_helper.write_image_metadata(figure)
if operation:
# Force writing operation meta data on disk.
# This is important later for operation import
self.file_helper.write_operation_metadata(operation)
def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
"""
Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
previews are for current user and project; grouped by session.
"""
result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
for name in result:
for figure in result[name]:
figures_folder = self.file_helper.get_images_folder(project.name)
figure_full_path = os.path.join(figures_folder, figure.file_path)
# Compute the path
figure.file_path = utils.path2url_part(figure_full_path)
return result, previews_info
@staticmethod
def load_figure(figure_id):
"""
Loads a stored figure by its id.
"""
return dao.load_figure(figure_id)
def edit_result_figure(self, figure_id, **data):
"""
Retrieve and edit a previously stored figure.
"""
figure = dao.load_figure(figure_id)
figure.session_name = data['session_name']
figure.name = data['name']
dao.store_entity(figure)
# Load instance from DB to have lazy fields loaded.
figure = dao.load_figure(figure_id)
# Store figure meta data in an XML attached to the image.
self.file_helper.write_image_metadata(figure)
def remove_result_figure(self, figure_id):
"""
Remove figure from DB and file storage.
"""
figure = dao.load_figure(figure_id)
# Delete all figure related files from disk.
figures_folder = self.file_helper.get_images_folder(figure.project.name)
path2figure = os.path.join(figures_folder, figure.file_path)
if os.path.exists(path2figure):
os.remove(path2figure)
self.file_helper.remove_image_metadata(figure)
# Remove figure reference from DB.
result = dao.remove_entity(model.ResultFigure, figure_id)
return result
示例5: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
#.........这里部分代码省略.........
@staticmethod
def get_operations_in_group(operation_group):
""" Return all the operations from an operation group. """
return dao.get_operations_in_group(operation_group.id)
@staticmethod
def is_upload_operation(operation_gid):
""" Returns True only if the operation with the given GID is an upload operation. """
return dao.is_upload_operation(operation_gid)
@staticmethod
def get_all_operations_for_uploaders(project_id):
""" Returns all finished upload operations. """
return dao.get_all_operations_for_uploaders(project_id)
def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
"""
Sets the operation visibility.
If 'is_operation_group' is True than this method will change the visibility for all
the operation from the OperationGroup with the GID field equal to 'entity_gid'.
"""
def set_visibility(op):
# workaround:
# 'reload' the operation so that it has the project property set.
# get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
# write_operation_metadata requires that property
op = dao.get_operation_by_id(op.id)
# end hack
op.visible = is_visible
self.structure_helper.write_operation_metadata(op)
dao.store_entity(op)
def set_group_descendants_visibility(operation_group_id):
ops_in_group = dao.get_operations_in_group(operation_group_id)
for group_op in ops_in_group:
set_visibility(group_op)
if is_operation_group:
op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
set_group_descendants_visibility(op_group_id)
else:
operation = dao.get_operation_by_gid(entity_gid)
# we assure that if the operation belongs to a group than the visibility will be changed for the entire group
if operation.fk_operation_group is not None:
set_group_descendants_visibility(operation.fk_operation_group)
else:
set_visibility(operation)
def get_operation_details(self, operation_gid, is_group):
"""
:returns: an entity OperationOverlayDetails filled with all information for current operation details.
"""
if is_group:
operation_group = self.get_operation_group_by_gid(operation_gid)
operation = dao.get_operations_in_group(operation_group.id, False, True)
## Reload, to make sure all attributes lazy are populated as well.
operation = dao.get_operation_by_gid(operation.gid)
no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
count_result = dao.count_datatypes_in_group(datatype_group.id)
示例6: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
#.........这里部分代码省略.........
dao.store_entity(datatype_group)
def initiate_prelaunch(self, operation, adapter_instance, temp_files, **kwargs):
"""
Public method.
This should be the common point in calling an adapter- method.
"""
result_msg = ""
try:
unique_id = None
if self.ATT_UID in kwargs:
unique_id = kwargs[self.ATT_UID]
filtered_kwargs = adapter_instance.prepare_ui_inputs(kwargs)
self.logger.debug("Launching operation " + str(operation.id) + " with " + str(filtered_kwargs))
operation = dao.get_operation_by_id(operation.id) # Load Lazy fields
params = dict()
for k, value_ in filtered_kwargs.items():
params[str(k)] = value_
disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by) # From kB to Bytes
available_space = disk_space_per_user - pending_op_disk_space - user_disk_space
result_msg, nr_datatypes = adapter_instance._prelaunch(operation, unique_id, available_space, **params)
operation = dao.get_operation_by_id(operation.id)
## Update DB stored kwargs for search purposes, to contain only valuable params (no unselected options)
operation.parameters = json.dumps(kwargs)
operation.mark_complete(model.STATUS_FINISHED)
if nr_datatypes > 0:
#### Write operation meta-XML only if some result are returned
self.file_helper.write_operation_metadata(operation)
dao.store_entity(operation)
self._remove_files(temp_files)
except zipfile.BadZipfile as excep:
msg = "The uploaded file is not a valid ZIP!"
self._handle_exception(excep, temp_files, msg, operation)
except TVBException as excep:
self._handle_exception(excep, temp_files, excep.message, operation)
except MemoryError:
msg = ("Could not execute operation because there is not enough free memory." +
" Please adjust operation parameters and re-launch it.")
self._handle_exception(Exception(msg), temp_files, msg, operation)
except Exception as excep1:
msg = "Could not launch Operation with the given input data!"
self._handle_exception(excep1, temp_files, msg, operation)
### Try to find next workflow Step. It might throw WorkflowException
next_op_id = self.workflow_service.prepare_next_step(operation.id)
self.launch_operation(next_op_id)
return result_msg
def _send_to_cluster(self, operations, adapter_instance, current_username="unknown"):
""" Initiate operation on cluster"""
for operation in operations:
try:
BACKEND_CLIENT.execute(str(operation.id), current_username, adapter_instance)
except Exception as excep:
self._handle_exception(excep, {}, "Could not start operation!", operation)
return operations
示例7: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
class WorkflowService:
"""
service layer for work-flow entity.
"""
def __init__(self):
self.logger = get_logger(self.__class__.__module__)
self.file_helper = FilesHelper()
def persist_operation_state(self, operation, operation_status, message=None):
"""
Update Operation instance state. Store it in DB and on HDD/
:param operation: Operation instance
:param operation_status: new status
:param message: message in case of error
:return: operation instance changed
"""
operation.mark_complete(operation_status, unicode(message))
dao.store_entity(operation)
operation = dao.get_operation_by_id(operation.id)
self.file_helper.write_operation_metadata(operation)
return operation
@staticmethod
def store_workflow_step(workflow_step):
"""
Store a workflow step entity.
"""
dao.store_entity(workflow_step)
@staticmethod
def create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations):
"""
Create and store the workflow given the project, user and burst in which the workflow is created.
:param simulator_index: the index of the simulator in the workflow
:param simulator_id: the id of the simulator adapter
:param operations: a list with the operations created for the simulator steps
"""
workflows = []
for operation in operations:
new_workflow = model.Workflow(project_id, burst_id)
new_workflow = dao.store_entity(new_workflow)
workflows.append(new_workflow)
simulation_step = model.WorkflowStep(algorithm_id=simulator_id, workflow_id=new_workflow.id,
step_index=simulator_index, static_param=operation.parameters)
simulation_step.fk_operation = operation.id
dao.store_entity(simulation_step)
return workflows
@staticmethod
def set_dynamic_step_references(workflow_step, step_reference):
"""
:param workflow_step: a valid instance of a workflow_step
:param step_reference: the step to which every dataType reference index should be set
For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference.
"""
dynamic_params = workflow_step.dynamic_param
for entry in dynamic_params:
dynamic_params[entry][WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
workflow_step.dynamic_param = dynamic_params
def prepare_next_step(self, last_executed_op_id):
"""
If the operation with id 'last_executed_op_id' resulted after
the execution of a workflow step then this method will launch
the operation corresponding to the next step from the workflow.
"""
try:
current_step, next_workflow_step = self._get_data(last_executed_op_id)
if next_workflow_step is not None:
operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
if len(dynamic_param_names) > 0:
op_params = json.loads(operation.parameters)
for param_name in dynamic_param_names:
dynamic_param = op_params[param_name]
former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
dynamic_param[
WorkflowStepConfiguration.STEP_INDEX_KEY])
if type(dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) is IntType:
datatypes = dao.get_results_for_operation(former_step.fk_operation)
op_params[param_name] = datatypes[
dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]].gid
else:
previous_operation = dao.get_operation_by_id(former_step.fk_operation)
op_params[param_name] = json.loads(previous_operation.parameters)[
dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]]
operation.parameters = json.dumps(op_params)
operation = dao.store_entity(operation)
return operation.id
elif current_step is not None:
current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
current_workflow.status = current_workflow.STATUS_FINISHED
dao.store_entity(current_workflow)
#.........这里部分代码省略.........
示例8: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
class WorkflowService:
"""
service layer for work-flow entity.
"""
def __init__(self):
self.logger = get_logger(self.__class__.__module__)
self.file_helper = FilesHelper()
def persist_operation_state(self, operation, operation_status, message=None):
"""
Update Operation instance state. Store it in DB and on HDD/
:param operation: Operation instance
:param operation_status: new status
:param message: message in case of error
:return: operation instance changed
"""
operation.mark_complete(operation_status, unicode(message))
dao.store_entity(operation)
operation = dao.get_operation_by_id(operation.id)
self.file_helper.write_operation_metadata(operation)
return operation
@staticmethod
def store_workflow_step(workflow_step):
"""
Store a workflow step entity.
"""
dao.store_entity(workflow_step)
@staticmethod
def create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations):
"""
Create and store the workflow given the project, user and burst in which the workflow is created.
:param simulator_index: the index of the simulator in the workflow
:param simulator_id: the id of the simulator adapter
:param operations: a list with the operations created for the simulator steps
"""
workflows = []
for operation in operations:
new_workflow = model.Workflow(project_id, burst_id)
new_workflow = dao.store_entity(new_workflow)
workflows.append(new_workflow)
simulation_step = model.WorkflowStep(algorithm_id=simulator_id, workflow_id=new_workflow.id,
step_index=simulator_index, static_param=operation.parameters)
simulation_step.fk_operation = operation.id
dao.store_entity(simulation_step)
return workflows
@staticmethod
def set_dynamic_step_references(workflow_step, step_reference):
"""
:param workflow_step: a valid instance of a workflow_step
:param step_reference: the step to which every dataType reference index should be set
For each dynamic parameter of the given workflow_step set the 'step_index' at step_reference.
"""
dynamic_params = workflow_step.dynamic_param
for entry in dynamic_params:
dynamic_params[entry][WorkflowStepConfiguration.STEP_INDEX_KEY] = step_reference
workflow_step.dynamic_param = dynamic_params
def prepare_next_step(self, last_executed_op_id):
"""
If the operation with id 'last_executed_op_id' resulted after
the execution of a workflow step then this method will launch
the operation corresponding to the next step from the workflow.
"""
try:
current_step, next_workflow_step = self._get_data(last_executed_op_id)
if next_workflow_step is not None:
operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
if len(dynamic_param_names) > 0:
op_params = json.loads(operation.parameters)
for param_name in dynamic_param_names:
dynamic_param = op_params[param_name]
former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
dynamic_param[
WorkflowStepConfiguration.STEP_INDEX_KEY])
if type(dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) is IntType:
datatypes = dao.get_results_for_operation(former_step.fk_operation)
op_params[param_name] = datatypes[
dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]].gid
else:
previous_operation = dao.get_operation_by_id(former_step.fk_operation)
op_params[param_name] = json.loads(previous_operation.parameters)[
dynamic_param[WorkflowStepConfiguration.DATATYPE_INDEX_KEY]]
operation.parameters = json.dumps(op_params)
operation = dao.store_entity(operation)
return operation.id
elif current_step is not None:
current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
current_workflow.status = current_workflow.STATUS_FINISHED
dao.store_entity(current_workflow)
#.........这里部分代码省略.........
示例9: _adapt_simulation_monitor_params
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
def _adapt_simulation_monitor_params():
"""
For previous simulation with EEG monitor, adjust the change of input parameters.
"""
session = SA_SESSIONMAKER()
param_connectivity = "connectivity"
param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"
try:
all_eeg_ops = session.query(model.Operation).filter(
model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
files_helper = FilesHelper()
all_bursts = dict()
for eeg_op in all_eeg_ops:
try:
op_params = parse_json_parameters(eeg_op.parameters)
LOGGER.debug("Updating " + str(op_params))
old_projection_guid = op_params[param_eeg_proj_old]
connectivity_guid = op_params[param_connectivity]
rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]
if dt.type == 'ProjectionSurfaceEEG':
LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
new_projection_guid = old_projection_guid
else:
new_projection_guid = session.execute(text("""SELECT DT.gid
FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
"MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
PM._sources='""" + rm._surface + """' and
DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
LOGGER.debug("New Prj is surface: " + str(new_projection_guid))
sensors_guid = session.execute(text("""SELECT _sensors
FROM "MAPPED_PROJECTION_MATRIX_DATA"
WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]
del op_params[param_eeg_proj_old]
op_params[param_eeg_proj_new] = str(new_projection_guid)
op_params[param_eeg_sensors] = str(sensors_guid)
op_params[param_eeg_rm] = str(rm.gid)
eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
LOGGER.debug("New params:" + eeg_op.parameters)
files_helper.write_operation_metadata(eeg_op)
burst = dao.get_burst_for_operation_id(eeg_op.id)
if burst is not None:
LOGGER.debug("Updating burst:" + str(burst))
burst.prepare_after_load()
del burst.simulator_configuration[param_eeg_proj_old]
burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
burst._simulator_configuration = json.dumps(burst.simulator_configuration,
cls=MapAsJson.MapAsJsonEncoder)
if not all_bursts.has_key(burst.id):
all_bursts[burst.id] = burst
except Exception:
LOGGER.exception("Could not process " + str(eeg_op))
session.add_all(all_eeg_ops)
session.add_all(all_bursts.values())
session.commit()
except Exception:
LOGGER.exception("Could not update Simulation Monitor Params")
finally:
session.close()
示例10: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import write_operation_metadata [as 别名]
class FigureService:
"""
Service layer for Figure entities.
"""
_TYPE_PNG = "png"
_TYPE_SVG = "svg"
_BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.png")
_BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.svg")
_DEFAULT_SESSION_NAME = "Default"
_DEFAULT_IMAGE_FILE_NAME = "snapshot."
def __init__(self):
self.logger = get_logger(self.__class__.__module__)
self.file_helper = FilesHelper()
def store_result_figure(self, project, user, img_type, operation_id, export_data):
"""
Store into a file, Result Image and reference in DB.
"""
# Generate path where to store image
store_path = self.file_helper.get_images_folder(project.name, operation_id)
store_path = utils.get_unique_file_name(store_path, FigureService._DEFAULT_IMAGE_FILE_NAME + img_type)[0]
file_path = os.path.split(store_path)[1]
if img_type == FigureService._TYPE_PNG: # PNG file from canvas
imgData = base64.b64decode(export_data) # decode the image
fakeImgFile = StringIO(imgData) # PIL.Image only opens from file, so fake one
origImg = Image.open(fakeImgFile)
brandingBar = Image.open(FigureService._BRANDING_BAR_PNG)
finalSize = (origImg.size[0], # original width
origImg.size[1] + brandingBar.size[1]) # original height + brandingBar height
finalImg = Image.new("RGBA", finalSize)
finalImg.paste(origImg, (0, 0)) # add the original image
finalImg.paste(brandingBar, (0, origImg.size[1])) # add the branding bar, below the original
# the extra width will be discarded
finalImg.save(store_path) # store to disk
elif img_type == FigureService._TYPE_SVG: # SVG file from svg viewer
dom = xml.dom.minidom.parseString(export_data)
figureSvg = dom.getElementsByTagName('svg')[0] # get the original image
dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)
brandingSvg = dom.getElementsByTagName('svg')[0] # get the branding bar
brandingSvg.setAttribute("y", figureSvg.getAttribute("height")) # position it below the figure
finalSvg = dom.createElement('svg') # prepare the final svg
width = figureSvg.getAttribute('width').replace('px', '') # same width as original figure
finalSvg.setAttribute("width", width)
height = float(figureSvg.getAttribute('height').replace('px', '')) # increase original height with
height += float(brandingSvg.getAttribute('height').replace('px', '')) # branding bar's height
finalSvg.setAttribute("height", str(height))
finalSvg.appendChild(figureSvg) # add the image
finalSvg.appendChild(brandingSvg) # and the branding bar
# Generate path where to store image
dest = open(store_path, 'w')
finalSvg.writexml(dest) # store to disk
dest.close()
operation = dao.get_operation_by_id(operation_id)
file_name = 'TVB-%s-%s' % (operation.algorithm.name.replace(' ', '-'), operation_id) # e.g. TVB-Algo-Name-352
# Store entity into DB
entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
file_name, file_path, img_type)
entity = dao.store_entity(entity)
# Load instance from DB to have lazy fields loaded
figure = dao.load_figure(entity.id)
# Write image meta data to disk
self.file_helper.write_image_metadata(figure)
# Force writing operation meta data on disk.
# This is important later for operation import
self.file_helper.write_operation_metadata(operation)
def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
"""
Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
previews are for current user and project; grouped by session.
"""
result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
for name in result:
for figure in result[name]:
figures_folder = self.file_helper.get_images_folder(project.name, figure.operation.id)
figure_full_path = os.path.join(figures_folder, figure.file_path)
# Compute the path
figure.file_path = utils.path2url_part(figure_full_path)
return result, previews_info
#.........这里部分代码省略.........