本文整理汇总了Python中tvb.core.entities.file.files_helper.FilesHelper.update_operation_metadata方法的典型用法代码示例。如果您正苦于以下问题:Python FilesHelper.update_operation_metadata方法的具体用法?Python FilesHelper.update_operation_metadata怎么用?Python FilesHelper.update_operation_metadata使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tvb.core.entities.file.files_helper.FilesHelper
的用法示例。
在下文中一共展示了FilesHelper.update_operation_metadata方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: FilesHelperTest
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import update_operation_metadata [as 别名]
#.........这里部分代码省略.........
self.assertTrue(os.path.exists(expected_file))
project_meta = XMLReader(expected_file).read_metadata()
loaded_project = model.Project(None, None)
loaded_project.from_dict(project_meta, self.test_user.id)
self.assertEqual(self.test_project.name, loaded_project.name)
self.assertEqual(self.test_project.description, loaded_project.description)
self.assertEqual(self.test_project.gid, loaded_project.gid)
expected_dict = self.test_project.to_dict()[1]
del expected_dict['last_updated']
found_dict = loaded_project.to_dict()[1]
del found_dict['last_updated']
self.assertDictContainsSubset(expected_dict, found_dict)
self.assertDictContainsSubset(found_dict, expected_dict)
def test_write_operation_metadata(self):
"""
Test that a correct XML is created for an operation.
"""
operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
self.assertFalse(os.path.exists(expected_file))
self.files_helper.write_operation_metadata(operation)
self.assertTrue(os.path.exists(expected_file))
operation_meta = XMLReader(expected_file).read_metadata()
loaded_operation = model.Operation(None, None, None, None)
loaded_operation.from_dict(operation_meta, dao)
expected_dict = operation.to_dict()[1]
found_dict = loaded_operation.to_dict()[1]
for key, value in expected_dict.iteritems():
self.assertEqual(str(value), str(found_dict[key]))
# Now validate that operation metaData can be also updated
self.assertNotEqual("new_group_name", found_dict['user_group'])
self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id)
found_dict = XMLReader(expected_file).read_metadata()
self.assertEqual("new_group_name", found_dict['user_group'])
def test_remove_dt_happy_flow(self):
"""
Happy flow for removing a file related to a DataType.
"""
folder_path = self.files_helper.get_project_folder(self.test_project, "42")
datatype = MappedType()
datatype.storage_path = folder_path
open(datatype.get_storage_file_path(), 'w')
self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
self.files_helper.remove_datatype(datatype)
self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!")
def test_remove_dt_non_existent(self):
"""
Try to call remove on a dataType with no H5 file.
Should work.
"""
folder_path = self.files_helper.get_project_folder(self.test_project, "42")
datatype = MappedType()
datatype.storage_path = folder_path
self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
self.files_helper.remove_datatype(datatype)
def test_move_datatype(self):
"""
Make sure associated H5 file is moved to a correct new location.
示例2: __init__
# 需要导入模块: from tvb.core.entities.file.files_helper import FilesHelper [as 别名]
# 或者: from tvb.core.entities.file.files_helper.FilesHelper import update_operation_metadata [as 别名]
#.........这里部分代码省略.........
group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
if group and len(group) > 0 and new_group_name != group[0].name:
group = group[0]
exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
if exists_group:
raise StructureException("Group '" + new_group_name + "' already exists.")
group.name = new_group_name
dao.store_entity(group)
else:
operation = dao.get_operation_by_id(datatype.fk_from_operation)
operation.user_group = new_group_name
dao.store_entity(operation)
# 2. Update dateType fields:
datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]
datatype = dao.store_entity(datatype)
# 3. Update MetaData in H5 as well.
datatype.persist_full_metadata()
# 4. Update the group_name/user_group into the operation meta-data file
operation = dao.get_operation_by_id(datatype.fk_from_operation)
self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
str(datatype.fk_from_operation), from_group)
def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
"""
Returns the dataTypes that are used as input parameters for the given operation.
'selected_filter' - is expected to be a visibility filter.
If any dataType is part of a dataType group then the dataType group will
be returned instead of that dataType.
"""
all_datatypes = self._review_operation_inputs(operation_gid)[0]
datatype_inputs = []
for datatype in all_datatypes:
if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
if datatype.visible:
datatype_inputs.append(datatype)
else:
datatype_inputs.append(datatype)
datatypes = []
datatype_groups = dict()
for data_type in datatype_inputs:
if data_type.fk_datatype_group is None:
datatypes.append(data_type)
elif data_type.fk_datatype_group not in datatype_groups:
dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
datatype_groups[data_type.fk_datatype_group] = dt_group
datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
return datatypes