本文整理汇总了Python中lazyflow.utility.PathComponents类的典型用法代码示例。如果您正苦于以下问题:Python PathComponents类的具体用法?Python PathComponents怎么用?Python PathComponents使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了PathComponents类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: configure_operator_with_parsed_args
def configure_operator_with_parsed_args(self, parsed_args):
"""
Helper function for headless workflows.
Configures this applet's top-level operator according to the settings provided in ``parsed_args``.
:param parsed_args: Must be an ``argparse.Namespace`` as returned by :py:meth:`parse_known_cmdline_args()`.
"""
# TODO: Support image stack inputs by checking for globstrings and converting to hdf5.
input_paths = parsed_args.input_files
input_infos = []
for p in input_paths:
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
# Convert all paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
comp = PathComponents(p)
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
info.nickname = comp.filenameBase
input_infos.append(info)
opDataSelection = self.topLevelOperator
opDataSelection.DatasetGroup.resize( len(input_infos) )
for lane_index, info in enumerate(input_infos):
# Only one dataset role in pixel classification
opDataSelection.DatasetGroup[lane_index][0].setValue( info )
示例2: configure_operator_with_parsed_args
def configure_operator_with_parsed_args(self, parsed_args):
"""
Helper function for headless workflows.
Configures this applet's top-level operator according to the settings provided in ``parsed_args``.
:param parsed_args: Must be an ``argparse.Namespace`` as returned by :py:meth:`parse_known_cmdline_args()`.
"""
input_paths = parsed_args.input_files
# If the user doesn't want image stacks to be copied inte the project file,
# we generate hdf5 volumes in a temporary directory and use those files instead.
if parsed_args.preconvert_stacks:
import tempfile
input_paths = self.convertStacksToH5( input_paths, tempfile.gettempdir() )
input_infos = []
for p in input_paths:
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
# Convert all paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
comp = PathComponents(p)
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
info.nickname = comp.filenameBase
input_infos.append(info)
opDataSelection = self.topLevelOperator
opDataSelection.DatasetGroup.resize( len(input_infos) )
for lane_index, info in enumerate(input_infos):
# Only one dataset role in pixel classification
opDataSelection.DatasetGroup[lane_index][0].setValue( info )
示例3: _applyInternalPathToTempOps
def _applyInternalPathToTempOps(self, index):
if index == -1:
return
newInternalPath = str( self.internalDatasetNameComboBox.currentText() )
# Save a copy of our settings
oldInfos = {}
for laneIndex, op in self.tempOps.items():
oldInfos[laneIndex] = copy.copy( op.Dataset.value )
# Attempt to apply to all temp operators
try:
for laneIndex, op in self.tempOps.items():
info = copy.copy( op.Dataset.value )
pathComponents = PathComponents(info.filePath)
if pathComponents.internalPath != newInternalPath:
pathComponents.internalPath = newInternalPath
info.filePath = pathComponents.totalPath()
op.Dataset.setValue( info )
self._error_fields.discard('Internal Dataset Name')
return True
except Exception as e:
# Revert everything back to the previous state
for laneIndex, op in self.tempOps.items():
op.Dataset.setValue( oldInfos[laneIndex] )
msg = "Could not set new internal path settings due to an exception:\n"
msg += "{}".format( e )
log_exception( logger, msg )
QMessageBox.warning(self, "Error", msg)
self._error_fields.add('Internal Dataset Name')
return False
示例4: handleImportLabelsAction
def handleImportLabelsAction():
# Find the directory of the most recently opened image file
mostRecentImageFile = PreferencesManager().get( 'DataSelection', 'recent image' )
if mostRecentImageFile is not None:
defaultDirectory = os.path.split(mostRecentImageFile)[0]
else:
defaultDirectory = os.path.expanduser('~')
fileNames = DataSelectionGui.getImageFileNamesToOpen(self, defaultDirectory)
fileNames = list(map(str, fileNames))
# For now, we require a single hdf5 file
if len(fileNames) > 1:
QMessageBox.critical(self, "Too many files",
"Labels must be contained in a single hdf5 volume.")
return
if len(fileNames) == 0:
# user cancelled
return
file_path = fileNames[0]
internal_paths = DataSelectionGui.getPossibleInternalPaths(file_path)
if len(internal_paths) == 0:
QMessageBox.critical(self, "No volumes in file",
"Couldn't find a suitable dataset in your hdf5 file.")
return
if len(internal_paths) == 1:
internal_path = internal_paths[0]
else:
dlg = H5VolumeSelectionDlg(internal_paths, self)
if dlg.exec_() == QDialog.Rejected:
return
selected_index = dlg.combo.currentIndex()
internal_path = str(internal_paths[selected_index])
path_components = PathComponents(file_path)
path_components.internalPath = str(internal_path)
try:
top_op = self.topLevelOperatorView
opReader = OpInputDataReader(parent=top_op.parent)
opReader.FilePath.setValue( path_components.totalPath() )
# Reorder the axes
op5 = OpReorderAxes(parent=top_op.parent)
op5.AxisOrder.setValue( top_op.LabelInputs.meta.getAxisKeys() )
op5.Input.connect( opReader.Output )
# Finally, import the labels
top_op.importLabels( top_op.current_view_index(), op5.Output )
finally:
op5.cleanUp()
opReader.cleanUp()
示例5: generateBatchPredictions
def generateBatchPredictions(workflow, batchInputPaths, batchExportDir, batchOutputSuffix, exportedDatasetName):
"""
Compute the predictions for each of the specified batch input files,
and export them to corresponding h5 files.
"""
batchInputPaths = convertStacksToH5(batchInputPaths)
batchInputInfos = []
for p in batchInputPaths:
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
# Convert all paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
comp = PathComponents(p)
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
batchInputInfos.append(info)
# Configure batch input operator
opBatchInputs = workflow.batchInputApplet.topLevelOperator
opBatchInputs.Dataset.setValues( batchInputInfos )
# Configure batch export operator
opBatchResults = workflow.batchResultsApplet.topLevelOperator
opBatchResults.ExportDirectory.setValue(batchExportDir)
opBatchResults.Format.setValue(ExportFormat.H5)
opBatchResults.Suffix.setValue(batchOutputSuffix)
opBatchResults.InternalPath.setValue(exportedDatasetName)
opBatchResults.SelectedSlices.setValue([30])
logger.info( "Exporting data to " + opBatchResults.OutputDataPath[0].value )
# Set up progress display handling (just logging for now)
currentProgress = [None]
def handleProgress(percentComplete):
if currentProgress[0] != percentComplete:
currentProgress[0] = percentComplete
logger.info("Batch job: {}% complete.".format(percentComplete))
progressSignal = opBatchResults.ProgressSignal[0].value
progressSignal.subscribe( handleProgress )
# Make it happen!
result = opBatchResults.ExportResult[0].value
return result
示例6: getPartiallyFormattedName
def getPartiallyFormattedName(self, lane_index, path_format_string):
''' Takes the format string for the output file, fills in the most important placeholders, and returns it '''
raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value
project_path = self.shell.projectManager.currentProjectPath
project_dir = os.path.dirname(project_path)
dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)
known_keys = {}
known_keys['dataset_dir'] = abs_dataset_dir
nickname = raw_dataset_info.nickname.replace('*', '')
if os.path.pathsep in nickname:
nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
known_keys['nickname'] = nickname
known_keys['result_type'] = self.dataExportTrackingApplet.topLevelOperator.SelectedPlugin._value
# use partial formatting to fill in non-coordinate name fields
partially_formatted_name = format_known_keys(path_format_string, known_keys)
return partially_formatted_name
示例7: getPartiallyFormattedName
def getPartiallyFormattedName(self, lane_index: int, path_format_string: str) -> str:
''' Takes the format string for the output file, fills in the most important placeholders, and returns it '''
raw_dataset_info = self.topLevelOperator.RawDatasetInfo[lane_index].value
project_path = self.topLevelOperator.WorkingDirectory.value
dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
abs_dataset_dir = make_absolute(dataset_dir, cwd=project_path)
nickname = raw_dataset_info.nickname.replace('*', '')
if os.path.pathsep in nickname:
nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
known_keys = {
'dataset_dir': abs_dataset_dir,
'nickname': nickname,
'result_type': self.topLevelOperator.SelectedPlugin._value,
}
return format_known_keys(path_format_string, known_keys)
示例8: append_lane
def append_lane(workflow, input_filepath, axisorder=None):
# Sanity checks
assert isinstance(workflow, PixelClassificationWorkflow)
opPixelClassification = workflow.pcApplet.topLevelOperator
assert opPixelClassification.Classifier.ready()
# If the filepath is a globstring, convert the stack to h5
input_filepath = DataSelectionApplet.convertStacksToH5( [input_filepath], TMP_DIR )[0]
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
info.filePath = input_filepath
comp = PathComponents(input_filepath)
# Convert all (non-url) paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
if not isUrl(input_filepath):
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
info.nickname = comp.filenameBase
if axisorder:
info.axistags = vigra.defaultAxistags(axisorder)
logger.debug( "adding lane: {}".format( info ) )
opDataSelection = workflow.dataSelectionApplet.topLevelOperator
# Add a lane
num_lanes = len( opDataSelection.DatasetGroup )+1
logger.debug( "num_lanes: {}".format( num_lanes ) )
opDataSelection.DatasetGroup.resize( num_lanes )
# Configure it.
role_index = 0 # raw data
opDataSelection.DatasetGroup[-1][role_index].setValue( info )
# Sanity check
assert len(opPixelClassification.InputImages) == num_lanes
return opPixelClassification
示例9: append_lane
def append_lane(workflow, input_filepath, axisorder=None):
"""
Add a lane to the project file for the given input file.
If axisorder is given, override the default axisorder for
the file and force the project to use the given one.
Globstrings are supported, in which case the files are converted to HDF5 first.
"""
# If the filepath is a globstring, convert the stack to h5
input_filepath = DataSelectionApplet.convertStacksToH5( [input_filepath], tempfile.mkdtemp() )[0]
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
info.filePath = input_filepath
comp = PathComponents(input_filepath)
# Convert all (non-url) paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
if not isUrl(input_filepath):
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
info.nickname = comp.filenameBase
if axisorder:
info.axistags = vigra.defaultAxistags(axisorder)
logger.debug( "adding lane: {}".format( info ) )
opDataSelection = workflow.dataSelectionApplet.topLevelOperator
# Add a lane
num_lanes = len( opDataSelection.DatasetGroup )+1
logger.debug( "num_lanes: {}".format( num_lanes ) )
opDataSelection.DatasetGroup.resize( num_lanes )
# Configure it.
role_index = 0 # raw data
opDataSelection.DatasetGroup[-1][role_index].setValue( info )
示例10: post_process_lane_export
def post_process_lane_export(self, lane_index):
settings, selected_features = self.trackingApplet.topLevelOperator.getLane(lane_index).get_table_export_settings()
if settings:
self.dataExportApplet.progressSignal.emit(0)
raw_dataset_info = self.dataSelectionApplet.topLevelOperator.DatasetGroup[lane_index][0].value
project_path = self.shell.projectManager.currentProjectPath
project_dir = os.path.dirname(project_path)
dataset_dir = PathComponents(raw_dataset_info.filePath).externalDirectory
abs_dataset_dir = make_absolute(dataset_dir, cwd=project_dir)
known_keys = {}
known_keys['dataset_dir'] = abs_dataset_dir
nickname = raw_dataset_info.nickname.replace('*', '')
if os.path.pathsep in nickname:
nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
known_keys['nickname'] = nickname
# use partial formatting to fill in non-coordinate name fields
name_format = settings['file path']
partially_formatted_name = format_known_keys( name_format, known_keys )
settings['file path'] = partially_formatted_name
req = self.trackingApplet.topLevelOperator.getLane(lane_index).export_object_data(
lane_index,
# FIXME: Even in non-headless mode, we can't show the gui because we're running in a non-main thread.
# That's not a huge deal, because there's still a progress bar for the overall export.
show_gui=False)
req.wait()
self.dataExportApplet.progressSignal.emit(100)
# Restore state of axis ranges
parameters = self.trackingApplet.topLevelOperator.Parameters.value
parameters['time_range'] = self.prev_time_range
parameters['x_range'] = self.prev_x_range
parameters['y_range'] = self.prev_y_range
parameters['z_range'] = self.prev_z_range
示例11: create_default_headless_dataset_info
def create_default_headless_dataset_info(cls, filepath):
"""
filepath may be a globstring or a full hdf5 path+dataset
"""
comp = PathComponents(filepath)
nickname = comp.filenameBase
# Remove globstring syntax.
if '*' in nickname:
nickname = nickname.replace('*', '')
if os.path.pathsep in nickname:
nickname = PathComponents(nickname.split(os.path.pathsep)[0]).fileNameBase
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
info.nickname = nickname
info.filePath = filepath
# Convert all (non-url) paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
if not isUrl(filepath):
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
return info
示例12: setupOutputs
def setupOutputs(self):
self.cleanupOnDiskView()
# FIXME: If RawData becomes unready() at the same time as RawDatasetInfo(), then
# we have no guarantees about which one will trigger setupOutputs() first.
# It is therefore possible for 'RawDatasetInfo' to appear ready() to us,
# even though it's upstream partner is UNready. We are about to get the
# unready() notification, but it will come too late to prevent our
# setupOutputs method from being called.
# Without proper graph setup transaction semantics, we have to use this
# hack as a workaround.
try:
rawInfo = self.RawDatasetInfo.value
except:
for oslot in self.outputs.values():
if oslot.partner is None:
oslot.meta.NOTREADY = True
return
selection_index = self.InputSelection.value
if not self.Inputs[selection_index].ready():
for oslot in self.outputs.values():
if oslot.partner is None:
oslot.meta.NOTREADY = True
return
self._opFormattedExport.Input.connect( self.Inputs[selection_index] )
dataset_dir = PathComponents(rawInfo.filePath).externalDirectory
abs_dataset_dir, _ = getPathVariants(dataset_dir, self.WorkingDirectory.value)
known_keys = {}
known_keys['dataset_dir'] = abs_dataset_dir
nickname = rawInfo.nickname.replace('*', '')
if '//' in nickname:
nickname = PathComponents(nickname.split('//')[0]).fileNameBase
known_keys['nickname'] = nickname
# Disconnect to open the 'transaction'
if self._opImageOnDiskProvider is not None:
self._opImageOnDiskProvider.TransactionSlot.disconnect()
self._opFormattedExport.TransactionSlot.disconnect()
# Blank the internal path while we manipulate the external path
# to avoid invalid intermediate states of ExportPath
self._opFormattedExport.OutputInternalPath.setValue( "" )
# use partial formatting to fill in non-coordinate name fields
name_format = self.OutputFilenameFormat.value
partially_formatted_name = format_known_keys( name_format, known_keys )
# Convert to absolute path before configuring the internal op
abs_path, _ = getPathVariants( partially_formatted_name, self.WorkingDirectory.value )
self._opFormattedExport.OutputFilenameFormat.setValue( abs_path )
# use partial formatting on the internal dataset name, too
internal_dataset_format = self.OutputInternalPath.value
partially_formatted_dataset_name = format_known_keys( internal_dataset_format, known_keys )
self._opFormattedExport.OutputInternalPath.setValue( partially_formatted_dataset_name )
# Re-connect to finish the 'transaction'
self._opFormattedExport.TransactionSlot.connect( self.TransactionSlot )
if self._opImageOnDiskProvider is not None:
self._opImageOnDiskProvider.TransactionSlot.connect( self.TransactionSlot )
self.setupOnDiskView()
示例13: enumerate
return dataset_keys
if __name__ == "__main__":
import sys
import argparse
#sys.argv += "/tmp/example_slice.h5/data /tmp/example_slice2.h5/data --export_drange=(0,255) --output_format=png --pipeline_result_drange=(1,2)".split()
# Construct a parser with all the 'normal' export options, and add arg for prediction_image_paths.
parser = DataExportApplet.make_cmdline_parser( argparse.ArgumentParser() )
parser.add_argument("prediction_image_paths", nargs='+', help="Path(s) to your exported predictions.")
parsed_args = parser.parse_args()
parsed_args, unused_args = DataExportApplet.parse_known_cmdline_args( sys.argv[1:], parsed_args )
# As a convenience, auto-determine the internal dataset path if possible.
for index, input_path in enumerate(parsed_args.prediction_image_paths):
path_comp = PathComponents(input_path, os.getcwd())
if not parsed_args.output_internal_path:
parsed_args.output_internal_path = "segmentation"
if path_comp.extension in PathComponents.HDF5_EXTS and path_comp.internalDatasetName == "":
with h5py.File(path_comp.externalPath, 'r') as f:
all_internal_paths = all_dataset_internal_paths(f)
if len(all_internal_paths) == 1:
path_comp.internalPath = all_internal_paths[0]
parsed_args.prediction_image_paths[index] = path_comp.totalPath()
elif len(all_internal_paths) == 0:
sys.stderr.write("Could not find any datasets in your input file:\n"
"{}\n".format(input_path))
sys.exit(1)
else:
sys.stderr.write("Found more than one dataset in your input file:\n"
示例14: filter
f.visit(allkeys.append)
dataset_keys = filter(lambda key: isinstance(f[key], h5py.Dataset),
allkeys)
return dataset_keys
if __name__ == "__main__":
import sys
import argparse
# Construct a parser with all the 'normal' export options, and add arg for input_path.
parser = DataExportApplet.make_cmdline_parser( argparse.ArgumentParser() )
parser.add_argument("input_path", help="Path to your exported predictions.")
parsed_args = parser.parse_args()
# As a convenience, auto-determine the internal dataset path if possible.
path_comp = PathComponents(parsed_args.input_path, os.getcwd())
if path_comp.extension in PathComponents.HDF5_EXTS and path_comp.internalDatasetName == "":
with h5py.File(path_comp.externalPath, 'r') as f:
all_internal_paths = all_dataset_internal_paths(f)
if len(all_internal_paths) == 1:
path_comp.internalPath = all_internal_paths[0]
parsed_args.input_path = path_comp.totalPath()
elif len(all_internal_paths) == 0:
sys.stderr.write("Could not find any datasets in your input file.")
sys.exit(1)
else:
sys.stderr.write("Found more than one dataset in your input file.\n"
"Please specify the dataset name, e.g. /path/to/myfile.h5/internal/dataset_name")
sys.exit(1)
示例15: configure_operator_with_parsed_args
def configure_operator_with_parsed_args(self, parsed_args):
"""
Helper function for headless workflows.
Configures this applet's top-level operator according to the settings provided in ``parsed_args``.
:param parsed_args: Must be an ``argparse.Namespace`` as returned by :py:meth:`parse_known_cmdline_args()`.
"""
role_names = self.topLevelOperator.DatasetRoles.value
role_paths = collections.OrderedDict()
if role_names:
for role_index, role_name in enumerate(role_names):
arg_name = self._role_name_to_arg_name(role_name)
input_paths = getattr(parsed_args, arg_name)
role_paths[role_index] = input_paths
if parsed_args.input_files:
# We allow the file list to go to the 'default' role, but only if no other roles were explicitly configured.
for role_index, input_paths in role_paths.items():
if input_paths:
# FIXME: This error message could be more helpful.
role_args = map( self._role_name_to_arg_name, role_names )
role_args = map( lambda s: '--' + s, role_args )
role_args_str = ", ".join( role_args )
raise Exception("Invalid command line arguments: All roles must be configured explicitly.\n"
"Use the following flags to specify which files are matched with which inputs:\n"
+ role_args_str )
role_paths = { 0 : parsed_args.input_files }
for role_index, input_paths in role_paths.items():
# If the user doesn't want image stacks to be copied into the project file,
# we generate hdf5 volumes in a temporary directory and use those files instead.
if parsed_args.preconvert_stacks:
import tempfile
input_paths = self.convertStacksToH5( input_paths, tempfile.gettempdir() )
input_infos = []
for p in input_paths:
info = DatasetInfo()
info.location = DatasetInfo.Location.FileSystem
info.filePath = p
comp = PathComponents(p)
# Convert all (non-url) paths to absolute
# (otherwise they are relative to the project file, which probably isn't what the user meant)
if not isUrl(p):
comp.externalPath = os.path.abspath(comp.externalPath)
info.filePath = comp.totalPath()
info.nickname = comp.filenameBase
# Remove globstring syntax.
if '*' in info.nickname:
info.nickname = info.nickname.replace('*', '')
if os.path.pathsep in info.nickname:
info.nickname = PathComponents(info.nickname.split(os.path.pathsep)[0]).fileNameBase
input_infos.append(info)
opDataSelection = self.topLevelOperator
existing_lanes = len(opDataSelection.DatasetGroup)
opDataSelection.DatasetGroup.resize( max(len(input_infos), existing_lanes) )
for lane_index, info in enumerate(input_infos):
opDataSelection.DatasetGroup[lane_index][role_index].setValue( info )
need_warning = False
for lane_index in range(len(input_infos)):
output_slot = opDataSelection.ImageGroup[lane_index][role_index]
if output_slot.meta.prefer_2d:
need_warning = True
break
if need_warning:
logger.warn("*******************************************************************************************")
logger.warn("Some of your input data is stored in a format that is not efficient for 3D access patterns.")
logger.warn("Performance may suffer as a result. For best performance, use a chunked HDF5 volume.")
logger.warn("*******************************************************************************************")