当前位置: 首页>>代码示例>>Python>>正文


Python opDataSelection.DatasetInfo类代码示例

本文整理汇总了Python中ilastik.applets.dataSelection.opDataSelection.DatasetInfo的典型用法代码示例。如果您正苦于以下问题:Python DatasetInfo类的具体用法?Python DatasetInfo怎么用?Python DatasetInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DatasetInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: debug_with_new

def debug_with_new(shell, workflow):
    """
    (Function for debug and testing.)
    """
    projFilePath = "/magnetic/test_watershed_project.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
    info = DatasetInfo()
    #info.filePath = '/magnetic/gigacube.h5'
    #info.filePath = '/magnetic/synapse_small.npy'
    info.filePath = '/magnetic/synapse_small.npy_results.h5'
    #info.filePath = '/magnetic/singleslice.h5'
    opDataSelection = workflow.dataSelectionApplet.topLevelOperator
    opDataSelection.Dataset.resize(1)
    opDataSelection.Dataset[0].setValue(info)

    # Select the watershed drawer
    shell.setSelectedAppletDrawer(1)

    # Save the project
    shell.onSaveProjectActionTriggered()
开发者ID:fblumenthal,项目名称:ilastik,代码行数:25,代码来源:vigraWatershedWorkflowMain.py

示例2: debug_with_new

def debug_with_new(shell):
    """
    (Function for debug and testing.)
    """
    #projFilePath = "/magnetic/synapse_debug_data/object_prediction.ilp"
    projFilePath = "/magnetic/stuart_object_predictions.ilp"

    # New project
    shell.createAndLoadNewProject(projFilePath)
    workflow = shell.projectManager.workflow

    # Add a file
    from ilastik.applets.dataSelection.opDataSelection import DatasetInfo

    rawInfo = DatasetInfo()
    #rawInfo.filePath = '/magnetic/synapse_debug_data/block256.h5/cube'
    #rawInfo.filePath = '/magnetic/synapse_small_4d.h5/volume/data'
    rawInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000.h5/volume/data'
    opRawDataSelection = workflow.rawDataSelectionApplet.topLevelOperator
    opRawDataSelection.Dataset.resize(1)
    opRawDataSelection.Dataset[0].setValue(rawInfo)

    predictionInfo = DatasetInfo()
    #predictionInfo.filePath = '/magnetic/synapse_debug_data/block256_spots_predictions.h5/cube'
    #predictionInfo.filePath = '/magnetic/synapse_small_4d_synapse_predictions.h5/volume/data'
    predictionInfo.filePath = '/magnetic/validation_slices_20_40_3200_4000_1200_2000_pred.h5/volume/data'
    opPredDataSelection = workflow.predictionSelectionApplet.topLevelOperator
    opPredDataSelection.Dataset.resize(1)
    opPredDataSelection.Dataset[0].setValue(predictionInfo)

    # Select the feature drawer
    shell.setSelectedAppletDrawer(2)
开发者ID:fblumenthal,项目名称:ilastik,代码行数:32,代码来源:synapseBlockwiseGui.py

示例3: impl

        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell

            # New project
            shell.createAndLoadNewProject(projFilePath, self.workflowClass())
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):
                # Add a file
                info = DatasetInfo()

                info.filePath = dataFile


                opDataSelection.DatasetGroup.resize(i+1)
                opDataSelection.DatasetGroup[i][0].setValue(info)

            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False]] )
            opFeatures.SelectionMatrix.setValue(selections)
开发者ID:ilastik,项目名称:ilastik,代码行数:30,代码来源:testObjectCountingGuiSelectingTools.py

示例4: _get_template_dataset_infos

    def _get_template_dataset_infos(self, input_axes=None):
        """
        Sometimes the default settings for an input file are not suitable (e.g. the axistags need to be changed).
        We assume the LAST non-batch input in the workflow has settings that will work for all batch processing inputs.
        Here, we get the DatasetInfo objects from that lane and store them as 'templates' to modify for all batch-processing files.
        """
        template_infos = {}

        # If there isn't an available dataset to use as a template
        if len(self.dataSelectionApplet.topLevelOperator.DatasetGroup) == 0:
            num_roles = len(self.dataSelectionApplet.topLevelOperator.DatasetRoles.value)
            for role_index in range(num_roles):
                template_infos[role_index] = DatasetInfo()
                template_infos[role_index].axistags = vigra.defaultAxistags(input_axes)
            return template_infos

        # Use the LAST non-batch input file as our 'template' for DatasetInfo settings (e.g. axistags)
        template_lane = len(self.dataSelectionApplet.topLevelOperator.DatasetGroup) - 1
        opDataSelectionTemplateView = self.dataSelectionApplet.topLevelOperator.getLane(template_lane)

        for role_index, info_slot in enumerate(opDataSelectionTemplateView.DatasetGroup):
            if info_slot.ready():
                template_infos[role_index] = info_slot.value
            else:
                template_infos[role_index] = DatasetInfo()
            if input_axes:
                # Support the --input_axes arg to override input axis order, same as DataSelection applet.
                template_infos[role_index].axistags = vigra.defaultAxistags(input_axes)
        return template_infos
开发者ID:sc65,项目名称:ilastik,代码行数:29,代码来源:batchProcessingApplet.py

示例5: test_fake_data_source

    def test_fake_data_source(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph,
                                 operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        info = DatasetInfo()
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.filePath = self.testRawDataFileName
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False
        # Use *fake* data source
        info.realDataSource = False
        info.axistags = vigra.defaultAxistags('tczyx')
        info.laneShape = self.imgData.shape
        info.laneDtype = self.imgData.dtype

        reader.Dataset.setValues([info])

        # Verify that now data selection operator returns fake data
        # with expected shape and type
        imgData = reader.Image[0][...].wait()

        assert imgData.shape == self.imgData.shape
        assert imgData.dtype == self.imgData.dtype
        expected_fake_data = numpy.zeros(info.laneShape, dtype=info.laneDtype)
        numpy.testing.assert_array_equal(imgData, expected_fake_data)
开发者ID:ilastik,项目名称:ilastik,代码行数:31,代码来源:testOpDataSelection.py

示例6: testBasic3DstacksFromFileList

    def testBasic3DstacksFromFileList(self):
        for ext, fileNames in list(self.imgFileLists2D.items()):
            fileNameString = os.path.pathsep.join(fileNames)
            graph = lazyflow.graph.Graph()
            reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
            reader.ProjectFile.setValue(self.projectFile)
            reader.WorkingDirectory.setValue(os.getcwd())
            reader.ProjectDataGroup.setValue('DataSelection/local_data')

            info = DatasetInfo(filepath=fileNameString)
            # Will be read from the filesystem since the data won't be found in the project file.
            info.location = DatasetInfo.Location.ProjectInternal
            info.internalPath = ""
            info.invertColors = False
            info.convertToGrayscale = False

            reader.Dataset.setValues([info])

            # Read the test files using the data selection operator and verify the contents
            imgData3D = reader.Image[0][...].wait()

            # Check raw images
            assert imgData3D.shape == self.imgData3D.shape, (imgData3D.shape, self.imgData3D.shape)
            # skip this if image was saved compressed:
            if any(x.strip('.') in ext.lower() for x in self.compressedExtensions):
                print("Skipping raw comparison for compressed data: {}".format(ext))
                continue
            numpy.testing.assert_array_equal(imgData3D, self.imgData3D)
开发者ID:ilastik,项目名称:ilastik,代码行数:28,代码来源:testOpDataSelection.py

示例7: test_load_single_file_with_list

    def test_load_single_file_with_list(self):
        graph = lazyflow.graph.Graph()
        reader = OperatorWrapper(OpDataSelection, graph=graph, operator_kwargs={'forceAxisOrder': False})
        reader.ProjectFile.setValue(self.projectFile)
        reader.WorkingDirectory.setValue(os.getcwd())
        reader.ProjectDataGroup.setValue('DataSelection/local_data')

        fileNameString = os.path.pathsep.join(self.file_names)
        info = DatasetInfo(filepath=fileNameString)
        # Will be read from the filesystem since the data won't be found in the project file.
        info.location = DatasetInfo.Location.ProjectInternal
        info.internalPath = ""
        info.invertColors = False
        info.convertToGrayscale = False

        reader.Dataset.setValues([info])

        # Read the test files using the data selection operator and verify the contents
        imgData = reader.Image[0][...].wait()
        print('imgData', reader.Image.meta.axistags, reader.Image.meta.original_axistags)

        # Check raw images
        assert imgData.shape == self.imgData3Dct.shape, (imgData.shape, self.imgData3Dct.shape)

        numpy.testing.assert_array_equal(imgData, self.imgData3Dct)
开发者ID:ilastik,项目名称:ilastik,代码行数:25,代码来源:testOpDataSelection.py

示例8: impl

 def impl():
     projFilePath = self.PROJECT_FILE
 
     shell = self.shell
     workflow = self.workflow
     
     # New project
     shell.createAndLoadNewProject(projFilePath)
 
     # Add a file
     from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
     info = DatasetInfo()
     info.filePath = self.SAMPLE_DATA
     opDataSelection = workflow.dataSelectionApplet.topLevelOperator
     opDataSelection.Dataset.resize(1)
     opDataSelection.Dataset[0].setValue(info)
     
     # Set some features
     featureGui = workflow.featureSelectionApplet.gui
     opFeatures = workflow.featureSelectionApplet.topLevelOperator
     #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
     selections = numpy.array( [[True, False, False, False, False, False, False],
                                [True, False, False, False, False, False, False],
                                [True, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False],
                                [False, False, False, False, False, False, False]] )
     opFeatures.SelectionMatrix.setValue(selections)
 
     # Save and close
     shell.projectManager.saveProject()
     shell.ensureNoCurrentProject(assertClean=True)
开发者ID:kemaleren,项目名称:ilastik,代码行数:32,代码来源:testPixelClassificationGui.py

示例9: impl

        def impl():
            projFilePath = self.PROJECT_FILE
            shell = self.shell
            
            # New project
            shell.createAndLoadNewProject(projFilePath)
            workflow = shell.projectManager.workflow

            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            for i, dataFile in enumerate(self.SAMPLE_DATA):        
                # Add a file
                info = DatasetInfo()
                info.filePath = dataFile
                opDataSelection.Dataset.resize(i+1)
                opDataSelection.Dataset[i].setValue(info)
            
            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds )
            opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] )
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [True, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False],
                                       [False, False, False, False, False, False, False]] )
            opFeatures.SelectionMatrix.setValue(selections)
      
            # Save and close
            shell.projectManager.saveProject()
            shell.ensureNoCurrentProject(assertClean=True)
开发者ID:fblumenthal,项目名称:ilastik,代码行数:33,代码来源:testPixelClassificationMultiImageGui.py

示例10: impl

        def impl():
            projFilePath = self.PROJECT_FILE
        
            shell = self.shell
            
            # New project
            shell.createAndLoadNewProject(projFilePath)
            workflow = shell.projectManager.workflow
        
            # Add a file
            from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
            info = DatasetInfo()
            info.filePath = self.SAMPLE_DATA
            opDataSelection = workflow.dataSelectionApplet.topLevelOperator
            opDataSelection.Dataset.resize(1)
            opDataSelection.Dataset[0].setValue(info)
            
            # Set some features
            opFeatures = workflow.featureSelectionApplet.topLevelOperator
            opFeatures.FeatureIds.setValue( OpPixelFeaturesPresmoothed.DefaultFeatureIds )
            opFeatures.Scales.setValue( [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0] )
            #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
            selections = numpy.array( [[True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False],
                                       [True, True, True, True, True, True, False]] )

            opFeatures.SelectionMatrix.setValue(selections)
开发者ID:JaimeIvanCervantes,项目名称:ilastik,代码行数:30,代码来源:testPixelClassificationBenchmarking.py

示例11: testWeirdAxisInfos

    def testWeirdAxisInfos(self):
        """
        If we add a dataset that has the channel axis in the wrong place, 
        the operator should automatically transpose it to be last.
        """
        weirdAxisFilename = os.path.join(self.workingDir, 'WeirdAxes.npy')
        expected_data = numpy.random.random( (3,100,100) )
        numpy.save(weirdAxisFilename, expected_data)

        info = DatasetInfo()
        info.filePath = weirdAxisFilename
        info.axistags = vigra.defaultAxistags('cxy')
        
        graph = Graph()
        op = OpDataSelectionGroup(graph=graph, forceAxisOrder=False)
        op.WorkingDirectory.setValue( self.workingDir )
        op.DatasetRoles.setValue( ['RoleA'] )

        op.DatasetGroup.resize( 1 )
        op.DatasetGroup[0].setValue( info )

        assert op.ImageGroup[0].ready()
        
        data_from_op = op.ImageGroup[0][:].wait()
        
        assert data_from_op.dtype == expected_data.dtype 
        assert data_from_op.shape == expected_data.shape, (data_from_op.shape, expected_data.shape)
        assert (data_from_op == expected_data).all()

        # op.Image is a synonym for op.ImageGroup[0]
        assert op.Image.ready()
        assert (op.Image[:].wait() == expected_data).all()
        
        # Ensure that files opened by the inner operators are closed before we exit.
        op.DatasetGroup.resize(0)
开发者ID:ilastik,项目名称:ilastik,代码行数:35,代码来源:testOpDataSelectionGroup.py

示例12: create_new_tst_project

    def create_new_tst_project(cls):
        # Instantiate 'shell'
        shell = HeadlessShell(  )
        
        # Create a blank project file and load it.
        newProjectFilePath = cls.PROJECT_FILE
        newProjectFile = ProjectManager.createBlankProjectFile(newProjectFilePath, PixelClassificationWorkflow, [])
        newProjectFile.close()
        shell.openProjectFile(newProjectFilePath)
        workflow = shell.workflow
        
        # Add a file
        from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
        info = DatasetInfo()
        info.filePath = cls.SAMPLE_DATA
        opDataSelection = workflow.dataSelectionApplet.topLevelOperator
        opDataSelection.DatasetGroup.resize(1)
        opDataSelection.DatasetGroup[0][0].setValue(info)
        
        
        # Set some features
        ScalesList = [0.3, 0.7, 1, 1.6, 3.5, 5.0, 10.0]    
        FeatureIds = [ 'GaussianSmoothing',
                       'LaplacianOfGaussian',
                       'StructureTensorEigenvalues',
                       'HessianOfGaussianEigenvalues',
                       'GaussianGradientMagnitude',
                       'DifferenceOfGaussians' ]

        opFeatures = workflow.featureSelectionApplet.topLevelOperator
        opFeatures.Scales.setValue( ScalesList )
        opFeatures.FeatureIds.setValue( FeatureIds )

        #                    sigma:   0.3    0.7    1.0    1.6    3.5    5.0   10.0
        selections = numpy.array( [[True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [True, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False],
                                   [False, False, False, False, False, False, False]] )
        opFeatures.SelectionMatrix.setValue(selections)
    
        # Add some labels directly to the operator
        opPixelClass = workflow.pcApplet.topLevelOperator

        opPixelClass.LabelNames.setValue(['Label 1', 'Label 2'])

        slicing1 = sl[0:1,0:10,0:10,0:1,0:1]
        labels1 = 1 * numpy.ones(slicing2shape(slicing1), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing1] = labels1

        slicing2 = sl[0:1,0:10,10:20,0:1,0:1]
        labels2 = 2 * numpy.ones(slicing2shape(slicing2), dtype=numpy.uint8)
        opPixelClass.LabelInputs[0][slicing2] = labels2

        # Save and close
        shell.projectManager.saveProject()
        del shell
开发者ID:JensNRAD,项目名称:ilastik_public,代码行数:58,代码来源:testPixelClassificationHeadless.py

示例13: testCreateExportDirectory

    def testCreateExportDirectory(self):
        """
        Test that the batch operator can create the export directory if it doesn't exist yet.
        """
        # Start by writing some test data to disk.
        self.testData = numpy.random.random((1,10,10,10,1))
        numpy.save(self.testDataFileName, self.testData)

        cwd = os.getcwd()
        info = DatasetInfo()
        info.filePath = os.path.join(cwd, 'NpyTestData.npy')
        
        graph = Graph()
        opBatchIo = OpBatchIo(graph=graph)
        opInput = OpInputDataReader(graph=graph)
        opInput.FilePath.setValue( info.filePath )
        
        # Our test "processing pipeline" is just a smoothing operator.
        opSmooth = OpGaussianSmoothing(graph=graph)
        opSmooth.Input.connect( opInput.Output )
        opSmooth.sigma.setValue(3.0)
        
        exportDir = os.path.join(cwd, 'exported_data')
        opBatchIo.ExportDirectory.setValue( exportDir )
        opBatchIo.Suffix.setValue( '_smoothed' )
        opBatchIo.Format.setValue( ExportFormat.H5 )
        opBatchIo.DatasetPath.setValue( info.filePath )
        opBatchIo.WorkingDirectory.setValue( cwd )
        
        internalPath = 'path/to/data'
        opBatchIo.InternalPath.setValue( internalPath )
        
        opBatchIo.ImageToExport.connect( opSmooth.Output )
        
        dirty = opBatchIo.Dirty.value
        assert dirty == True
        
        outputPath = opBatchIo.OutputDataPath.value
        assert outputPath == os.path.join(exportDir, 'NpyTestData_smoothed.h5', internalPath)
        
        result = opBatchIo.ExportResult.value
        assert result
        
        dirty = opBatchIo.Dirty.value
        assert dirty == False
        
        # Check the file
        smoothedPath = PathComponents(outputPath).externalPath
        with h5py.File(smoothedPath, 'r') as f:
            assert internalPath in f
            assert f[internalPath].shape == self.testData.shape
        try:
            os.remove(smoothedPath)
            os.rmdir(exportDir)
        except:
            pass
开发者ID:bheuer,项目名称:ilastik,代码行数:56,代码来源:testOpBatchIo.py

示例14: basicImpl

 def basicImpl(self):
     cwd = os.getcwd()
     info = DatasetInfo()
     info.filePath = os.path.join(cwd, self.testDataFileName)
     
     graph = Graph()
     opBatchIo = OpBatchIo(graph=graph)
     opInput = OpInputDataReader(graph=graph)
     opInput.FilePath.setValue( info.filePath )
     
     # Our test "processing pipeline" is just a smoothing operator.
     opSmooth = OpGaussianSmoothing(graph=graph)
     opSmooth.Input.connect( opInput.Output )
     opSmooth.sigma.setValue(3.0)
     
     opBatchIo.ExportDirectory.setValue( '' )
     opBatchIo.Suffix.setValue( '_smoothed' )
     opBatchIo.Format.setValue( ExportFormat.H5 )
     opBatchIo.DatasetPath.setValue( info.filePath )
     opBatchIo.WorkingDirectory.setValue( cwd )
     
     internalPath = 'path/to/data'
     opBatchIo.InternalPath.setValue( internalPath )
     
     opBatchIo.ImageToExport.connect( opSmooth.Output )
     
     dirty = opBatchIo.Dirty.value
     assert dirty == True
     
     outputPath = opBatchIo.OutputDataPath.value
     assert outputPath == os.path.join(cwd, 'NpyTestData_smoothed.h5/' + internalPath)
     
     result = opBatchIo.ExportResult.value
     assert result
     
     dirty = opBatchIo.Dirty.value
     assert dirty == False
     
     # Check the file
     smoothedPath = os.path.join(cwd, 'NpyTestData_smoothed.h5')
     with h5py.File(smoothedPath, 'r') as f:
         assert internalPath in f
         assert f[internalPath].shape == self.expectedDataShape
         assert (f[internalPath][:] == opSmooth.Output[:].wait()).all()
     try:
         os.remove(smoothedPath)
     except:
         pass
     
     # Check the exported image
     assert ( opBatchIo.ExportedImage[:].wait() == opSmooth.Output[:].wait() ).all()
开发者ID:bheuer,项目名称:ilastik,代码行数:51,代码来源:testOpBatchIo.py

示例15: loadProject

 def loadProject(shell, workflow):
     if not os.path.exists(projectFilename):
         shell.createAndLoadNewProject(projectFilename)
     else:
         shell.openProjectFile(projectFilename)
     workflow.setCarvingGraphFile(carvingGraphFilename)
     # Add a file
     from ilastik.applets.dataSelection.opDataSelection import DatasetInfo
     info = DatasetInfo()
     info.filePath = carvingGraphFilename + "/graph/raw"
     opDataSelection = workflow.dataSelectionApplet.topLevelOperator
     opDataSelection.Dataset.resize(1)
     opDataSelection.Dataset[0].setValue(info)
     shell.setSelectedAppletDrawer(2)
开发者ID:LimpingTwerp,项目名称:applet-workflows,代码行数:14,代码来源:carving.py


注:本文中的ilastik.applets.dataSelection.opDataSelection.DatasetInfo类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。