本文整理汇总了Python中capsul.qt_gui.widgets.PipelineDevelopperView类的典型用法代码示例。如果您正苦于以下问题:Python PipelineDevelopperView类的具体用法?Python PipelineDevelopperView怎么用?Python PipelineDevelopperView使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了PipelineDevelopperView类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_simple_run
def test_simple_run(self):
""" Method to test a simple 1 cpu call with the scheduler.
"""
# Configure the environment
study_config = StudyConfig(
modules=[],
use_smart_caching=True,
output_directory=self.outdir,
number_of_cpus=1,
generate_logging=True,
use_scheduler=True)
# Create pipeline
pipeline = get_process_instance(self.pipeline_name)
pipeline.date_in_filename = True
# Set pipeline input parameters
dicom_dataset = get_sample_data("dicom")
dcmfolder = os.path.join(self.outdir, "dicom")
if not os.path.isdir(dcmfolder):
os.makedirs(dcmfolder)
shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))
pipeline.source_dir = dcmfolder
# View pipeline
if 0:
from capsul.qt_gui.widgets import PipelineDevelopperView
from PySide import QtGui
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
# Execute the pipeline in the configured study
study_config.run(pipeline)
示例2: ActivationInspectorApp
class ActivationInspectorApp(Application):
""" ActiovationInspector Application.
"""
# Load some meta informations
from capsul.info import __version__ as _version
from capsul.info import NAME as _application_name
from capsul.info import ORGANISATION as _organisation_name
def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
""" Method to initialize the ActivationInspectorApp class.
Parameters
----------
pipeline_path: str (mandatory)
the name of the pipeline we want to load.
record_file: str (optional)
a file where the pipeline activation steps are stored.
"""
# Inhetritance
super(ActivationInspectorApp, self).__init__(*args, **kwargs)
# Load the pipeline
self.pipeline = get_process_instance(pipeline_path)
# Initialize the application
self.record_file = record_file
self.window = None
self.init_window()
def init_window(self):
""" Method to initialize the main window.
"""
# First set some meta informations
self.setApplicationName(self._application_name)
self.setOrganizationName(self._organisation_name)
self.setApplicationVersion(self._version)
# Get the user interface description from capsul resources
ui_file = os.path.join(
os.path.dirname(__file__), "activation_inspector.ui")
#ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")
# Create and show the activation/pipeline/controller windows
self.pipeline_window = PipelineDevelopperView(self.pipeline)
self.controller_window = ScrollControllerWidget(self.pipeline,live=True)
self.activation_window = ActivationInspector(
self.pipeline, ui_file, self.record_file,
developper_view=self.pipeline_window)
self.pipeline_window.show()
self.activation_window.show()
self.controller_window.show()
return True
示例3: init_window
def init_window(self):
""" Method to initialize the main window.
"""
# First set some meta informations
self.setApplicationName(self._application_name)
self.setOrganizationName(self._organisation_name)
self.setApplicationVersion(self._version)
# Get the user interface description from capsul resources
ui_file = os.path.join(
os.path.dirname(__file__), "activation_inspector.ui")
#ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")
# Create and show the activation/pipeline/controller windows
self.pipeline_window = PipelineDevelopperView(self.pipeline)
self.controller_window = ScrollControllerWidget(self.pipeline,live=True)
self.activation_window = ActivationInspector(
self.pipeline, ui_file, self.record_file,
developper_view=self.pipeline_window)
self.pipeline_window.show()
self.activation_window.show()
self.controller_window.show()
return True
示例4: test
except: pass
try:
os.unlink(output_name)
except: pass
def test():
""" Function to execute unitest
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestPipelineWithTemp)
runtime = unittest.TextTestRunner(verbosity=2).run(suite)
return runtime.wasSuccessful()
if __name__ == "__main__":
print "RETURNCODE: ", test()
if 1:
import sys
from soma.qt_gui.qt_backend import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
pipeline = MyPipeline()
pipeline.input_image = '/data/file.txt'
pipeline.output_image = '/data/output_file.txt'
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
del view1
示例5: pilot_bet
def pilot_bet(enable_display=False):
"""
BET
===
Brain extraction with FSL.
Start to import required modules:
"""
import os
from mmutils.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from capsul.process import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/catalogue/pclinfmri/fsl_bet"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration:
"""
study_config = StudyConfig(
modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
use_smart_caching=False,
fsl_config="/etc/fsl/4.1/fsl.sh",
use_fsl=True,
output_directory=working_dir,
number_of_cpus=1,
generate_logging=True,
use_scheduler=True)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* fmri: the functional volume.
* anat: the structural volume.
* TR: the repetition time.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <clinfmri.preproc.FslBet>` that
define the different step of the processings:
"""
pipeline = get_process_instance("clinfmri.utils.fsl_bet.xml")
print pipeline.get_input_spec()
"""
It is possible to display the pipeline.
"""
if enable_display:
import sys
from PySide import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
view = PipelineDevelopperView(pipeline)
view.show()
app.exec_()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.input_image_file = toy_dataset.anat
pipeline.generate_binary_mask = True
pipeline.bet_threshold = 0.5
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
示例6: pilot_new_segment
def pilot_new_segment(enable_display=False):
"""
New Segment
===========
Unifed SPM segmentation: segments, bias corrects and spatially normalises.
Start to import required modules:
"""
import os
from mmutils.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from capsul.process import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/catalogue/pclinfmri/spm_newsegment"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration:
"""
study_config = StudyConfig(
modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
use_smart_caching=False,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir,
number_of_cpus=1,
generate_logging=True,
use_scheduler=True)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
template_dataset = get_sample_data("mni_1mm")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* fmri: the functional volume.
* anat: the structural volume.
* TR: the repetition time.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <clinfmri.utils.SpmNewSegment>`
that define the different step of the processings:
"""
pipeline = get_process_instance("clinfmri.utils.spm_new_segment.xml")
print pipeline.get_input_spec()
"""
It is possible to display the pipeline.
"""
if enable_display:
import sys
from PySide import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
view = PipelineDevelopperView(pipeline)
view.show()
app.exec_()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.channel_files = [toy_dataset.mean]
pipeline.reference_volume = template_dataset.brain
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
#.........这里部分代码省略.........
示例7: write_state
if 0:
def write_state():
state_file_name = '/tmp/state.json'
json.dump(pipeline.pipeline_state(), open(state_file_name,'w'))
print 'Wrote', state_file_name
import sys
#from PySide import QtGui
from soma.qt_gui import qt_backend
qt_backend.set_qt_backend('PyQt4')
from soma.qt_gui.qt_backend import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
#from capsul.qt_gui.widgets import PipelineUserView
from capsul.process import get_process_instance
app = QtGui.QApplication(sys.argv)
pipeline = get_process_instance(MainTestPipeline)
pipeline.on_trait_change(write_state,'selection_changed')
view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True, allow_open_controller=True)
view1.add_embedded_subpipeline('switch_pipeline', scale=0.7)
view1.add_embedded_subpipeline('way1_1', scale=0.4)
view1.add_embedded_subpipeline('way2_1', scale=0.4)
view1.show()
#view2 = PipelineUserView(pipeline)
#view2.show()
app.exec_()
del view1
#del view2
示例8: test
suite = unittest.TestLoader().loadTestsFromTestCase(TestPipeline)
runtime = unittest.TextTestRunner(verbosity=2).run(suite)
return runtime.wasSuccessful()
if __name__ == "__main__":
test()
if 1:
from PySide import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
pipeline = MyPipeline()
pipeline.input_image = ["toto", "tutu", "titi"]
pipeline.dynamic_parameter = [3, 1, 4]
pipeline.other_input = 0
pipeline2 = pipeline.nodes["iterative"].process
pipeline2.scene_scale_factor = 0.5
pipeline.node_position = {'inputs': (50.0, 50.0),
'iterative': (267.0, 56.0),
'outputs': (1124.0, 96.0)}
view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True,
allow_open_controller=True)
view1.add_embedded_subpipeline('iterative')
view1.show()
app.exec_()
del view1
示例9: pilot_qa_fmri
def pilot_qa_fmri():
"""
Imports
-------
This code needs 'capsul' and 'mmutils' package in order to instanciate and
execute the pipeline and to get a toy dataset.
These packages are available in the 'neurospin' source list or in pypi.
"""
# Capsul import
from capsul.study_config.study_config import StudyConfig
from capsul.process.loader import get_process_instance
# Mmutils import
from mmutils.toy_datasets import get_sample_data
"""
Parameters
----------
The 'pipeline_name' parameter contains the location of the pipeline XML
description that will perform the DICOMs conversion, and the 'outdir' the
location of the pipeline's results: in this case a temporary directory.
"""
pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml"
outdir = tempfile.mkdtemp()
"""
Capsul configuration
--------------------
A 'StudyConfig' has to be instantiated in order to execute the pipeline
properly. It enables us to define the results directory through the
'output_directory' attribute, the number of CPUs to be used through the
'number_of_cpus' attributes, and to specify that we want a log of the
processing step through the 'generate_logging'. The 'use_scheduler'
must be set to True if more than 1 CPU is used.
"""
study_config = StudyConfig(
number_of_cpus=1,
generate_logging=True,
use_scheduler=True,
output_directory=outdir)
"""
Get the toy dataset
-------------------
The toy dataset is composed of a functional image that is downloaded
if it is necessary throught the 'get_sample_data' function and exported
locally.
"""
localizer_dataset = get_sample_data("localizer_extra")
"""
Pipeline definition
-------------------
The pipeline XML description is first imported throught the
'get_process_instance' method, and the resulting pipeline instance is
parametrized: in this example we decided to set the date in the converted
file name and we set two DICOM directories to be converted in Nifti
format.
"""
pipeline = get_process_instance(pipeline_name)
pipeline.image_file = localizer_dataset.fmri
pipeline.repetition_time = 2.0
pipeline.exclude_volume = []
pipeline.roi_size = 21
pipeline.score_file = os.path.join(outdir, "scores.json")
"""
Pipeline representation
-----------------------
By executing this block of code, a pipeline representation can be
displayed. This representation is composed of boxes connected to each
other.
"""
if 0:
from capsul.qt_gui.widgets import PipelineDevelopperView
from PySide import QtGui
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
"""
Pipeline execution
------------------
Finally the pipeline is eecuted in the defined 'study_config'.
"""
study_config.run(pipeline)
"""
Access the result
-----------------
#.........这里部分代码省略.........
示例10: get_process_instance
"""
Processing definition: create the <clinfmri.preproc.FmriPreproc> that
define the different step of the processings.
"""
pipeline = get_process_instance("clinfmri.preproc.fmri_preproc.xml")
"""
It is possible to display the pipeline.
"""
if args.display:
import sys
from PySide import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
view = PipelineDevelopperView(pipeline)
view.show()
app.exec_()
"""
Now to parametrize the pipeline pipeline.
"""
pipeline.fmri_file = funcfile
pipeline.realign_register_to_mean = True
pipeline.select_slicer = args.timings
pipeline.select_normalization = args.normalization
pipeline.force_repetition_time = args.repetition_time
pipeline.force_slice_orders = args.slice_order
pipeline.realign_wrap = [0, 1, 0]
pipeline.realign_write_wrap = [0, 1, 0]
pipeline.ref_slice = args.ref_slice
示例11: pilot_dcm2nii
def pilot_dcm2nii():
"""
Imports
-------
This code needs 'capsul' and 'mmutils' package in order to instanciate and
execute the pipeline and to get a toy dataset.
These packages are available in the 'neurospin' source list or in pypi.
"""
import os
import sys
import shutil
import tempfile
from capsul.study_config.study_config import StudyConfig
from capsul.process.loader import get_process_instance
from mmutils.toy_datasets import get_sample_data
"""
Parameters
----------
The 'pipeline_name' parameter contains the location of the pipeline XML
description that will perform the DICOMs conversion, and the 'outdir' the
location of the pipeline's results: in this case a temporary directory.
"""
pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml"
outdir = tempfile.mkdtemp()
"""
Capsul configuration
--------------------
A 'StudyConfig' has to be instantiated in order to execute the pipeline
properly. It enables us to define the results directory through the
'output_directory' attribute, the number of CPUs to be used through the
'number_of_cpus' attributes, and to specify that we want a log of the
processing step through the 'generate_logging'. The 'use_scheduler'
must be set to True if more than 1 CPU is used.
"""
study_config = StudyConfig(
modules=[],
output_directory=outdir,
number_of_cpus=1,
generate_logging=True,
use_scheduler=True)
"""
Get the toy dataset
-------------------
The toy dataset is composed of a 3D heart dicom image that is downloaded
if it is necessary throught the 'get_sample_data' function and exported
locally in a 'heart.dcm' file.
"""
dicom_dataset = get_sample_data("dicom")
dcmfolder = os.path.join(outdir, "dicom")
if not os.path.isdir(dcmfolder):
os.makedirs(dcmfolder)
shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))
"""
Pipeline definition
-------------------
The pipeline XML description is first imported throught the
'get_process_instance' method, and the resulting pipeline instance is
parametrized: in this example we decided to set the date in the converted
file name and we set two DICOM directories to be converted in Nifti
format.
"""
pipeline = get_process_instance(pipeline_name)
pipeline.date_in_filename = True
pipeline.dicom_directories = [dcmfolder, dcmfolder]
pipeline.additional_informations = [[("Provided by", "[email protected]")],
[("Provided by", "[email protected]"),
("TR", "1500")]]
pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]),
("TE", [("0x0018", "0x0081")])]
"""
Pipeline representation
-----------------------
By executing this block of code, a pipeline representation can be
displayed. This representation is composed of boxes connected to each
other.
"""
if 0:
from capsul.qt_gui.widgets import PipelineDevelopperView
from PySide import QtGui
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
"""
Pipeline execution
------------------
#.........这里部分代码省略.........
示例12: pilot_preproc_spm_fmri
def pilot_preproc_spm_fmri(enable_display=False):
"""
FMRI preprocessings
===================
Preprocessing with the SPM slice timing and a normalization to a given
template.
Start to import required modules:
"""
import os
from mmutils.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from capsul.api import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc_spm_fmri"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
Then define the study configuration:
"""
study_config = StudyConfig(
modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
use_smart_caching=False,
fsl_config="/etc/fsl/4.1/fsl.sh",
use_fsl=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir,
number_of_cpus=1,
generate_logging=True,
use_scheduler=True,)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
template_dataset = get_sample_data("mni_1mm")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* fmri: the functional volume.
* anat: the structural volume.
* TR: the repetition time.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <clinfmri.preproc.FmriPreproc>` that
define the different step of the processings:
"""
pipeline = get_process_instance("clinfmri.preproc.converted_fmri_preproc")
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.fmri_file = toy_dataset.fmri
pipeline.structural_file = toy_dataset.anat
pipeline.realign_register_to_mean = True
pipeline.select_slicer = "spm"
pipeline.select_normalization = "fmri"
pipeline.template_file = template_dataset.brain
pipeline.force_repetition_time = toy_dataset.TR
pipeline.force_slice_orders = [index + 1 for index in range(40)]
"""
It is possible to display the pipeline.
"""
if enable_display:
import sys
from PySide import QtGui
from capsul.qt_gui.widgets import PipelineDevelopperView
app = QtGui.QApplication(sys.argv)
view = PipelineDevelopperView(pipeline)
view.show()
app.exec_()
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
#.........这里部分代码省略.........
示例13: pilot_gdti_estimation
def pilot_gdti_estimation():
"""
Generalized diffusion tensor estimation
=======================================
"""
# System import
import os
import sys
import datetime
import PySide.QtGui as QtGui
# CAPSUL import
from capsul.qt_gui.widgets import PipelineDevelopperView
from capsul.study_config.study_config import StudyConfig
from capsul.process.loader import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/clindmri/gdti"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig"],
use_smart_caching=True,
output_directory=working_dir)
# Create pipeline
start_time = datetime.datetime.now()
print "Start Pipeline Creation", start_time
pipeline = get_process_instance("clindmri.estimation.gdti.xml")
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)
# View pipeline
if 0:
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
del view1
# Set pipeline input parameters
pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
pipeline.order = 2
pipeline.odf = False
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)
# Execute the pipeline in the configured study
study_config.run(pipeline, verbose=1)
示例14: pilot_fsl_preproc
def pilot_fsl_preproc():
"""
FSL preprocessings
==================
"""
# System import
import os
import sys
import datetime
import PySide.QtGui as QtGui
# CAPSUL import
from capsul.qt_gui.widgets import PipelineDevelopperView
from capsul.study_config.study_config import StudyConfig
from capsul.process.loader import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/clindmri/fslpreproc"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "FSLConfig", "MatlabConfig",
"SPMConfig", "NipypeConfig"],
use_smart_caching=True,
fsl_config="/etc/fsl/4.1/fsl.sh",
use_fsl=True,
output_directory=working_dir)
# Create pipeline
start_time = datetime.datetime.now()
print "Start Pipeline Creation", start_time
pipeline = get_process_instance("clindmri.preproc.fsl_preproc.xml")
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)
# View pipeline
if 0:
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline)
view1.show()
app.exec_()
del view1
# Set pipeline input parameters
pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)
#print pipeline.nodes["eddy"].process._nipype_interface.inputs
print pipeline.nodes["eddy"].process._nipype_interface.cmdline
# Execute the pipeline in the configured study
study_config.run(pipeline, verbose=1)