当前位置: 首页>>代码示例>>Python>>正文


Python BlitzGateway.isConnected方法代码示例

本文整理汇总了Python中omero.gateway.BlitzGateway.isConnected方法的典型用法代码示例。如果您正苦于以下问题:Python BlitzGateway.isConnected方法的具体用法?Python BlitzGateway.isConnected怎么用?Python BlitzGateway.isConnected使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在omero.gateway.BlitzGateway的用法示例。


在下文中一共展示了BlitzGateway.isConnected方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Omg

# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import isConnected [as 别名]
class Omg(object):
    """
    OMERO gateway that wraps Blitz gateway and CLI, intended for
    scripting and interactive work.

    Attributes
    ----------
    conn : Blitz gateway connection

    """

    def __init__(self, conn=None, user=None, passwd=None,
                 server=SERVER, port=PORT, skey=None):
        """
        Requires active Blitz connection OR username plus password or sesskey
        """
        if conn is None and (user is None or (passwd is None and skey is None)):
            raise ValueError("Bad parameters," + self.__init__.__doc__)
        if conn is not None:
            if conn.isConnected():
                self.conn = conn
            else:
                raise ValueError("Cannot initialize with closed connection!")
        else:
            if passwd is not None:
                self.conn = BlitzGateway(user, passwd, host=server, port=port)
                self.conn.connect()
            else:
                self.conn = BlitzGateway(user, host=server, port=port)
                self.conn.connect(skey)
        if self.conn.isConnected():
            self._server = self.conn.host
            self._port = self.conn.port
            self._user = self.conn.getUser().getName()
            self._key = self.conn.getSession().getUuid().getValue()
            print("Connected to {0} (port {1}) as {2}, session key={3}".format(
                  self._server, self._port, self._user, self._key))
        else:
            print("Failed to open connection :-(")

    def ls(self):
        """
        Print groups, then projects/datasets/images for current group.
        """
        print("Groups for {0}:-".format(self.conn.getUser().getName()))
        for gid, gname in self._ls_groups():
            print("  {0} ({1})".format(gname, str(gid)))
        curr_grp = self.conn.getGroupFromContext()
        gid, gname = curr_grp.getId(), curr_grp.getName()
        print("\nData for current group, {0} ({1}):-".format(gname, gid))
        for pid, pname in self._ls_projects():
            print("  Project: {0} ({1})".format(pname, str(pid)))
            for did, dname in self._ls_datasets(pid):
                print("    Dataset: {0} ({1})".format(dname, str(did)))
                for iid, iname in self._ls_images(did):
                    print("      Image: {0} ({1})".format(iname, str(iid)))
        # TODO, list orphaned Datasets and Images

    def _ls_groups(self):
        """list groups (id, name) this session is a member of"""
        groups = self.conn.getGroupsMemberOf()
        return [(group.getId(), group.getName()) for group in groups]

    def _ls_projects(self):
        """list projects (id, name) in the current session group"""
        projs = self.conn.listProjects(self.conn.getUserId())
        return [(proj.getId(), proj.getName()) for proj in projs]

    def _ls_datasets(self, proj_id):
        """list datasets (id, name) within the project id given"""
        dsets = self.conn.getObject("Project", proj_id).listChildren()
        return [(dset.getId(), dset.getName()) for dset in dsets]

    def _ls_images(self, dset_id):
        """list images (id, name) within the dataset id given"""
        imgs = self.conn.getObject("Dataset", dset_id).listChildren()
        return [(img.getId(), img.getName()) for img in imgs]

    def chgrp(self, group_id):
        """
        Change group for this session to the group_id given.
        """
        self.conn.setGroupForSession(group_id)

    def get(self, im_id, get_att=True):
        """
        Download the specified image as an OME-TIFF to current directory,
        with attachments also downloaded to folder: img_path + '_attachments'
        Return : path to downloaded image
        """
        img = self.conn.getObject("Image", oid=im_id)
        img_name = self._unique_name(img.getName(), im_id)
        img_path = os.path.join(os.getcwd(), img_name)
        img_file = open(str(img_path + ".ome.tiff"), "wb")
        fsize, blockgen = img.exportOmeTiff(bufsize=65536)
        for block in blockgen:
            img_file.write(block)
        img_file.close()
        fa_type = omero.model.FileAnnotationI
        attachments = [ann for ann in img.listAnnotations()
#.........这里部分代码省略.........
开发者ID:graemeball,项目名称:omero_scripts,代码行数:103,代码来源:iomero.py

示例2: run

# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import isConnected [as 别名]
def run():
    """
    Launch (remote) Priism ER deconvolution job on a list of images.
    Results imported back into dataset of origin for each image.
    """

    # Build GUI dialog for user to choose images & update parameters
    client = script.client(
        "ER_Deconvolution.py", "ER deconvolution",

        script.String(
            "Data_Type", optional=False,
            grouping="1", values=[rstring('Image')], default="Image"),

        script.List(
            "IDs", optional=False,
            description="image IDs (must have original .dv file!)",
            grouping='2').ofType(rlong(0)),

        script.Int(
            "alpha", optional=False,
            description='regularization parameter "alpha" - try 1000-10000',
            grouping='3', default=job['par.alpha'], min=0),

        script.Float(
            "lambda f", optional=False,
            description='smoothing parameter "lambda f" - try 0.1-1.0',
            grouping='4', default=job['par.lamf'], min=0.0, max=1.0),

        script.Int(
            "iterations", optional=False,
            description="number of iterations - try 10-100",
            grouping='5', default=job['par.niter'], min=0),

        version="0.99",
        authors=["Graeme Ball"],
        institutions=["Dundee Imaging Facility"],
        contact="[email protected]"
    )

    try:
        tempdir = None
        input_image_ids = [int(n) for n in client.getInput("IDs", unwrap=True)]
        job['par.alpha'] = client.getInput("alpha", unwrap=True)
        job['par.lamf'] = client.getInput("lambda f", unwrap=True)
        job['par.niter'] = client.getInput("iterations", unwrap=True)

        conn = BlitzGateway(client_obj=client)
        user = str(conn.getUser().getName())
        group = str(conn.getGroupFromContext().getName())
        sid = client.getSessionId()

        # export images (must be .dv!) to shared / temp storage
        tempdir = mktempdir(user, TEMP)
        inputs = []
        for iid in input_image_ids:
            try:
                path = export_original_dvfile(conn, iid, tempdir)
                image = conn.getObject("Image", iid)
                fail(image is None, "No such image, ID=%d" % iid)
                did = image.getParent().getId()
                #did = image.getDataset().getId()
                inputs.append({'imageID': iid, 'path': path, 'datasetID': did})
            except RuntimeError as e:
                print "Fail: " + str(e)

        jobs = []
        for inp in inputs:
            command = dict(job)  # copy
            command['inputs'] = [inp]  # only 1 input image for this job
            jobs.append(json.dumps([command]))  # only 1 command for this job
        # N.B. '.jobs' file format more flexible than needed here
        # write jobs definition file (1 line json string per job)
        jobs_filepath = os.path.join(tempdir, jobs_filename)
        with open(jobs_filepath, 'w') as f:
            f.writelines(["%s\n" % j for j in jobs])

        # poll filesystem, checking for results
        client.enableKeepAlive(KEEPALIVE_PULSE)
        results_filepath = os.path.join(tempdir, results_filename)
        result_count = 0  # results .json file grows as results appear
        import_count = 0  # ensure we only attempt to import each result once
        tstart = time.time()
        while result_count < len(inputs) and (time.time() - tstart) < TIMEOUT:
            fail(not conn.isConnected(), "Connection lost!")
            alive_filepath = os.path.join(tempdir, alive_check_filename)
            with open(alive_filepath, 'w') as f:
                f.write("%f\n%d" % (time.time(), RESULTS_POLL_PULSE))
            time.sleep(RESULTS_POLL_PULSE)
            if os.path.exists(results_filepath):
                with open(results_filepath, 'r') as fr:
                    results = fr.readlines()  # 1 line json string per result
                    new_results = results[import_count:]
                    import_count += import_results(new_results, user, group,
                                                   sid, conn)
                    result_count = len(results)
        if result_count < len(inputs):
            print "Job timed out after %d seconds, %d results imported" % \
                (TIMEOUT, import_count)

#.........这里部分代码省略.........
开发者ID:graemeball,项目名称:omero-user-scripts,代码行数:103,代码来源:ER_Deconvolution.py


注:本文中的omero.gateway.BlitzGateway.isConnected方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。