本文整理汇总了Python中omero.gateway.BlitzGateway.getGroupFromContext方法的典型用法代码示例。如果您正苦于以下问题:Python BlitzGateway.getGroupFromContext方法的具体用法?Python BlitzGateway.getGroupFromContext怎么用?Python BlitzGateway.getGroupFromContext使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类omero.gateway.BlitzGateway
的用法示例。
在下文中一共展示了BlitzGateway.getGroupFromContext方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: connect_to_omero
# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import getGroupFromContext [as 别名]
def connect_to_omero(user, password, host, port=4064):
conn = BlitzGateway(user, password, host=host, port=port)
print conn.connect()
user = conn.getUser()
print "Current user:"
print " ID:", user.getId()
print " Username:", user.getName()
print " Full Name:", user.getFullName()
print "Member of:"
for g in conn.getGroupsMemberOf():
print " ID:", g.getName(), " Name:", g.getId()
group = conn.getGroupFromContext()
print "Current group: ", group.getName()
return conn
示例2: TestTree
# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import getGroupFromContext [as 别名]
#.........这里部分代码省略.........
project.name = rstring(self.names[index])
datasets = [DatasetI(), DatasetI(), DatasetI(), DatasetI()]
for index, dataset in enumerate(datasets):
dataset.name = rstring(self.names[index])
project.linkDataset(dataset)
return self.update.saveAndReturnArray(projects)
@pytest.fixture
def datasets(self):
"""
Returns four new OMERO Datasets with required fields set and with
names that can be used to exercise sorting semantics.
"""
to_save = [DatasetI(), DatasetI(), DatasetI(), DatasetI()]
for index, dataset in enumerate(to_save):
dataset.name = rstring(self.names[index])
# Non-orphaned Dataset to catch issues with queries where non-orphaned
# datasets are included in the results.
project = ProjectI()
project.name = rstring(self.uuid())
dataset = DatasetI()
dataset.name = rstring(self.uuid())
project.linkDataset(dataset)
self.update.saveAndReturnObject(project)
return self.update.saveAndReturnArray(to_save)
@pytest.fixture
def datasets_different_users(self):
"""
Returns two new OMERO Datasets created by different users with
required fields set.
"""
client = self.conn.c
group = self.conn.getGroupFromContext()._obj
datasets = list()
# User that has already been created by the "client" fixture
user, name = self.user_and_name(client)
self.add_experimenters(group, [user])
for name in (rstring(self.uuid()), rstring(self.uuid())):
client, user = self.new_client_and_user(group=group)
try:
dataset = DatasetI()
dataset.name = name
update_service = client.getSession().getUpdateService()
datasets.append(update_service.saveAndReturnObject(dataset))
finally:
client.closeSession()
return datasets
@pytest.fixture
def screens(self):
"""
Returns four new OMERO Screens with required fields set and with names
that can be used to exercise sorting semantics.
"""
to_save = [ScreenI(), ScreenI(), ScreenI(), ScreenI()]
for index, screen in enumerate(to_save):
screen.name = rstring(self.names[index])
return self.update.saveAndReturnArray(to_save)
@pytest.fixture
def screens_different_users(self):
"""
Returns two new OMERO Screens created by different users with
required fields set.
"""
示例3:
# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import getGroupFromContext [as 别名]
# Current session details
# =============================================================
# By default, you will have logged into your 'current' group in OMERO. This
# can be changed by switching group in the OMERO.insight or OMERO.web clients.
user = conn.getUser()
print "Current user:"
print " ID:", user.getId()
print " Username:", user.getName()
print " Full Name:", user.getFullName()
print "Member of:"
for g in conn.getGroupsMemberOf():
print " ID:", g.getName(), " Name:", g.getId()
group = conn.getGroupFromContext()
print "Current group: ", group.getName()
print "Other Members of current group:"
for exp in conn.listColleagues():
print " ID:", exp.getId(), exp.getOmeName(), " Name:", exp.getFullName()
print "Owner of:"
for g in conn.listOwnedGroups():
print " ID:", g.getName(), " Name:", g.getId()
# New in OMERO 5
print "Admins:"
for exp in conn.getAdministrators():
print " ID:", exp.getId(), exp.getOmeName(), " Name:", exp.getFullName()
示例4: Omg
# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import getGroupFromContext [as 别名]
class Omg(object):
"""
OMERO gateway that wraps Blitz gateway and CLI, intended for
scripting and interactive work.
Attributes
----------
conn : Blitz gateway connection
"""
def __init__(self, conn=None, user=None, passwd=None,
server=SERVER, port=PORT, skey=None):
"""
Requires active Blitz connection OR username plus password or sesskey
"""
if conn is None and (user is None or (passwd is None and skey is None)):
raise ValueError("Bad parameters," + self.__init__.__doc__)
if conn is not None:
if conn.isConnected():
self.conn = conn
else:
raise ValueError("Cannot initialize with closed connection!")
else:
if passwd is not None:
self.conn = BlitzGateway(user, passwd, host=server, port=port)
self.conn.connect()
else:
self.conn = BlitzGateway(user, host=server, port=port)
self.conn.connect(skey)
if self.conn.isConnected():
self._server = self.conn.host
self._port = self.conn.port
self._user = self.conn.getUser().getName()
self._key = self.conn.getSession().getUuid().getValue()
print("Connected to {0} (port {1}) as {2}, session key={3}".format(
self._server, self._port, self._user, self._key))
else:
print("Failed to open connection :-(")
def ls(self):
"""
Print groups, then projects/datasets/images for current group.
"""
print("Groups for {0}:-".format(self.conn.getUser().getName()))
for gid, gname in self._ls_groups():
print(" {0} ({1})".format(gname, str(gid)))
curr_grp = self.conn.getGroupFromContext()
gid, gname = curr_grp.getId(), curr_grp.getName()
print("\nData for current group, {0} ({1}):-".format(gname, gid))
for pid, pname in self._ls_projects():
print(" Project: {0} ({1})".format(pname, str(pid)))
for did, dname in self._ls_datasets(pid):
print(" Dataset: {0} ({1})".format(dname, str(did)))
for iid, iname in self._ls_images(did):
print(" Image: {0} ({1})".format(iname, str(iid)))
# TODO, list orphaned Datasets and Images
def _ls_groups(self):
"""list groups (id, name) this session is a member of"""
groups = self.conn.getGroupsMemberOf()
return [(group.getId(), group.getName()) for group in groups]
def _ls_projects(self):
"""list projects (id, name) in the current session group"""
projs = self.conn.listProjects(self.conn.getUserId())
return [(proj.getId(), proj.getName()) for proj in projs]
def _ls_datasets(self, proj_id):
"""list datasets (id, name) within the project id given"""
dsets = self.conn.getObject("Project", proj_id).listChildren()
return [(dset.getId(), dset.getName()) for dset in dsets]
def _ls_images(self, dset_id):
"""list images (id, name) within the dataset id given"""
imgs = self.conn.getObject("Dataset", dset_id).listChildren()
return [(img.getId(), img.getName()) for img in imgs]
def chgrp(self, group_id):
"""
Change group for this session to the group_id given.
"""
self.conn.setGroupForSession(group_id)
def get(self, im_id, get_att=True):
"""
Download the specified image as an OME-TIFF to current directory,
with attachments also downloaded to folder: img_path + '_attachments'
Return : path to downloaded image
"""
img = self.conn.getObject("Image", oid=im_id)
img_name = self._unique_name(img.getName(), im_id)
img_path = os.path.join(os.getcwd(), img_name)
img_file = open(str(img_path + ".ome.tiff"), "wb")
fsize, blockgen = img.exportOmeTiff(bufsize=65536)
for block in blockgen:
img_file.write(block)
img_file.close()
fa_type = omero.model.FileAnnotationI
attachments = [ann for ann in img.listAnnotations()
#.........这里部分代码省略.........
示例5: run
# 需要导入模块: from omero.gateway import BlitzGateway [as 别名]
# 或者: from omero.gateway.BlitzGateway import getGroupFromContext [as 别名]
def run():
"""
Launch (remote) Priism ER deconvolution job on a list of images.
Results imported back into dataset of origin for each image.
"""
# Build GUI dialog for user to choose images & update parameters
client = script.client(
"ER_Deconvolution.py", "ER deconvolution",
script.String(
"Data_Type", optional=False,
grouping="1", values=[rstring('Image')], default="Image"),
script.List(
"IDs", optional=False,
description="image IDs (must have original .dv file!)",
grouping='2').ofType(rlong(0)),
script.Int(
"alpha", optional=False,
description='regularization parameter "alpha" - try 1000-10000',
grouping='3', default=job['par.alpha'], min=0),
script.Float(
"lambda f", optional=False,
description='smoothing parameter "lambda f" - try 0.1-1.0',
grouping='4', default=job['par.lamf'], min=0.0, max=1.0),
script.Int(
"iterations", optional=False,
description="number of iterations - try 10-100",
grouping='5', default=job['par.niter'], min=0),
version="0.99",
authors=["Graeme Ball"],
institutions=["Dundee Imaging Facility"],
contact="[email protected]"
)
try:
tempdir = None
input_image_ids = [int(n) for n in client.getInput("IDs", unwrap=True)]
job['par.alpha'] = client.getInput("alpha", unwrap=True)
job['par.lamf'] = client.getInput("lambda f", unwrap=True)
job['par.niter'] = client.getInput("iterations", unwrap=True)
conn = BlitzGateway(client_obj=client)
user = str(conn.getUser().getName())
group = str(conn.getGroupFromContext().getName())
sid = client.getSessionId()
# export images (must be .dv!) to shared / temp storage
tempdir = mktempdir(user, TEMP)
inputs = []
for iid in input_image_ids:
try:
path = export_original_dvfile(conn, iid, tempdir)
image = conn.getObject("Image", iid)
fail(image is None, "No such image, ID=%d" % iid)
did = image.getParent().getId()
#did = image.getDataset().getId()
inputs.append({'imageID': iid, 'path': path, 'datasetID': did})
except RuntimeError as e:
print "Fail: " + str(e)
jobs = []
for inp in inputs:
command = dict(job) # copy
command['inputs'] = [inp] # only 1 input image for this job
jobs.append(json.dumps([command])) # only 1 command for this job
# N.B. '.jobs' file format more flexible than needed here
# write jobs definition file (1 line json string per job)
jobs_filepath = os.path.join(tempdir, jobs_filename)
with open(jobs_filepath, 'w') as f:
f.writelines(["%s\n" % j for j in jobs])
# poll filesystem, checking for results
client.enableKeepAlive(KEEPALIVE_PULSE)
results_filepath = os.path.join(tempdir, results_filename)
result_count = 0 # results .json file grows as results appear
import_count = 0 # ensure we only attempt to import each result once
tstart = time.time()
while result_count < len(inputs) and (time.time() - tstart) < TIMEOUT:
fail(not conn.isConnected(), "Connection lost!")
alive_filepath = os.path.join(tempdir, alive_check_filename)
with open(alive_filepath, 'w') as f:
f.write("%f\n%d" % (time.time(), RESULTS_POLL_PULSE))
time.sleep(RESULTS_POLL_PULSE)
if os.path.exists(results_filepath):
with open(results_filepath, 'r') as fr:
results = fr.readlines() # 1 line json string per result
new_results = results[import_count:]
import_count += import_results(new_results, user, group,
sid, conn)
result_count = len(results)
if result_count < len(inputs):
print "Job timed out after %d seconds, %d results imported" % \
(TIMEOUT, import_count)
#.........这里部分代码省略.........