本文整理匯總了Python中gevent.queue.JoinableQueue.get方法的典型用法代碼示例。如果您正苦於以下問題:Python JoinableQueue.get方法的具體用法?Python JoinableQueue.get怎麽用?Python JoinableQueue.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類gevent.queue.JoinableQueue
的用法示例。
在下文中一共展示了JoinableQueue.get方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_main
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def test_main(self):
queue = JoinableQueue()
print dir(queue)
queue.put(1)
queue.put(3)
queue.put(2)
queue.put(6)
print queue.qsize()
print '1', queue.get(), queue.get()
示例2: test_api
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def test_api(self):
queue = JoinableQueue()
task_group = self.api.search('terminator', queue)
while True:
finished = all(
[greenlet.ready() for greenlet in task_group.greenlets]
)
try:
item = queue.get(timeout=1.0)
except Empty:
if finished:
log.info('queue is empty and all jobs are done, quitting')
break
log.info(
'queue was empty and jobs are still running, retrying'
)
continue
try:
log.info('%r', item)
finally:
queue.task_done()
task_group.join()
queue.join()
log.info('joined everything')
示例3: test_service
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def test_service():
"Service() should implement the basic needs of an async service"
# Given the following service
class MyService(Service):
def __init__(self, my_mock, result_queue=None):
self.my_mock = my_mock
super(MyService, self).__init__(
callback=self.run,
result_queue=result_queue,
)
def run(self, package, sender_data):
self.my_mock.ran = package
my_mock = Mock()
queue = JoinableQueue()
service = MyService(my_mock, result_queue=queue)
# When I queue a package to be processed by my service and start the
# service with 1 concurrent worker
service.queue('gherkin==0.1.0', 'main')
service.consume()
# Then I see that the package processed
package = queue.get()
package.should.equal('gherkin==0.1.0')
my_mock.ran.should.equal('gherkin==0.1.0')
示例4: on_search
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def on_search(self, query):
log.debug('search for %r', query)
queue = JoinableQueue()
task_group = g.api.search(query, queue)
while True:
finished = all(
[t.ready() for t in task_group]
)
try:
item = queue.get(timeout=1.0)
except Empty:
if finished:
break
continue
try:
self.emit('result', item._asdict())
finally:
queue.task_done()
queue.join()
task_group.join()
self.emit('done', query)
示例5: GQueue
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class GQueue(object):
def __init__(self):
self.__QUEUE = JoinableQueue()
def job(self, func):
@functools.wraps(func)
def f(*args, **kwargs):
self.__QUEUE.put([func, args, kwargs])
return f
def join(self):
self.__QUEUE.join()
def work(self):
while True:
func, args, kwargs = self.__QUEUE.get()
try:
func(*args, **kwargs)
finally:
self.__QUEUE.task_done()
def run_worker(self, num=1):
for i in range(num):
gevent.spawn(self.work)
示例6: _run
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def _run(self):
paths = glob.glob(self.path)
while not paths:
gevent.sleep(0.01)
paths = glob.glob(self.path)
q = JoinableQueue()
self.logger.debug('Tailing %s' % ', '.join(paths))
self.tails = [Tail(p, q, self.statedir) for p in paths]
while True:
data = q.get()
if data:
if data.endswith('\n'):
data = data[0:-1]
self.logger.debug('Received: %r' % data)
self.output.put(Event(data=data))
q.task_done()
示例7: init
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def init():
global patch_loop_greenlet
global core_source
# add core source
sig = [
14493609762890313342166277786717882067186706504725349899906780741747713356290787356528733464152980047783620946593111196306463577744063955815402148552860145629259653950818107505393643383587083768290613402372295707034951885912924020308782786221888333312179957359121890467597304281160325135791414295786807436357,
1836340799499544967344676626569366761238237327637553699677615341837866857178638560803752775147141401436473176143062386392930849127511639810150938435062071285028855634164277748937448362731305104091415548874264676030905340846245037152836818535938439214826659048244377315288514582697466079356264083762738266643,
89884656743115795873895609296394864029741047392531316591432509289601210992615631812974174607675153482641606235553368183778569185786977952044726620763937252233940116059625337686768538445873713070762889839480360220508177637118657209098549890835520224254015051271431737736621385544038152276933973262030194906397,
1224239220300762038953555488069442663256999688439
]
with transaction:
core_source = CoreSource(id=platform, url=settings.patchserver, sig=sig, contact='[email protected]')
# load sources
with transaction, db.Cursor() as c:
aa = c.execute("SELECT * FROM patch_source")
for a in aa.fetchall():
try:
id = json.loads(a['id'])
data = json.loads(a['data'])
# update old repo urls
if 'url' in data and data['url'].startswith('http://patch.download.am'):
data['url'] = data['url'].replace('http://patch.download.am', 'http://repo.download.am')
if 'url' in data and data['url'].endswith('.git'):
source = GitSource(id=id, **data)
else:
source = PatchSource(id=id, **data)
if source.enabled:
patch_group.spawn(source.check)
except TypeError:
log.critical("broken row: {}".format(a))
traceback.print_exc()
# delete useless repos
for extern in os.listdir(settings.external_plugins):
if extern not in sources or not sources[extern].enabled:
path = os.path.join(settings.external_plugins, extern)
if os.path.isdir(path) and not os.path.exists(os.path.join(path, '.git')):
log.info('deleting useless external repo {}'.format(path))
try:
really_clean_repo(path)
except:
pass
default_sources = dict(
downloadam='http://community.download.am/dlam-config.yaml'
)
if not test_mode:
for id, url in default_sources.iteritems():
if id not in sources and url not in config_urls:
yield 'adding default repo {}'.format(id)
try:
source = add_source(url)
if source is None:
continue
except:
traceback.print_exc()
else:
if isinstance(source, BasicSource) and source.enabled:
patch_group.spawn(source.check)
# check and apply updates
from gevent.queue import JoinableQueue
y = JoinableQueue()
complete = list()
def source_complete_callback(source):
complete.append(source)
if len(complete) == len(sources):
y.put('updating {} / {}'.format(len(complete), len(sources)))
gevent.spawn(patch_all, 30, False, source_complete_callback=source_complete_callback)
gevent.sleep(0.2)
yield 'updating {} / {}'.format(len(complete), len(sources))
while len(patch_group):
try:
x = y.get(timeout=1)
except:
continue
yield x
patch_group.join()
execute_restart()
# start the patch loop
patch_loop_greenlet = gevent.spawn(patch_loop)
示例8: ScoringService
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class ScoringService(Service):
"""A service that assigns a score to submission results.
A submission result is ready to be scored when its compilation is
unsuccessful (in this case, no evaluation will be performed) or
after it has been evaluated. The goal of scoring is to use the
evaluations to determine score, score_details, public_score,
public_score_details and ranking_score_details (all non-null).
Scoring is done by the compute_score method of the ScoreType
defined by the dataset of the result.
ScoringService keeps a queue of (submission_id, dataset_id) pairs
identifying submission results to score. A greenlet is spawned to
consume this queue, one item at a time. The queue is filled by the
new_evaluation and the invalidate_submissions RPC methods, and by a
sweeper greenlet, whose duty is to regularly check all submissions
in the database and put the unscored ones in the queue (this check
can also be forced by the search_jobs_not_done RPC method).
"""
# How often we look for submission results not scored.
SWEEPER_TIMEOUT = 347.0
def __init__(self, shard):
"""Initialize the ScoringService.
"""
Service.__init__(self, shard)
# Set up communication with ProxyService.
self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0))
# Set up and spawn the scorer.
# TODO Link to greenlet: when it dies, log CRITICAL and exit.
self._scorer_queue = JoinableQueue()
gevent.spawn(self._scorer_loop)
# Set up and spawn the sweeper.
# TODO Link to greenlet: when it dies, log CRITICAL and exit.
self._sweeper_start = None
self._sweeper_event = Event()
gevent.spawn(self._sweeper_loop)
def _scorer_loop(self):
"""Monitor the queue, scoring its top element.
This is an infinite loop that, at each iteration, gets an item
from the queue (blocking until there is one, if the queue is
empty) and scores it. Any error during the scoring is sent to
the logger and then suppressed, because the loop must go on.
"""
while True:
submission_id, dataset_id = self._scorer_queue.get()
try:
self._score(submission_id, dataset_id)
except Exception:
logger.error("Unexpected error when scoring submission %d on "
"dataset %d.", submission_id, dataset_id,
exc_info=True)
finally:
self._scorer_queue.task_done()
def _score(self, submission_id, dataset_id):
"""Assign a score to a submission result.
This is the core of ScoringService: here we retrieve the result
from the database, check if it is in the correct status,
instantiate its ScoreType, compute its score, store it back in
the database and tell ProxyService to update RWS if needed.
submission_id (int): the id of the submission that has to be
scored.
dataset_id (int): the id of the dataset to use.
"""
with SessionGen() as session:
# Obtain submission.
submission = Submission.get_from_id(submission_id, session)
if submission is None:
raise ValueError("Submission %d not found in the database." %
submission_id)
# Obtain dataset.
dataset = Dataset.get_from_id(dataset_id, session)
if dataset is None:
raise ValueError("Dataset %d not found in the database." %
dataset_id)
# Obtain submission result.
submission_result = submission.get_result(dataset)
# It means it was not even compiled (for some reason).
if submission_result is None:
raise ValueError("Submission result %d(%d) was not found." %
(submission_id, dataset_id))
# Check if it's ready to be scored.
if not submission_result.needs_scoring():
#.........這裏部分代碼省略.........
示例9: __init__
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class Migrator:
def __init__(self, scheme, create_devices=True,
write_data=True,
start_date="2000-01-01T00:00:00Z",
end_date="2014-12-31T00:00:00Z",
pool_size=3):
self.scheme = scheme
self.create_devices = create_devices
self.should_write_data = write_data
self.start_date = start_date
self.end_date = end_date
self.tdb = TDBClient(scheme.db_key, scheme.db_key,
scheme.db_secret,
base_url=scheme.db_baseurl)
iq_endpoint = HTTPEndpoint(scheme.iq_baseurl,
scheme.iq_key,
scheme.iq_secret)
self.tiq = TIQClient(iq_endpoint)
self.queue = JoinableQueue()
self.lock = Lock()
self.dp_count = 0
self.req_count = 0
self.dp_reset = time.time()
for i in range(pool_size):
gevent.spawn(self.worker)
def worker(self):
while True:
series = self.queue.get()
try:
self.migrate_series(series)
finally:
self.queue.task_done()
def migrate_all_series(self, start_key="", limit=None):
start_time = time.time()
(keys, tags, attrs) = self.scheme.identity_series_filter()
series_set = self.tdb.list_series(keys, tags, attrs)
# Keep our own state of whether we passed the resume point, so we don't
# need to assume client and server sort strings the same.
found_first_series = False
series_count = 0
for series in series_set:
if not found_first_series and series.key < start_key:
continue
else:
found_first_series = True
if limit and series_count >= limit:
print("Reached limit of %d devices, stopping." % (limit))
break
if self.scheme.identity_series_client_filter(series):
# If the series looks like an identity series,
# queue it to be processed by the threadpool
self.queue.put(series)
series_count += 1
self.queue.join()
end_time = time.time()
print("Exporting {} devices took {} seconds".format(series_count, end_time - start_time))
def migrate_series(self, series):
print(" Beginning to migrate series: %s" % (series.key))
error = False
try:
if self.create_devices:
error = self.create_device(series)
if self.should_write_data and not error:
error = self.write_data(series)
except Exception, e:
logging.exception(e)
error = True
if not error:
print("COMPLETED migrating for series %s" % (series.key))
else:
print("ERROR migrating series %s" % (series.key))
示例10: Importer
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class Importer(object):
def __init__(self, creds, pool_size=POOL_SIZE):
self.client = get_session(creds['host'],
creds['key'],
creds['secret'])
self.queue = JoinableQueue(maxsize=POOL_SIZE*2)
for i in range(pool_size):
gevent.spawn(self.worker)
def worker(self):
while True:
job = self.queue.get()
typ = job.get('type')
try:
if typ == 'device':
self._process_device(job['data'])
elif typ == 'datapoints':
self._process_datapoints(job['data'])
finally:
self.queue.task_done()
def write_devices(self, devices):
for device in devices:
self.queue.put({'type': 'device', 'data': device})
self.queue.join()
def write_datapoints_from_file(self, infile):
points = {}
lineno = 0
for line in infile:
lineno += 1
(device, sensor, ts, val) = line.split('\t')
pts = points.setdefault(device, {}).setdefault(sensor, [])
pts.append({'t': ts, 'v': float(val)})
if lineno % 1000 == 0:
self.queue.put({'type': 'datapoints', 'data': points})
points = {}
if points:
self.queue.put({'type': 'datapoints', 'data': points})
self.queue.join()
def _process_device(self, device, retries=5):
res = self.client.create_device(device)
if res.successful != tempoiq.response.SUCCESS:
if 'A device with that key already exists' in res.body:
print("Skipping creating existing device {}"
.format(device['key']))
return
if retries > 0:
print("Retrying device create {}, error {}"
.format(device['key'], res.body))
self._process_device(device, retries - 1)
else:
print("Retries exceeded; couldn't create device {}"
.format(device['key']))
def _process_datapoints(self, write_request, retries=5):
try:
res = self.client.write(write_request)
except Exception, e:
print("ERROR with request: --->")
print(json.dumps(write_request, default=WriteEncoder().default))
raise e
if res.successful != tempoiq.response.SUCCESS:
if retries > 0:
print("Retrying write, error was: {}".format(res.body))
return self._process_datapoints(write_request, retries - 1)
else:
print("Retries exceeded; lost data!")
print(json.dumps(write_request, default=WriteEncoder().default))
return True
return False
示例11: InterceptedStreamsMixin
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class InterceptedStreamsMixin(object):
"""
Mixin class for GethProcess instances that feeds all of the stdout and
stderr lines into some set of provided callback functions.
"""
stdout_callbacks = None
stderr_callbacks = None
def __init__(self, *args, **kwargs):
super(InterceptedStreamsMixin, self).__init__(*args, **kwargs)
self.stdout_callbacks = []
self.stdout_queue = JoinableQueue()
self.stderr_callbacks = []
self.stderr_queue = JoinableQueue()
def register_stdout_callback(self, callback_fn):
self.stdout_callbacks.append(callback_fn)
def register_stderr_callback(self, callback_fn):
self.stderr_callbacks.append(callback_fn)
def produce_stdout_queue(self):
for line in iter(self.proc.stdout.readline, b''):
self.stdout_queue.put(line)
gevent.sleep(0)
def produce_stderr_queue(self):
for line in iter(self.proc.stderr.readline, b''):
self.stderr_queue.put(line)
gevent.sleep(0)
def consume_stdout_queue(self):
while True:
line = self.stdout_queue.get()
for fn in self.stdout_callbacks:
fn(line.strip())
gevent.sleep(0)
def consume_stderr_queue(self):
while True:
line = self.stderr_queue.get()
for fn in self.stderr_callbacks:
fn(line.strip())
gevent.sleep(0)
def start(self):
super(InterceptedStreamsMixin, self).start()
gevent.spawn(self.produce_stdout_queue)
gevent.spawn(self.produce_stderr_queue)
gevent.spawn(self.consume_stdout_queue)
gevent.spawn(self.consume_stderr_queue)
def stop(self):
super(InterceptedStreamsMixin, self).stop()
try:
self.stdout_queue.join(5)
except Timeout:
pass
try:
self.stderr_queue.join(5)
except Timeout:
pass
示例12: Service
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class Service(object):
def __init__(self, callback, **args):
self.callback = callback
self.result_queue = args.get('result_queue')
self.package_queue = JoinableQueue()
self.failed_queue = []
self.env = args.get('env')
self.main_greenlet = None
self.pool = Pool(args.get('concurrency'))
self.should_run = True
self.subscribers = []
self.logger = Logger(self.name, args.get('log_level'))
@property
def name(self):
return self.__class__.__name__.lower()
def queue(self, package, sender_name, **data):
assert (sender_name == 'downloadmanager' and data.get('path')) or True
self.package_queue.put((package, (sender_name, data)))
self.logger.level(3, ' * queue(from=%s, to=%s, package=%s, data=%s)',
sender_name, self.name, package, data)
def consume(self):
package, sender_data = self.package_queue.get()
self.pool.spawn(self._run_service, package, sender_data)
self.logger.level(3, ' * %s.run(package=%s, sender_data=%s)',
self.name, package, sender_data)
def subscribe(self, other):
other.subscribers.append(self)
def loop(self):
while self.should_run:
self.consume()
def start(self):
self.main_greenlet = gevent.spawn(self.loop)
def stop(self, force=False):
# This will force the current iteraton on `loop()` to be the last one,
# so the thing we're processing will be able to finish;
self.should_run = False
# if the caller is in a hurry, we'll just kill everything mercilessly
if force and self.main_greenlet:
self.main_greenlet.kill()
def _run_service(self, package, sender_data):
try:
data = self.callback(package, sender_data)
except NotForMe:
return
except ReportableError as exc:
self.failed_queue.append((package, exc))
self.logger.level(0, "Error: %s", exc)
except BaseException as exc:
self.failed_queue.append((package, exc))
self.logger.traceback(4,
'failed to run %s (requested by:%s) for package %s:',
self.name, sender_data[0], package, exc=exc)
else:
# Let's notify our subscribers
for subscriber in self.subscribers:
subscriber.queue(package, self.name, **(data or {}))
# If the callback worked, let's go ahead and tell the world. If and
# only if requested by the caller, of course.
if self.result_queue:
self.result_queue.put(package)
示例13: WebServer
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class WebServer(Flask):
def __init__(self, *args, **kwargs):
super(WebServer, self).__init__(*args, **kwargs)
print 'Webserver started'
self.debug = True
self.cmd_queue = JoinableQueue()
self.event_queue = JoinableQueue()
self.cmd_id = 0
self.cmd_results = {}
gevent.spawn(self.send_commands_to_debugger)
gevent.spawn(self.receive_events_from_debugger)
def do_command(self, cmd, args=''):
cmd_id = self.generate_cmd_id()
self.cmd_results[cmd_id] = AsyncResult()
self.cmd_queue.put((
cmd_id,
json.dumps({
'cmd' : cmd,
'args' : args,
}))
)
result = self.cmd_results[cmd_id].wait()
return json.loads(result)
def generate_cmd_id(self):
self.cmd_id += 1
return self.cmd_id
def send_commands_to_debugger(self):
print 'start send_commands_to_debugger'
conn = None
while True:
cmd_id, cmd = self.cmd_queue.get()
if not cmd:
break
print 'send command', cmd
conn = socket.create_connection(config.command_socket_addr)
conn.send(cmd)
result = ''
while True:
data = conn.recv(4096)
if not data: break
result += data
cmd_result = self.cmd_results.pop(cmd_id)
cmd_result.set(result)
conn.close()
def receive_events_from_debugger(self):
print 'start receive_events_from_debugger'
self.event_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.event_server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.event_server.bind(config.event_socket_addr)
self.event_server.listen(16)
conn, _ = self.event_server.accept()
while True:
self.event_queue.put(conn.recv(4096))
def clear_event_queue(self):
self.event_queue = JoinableQueue()
def shutdown(self):
self.event_server.close()
示例14: HttpScanner
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
class HttpScanner(object):
def __init__(self, args):
"""
Initialise HTTP scanner
:param args:
:return:
"""
self.args = args
self.output = HttpScannerOutput(args)
self._init_scan_options()
# Reading files
self.output.write_log("Reading files and deduplicating.", logging.INFO)
self.hosts = self._file_to_list(args.hosts)
self.urls = self._file_to_list(args.urls)
#
self._calc_urls()
out = 'Loaded %i hosts %i urls' % (self.hosts_count, self.urls_count)
if self.args.ports is not None:
out += ' %i ports' % len(self.args.ports)
self.output.print_and_log(out)
if self.args.ports is not None and not self.args.syn:
new_hosts = []
for host in self.hosts:
for port in self.args.ports:
# print(host, port)
new_hosts.append(helper.generate_url(host, port))
self.hosts = new_hosts
#
self._calc_urls()
self.output.print_and_log('%i full urls to scan' % self.full_urls_count)
# Queue and workers
self.hosts_queue = JoinableQueue()
self.workers = []
def _file_to_list(self, filename, dedup=True):
"""
Get list from file
:param filename: file to read
:return: list of lines
"""
if not path.exists(filename) or not path.isfile(filename):
self.output.print_and_log('File %s not found!' % filename, logging.ERROR)
exit(-1)
# Preparing lines list
lines = filter(lambda line: line is not None and len(line) > 0, open(filename).read().split('\n'))
if len(lines) == 0:
self.output.print_and_log('File %s is empty!' % filename, logging.ERROR)
exit(-1)
return helper.deduplicate(lines) if dedup else lines
def _init_scan_options(self):
# Session
self.session = session()
self.session.timeout = self.args.timeout
self.session.verify = False
# TODO: debug and check
# self.session.mount("http://", HTTPAdapter(max_retries=self.args.max_retries))
# self.session.mount("https://", HTTPAdapter(max_retries=self.args.max_retries))
# http://stackoverflow.com/questions/15431044/can-i-set-max-retries-for-requests-request
# Max retries
adapters.DEFAULT_RETRIES = self.args.max_retries
# TOR
if self.args.tor:
self.output.write_log("TOR usage detected. Making some checks.")
self.session.proxies = {
'http': 'socks5://127.0.0.1:9050',
'https': 'socks5://127.0.0.1:9050'
}
url = 'http://ifconfig.me/ip'
real_ip, tor_ip = None, None
# Ger real IP address
try:
real_ip = get(url).text.strip()
except Exception as exception:
self.output.print_and_log("Couldn't get real IP address. Check yout internet connection.",
logging.ERROR)
self.output.write_log(str(exception), logging.ERROR)
exit(-1)
# Get TOR IP address
try:
tor_ip = self.session.get(url).text.strip()
except Exception as exception:
self.output.print_and_log("TOR socks proxy doesn't seem to be working.", logging.ERROR)
self.output.write_log(str(exception), logging.ERROR)
exit(-1)
# Show IP addresses
self.output.print_and_log('Real IP: %s TOR IP: %s' % (real_ip, tor_ip))
#.........這裏部分代碼省略.........
示例15: get
# 需要導入模塊: from gevent.queue import JoinableQueue [as 別名]
# 或者: from gevent.queue.JoinableQueue import get [as 別名]
def get(self, *args, **kw):
result = JoinableQueue.get(self, *args, **kw)
log.debug("operating on {}".format(result))
return result