本文整理汇总了Python中tornado.ioloop.IOLoop.set_blocking_signal_threshold方法的典型用法代码示例。如果您正苦于以下问题:Python IOLoop.set_blocking_signal_threshold方法的具体用法?Python IOLoop.set_blocking_signal_threshold怎么用?Python IOLoop.set_blocking_signal_threshold使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tornado.ioloop.IOLoop
的用法示例。
在下文中一共展示了IOLoop.set_blocking_signal_threshold方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TornadoOctopus
# 需要导入模块: from tornado.ioloop import IOLoop [as 别名]
# 或者: from tornado.ioloop.IOLoop import set_blocking_signal_threshold [as 别名]
#.........这里部分代码省略.........
self.running_urls -= 1
handler(url, response)
return
logging.info('Fetching %s...' % url)
request = HTTPRequest(
url=url,
method=method,
connect_timeout=self.connect_timeout_in_seconds,
request_timeout=self.request_timeout_in_seconds,
prepare_curl_callback=self.handle_curl_callback,
**kw
)
self.http_client.fetch(request, self.handle_request(url, handler))
def handle_curl_callback(self, curl):
if not self.allow_connection_reuse:
curl.setopt(pycurl.FRESH_CONNECT, 1)
def get_next_url(self, request_url=None, handler=None, method=None, **kw):
if request_url is None:
if not self.url_queue:
return
request_url, handler, method, kw = self.url_queue.pop()
self.fetch_next_url(request_url, handler, method, **kw)
def fetch_next_url(self, request_url, handler, method, **kw):
if self.limiter and not self.limiter.acquire(request_url):
logging.info('Could not acquire limit for url "%s".' % request_url)
self.url_queue.append((request_url, handler, method, kw))
deadline = timedelta(seconds=self.limiter.limiter_miss_timeout_ms / 1000.0)
self.ioloop.add_timeout(deadline, self.get_next_url)
self.limiter.publish_lock_miss(request_url)
return False
logging.debug('Queue has space available for fetching %s.' % request_url)
self.fetch(request_url, handler, method, **kw)
return True
def handle_request(self, url, callback):
def handle(response):
logging.debug('Handler called for url %s...' % url)
self.running_urls -= 1
response = self.from_tornado_response(url, response)
logging.info('Got response(%s) from %s.' % (response.status_code, url))
if self.cache and response and response.status_code < 399:
logging.debug('Putting %s into cache.' % url)
self.response_cache.put(url, response)
if self.limiter:
self.limiter.release(url)
try:
callback(url, response)
except Exception:
logging.exception('Error calling callback for %s.' % url)
if self.running_urls < self.concurrency and self.url_queue:
self.get_next_url()
logging.debug('Getting %d urls and still have %d more urls to get...' % (self.running_urls, self.remaining_requests))
if self.running_urls < 1 and self.remaining_requests == 0:
logging.debug('Nothing else to get. Stopping Octopus...')
self.stop()
return handle
def handle_wait_timeout(self, signal_number, frames):
logging.debug('Timeout waiting for IOLoop to finish. Stopping IOLoop manually.')
self.stop(force=True)
def wait(self, timeout=10):
self.last_timeout = timeout
if not self.url_queue and not self.running_urls:
logging.debug('No urls to wait for. Returning immediately.')
return
if timeout:
logging.debug('Waiting for urls to be retrieved for %s seconds.' % timeout)
self.ioloop.set_blocking_signal_threshold(timeout, self.handle_wait_timeout)
else:
logging.debug('Waiting for urls to be retrieved.')
logging.info('Starting IOLoop with %d URLs still left to process.' % self.remaining_requests)
self.ioloop.start()
@property
def remaining_requests(self):
return len(self.url_queue)
def stop(self, force=False):
logging.info('Stopping IOLoop with %d URLs still left to process.' % self.remaining_requests)
self.ioloop.stop()