本文整理匯總了Python中scrapy.xlib.tx.HTTPConnectionPool.closeCachedConnections方法的典型用法代碼示例。如果您正苦於以下問題:Python HTTPConnectionPool.closeCachedConnections方法的具體用法?Python HTTPConnectionPool.closeCachedConnections怎麽用?Python HTTPConnectionPool.closeCachedConnections使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類scrapy.xlib.tx.HTTPConnectionPool
的用法示例。
在下文中一共展示了HTTPConnectionPool.closeCachedConnections方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: HTTP11DownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
self._pool._factory.noisy = False
self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
self._contextFactory = self._contextFactoryClass()
self._disconnect_timeout = 1
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
return agent.download_request(request)
def close(self):
d = self._pool.closeCachedConnections()
# closeCachedConnections will hang on network or server issues, so
# we'll manually timeout the deferred.
#
# Twisted issue addressing this problem can be found here:
# https://twistedmatrix.com/trac/ticket/7738.
#
# closeCachedConnections doesn't handle external errbacks, so we'll
# issue a callback after `_disconnect_timeout` seconds.
delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])
def cancel_delayed_call(result):
if delayed_call.active():
delayed_call.cancel()
return result
d.addBoth(cancel_delayed_call)
return d
示例2: HTTP11DownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
self._pool._factory.noisy = False
self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
# try method-aware context factory
try:
self._contextFactory = self._contextFactoryClass(method=self._sslMethod)
except TypeError:
# use context factory defaults
self._contextFactory = self._contextFactoryClass()
msg = """
'%s' does not accept `method` argument (type OpenSSL.SSL method,\
e.g. OpenSSL.SSL.SSLv23_METHOD).\
Please upgrade your context factory class to handle it or ignore it.""" % (
settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
warnings.warn(msg)
self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
self._disconnect_timeout = 1
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool,
maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
warnsize=getattr(spider, 'download_warnsize', self._default_warnsize))
return agent.download_request(request)
def close(self):
d = self._pool.closeCachedConnections()
# closeCachedConnections will hang on network or server issues, so
# we'll manually timeout the deferred.
#
# Twisted issue addressing this problem can be found here:
# https://twistedmatrix.com/trac/ticket/7738.
#
# closeCachedConnections doesn't handle external errbacks, so we'll
# issue a callback after `_disconnect_timeout` seconds.
delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])
def cancel_delayed_call(result):
if delayed_call.active():
delayed_call.cancel()
return result
d.addBoth(cancel_delayed_call)
return d
示例3: HTTP11DownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
self._contextFactory = self._contextFactoryClass()
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
return agent.download_request(request)
def close(self):
return self._pool.closeCachedConnections()
示例4: HTTP11DownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._pool.maxPersistentPerHost = settings.getint("CONCURRENT_REQUESTS_PER_DOMAIN")
self._pool._factory.noisy = False
self._contextFactoryClass = load_object(settings["DOWNLOADER_CLIENTCONTEXTFACTORY"])
self._contextFactory = self._contextFactoryClass()
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
return agent.download_request(request)
def close(self):
return self._pool.closeCachedConnections()
示例5: MyDownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class MyDownloadHandler(object):
'''下載接口, 被上層所調用'''
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
self._pool._factory.noisy = False
self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
self._contextFactory = self._contextFactoryClass()
def download_request(self, request, spider):
'''下載的主要被調用接口(異步),返回 deferred (twisted 的延遲回調對象)'''
myDownloader = MyLogicDownloader()
return myDownloader.download(request)
def close(self):
return self._pool.closeCachedConnections()
示例6: SeleniumDownloadHandler
# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class SeleniumDownloadHandler(object):
"""
download interface
"""
def __init__(self, settings):
self._pool = HTTPConnectionPool(reactor, persistent=True)
self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
self._pool._factory.noisy = False
self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
self._contextFactory = self._contextFactoryClass()
self._disconnect_timeout = 1
def download_request(self, request, spider):
myDownloader = SeleniumLogicDownloader()
return myDownloader.download(request)
def close(self):
return self._pool.closeCachedConnections()