當前位置: 首頁>>代碼示例>>Python>>正文


Python HTTPConnectionPool.closeCachedConnections方法代碼示例

本文整理匯總了Python中scrapy.xlib.tx.HTTPConnectionPool.closeCachedConnections方法的典型用法代碼示例。如果您正苦於以下問題:Python HTTPConnectionPool.closeCachedConnections方法的具體用法?Python HTTPConnectionPool.closeCachedConnections怎麽用?Python HTTPConnectionPool.closeCachedConnections使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在scrapy.xlib.tx.HTTPConnectionPool的用法示例。


在下文中一共展示了HTTPConnectionPool.closeCachedConnections方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: HTTP11DownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        self._contextFactory = self._contextFactoryClass()
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
開發者ID:amogh14,項目名稱:fintra,代碼行數:36,代碼來源:http11.py

示例2: HTTP11DownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool,
            maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
            warnsize=getattr(spider, 'download_warnsize', self._default_warnsize))
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
開發者ID:Manuel4131,項目名稱:scrapy,代碼行數:53,代碼來源:http11.py

示例3: HTTP11DownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        self._contextFactory = self._contextFactoryClass()

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
        return agent.download_request(request)

    def close(self):
        return self._pool.closeCachedConnections()
開發者ID:HolyDays,項目名稱:scrapy,代碼行數:16,代碼來源:http11.py

示例4: HTTP11DownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint("CONCURRENT_REQUESTS_PER_DOMAIN")
        self._pool._factory.noisy = False
        self._contextFactoryClass = load_object(settings["DOWNLOADER_CLIENTCONTEXTFACTORY"])
        self._contextFactory = self._contextFactoryClass()

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool)
        return agent.download_request(request)

    def close(self):
        return self._pool.closeCachedConnections()
開發者ID:alvarofierroclavero,項目名稱:scrapy,代碼行數:17,代碼來源:http11.py

示例5: MyDownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class MyDownloadHandler(object):
    '''下載接口, 被上層所調用'''
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        self._contextFactory = self._contextFactoryClass()

    def download_request(self, request, spider):
        '''下載的主要被調用接口(異步),返回 deferred (twisted 的延遲回調對象)'''
        myDownloader = MyLogicDownloader()
        return myDownloader.download(request)

    def close(self):
        return self._pool.closeCachedConnections()
開發者ID:shaobenbin,項目名稱:apk-crawler,代碼行數:18,代碼來源:mydownloader.py

示例6: SeleniumDownloadHandler

# 需要導入模塊: from scrapy.xlib.tx import HTTPConnectionPool [as 別名]
# 或者: from scrapy.xlib.tx.HTTPConnectionPool import closeCachedConnections [as 別名]
class SeleniumDownloadHandler(object):
    """
    download interface
    """

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        self._contextFactory = self._contextFactoryClass()
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        myDownloader = SeleniumLogicDownloader()
        return myDownloader.download(request)

    def close(self):
        return self._pool.closeCachedConnections()
開發者ID:fennywhu,項目名稱:jstest,代碼行數:21,代碼來源:mydownloader.py


注:本文中的scrapy.xlib.tx.HTTPConnectionPool.closeCachedConnections方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。