当前位置: 首页>>代码示例>>Python>>正文


Python DeferredSemaphore.acquire方法代码示例

本文整理汇总了Python中twisted.internet.defer.DeferredSemaphore.acquire方法的典型用法代码示例。如果您正苦于以下问题:Python DeferredSemaphore.acquire方法的具体用法?Python DeferredSemaphore.acquire怎么用?Python DeferredSemaphore.acquire使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在twisted.internet.defer.DeferredSemaphore的用法示例。


在下文中一共展示了DeferredSemaphore.acquire方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: PlotlyStreamProducer

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class PlotlyStreamProducer(object):
    """Implements a producer that copies from a buffer to a plot.ly
    connection.
    """
    implements(IBodyProducer)
    length = UNKNOWN_LENGTH

    def __init__(self, buffer, start_callback=None):
        self.buffer = buffer
        self._done = False
        self._flush = DeferredSemaphore(1)
        self._waiter = DeferredSemaphore(1)
        self._flush.acquire()
        self._started = start_callback
        self._keepalive = LoopingCall(self._send_keepalive)

    @inlineCallbacks
    def startProducing(self, consumer):
        self._keepalive.start(60)
        self._started.callback(None)
        while True:
            # if paused, this will block
            yield self._waiter.acquire()
            while len(self.buffer):
                v = self.buffer.pop(0)
                if v is not None:
                    consumer.write(json.dumps(v))
                consumer.write("\n")
            yield self._waiter.release()

            if self._done: 
                return
            yield self._flush.acquire()

    def pauseProducing(self):
        return self._waiter.acquire()

    def resumeProducing(self):
        return self._waiter.release()

    def stopProducing(self):
        self._done = True
        if self._keepalive.running:
            self._keepalive.stop()

    def _send_keepalive(self):
        self.buffer.append(None)
        self.flush()

    def flush(self):
        if self._flush.tokens == 0:
            self._flush.release()
开发者ID:ahaas,项目名称:smap,代码行数:54,代码来源:plotly.py

示例2: ThreadedUrllib2TestMixin

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class ThreadedUrllib2TestMixin(object):

    def setUp(self):
        self._semaphore = DeferredSemaphore(2)

    def tearDown(self):
        pass


    def getPages(self, count, url):
        return gatherResults([self.getPage(url) for i in xrange(0, count)])

    @inlineCallbacks
    def getPage(self, url):
        yield self._semaphore.acquire()
        page = yield deferToThread(self._openPage, url)
        self._semaphore.release()
        returnValue(page)

    def _openPage(self, url):
        log.msg("Opening url: %r" % url)
        return urlopen(url).read()

    @inlineCallbacks
    def getPageLength(self, url):
        response = yield self.getPage(url)
        returnValue(len(response))
开发者ID:alexstaytuned,项目名称:tx-pendrell,代码行数:29,代码来源:_test_comparison.py

示例3: __init__

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class DeferredConcurrencyLimiter:
    """Initiliaze me, and then use me as a decorator, to limit the ammount of defers that can execute asynchronously."""
    
    def __init__(self, tokens = 5):
        if tokens < 1:
            raise ValueError("tokens must be > 0")
        
        if tokens == 1:
            self.lock = DeferredLock()
        else:
            self.lock = DeferredSemaphore(tokens)
    
    def _releaseLock(self, response, lock):
        lock.release()
        return response
    
    def _lockAcquired(self, lock, f, *a, **kw):
        d = maybeDeferred(f, *a, **kw)
        d.addBoth(self._releaseLock, lock)
        return d
    
    def __call__(self, f):
        @wraps(f)
        def wrapped(*a, **kw):
            d = self.lock.acquire()
            d.addCallback(self._lockAcquired, f, *a, **kw)
            return d
        
        return wrapped
开发者ID:e000,项目名称:prickle,代码行数:31,代码来源:util.py

示例4: TwistedWebTestMixin

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class TwistedWebTestMixin(object):

    def setUp(self):
        self._semaphore = DeferredSemaphore(2)

    def tearDown(self):
        pass


    @inlineCallbacks
    def getPages(self, count, url):
        return gatherResults([self.getPage(url) for i in xrange(0, count)])

    @inlineCallbacks
    def getPage(self, url):
        yield self._semaphore.acquire()
        page = yield tx_getPage(url)
        self._semaphore.release()
        returnValue(page)

    @inlineCallbacks
    def getPageLength(self, url):
        response = yield self.getPage(url)
        returnValue(len(response))
开发者ID:alexstaytuned,项目名称:tx-pendrell,代码行数:26,代码来源:_test_comparison.py

示例5: RateLimitedClient

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class RateLimitedClient(object):
    """A Web client with per-second request limit.
    """

    # Max number of requests per second (can be < 1.0)
    rate_limit = None
    # Grace delay (seconds) when the server throttles us
    grace_delay = 30
    # Max number of parallel requests
    max_concurrency = 5

    def __init__(self, time=None):
        self.sem = DeferredSemaphore(self.max_concurrency)
        self.grace_deferred = None
        self.logger = logging.getLogger("webclient")
        self.time = time or reactor
        self.last_request = 0.0

    def _enable_grace_delay(self, delay):
        if self.grace_deferred:
            # Already enabled by an earlier concurrent request
            return
        self.grace_deferred = Deferred()

        def expire():
            g = self.grace_deferred
            self.grace_deferred = None
            g.callback(None)

        reactor.callLater(self.grace_delay, expire)

    def _delay_if_necessary(self, func, *args, **kwargs):
        d = Deferred()
        d.addCallback(lambda _: func(*args, **kwargs))
        trigger = None
        if self.grace_deferred:
            trigger = self.grace_deferred
        elif self.rate_limit:
            delay = (self.last_request + 1.0 / self.rate_limit) - self.time.seconds()
            if delay > 0:
                self.logger.debug("inserting rate limit delay of %.1f", delay)
                trigger = Deferred()
                self.time.callLater(delay, trigger.callback, None)
        (trigger or maybeDeferred(lambda: None)).chainDeferred(d)
        return d

    def get_page(self, url, *args, **kwargs):
        if isinstance(url, unicode):
            url = url.encode("utf8")

        def schedule_request(_):
            return self._delay_if_necessary(issue_request, None)

        def issue_request(_):
            self.last_request = self.time.seconds()
            self.logger.debug("fetching %r", url)
            return getPage(url, *args, **kwargs)

        def handle_success(value):
            self.sem.release()
            self.logger.debug("got %d bytes for %r", len(value), url)
            return value

        def handle_error(failure):
            self.sem.release()
            failure.trap(HTTPError)
            self.logger.debug("got HTTP error %s", failure.value)
            self.trap_throttling(failure)
            delay = self.grace_delay
            self.logger.warning("we are throttled, delaying by %.1f seconds", delay)
            self._enable_grace_delay(delay)
            # auto-retry
            return do_get_page()

        def do_get_page():
            # We acquire the semaphore *before* seeing if we should delay
            # the request, so that we avoid pounding on the server when
            # the grace period is entered.
            d = self.sem.acquire()
            d.addCallback(schedule_request)
            d.addCallbacks(handle_success, handle_error)
            return d

        return do_get_page()

    def trap_throttling(self, failure):
        """Trap HTTP failures and return if we are
        throttled by the distant site, else re-raise.
        """
        e = failure.value
        if e.status in ("400", "420", "500", "503"):
            return
        failure.raiseException()
开发者ID:pagedegeek,项目名称:ipinion-rank,代码行数:95,代码来源:ipinion.py

示例6: AggregationResponseCache

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class AggregationResponseCache(object):
    '''
    This holds all the responses being aggregated for a single destination.
    
    One of the main challenges here is to make sure while we're sending the responses,
    we don't get a new response in and not send it.
    '''


    def __init__(self, numSecondsToWait, numMessagesToWaitFor, chordNode):
        '''
        Constructor
        '''
        self.numSecondsToWait = numSecondsToWait
        self.numMessagesToWaitFor = numMessagesToWaitFor
        self.numSecondsToWait = numSecondsToWait
        self.chordNode = chordNode
        self.semaphore = DeferredSemaphore(1)
        self.messageList = [] # Holds tuples of (message, envelope)
        
        # Construct a timer to wait
        self.timerID = None
        
    def addResponse(self, message, envelope):
        '''We use a semaphore to ensure we don't modify the list while sending.'''
        d = self.semaphore.acquire()
        d.addCallback(self._addResponse, message, envelope)
        
    def _addResponse(self, dummy_defResult, message, envelope):
        '''This is called only once we have the semaphore.'''         
        self.messageList.append ( (message, envelope) )
        
        print("DEBUG: AggRespCache: %s  adding message %s " % (self.chordNode.nodeLocation.port, message))
        
        if len(self.messageList) >= self.numMessagesToWaitFor:
            # Send it!
            self._sendResponse()
        else:
            # Make sure a timer is running
            if self.timerID is None or not self.timerID.active():
                self.timerID = reactor.callLater(self.numSecondsToWait, self.sendResponse)
            
            # We're done.
            self.semaphore.release()    
        
            
    def sendResponse(self):
        '''Only call sendResponse when you have the lock.'''
        d = self.semaphore.acquire()
        d.addCallback(self._sendResponse)
        
    
    def _sendResponse(self, dummy_deferResult=None):
        '''Send the response but only after acquiring the semaphore
        '''
        # Copy the list
        messagesListCopy = self.messageList
        self.messageList = []
        
        # Release the semaphore
        self.semaphore.release()
        
        # Stop the timer if it's still going
        if self.timerID is not None and self.timerID.active():
            self.timerID.cancel()
            self.timerID = None
        
        print("DEBUG: AggResponseCache-Sending %d Messages %s" % (len(messagesListCopy), self.chordNode.nodeLocation.port))
        
        # Send a P2P message to the dest with all the responses
        d = self.chordNode.sendSyncMultipleMessage(messagesListCopy, 'p2p') # Will this break message authentication?
        d.addCallback(self.sendAcks, messagesListCopy)
        d.addErrback(self.sendResponseFailed)

#     def emptyMessageList(self, _):
#         self.messageList = []
        
    def sendAcks(self, resultsDict, messageList):
        # Send ACK messages to the nodes for which we aggregated
        
        for (_message, envelope) in messageList:
            # Get the status to return
            msgID = envelope['msgID']
            if msgID not in resultsDict:
                status = False
            else:
                status = resultsDict[msgID]

            d = self.chordNode.sendSingleAck(msgID, envelope['source'], status)
            d.addErrback(self.sendAckFailed, envelope['source'])
                        
            
    def sendAckFailed(self, fail, sourceNode):
        log.err("We failed to SendAck for source %s" % sourceNode, fail)
            
        
    def sendResponseFailed(self, theFailure):
        log.err(theFailure)    
        
        
#.........这里部分代码省略.........
开发者ID:danfleck,项目名称:Class-Chord,代码行数:103,代码来源:AggregationResponseCache.py

示例7: __init__

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]

#.........这里部分代码省略.........
                # create a new one
                d = self._newConnection(conn_str)

                def connected_cb(indx_conn):
                    logging.debug("IndxConnectionPool _connect connected_cb ({0})".format(indx_conn))
                    self.connections[conn_str].getFree().remove(indx_conn)
                    self.connections[conn_str].getInuse().append(indx_conn)
                    self.semaphore.release()
                    return_d.callback(indx_conn)
                    return

                d.addCallbacks(connected_cb, err_cb)
                return

            # wait for a connection
            def wait_cb(conn):
                logging.debug("IndxConnectionPool _connect wait_cb ({0})".format(conn))
                # already put in 'inuse'
                return_d.callback(conn)
                return

            self.semaphore.release()
            self.connections[conn_str].getWaiting().append(wait_cb)
            return

        def locked_cb(empty):
            logging.debug("IndxConnectionPool _connect locked_cb")
            if conn_str not in self.connections:
                self._newConnections(conn_str).addCallbacks(succeed_cb, err_cb)
            else:
                threads.deferToThread(succeed_cb, None)
#                succeed_cb(None)

        self.semaphore.acquire().addCallbacks(locked_cb, err_cb)
        return return_d

    def _closeOldConnection(self):
        """ Close the oldest connection, so we can open a new one up. """
        # is already in a semaphore lock, from _newConnection
        logging.debug("IndxConnectionPool _closeOldConnection")

        ### we could force quite them through postgresql like this - but instead we kill them from inside
        #query = "SELECT * FROM pg_stat_activity WHERE state = 'idle' AND application_name = %s AND query != 'LISTEN wb_new_version' ORDER BY state_change LIMIT 1;"
        #params = [indx_pg2.APPLICATION_NAME]

        return_d = Deferred()

        def err_cb(failure):
            return_d.errback(failure)

        ages = {}
        for conn_str, dbpool in self.connections.items():
            lastused = dbpool.getTime()
            if lastused not in ages:
                ages[lastused] = []
            ages[lastused].append(dbpool)

        times = ages.keys()
        times.sort()

        pool_queue = []
        for timekey in times:
            pools = ages[timekey]
            pool_queue.extend(pools)

        def removed_cb(count):
开发者ID:sociam,项目名称:indx,代码行数:70,代码来源:connectionpool.py

示例8: DBConnectionPool

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]
class DBConnectionPool():
    """ A pool of DB connections for a specific connection string / DB. """

    def __init__(self, conn_str):
        self.waiting = []
        self.inuse = []
        self.free = []

        self.semaphore = DeferredSemaphore(1)
        self.updateTime()

    def __unicode__(self):
        return self.__str__()

    def __str__(self):
        return "waiting: {0}, inuse: {1}, free: {2}, semaphore: {3}, lastused: {4}".format(self.waiting, self.inuse, self.free, self.semaphore, self.lastused)

    def updateTime(self):
        self.lastused = time.mktime(time.gmtime()) # epoch time

    def getTime(self):
        return self.lastused

    def getWaiting(self):
        self.updateTime()
        return self.waiting

    def getInuse(self):
        self.updateTime()
        return self.inuse

    def getFree(self):
        self.updateTime()
        return self.free

    def freeConnection(self, conn):
        """ Free a connection from this DBPool. """

        def locked_cb(empty):
            logging.debug("DBConnectionPool locked_cb")
            self.getInuse().remove(conn)
            
            if len(self.getWaiting()) > 0:
                callback = self.getWaiting().pop()
                self.getInuse().append(conn)
                self.semaphore.release()
                callback(conn)
            else: 
                self.getFree().append(conn)
                self.semaphore.release()

        def err_cb(failure):
            failure.trap(Exception)
            logging.error("DBConnectionPool free, err_cb: {0}".format(failure.value))
            self.semaphore.release()

        self.semaphore.acquire().addCallbacks(locked_cb, err_cb)


    def removeAll(self, count):
        """ Remove all free connections (usually because they're old and we're in
            a freeing up period.
        """
        logging.debug("DBConnectionPool removeAll called, count: {0}".format(count))
        return_d = Deferred()
        self.updateTime()

        def err_cb(failure):
            self.semaphore.release()
            return_d.errback(failure)

        def locked_cb(count):
            # immediately close the free connections
            while len(self.free) > 0:
                conn = self.free.pop(0)
                conn.close()
                count += 1

            self.semaphore.release()
            return_d.callback(count)

        self.semaphore.acquire().addCallbacks(lambda s: locked_cb(count), err_cb)
        return return_d
开发者ID:sociam,项目名称:indx,代码行数:85,代码来源:connectionpool.py

示例9: BaseQtWebKitMiddleware

# 需要导入模块: from twisted.internet.defer import DeferredSemaphore [as 别名]
# 或者: from twisted.internet.defer.DeferredSemaphore import acquire [as 别名]

#.........这里部分代码省略.........
        qt_request = QNetworkRequest(QUrl(scrapy_request.url))
        for header, values in scrapy_request.headers.items():
            qt_request.setRawHeader(header, b', '.join(values))

        try:
            operation = HTTP_METHOD_TO_QT_OPERATION[scrapy_request.method]
        except KeyError:
            operation = QNetworkAccessManager.CustomOperation
            qt_request.setAttribute(QNetworkRequest.CustomVerbAttribute,
                                    scrapy_request.method)

        qt_request.setAttribute(QNetworkRequest.CacheSaveControlAttribute,
                                False)

        req_body = QByteArray(scrapy_request.body)

        return qt_request, operation, req_body

    @inlineCallbacks
    def process_request(self, request, spider):
        if self.cookies_middleware:
            yield self.cookies_middleware.process_request(request, spider)

        if isinstance(request, QtWebKitRequest):
            if request.webpage:
                # Request is to continue processing with an existing webpage
                # object.
                webpage = request.webpage
                request = request.replace(webpage=None)
                webpage.networkAccessManager().request = request
                returnValue(self._handle_page_request(spider, request,
                                                      webpage))
            else:
                yield self.semaphore.acquire()
                response = yield self.create_page(request, spider)
                returnValue(response)

    def process_response(self, request, response, spider):
        if self.cookies_middleware:
            return self.cookies_middleware.process_response(request, response,
                                                            spider)
        else:
            return response

    def ensure_qapplication(self):
        """Create and setup a QApplication if one does not already exist."""
        if not QApplication.instance():
            args = ["scrapy"]
            if self.qt_platform is not None:
                args.extend(["-platform", self.qt_platform])
            app = QApplication(args)
            self._schedule_qt_event_loop(app)
            _QApplicationStopper(self._crawler.signals, app)

    def create_page(self, request, spider):
        """

        Create a webpage object, load a request on it, return a deferred that
        fires with a response on page load.

        """

        self.ensure_qapplication()

        webpage = WebPage()
        self._setup_page(webpage,
开发者ID:ArturGaspar,项目名称:scrapy-qtwebkit,代码行数:70,代码来源:__init__.py


注:本文中的twisted.internet.defer.DeferredSemaphore.acquire方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。