当前位置: 首页>>代码示例>>Python>>正文


Python LifoQueue.empty方法代码示例

本文整理汇总了Python中queue.LifoQueue.empty方法的典型用法代码示例。如果您正苦于以下问题:Python LifoQueue.empty方法的具体用法?Python LifoQueue.empty怎么用?Python LifoQueue.empty使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在queue.LifoQueue的用法示例。


在下文中一共展示了LifoQueue.empty方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: ThreadedNormalWorker

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class ThreadedNormalWorker(object):
    def __init__(self, print_errors=False):
        self.print_errors = print_errors
        self.queue = LifoQueue()

    def get_url_bulk(self):
        normals = Normals.objects(access_success=False)
        for i in normals:
            self.queue.put(item=i)

    def grab_from_queue(self):
        while not self.queue.empty():
            url = self.queue.get()
            normals_finder = NormalsSpider(url=url.url,
                                           print_errors=self.print_errors)
            normals_finder.update_normals_data()
            print(url.url)
            self.queue.task_done()

    def start(self, n_threads):
        self.get_url_bulk()
        for i in range(n_threads):
            thread = Thread(target=self.grab_from_queue())
            thread.start()
        self.queue.join()
开发者ID:joaoTrevizoli,项目名称:climatemps-data,代码行数:27,代码来源:bots.py

示例2: match_query

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
    def match_query(self, query):
        '''Given a search query, return a tuple containing a regex match and
        trigger object that matches the given query.  If no match can be found,
        return a tuple of (None, None).'''

        sink = LifoQueue()

        while not self.triggers.empty():
            trigger = self.triggers.get()
            match = trigger.pattern.match(query)

            if match:
                break

            else:
                sink.put(trigger)
                trigger = None

        while not sink.empty():
            self.triggers.put(sink.get())

        if trigger:
            self.triggers.put(trigger)
            return (match, trigger)

        return (None, None)
开发者ID:jreese,项目名称:congress,代码行数:28,代码来源:congress.py

示例3: __init__

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class QueryQueue:
    def __init__(self):
        self.queue = LifoQueue()
        self.comm_sender = CommSender()
        th = threading.Thread(target=self.send_require)
        th.start()

    def put(self, item):
        self.queue.put(item)

    def send_require(self):
        while True:
            time.sleep(1)
            c = ConnInfo.objects.all()[0]
            q = QueryInfo.objects.all()[0]
            r = RoomInfo.objects.all()[0]
            # if is logout or unconnected, only flush queue
            if c.is_log == "False" or c.is_conn == "False":
                while not self.queue.empty():
                    self.queue.get()
                continue

            # else get last item and flush queue
            if not self.queue.empty():
                query = self.queue.get()
                while not self.queue.empty():
                    self.queue.get()
                #
                m = ModeInfo.objects.all()[0]
                s = SensorInfo.objects.all()[0]
                ss = SettingInfo.objects.all()[0]
                if m.mode == 'cold' and ss.target_temp > s.current_temp:
                    query = 'standby'
                elif m.mode == 'hot' and ss.target_temp < s.current_temp:
                    query = 'standby'
                #
                q.query_speed = query
                q.save()
                r = self.comm_sender.send_msg(data={'type': 'require', 'source': r.room_number, 'speed': query})
                # if query is standby, we should change to standby immediately
                if query == 'standby' and r.json()['ack_nak'] == 'ACK':
                    q.current_speed = 'standby'
                    q.query_speed = 'None'
                    q.save()
开发者ID:bupt1309SE,项目名称:AirSlave,代码行数:46,代码来源:speed_query.py

示例4: inorder_walk

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
def inorder_walk(a_root_node: BSTreeNode):
    node_stack = LifoQueue()
    current_item = a_root_node
    while True:
        while current_item:
            node_stack.put(current_item)
            current_item = current_item.left_child
        if node_stack.empty():
            break
        tmp_item = node_stack.get()
        yield tmp_item

        current_item = tmp_item.right_child
开发者ID:lostinplace,项目名称:filtered-intervaltree,代码行数:15,代码来源:bs_tree_funcs.py

示例5: index

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
    def index(self, conf):

        session = requests.session()
        urls = LifoQueue()

        allowed_domains = conf['allowed_domains'].split(',')
        start = conf['url']
        ignore = re.compile(conf['ignore'])

        found = set([start])
        urls.put(start)

        while not urls.empty():
            url = urls.get()

            r = session.get(url)

            for link in BeautifulSoup(r.content, 'lxml').find_all('a'):
                link_href = link.get('href')

                if not link_href:
                    continue

                if link_href.startswith('/'):
                    link_href = urljoin(url, link_href)

                parsed = urlparse(link_href)

                if parsed.hostname not in allowed_domains:
                    continue

                if conf['ignore'] and ignore.match(link_href):
                    continue

                if link_href not in found:
                    found.add(link_href)
                    urls.put(link_href)

            file = MemoryFile(r.content)
            file.url = url
            file.mimetype = 'text/html'
            file.size = 0
            file.modified = None

            yield file
开发者ID:acidtv,项目名称:Ducky,代码行数:47,代码来源:http.py

示例6: range

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
str="abcdefghijk"
import random
#Queue
q=Queue(10)
for i in range(10):
	q.put(random.choice(str))
print("size=",q.qsize())
while not q.empty():
	print(q.get())
	q.task_done()

#Lifo Queue
print("-"*10,"lifo_queue","-"*10)
lifoq=LifoQueue(10)
for i in range(10):
	lifoq.put_nowait(random.choice(str))
while not lifoq.empty():
	print(lifoq.get_nowait())
	lifoq.task_done()

#Priority Queue
print("-"*10,"priority queue","-"*10)
pq=PriorityQueue(10)
for i in range(10):
	pq.put_nowait(random.choice(str))
while not pq.empty():
	print(pq.get_nowait())
	pq.task_done()

开发者ID:denisyq,项目名称:code,代码行数:30,代码来源:note_queue.py

示例7: CrawlerType2

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class CrawlerType2(BaseCrawler):
    def __init__(self, name, start_url, list_of_urls, number_of_threads,
                 delayed_request=False, max_allowed_error=10):
        super().__init__(name, start_url, number_of_threads,
                         delay_request=delayed_request,
                         max_err=max_allowed_error)
        self.url_list = list_of_urls
        self.task_queue = LifoQueue()

    def run(self):
        """
        Function to be called by subclasses to start crawler
        """
        while True:
            # Crawl cycle starts
            print_util.print_info(
                'Starting crawl with {0}'.format(
                    self.name
                ),
                Colors.BLACK
            )
            # Add URLs to task queue
            for url in self.url_list:
                self.task_queue.put(
                    {
                        'type': 0,
                        'url': url,
                        'n_errors': 0
                    }
                )
            # Start all threads
            threads = []
            for n in range(1, self.number_of_threads + 1):
                temp_thread = Thread(
                    target=self.threader,
                    args=(n,)
                )
                threads.append(temp_thread)
                temp_thread.start()
            # Wait for threads to finish
            for temp_thread in threads:
                temp_thread.join()
                # Crawl cycle ends

    def threader(self, thread_id):
        """
        Worker function
        :param thread_id: Ass usual
        """
        while not self.task_queue.empty():

            task = self.task_queue.get()
            if task['n_errors'] >= self.max_allowed_errors:
                print_util.print_warning(
                    '{0} --> Too many errors in task {1}. Skipping.'.format(
                        thread_id,
                        task
                    )
                )
                continue

            print_util.print_info(
                '{0} --> New task : {1}'.format(
                    thread_id,
                    task
                )
            )

            try:
                if task['type'] == 0:
                    self.get_artists(
                        thread_id,
                        task['url']
                    )
                elif task['type'] == 1:
                    self.get_artist(
                        thread_id,
                        task['url'],
                        task['artist']
                    )
                elif task['type'] == 2:
                    self.get_songs_from_page(
                        thread_id,
                        task['url'],
                        task['artist']
                    )
                elif task['type'] == 3:
                    self.get_song(
                        thread_id,
                        task['url'],
                        task['song'],
                        task['artist']
                    )
                print_util.print_info(
                    '{0} --> Task complete : {1}'.format(
                        thread_id,
                        task
                    ),
                    Colors.GREEN
                )
#.........这里部分代码省略.........
开发者ID:iiitv,项目名称:lyrics-crawler,代码行数:103,代码来源:base_crawler.py

示例8: CrawlerType0

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class CrawlerType0(BaseCrawler):
    def __init__(self, name, start_url, list_of_url, number_of_threads,
                 max_err=10, delay_request=False):

        # Constructor for BaseCrawler
        """
        Crawler for the websites of type 0.
        :param list_of_url: List of URLs to start with.
        """
        super().__init__(name, start_url, number_of_threads, max_err,
                         delay_request)

        # Initialize data members
        self.task_queue = LifoQueue()
        self.url_list = list_of_url

    def threader(self, thread_id):
        """
        Worker function.
        :return:
        :param thread_id: Assigned ID of thread.
        """
        while not self.task_queue.empty():  # While there are any tasks

            task = self.task_queue.get()  # Get one of them

            if task['n_errors'] >= self.max_allowed_errors:  # Too many errors
                print_util.print_warning(
                    '{0} --> Too many errors in task {1}. Skipping.'.format(
                        thread_id,
                        task
                    )
                )
                continue

            print_util.print_info(
                '{0} --> New task : {1}'.format(
                    thread_id,
                    task
                )
            )  # Log the task

            try:

                # Call corresponding function
                if task['type'] == 0:
                    self.get_movies(
                        thread_id,
                        task['url']
                    )
                elif task['type'] == 1:
                    self.download_movie(
                        thread_id,
                        task['url'],
                        task['movie']
                    )
                elif task['type'] == 2:
                    self.download_song(
                        thread_id,
                        task['url'],
                        task['song'],
                        task['movie'],
                        task['movie_url']
                    )

                print_util.print_info(
                    '{0} --> Task complete : {1}'.format(
                        thread_id,
                        task
                    ),
                    Colors.GREEN
                )  # Log success

            except Exception as e:  # Some error
                print_util.print_error(
                    '{0} --> Error : {1}'.format(
                        thread_id,
                        e
                    )
                )  # Log it
                task['n_errors'] += 1  # Increment number of errors
                self.task_queue.put(task)  # Put back in queue

    def run(self):
        """
        Function to be called by subclasses to start crawler.
        """
        while True:
            # Crawl cycle start
            print_util.print_info(
                'Starting new crawl with {0}.'.format(
                    self.name
                ),
                Colors.BLACK
            )
            # Add all URLs to task queue
            for url in self.url_list:
                self.task_queue.put(
                    {
                        'type': 0,
#.........这里部分代码省略.........
开发者ID:iiitv,项目名称:lyrics-crawler,代码行数:103,代码来源:base_crawler.py

示例9: CrawlerType1

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class CrawlerType1(BaseCrawler):
    def __init__(self, name, start_url, list_of_url, number_of_threads,
                 delay_request=False, max_allowed_errors=3):
        """

        :param name: As usual
        :param start_url: As usual
        :param list_of_url: As usual
        :param number_of_threads: As usual
        :param delay_request: As usual
        :param max_allowed_errors: As usual
        """
        super().__init__(name, start_url, number_of_threads=number_of_threads,
                         delay_request=delay_request,
                         max_err=max_allowed_errors)
        self.url_list = list_of_url
        self.task_queue = LifoQueue()

    def run(self):
        """
        Method called from subclasses to start crawling process
        """
        while True:
            # Crawl cycle starts
            print_util.print_info(
                'Starting new crawl with {0}'.format(
                    self.name
                ),
                Colors.BLACK
            )
            # Add all URLs to task queue
            for url in self.url_list:
                self.task_queue.put(
                    {
                        'type': 0,
                        'url': url,
                        'n_errors': 0
                    }
                )
            # Start all threads
            threads = []
            for n in range(1, self.number_of_threads + 1):
                temp_thread = Thread(
                    target=self.threader,
                    args=(n,)
                )
                threads.append(temp_thread)
                temp_thread.start()
            for temp_thread in threads:
                temp_thread.join()
                # Crawl cycle ends

    def threader(self, thread_id):
        """
        Worker function
        :param thread_id: As usual
        """
        while not self.task_queue.empty():
            task = self.task_queue.get()

            if task['n_errors'] >= self.max_allowed_errors:
                print_util.print_warning(
                    '{0} --> Too many errors in task {1}. Skipping.'.format(
                        thread_id,
                        task
                    )
                )
                continue

            print_util.print_info(
                '{0} --> New task : {1}'.format(
                    thread_id,
                    task
                )
            )

            try:
                if task['type'] == 0:
                    self.get_artists(
                        thread_id,
                        task['url']
                    )
                elif task['type'] == 1:
                    self.get_artist_albums(
                        thread_id,
                        task['url'],
                        task['artist']
                    )
                elif task['type'] == 2:
                    self.get_song(
                        thread_id,
                        task['url'],
                        task['song'],
                        task['album'],
                        task['album_url'],
                        task['artist']
                    )

                print_util.print_info(
                    '{0} --> Task complete : {1}'.format(
#.........这里部分代码省略.........
开发者ID:iiitv,项目名称:lyrics-crawler,代码行数:103,代码来源:base_crawler.py

示例10: UnhandledExceptionHandler

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]

#.........这里部分代码省略.........
        :param frame: The interrupted stack frame
        :type frame: frame
        """
        signal_names = {
            signal.SIGTERM: 'SIGTERM',
            signal.SIGINT: 'SIGINT',
        }
        self._logger.info('{} signal received. Triggering teardown.', signal_names[sig])
        raise AppTeardown

    def __enter__(self):
        """
        Enables this to be used as a context manager. No special handling is needed on enter.
        """
        pass

    def __exit__(self, exc_type, exc_value, traceback):
        """
        Enables this to be used as a context manager. If an exception was raised during the execution block (inside the
        "with" statement) then exc_value will be set to the exception object.

        There are four situations in which we can go through this method:
        1. Exception, on main thread
            - The exception is logged and in some cases (e.g., SystemExit) may be immediately reraised.
            - Teardown callbacks are executed.
            - Example: A KeyboardInterrupt exception raised because user presses ctrl-c / sends SIGINT signal

        2. Exception, not on main thread
            - The exception is logged and in some cases may be passed to the main thread to be reraised.
            - Teardown callbacks are executed.
            - Example: Any unhandled exception that is raised on a SafeThread

        3. Normal exit, on main thread
            - We check to see if there was an exception that we need to reraise on the main thread. In almost all cases
              we will *not* reraise an exception on the main thread since it has already been logged and teardown
              callbacks have already been executed on the thread that raised the exception.
            - Teardown callbacks are *not* executed.
            - Example: A SystemExit exception raised by sys.exit() is passed from a SafeThread to the main thread to
                       make Python set the exit code.

        4. Normal exit, not on main thread
            - Do nothing! All is well.
        """
        if exc_value:
            # An exception occurred during execution, so run the teardown callbacks. We use a lock here since multiple
            # threads could raise exceptions at the same time and we only want to execute these once.
            with self._handling_lock:
                if not isinstance(exc_value, (SystemExit, AppTeardown, KeyboardInterrupt)):
                    # It is not very useful to log the SystemExit exception since it is raised by sys.exit(), and thus
                    # application exit is completely expected.
                    self._logger.exception('Unhandled exception handler caught exception.')

                while not self._teardown_callback_stack.empty():
                    callback, args, kwargs = self._teardown_callback_stack.get()
                    self._logger.debug('Executing teardown callback: {}', callback)
                    try:
                        callback(*args, **kwargs)
                    except:  # pylint: disable=bare-except
                        # Also catch any exception that occurs during a teardown callback and log it.
                        self._teardown_callback_raised_exception = True
                        self._logger.exception('Exception raised by teardown callback {}', callback)

                self._handled_exceptions.put(exc_value)

        if current_thread() is main_thread():
            # The usage of this class on the main thread is a special case since only exceptions raised on the main
            # thread may affect the exit code of the overall application. Any unhandled exceptions raised on child
            # threads will only interrupt execution on that particular thread.
            #
            # This main-thread-only code path serves to ensure that exceptions raised on child threads during a `with
            # unhandled_exception_handler` block will also raise an exception on the main thread upon exit of the main
            # thread's `with unhandled_exception_handler` block. This ensures we will set a failing exit code even if
            # an exception is raised on a child thread.
            #
            # Note: this only works for child threads protected by the UnhandledExceptionHandler (e.g., an instance of
            # a SafeThread).
            #
            # We check the self._handled_exceptions queue to see if there was an exception that we want to reraise. We
            # only care about the first exception on the queue -- it was the first caught exception so it "wins".
            if not self._handled_exceptions.empty():
                handled_exception = self._handled_exceptions.get()

                # We reraise SystemExit on the main thread -- this specific exception is how Python controls setting
                # the process exit code, and that only works if raised on the main thread.
                if isinstance(handled_exception, SystemExit):
                    raise handled_exception

                # We also want to make sure the process exit code is set non-zero if the UnhandledExceptionHandler
                # handled any Exception at all. (Note: this does not include AppTeardown or KeyboardInterrupt, which
                # both inherit from BaseException.)
                if isinstance(handled_exception, Exception):
                    raise SystemExit(self.HANDLED_EXCEPTION_EXIT_CODE)

            # If an exception was raised while executing one of the teardown callbacks, also make sure to exit with a
            # non-zero exit code.
            if self._teardown_callback_raised_exception:
                raise SystemExit(self.EXCEPTION_DURING_TEARDOWN_EXIT_CODE)

        # Returning True from this method tells Python not to re-raise the exc_value exception on the current thread.
        return True
开发者ID:mdengler,项目名称:ClusterRunner,代码行数:104,代码来源:unhandled_exception_handler.py

示例11: __init__

# 需要导入模块: from queue import LifoQueue [as 别名]
# 或者: from queue.LifoQueue import empty [as 别名]
class Grid:

    def __init__(self, columns=2, rows=2, allowed_paths=[Path.up, Path.right, Path.down, Path.left], verbose=False):
        self._columns = columns
        self._rows = rows
        self._allowed_paths = allowed_paths
        self._verbose = verbose

        self._pos_x = 0
        self._pos_y = 0
        self._move_history = LifoQueue()
        self._last_move = None

        self._create_grid_matrix()

    def _create_grid_matrix(self):
        self._grid_matrix = []
        for r in range(0, self._rows + 1):
            self._grid_matrix.append([])
            for c in range(0, self._columns + 1):
                open_paths = []
                if Path.up    in self._allowed_paths and r > 0            : open_paths.append(Path.up)
                if Path.right in self._allowed_paths and c < self._columns: open_paths.append(Path.right)
                if Path.down  in self._allowed_paths and r < self._rows   : open_paths.append(Path.down)
                if Path.left  in self._allowed_paths and c > 0            : open_paths.append(Path.left)
                self._grid_matrix[r].append(Intersection(open_paths))

    def get_intersection(self):
        return self._grid_matrix[self._pos_y][self._pos_x]

    def get_open_paths(self):
        return self._grid_matrix[self._pos_y][self._pos_x].get_open_paths()

    def backtrack(self):
        if self._move_history.empty():
            if self._verbose:
                print('! No more paths to backtrack from.')
            return False

        self._last_move = self._move_history.get()
        self.get_intersection().reset()

        if   self._last_move == Path.up   : self._pos_y += 1
        elif self._last_move == Path.right: self._pos_x -= 1
        elif self._last_move == Path.down : self._pos_y -= 1
        elif self._last_move == Path.left : self._pos_x += 1
        else:
            if self._verbose:
                print('! Unable to backtrack anymore.')
            return False

        return True

    def move(self, path=None):
        open_paths = self.get_open_paths()
        if self._last_move is not None and self._last_move in open_paths:
            open_paths.remove(self._last_move)
            self._last_move = None

        if path is None:
            if len(open_paths) > 0:
                path = open_paths[0]
            else:
                if self._verbose:
                    print('! No more open paths to move into.')
                return False
        elif path not in open_paths:
            if self._verbose:
                print('! Unable to move {}.'.format(path.name))
            return False

        self.get_intersection().use_path(path)
        self._move_history.put(path)

        if   path == Path.up   : self._pos_y -= 1
        elif path == Path.right: self._pos_x += 1
        elif path == Path.down : self._pos_y += 1
        elif path == Path.left : self._pos_x -= 1

        return True

    def is_at_start(self):
        return self._pos_x == self._pos_y == 0

    def is_at_end(self):
        return self._pos_y == self._rows and self._pos_x == self._columns

    def _to_string(self):
        COL_WIDTH = 3
        ROW_HEIGHT = 1

        output = ''
        for r in range(0, self._rows + 1):
            for c in range(0, self._columns + 1):
                if self._pos_y == r and self._pos_x == c:
                    area = 'x'
                else:
                    path = self._grid_matrix[r][c].get_used_path()
                    if path == Path.up:
                        area = '^'
#.........这里部分代码省略.........
开发者ID:ChyrosNX,项目名称:NephX7,代码行数:103,代码来源:p15_lattice_paths.py


注:本文中的queue.LifoQueue.empty方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。