当前位置: 首页>>代码示例>>Python>>正文


Python BaseManager.get_task_queue方法代码示例

本文整理汇总了Python中multiprocessing.managers.BaseManager.get_task_queue方法的典型用法代码示例。如果您正苦于以下问题:Python BaseManager.get_task_queue方法的具体用法?Python BaseManager.get_task_queue怎么用?Python BaseManager.get_task_queue使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.managers.BaseManager的用法示例。


在下文中一共展示了BaseManager.get_task_queue方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
	def __init__(self):
		class QueueManager(BaseManager):
    			pass
    			# 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
		BaseManager.register('get_task_queue')
		BaseManager.register('get_result_queue')
		# 连接到服务器,也就是运行taskmanager.py的机器:
		server_addr = '10.1.142.100'
		print('Connect to server %s...' % server_addr)
		# 端口和验证码注意保持与taskmanager.py设置的完全一致:
		m = BaseManager(address=(server_addr, 8888), authkey='abc')
		# 从网络连接:
		m.connect()
		# 获取Queue的对象:
		self.task = m.get_task_queue()
		self.result = m.get_result_queue()
开发者ID:PatricKuo,项目名称:AFOJ-Core,代码行数:18,代码来源:judgeclient.py

示例2: __init__

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
	def __init__(self):
		# 发送任务的队列:
		task_queue = Queue.Queue()
		# 接收结果的队列:
		result_queue = Queue.Queue()
		class QueueManager(BaseManager):
		    pass
		# 把两个Queue都注册到网络上, callable参数关联了Queue对象:
		BaseManager.register('get_task_queue', callable=lambda: task_queue)
		BaseManager.register('get_result_queue', callable=lambda: result_queue)
		# 绑定端口5000, 设置验证码'abc':
		manager = BaseManager(address=('',8888), authkey='abc')
		# 启动Queue:
		manager.start()
		# 获得通过网络访问的Queue对象:
		self.task = manager.get_task_queue()
		self.result = manager.get_result_queue()
开发者ID:PatricKuo,项目名称:AFOJ-Core,代码行数:19,代码来源:Judge.py

示例3: SpiderWork

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
class SpiderWork(object):
    def __init__(self):
        BaseManager.register('get_task_queue')
        BaseManager.register('get_result_queue')

        server_addr = '127.0.0.1'
        print ('Connect to server %s...' % server_addr)

        self.m=BaseManager(address=(server_addr,8001),authkey='qiye'.encode('utf-8'))
        print 'connecting...'
        self.m.connect()
        print 'connected'

        self.task = self.m.get_task_queue()
        self.result = self.m.get_result_queue()

        self.downloader = HtmlDownloader()
        self.parser = HtmlParser()
        print 'spider init finish'


    def crawl(self):
        while True:
            try:
                # print self.task
                if not self.task.empty():
                    url = self.task.get()

                    if url == 'end':
                        print ('stop...')
                        # 通知其它节点停止
                        self.result.put({'new_urls':'end','data':'end'})
                        return
                    print ('spider is working on %s'%url)
                    content = self.downloader.download(url)
                    new_urls, data = self.parser.parser(url, content)
                    self.result.put({'new_urls':new_urls,'data':data})
            except EOFError as e:
                print 'cannot connect other'
                return
            except Exception as e:
                print e
                print 'crawl fail'
开发者ID:cawind2,项目名称:TestTu,代码行数:45,代码来源:SpiderWork.py

示例4: SlaveWork

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
class SlaveWork(object):

    def __init__(self):

        #初始化分布式进程中的工作节点的链接工作
        #实现第一步,使用basemanager注册获取queue的方法名称
        BaseManager.register('get_task_queue')
        BaseManager.register('get_result_queue')

        ##实现第二步,连接到服务器
        server_addr = '127.0.0.1'
        # 端口和验证口令注意保持与服务进程设置的完全一致:
        self.m = BaseManager(address=(server_addr, 8081), authkey='seven')
        # 从网络连接:
        self.m.connect()

        ##实现第三步
        self.task = self.m.get_task_queue()
        self.result = self.m.get_result_queue()

        ##初始化网页下载器和解析器
        self.downloader = HtmlDownloader()
        self.parser = HtmlParse()

    def crawl(self):
        while(True):
            try:
                if not self.task.empty():
                    url = self.task.get()
                    if url =='end':
                        print("控制节点通知爬虫节点停止工作")
                        self.result.put({'new_urls':'end','data':'end'})
                        return
                    print('爬虫节点正在解析:%s' % url.encode('utf-8'))
                    content = self.downloader.download(url)
                    new_urls, data = self.parser.parser(url, content)
                    self.result.put({"new_urls": new_urls, "data": data})
            except EOFError:
                print("连接工作节点失败")
                return
            except Exception:
                print('Crawl  fali ')
开发者ID:shisiying,项目名称:crawer_python,代码行数:44,代码来源:SlaveWork.py

示例5: worker

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
def worker():
    # load 'session.txt', or call login() to generate it
    try:
        with open('session.txt', 'rb') as f:
            headers = cPickle.load(f)
            cookies = cPickle.load(f)
    except:
        print '[-] 无session.txt文件, 调用login()...'
        session = DoubanLogin().login()
        headers = session.headers
        cookies = session.cookies

    # connect to manager
    BaseManager.register('get_task_queue')
    BaseManager.register('get_result_queue')
    print 'Connect to server %s:5000...' % server_addr
    worker = BaseManager(address=(SERVER_ADDR, PORT), authkey='douban')
    worker.connect()
    task = worker.get_task_queue()
    result = worker.get_result_queue()

    # start listening
    print '[-] Waiting...'
    while True:
        try:
            id_ = task.get()
            print '[~] Running task...'
            info = get_user_info.get_info(id_, headers=headers, cookies=cookies)
            print '[+] Information returned.\n'
            result.put(info)
            print '[-] Waiting...'
            time.sleep(DELAY_TIME)

        except Exception, e:
            print e
            exit()
开发者ID:Kr0c,项目名称:DoubanRobot,代码行数:38,代码来源:worker.py

示例6: QueueManager

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
# 创建类似的QueueManager:
class QueueManager(BaseManager):
    pass

# 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
BaseManager.register('get_task_queue')
BaseManager.register('get_result_queue')

# 连接到服务器,也就是运行task_master.py的机器:
server_addr = '127.0.0.1'
print('Connect to server %s...' % server_addr)
# 端口和验证码注意保持与task_master.py设置的完全一致:
m = BaseManager(address=(server_addr, 5000), authkey=b'abc')
# 从网络连接:
m.connect()
# 获取Queue的对象:
task = m.get_task_queue()
result = m.get_result_queue()
# 从task队列取任务,并把结果写入result队列:
for i in range(10):
    try:
        n = task.get(timeout=1)
        print('run task %d * %d...' % (n, n))
        r = '%d * %d = %d' % (n, n, n*n)
        time.sleep(1)
        result.put(r)
    except Queue.Empty:
        print('task queue is empty.')
# 处理结束:
print('worker exit.')
开发者ID:zthxxx,项目名称:python-OpinionMonitoring,代码行数:32,代码来源:c.py

示例7: manager

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import get_task_queue [as 别名]
def manager():
    # load 'session.txt', or call login() to generate it
    try:
        with open('session.txt', 'rb') as f:
            headers = cPickle.load(f)
            cookies = cPickle.load(f)
    except:
        print '[-] 无session.txt文件, 调用login()...'
        session = DoubanLogin().login()
        headers = session.headers
        cookies = session.cookies

    # create task queue and result queue
    task_queue = Queue.Queue()
    result_queue = Queue.Queue()

    # register
    BaseManager.register('get_task_queue', callable=lambda: task_queue)
    BaseManager.register('get_result_queue', callable=lambda: result_queue)

    # bound port 5000, set authkey
    manager = BaseManager(address=('', PORT), authkey='douban')
    manager.start()
    task = manager.get_task_queue()
    result = manager.get_result_queue()

    # load task file
    done = task2file.load('done.txt')
    todo = task2file.load('todo.txt')

    # initial task(if no task file)
    new = set([INIT_ID])
    todo = (todo | (new - (new & done)))
    count = 1

    try:
        while len(todo) != 0:
            try:
                # select an id_ then send it to worker's task queue
                id_ = todo.pop()
                task.put(id_)
                print '\n[+] ========  No.%d  ID: %s  ========' % (count, id_)
                print '[~] Send to task queue...'
                time.sleep(DELAY_TIME)
                new = get_user_id.get_id(id_, headers=headers, cookies=cookies)

                # set() operation, add new IDs to todo
                add = (new - (new & done))
                todo = (todo | add)
                print '[+] 新发现用户ID: %d 个' % len(add)
                print '[~] Receiving User Information...'
                data = result.get()

                # save user information to 'info.txt'
                get_user_info.write2file('info.txt', data)
                print '[+] 已将用户信息保存至: info.txt'
                # add id_ to done
                done.add(id_)
                count += 1

                # to avoid task set expanding too fast, write them to file in time
                task2file.save('todo.txt', todo)
                task2file.save('done.txt', done)

            except Exception, e:
                print e
                exit()

    finally:
        manager.shutdown()
        print '\n[+] Manager exit.'
        exit()
开发者ID:Kr0c,项目名称:DoubanRobot,代码行数:74,代码来源:manager.py


注:本文中的multiprocessing.managers.BaseManager.get_task_queue方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。