当前位置: 首页>>代码示例>>Python>>正文


Python BaseManager.connect方法代码示例

本文整理汇总了Python中multiprocessing.managers.BaseManager.connect方法的典型用法代码示例。如果您正苦于以下问题:Python BaseManager.connect方法的具体用法?Python BaseManager.connect怎么用?Python BaseManager.connect使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.managers.BaseManager的用法示例。


在下文中一共展示了BaseManager.connect方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
def main():
    BaseManager.register('run_rpc_function')
    manager = BaseManager(address=('127.0.0.1',51999), authkey='my_authkey')
    manager.connect()
    returned_proxy_object = manager.run_rpc_function('SOME LOUD TEXT')
    print returned_proxy_object
    print returned_proxy_object._getvalue() #might be a better way to get the value, probably need to specify my own proxy object, see BaseProxy
开发者ID:brentpayne,项目名称:ExampleCode,代码行数:9,代码来源:example_rpc_client.py

示例2: StartConnect

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
 def StartConnect(self, serveraddress="localhost", port=80, key=None):
     if(self.queueManager == None):
         try:
             key = key.encode("utf-8")
         except:
             pass
         BaseManager.register('GetTaskQueue')
         BaseManager.register('GetResultQueue')
         queueManager = BaseManager(address=(serveraddress, port), authkey=key)
         queueManager.connect()
         self.queueManager = queueManager
开发者ID:zthxxx,项目名称:python-OpinionMonitoring,代码行数:13,代码来源:ProcessingQueueNode.py

示例3: analyze_found

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
 def analyze_found(self, list):
     if len(list) > 0:
         manager = BaseManager(address=('', self._share_port), authkey=self._auth)
         manager.register('get_meta_data')
         manager.connect()
         distances, max_dist = manager.get_meta_data()
         for n in list:
             try:
                 if max_dist - distances[n._id] > self._threshold:
                     # Do something here
                     print "*** Node " + str(n._id) + " hit the threshold"
             except KeyError:
                 # Do query here to see if node exists. If it does than node
                 # does not link to origin
                 print "*** Node " + str(n._id) + " may not link to origin"
开发者ID:admgrn,项目名称:Switcharoo,代码行数:17,代码来源:transverse.py

示例4: __init__

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
	def __init__(self):
		class QueueManager(BaseManager):
    			pass
    			# 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
		BaseManager.register('get_task_queue')
		BaseManager.register('get_result_queue')
		# 连接到服务器,也就是运行taskmanager.py的机器:
		server_addr = '10.1.142.100'
		print('Connect to server %s...' % server_addr)
		# 端口和验证码注意保持与taskmanager.py设置的完全一致:
		m = BaseManager(address=(server_addr, 8888), authkey='abc')
		# 从网络连接:
		m.connect()
		# 获取Queue的对象:
		self.task = m.get_task_queue()
		self.result = m.get_result_queue()
开发者ID:PatricKuo,项目名称:AFOJ-Core,代码行数:18,代码来源:judgeclient.py

示例5: connect

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
    def connect(self, pythonexec=None, parent=None):
        "Custom connection method that will start up a new server"

        # fork a new server process with correct python interpreter (py3/venv)
        if pythonexec:
            # warning: this will not work frozen? (ie. py2exe)
            command = pythonexec + " -u %s --server" % __file__

            import wx
            
            class MyProcess(wx.Process):
                "Custom Process Class to handle OnTerminate event method"

                def OnTerminate(self, pid, status):
                    "Clean up on termination (prevent SEGV!)"
                
                def OnClose(self, evt):
                    "Termitate the process on exit"
                    # prevent the server continues running after the IDE closes
                    print("closing pid", self.GetPid())
                    self.Kill(self.GetPid())
                    print("killed")


            self.process = MyProcess(parent)
            parent.Bind(wx.EVT_CLOSE, self.process.OnClose)
            #process.Redirect()
            flags = wx.EXEC_ASYNC
            if wx.Platform == '__WXMSW__':
                flags |= wx.EXEC_NOHIDE
            wx.Execute(command, flags, self.process)

            return BaseManager.connect(self)
开发者ID:reingart,项目名称:rad2py,代码行数:35,代码来源:autocompletion.py

示例6: start

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
    def start(self):
        BaseManager.register('get_dispatched_job_queue')
        BaseManager.register('get_finished_job_queue')

        server = '127.0.0.1'
        print('Connect to server %s...' % server)
        manager = BaseManager(address=(server, 8888), authkey='jobs')
        manager.connect()

        dispatched_jobs = manager.get_dispatched_job_queue()
        finished_jobs = manager.get_finished_job_queue()

        while True:
            job = dispatched_jobs.get(timeout=1)
            print('Run job: %s ' % job.job_id)
            time.sleep(1)
            finished_jobs.put(job)
开发者ID:kongxx,项目名称:garbagecan,代码行数:19,代码来源:slave.py

示例7: SpiderWork

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
class SpiderWork(object):
    def __init__(self):
        BaseManager.register('get_task_queue')
        BaseManager.register('get_result_queue')

        server_addr = '127.0.0.1'
        print ('Connect to server %s...' % server_addr)

        self.m=BaseManager(address=(server_addr,8001),authkey='qiye'.encode('utf-8'))
        print 'connecting...'
        self.m.connect()
        print 'connected'

        self.task = self.m.get_task_queue()
        self.result = self.m.get_result_queue()

        self.downloader = HtmlDownloader()
        self.parser = HtmlParser()
        print 'spider init finish'


    def crawl(self):
        while True:
            try:
                # print self.task
                if not self.task.empty():
                    url = self.task.get()

                    if url == 'end':
                        print ('stop...')
                        # 通知其它节点停止
                        self.result.put({'new_urls':'end','data':'end'})
                        return
                    print ('spider is working on %s'%url)
                    content = self.downloader.download(url)
                    new_urls, data = self.parser.parser(url, content)
                    self.result.put({'new_urls':new_urls,'data':data})
            except EOFError as e:
                print 'cannot connect other'
                return
            except Exception as e:
                print e
                print 'crawl fail'
开发者ID:cawind2,项目名称:TestTu,代码行数:45,代码来源:SpiderWork.py

示例8: SlaveWork

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
class SlaveWork(object):

    def __init__(self):

        #初始化分布式进程中的工作节点的链接工作
        #实现第一步,使用basemanager注册获取queue的方法名称
        BaseManager.register('get_task_queue')
        BaseManager.register('get_result_queue')

        ##实现第二步,连接到服务器
        server_addr = '127.0.0.1'
        # 端口和验证口令注意保持与服务进程设置的完全一致:
        self.m = BaseManager(address=(server_addr, 8081), authkey='seven')
        # 从网络连接:
        self.m.connect()

        ##实现第三步
        self.task = self.m.get_task_queue()
        self.result = self.m.get_result_queue()

        ##初始化网页下载器和解析器
        self.downloader = HtmlDownloader()
        self.parser = HtmlParse()

    def crawl(self):
        while(True):
            try:
                if not self.task.empty():
                    url = self.task.get()
                    if url =='end':
                        print("控制节点通知爬虫节点停止工作")
                        self.result.put({'new_urls':'end','data':'end'})
                        return
                    print('爬虫节点正在解析:%s' % url.encode('utf-8'))
                    content = self.downloader.download(url)
                    new_urls, data = self.parser.parser(url, content)
                    self.result.put({"new_urls": new_urls, "data": data})
            except EOFError:
                print("连接工作节点失败")
                return
            except Exception:
                print('Crawl  fali ')
开发者ID:shisiying,项目名称:crawer_python,代码行数:44,代码来源:SlaveWork.py

示例9: start

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
    def start(self):
        BaseManager.register('get_dispatch_queue')
        BaseManager.register('get_finished_queue')

        server = ('127.0.0.1', 8000)
        manager = BaseManager(address=server, authkey=b'dispatcher')
        manager.connect()
        print('Connected to server {}'.format(server))

        dispatch_queue = manager.get_dispatch_queue()
        finished_queue = manager.get_finished_queue()

        while True:
            job = dispatch_queue.get()
            if job is None:
                print('Sentinel Detected, terminating...')
                dispatch_queue.put(job)
                break
            print('Working on job: {}'.format(job))
            work_time = min(2, max(0.5, gauss(1, 1)))
            sleep(work_time)
            print('Job finished: {}'.format(job))
            finished_queue.put(job)
开发者ID:Time1ess,项目名称:MyCodes,代码行数:25,代码来源:job_dispatch.py

示例10: QueueClient

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
class QueueClient(object):
    
    def __init__(self, queue_name, server_host, server_port, server_authkey):
        '''clients need to know which queue on our server they want to use'''
        self.queue_name = queue_name

        '''as name suggested queue_server, queue_port and server_authkey
        are needed to connect to our queue server
        '''
        self.host = server_host
        self.port = server_port
        self.authkey = server_authkey
        
        '''Now it is an important step, we need to tell BaseManager class about this queue
        so when we connect with our server, we can get the proxy object of our queue. 
        Notice here, we are not passing any callable here for the same reason'''
        BaseManager.register(self.queue_name)
        
        '''lets create an instance of BaseManager class so we can connect to server'''
        self.manager = BaseManager(address=(self.host, self.port), 
                                   authkey=self.authkey)
        self.manager.connect()
        
    def get_queue(self):
        '''this is an important method, we'll use getattr() method to inspect
        the availability of queue in our manager instance and returns the queue.
        Remember in QueueServer class, we register a callable that returns the 
        actual database_queue object, here is what we are asking for manager to
        give us that callable object which we can call to get the proxy queue object
        '''
        queue_callable = getattr(self.manager, self.queue_name)
        
        '''now that we have the callable, we'll just call this and it will return the
        proxy queue object (remember it is a proxy object and we can perform all the 
        operations that Queue.Queue class provides and it'll be communicated back to
        our server on the original queue'''
        return queue_callable()
开发者ID:eulhaque,项目名称:avoid-picture-duplication---reclaim-your-space,代码行数:39,代码来源:client.py

示例11: start

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
    def start(self,execute= True,serverip='127.0.0.1',port=8888):
        # 把派发作业队列和完成作业队列注册到网络上
        BaseManager.register('get_dispatched_job_queue')
        BaseManager.register('get_finished_job_queue')

        server = serverip;
        print('Connect to server %s...' % server)
        manager = BaseManager(address=(server, port), authkey=authkey)
        manager.connect()
        # 使用上面注册的方法获取队列
        dispatched_jobs = manager.get_dispatched_job_queue()
        finished_jobs = manager.get_finished_job_queue()

        # 运行作业并返回结果,这里只是模拟作业运行,所以返回的是接收到的作业
        while True:
            if dispatched_jobs.empty():
                time.sleep(1)
                print('queue is empty,wait 1 sec...')
                continue;

            job = dispatched_jobs.get(timeout=timeout)
            print('Run job: %s ' % job.id)
            project=job.project;
            project= etl.LoadProject_dict(project);
            module= project.modules[job.jobname];
            count=0
            try:
                generator= etl.parallel_reduce(module,[ job.config],execute)
                for r in generator:
                    count+=1;
            except Exception as e:
                print(e)
            print('finish job,id %s, count %s'%(job.id,count))
            resultjob= JobResult(job.jobname,count,job.id)

            finished_jobs.put(resultjob)
开发者ID:Veterun,项目名称:etlpy,代码行数:38,代码来源:distributed.py

示例12: worker

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
def worker():
    # load 'session.txt', or call login() to generate it
    try:
        with open('session.txt', 'rb') as f:
            headers = cPickle.load(f)
            cookies = cPickle.load(f)
    except:
        print '[-] 无session.txt文件, 调用login()...'
        session = DoubanLogin().login()
        headers = session.headers
        cookies = session.cookies

    # connect to manager
    BaseManager.register('get_task_queue')
    BaseManager.register('get_result_queue')
    print 'Connect to server %s:5000...' % server_addr
    worker = BaseManager(address=(SERVER_ADDR, PORT), authkey='douban')
    worker.connect()
    task = worker.get_task_queue()
    result = worker.get_result_queue()

    # start listening
    print '[-] Waiting...'
    while True:
        try:
            id_ = task.get()
            print '[~] Running task...'
            info = get_user_info.get_info(id_, headers=headers, cookies=cookies)
            print '[+] Information returned.\n'
            result.put(info)
            print '[-] Waiting...'
            time.sleep(DELAY_TIME)

        except Exception, e:
            print e
            exit()
开发者ID:Kr0c,项目名称:DoubanRobot,代码行数:38,代码来源:worker.py

示例13: platforms

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
def platforms():
    mgr = BaseManager(address=('127.0.0.1', 7777), authkey='xxx')
    mgr.connect()
    print mgr.platforms()._getvalue()
开发者ID:lowitty,项目名称:zacademy,代码行数:6,代码来源:client.py

示例14: get_cache

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
 def get_cache(self):
     manager = BaseManager(address=('', self.port), authkey=self.auth)
     manager.register('get_cache')
     manager.connect()
     return manager.get_cache()
开发者ID:admgrn,项目名称:Switcharoo,代码行数:7,代码来源:cachemanager.py

示例15: start_session

# 需要导入模块: from multiprocessing.managers import BaseManager [as 别名]
# 或者: from multiprocessing.managers.BaseManager import connect [as 别名]
def start_session(ip, port, passwd):
    mgr = BaseManager(address=(ip, port), authkey=passwd)
    mgr.connect()
    return mgr
开发者ID:lowitty,项目名称:selenium,代码行数:6,代码来源:base_clint_for_selenium.py


注:本文中的multiprocessing.managers.BaseManager.connect方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。