当前位置: 首页>>代码示例>>Python>>正文


Python utils.run_in_thread函数代码示例

本文整理汇总了Python中pyspider.libs.utils.run_in_thread函数的典型用法代码示例。如果您正苦于以下问题:Python run_in_thread函数的具体用法?Python run_in_thread怎么用?Python run_in_thread使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了run_in_thread函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUpClass

 def setUpClass(self):
     self.inqueue = Queue(10)
     self.outqueue = Queue(10)
     self.fetcher = Fetcher(self.inqueue, self.outqueue)
     self.rpc = xmlrpclib.ServerProxy('http://localhost:%d' % 24444)
     self.xmlrpc_thread = utils.run_in_thread(self.fetcher.xmlrpc_run, port=24444)
     self.thread = utils.run_in_thread(self.fetcher.run)
开发者ID:BCriswell,项目名称:pyspider,代码行数:7,代码来源:test_fetcher.py

示例2: setUpClass

    def setUpClass(self):
        shutil.rmtree("./data/tests", ignore_errors=True)
        os.makedirs("./data/tests")

        ctx = run.cli.make_context(
            "test",
            [
                "--taskdb",
                "sqlite+taskdb:///data/tests/task.db",
                "--projectdb",
                "sqlite+projectdb:///data/tests/projectdb.db",
                "--resultdb",
                "sqlite+resultdb:///data/tests/resultdb.db",
            ],
            None,
            obj=dict(testing_mode=True),
        )
        self.ctx = run.cli.invoke(ctx)

        ctx = run.scheduler.make_context("scheduler", [], self.ctx)
        scheduler = run.scheduler.invoke(ctx)
        utils.run_in_thread(scheduler.xmlrpc_run)
        utils.run_in_thread(scheduler.run)

        time.sleep(1)
开发者ID:jiangwaniot,项目名称:pyspider,代码行数:25,代码来源:test_run.py

示例3: bloomfilter

def bloomfilter(ctx, xmlrpc, xmlrpc_host, xmlrpc_port, key, capacity, error, redis):
    """
    Run bloomfilter, only one bloomfilter is allowed.
    """
    g = ctx.obj

    if os.name == 'nt':
        from pyspider.filter import BloomFilter
        bloomfilter = BloomFilter(key, capacity, error)
    else:
        from pyspider.filter import RedisBloomFilter
        from six.moves.urllib.parse import urlparse
        parsed = urlparse(url)
        # ParseResult(scheme='', netloc='127.0.0.1:6379', path='/0', params='', query='', fragment='')
        bloomfilter = RedisBloomFilter(key, capacity, error,
            parsed.hostname, parsed.port, int(parsed.path.strip('/') or 0))

    g.instances.append(bloomfilter)
    if g.get('testing_mode'):
        return bloomfilter


    if xmlrpc:
        utils.run_in_thread(bloomfilter.xmlrpc_run, port=xmlrpc_port, bind=xmlrpc_host)
    bloomfilter.run()
开发者ID:eromoe,项目名称:pyspider,代码行数:25,代码来源:run.py

示例4: scheduler

def scheduler(ctx, xmlrpc, xmlrpc_host, xmlrpc_port,
              inqueue_limit, delete_time, active_tasks, loop_limit, scheduler_cls,
              threads):
    """
    Run Scheduler, only one scheduler is allowed.
    """
    g = ctx.obj
    Scheduler = load_cls(None, None, scheduler_cls)

    kwargs = dict(taskdb=g.taskdb, projectdb=g.projectdb, resultdb=g.resultdb,
                  newtask_queue=g.newtask_queue, status_queue=g.status_queue,
                  out_queue=g.scheduler2fetcher, data_path=g.get('data_path', 'data'))
    if threads:
        kwargs['threads'] = int(threads)

    scheduler = Scheduler(**kwargs)
    scheduler.INQUEUE_LIMIT = inqueue_limit
    scheduler.DELETE_TIME = delete_time
    scheduler.ACTIVE_TASKS = active_tasks
    scheduler.LOOP_LIMIT = loop_limit

    g.instances.append(scheduler)
    if g.get('testing_mode'):
        return scheduler

    if xmlrpc:
        utils.run_in_thread(scheduler.xmlrpc_run, port=xmlrpc_port, bind=xmlrpc_host)
    scheduler.run()
开发者ID:runt18,项目名称:pyspider,代码行数:28,代码来源:run.py

示例5: setUpClass

    def setUpClass(self):
        import tests.data_test_webpage
        import httpbin

        self.httpbin_thread = utils.run_in_subprocess(httpbin.app.run, port=14887, passthrough_errors=False)
        self.httpbin = 'http://127.0.0.1:14887'

        self.inqueue = Queue(10)
        self.outqueue = Queue(10)
        self.fetcher = Fetcher(self.inqueue, self.outqueue)
        self.fetcher.phantomjs_proxy = '127.0.0.1:25555'
        self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % 24444)
        self.xmlrpc_thread = utils.run_in_thread(self.fetcher.xmlrpc_run, port=24444)
        self.thread = utils.run_in_thread(self.fetcher.run)
        self.proxy_thread = subprocess.Popen(['pyproxy', '--username=binux',
                                              '--password=123456', '--port=14830',
                                              '--debug'], close_fds=True)
        self.proxy = '127.0.0.1:14830'
        try:
            self.phantomjs = subprocess.Popen(['phantomjs',
                os.path.join(os.path.dirname(__file__),
                    '../pyspider/fetcher/phantomjs_fetcher.js'),
                '25555'])
        except OSError:
            self.phantomjs = None
        time.sleep(0.5)
开发者ID:eromoe,项目名称:pyspider,代码行数:26,代码来源:test_fetcher.py

示例6: fetcher

def fetcher(ctx, xmlrpc, xmlrpc_host, xmlrpc_port, poolsize, proxy, user_agent,
            timeout, phantomjs_endpoint, splash_endpoint, fetcher_cls,
            async_mode=True, get_object=False, no_input=False):
    """
    Run Fetcher.
    """
    g = ctx.obj
    Fetcher = load_cls(None, None, fetcher_cls)

    if no_input:
        inqueue = None
        outqueue = None
    else:
        inqueue = g.scheduler2fetcher
        outqueue = g.fetcher2processor
    fetcher = Fetcher(inqueue=inqueue, outqueue=outqueue,
                      poolsize=poolsize, proxy=proxy, async_mode=async_mode)
    fetcher.phantomjs_proxy = phantomjs_endpoint or g.phantomjs_proxy
    fetcher.splash_endpoint = splash_endpoint
    if user_agent:
        fetcher.user_agent = user_agent
    if timeout:
        fetcher.default_options = copy.deepcopy(fetcher.default_options)
        fetcher.default_options['timeout'] = timeout

    g.instances.append(fetcher)
    if g.get('testing_mode') or get_object:
        return fetcher

    if xmlrpc:
        utils.run_in_thread(fetcher.xmlrpc_run, port=xmlrpc_port, bind=xmlrpc_host)
    fetcher.run()
开发者ID:QxxDmaku,项目名称:pyspider,代码行数:32,代码来源:run.py

示例7: run_fetcher

def run_fetcher(g=g):
    from pyspider.fetcher.tornado_fetcher import Fetcher
    fetcher = Fetcher(inqueue=g.scheduler2fetcher, outqueue=g.fetcher2processor)
    fetcher.phantomjs_proxy = g.phantomjs_proxy

    run_in_thread(fetcher.xmlrpc_run, port=g.fetcher_xmlrpc_port, bind=g.webui_host)
    fetcher.run()
开发者ID:BCriswell,项目名称:pyspider,代码行数:7,代码来源:run.py

示例8: scheduler

def scheduler(
    ctx, xmlrpc, xmlrpc_host, xmlrpc_port, inqueue_limit, delete_time, active_tasks, loop_limit, scheduler_cls
):
    g = ctx.obj
    Scheduler = load_cls(None, None, scheduler_cls)

    scheduler = Scheduler(
        taskdb=g.taskdb,
        projectdb=g.projectdb,
        resultdb=g.resultdb,
        newtask_queue=g.newtask_queue,
        status_queue=g.status_queue,
        out_queue=g.scheduler2fetcher,
        data_path=g.get("data_path", "data"),
    )
    scheduler.INQUEUE_LIMIT = inqueue_limit
    scheduler.DELETE_TIME = delete_time
    scheduler.ACTIVE_TASKS = active_tasks
    scheduler.LOOP_LIMIT = loop_limit

    g.instances.append(scheduler)
    if g.get("testing_mode"):
        return scheduler

    if xmlrpc:
        utils.run_in_thread(scheduler.xmlrpc_run, port=xmlrpc_port, bind=xmlrpc_host)
    scheduler.run()
开发者ID:0xa-cc,项目名称:pyspider,代码行数:27,代码来源:run.py

示例9: setUpClass

    def setUpClass(self):
        import tests.data_test_webpage
        import httpbin

        self.httpbin_thread = utils.run_in_subprocess(httpbin.app.run, port=14887)
        self.httpbin = "http://127.0.0.1:14887"

        self.inqueue = Queue(10)
        self.outqueue = Queue(10)
        self.fetcher = Fetcher(self.inqueue, self.outqueue)
        self.fetcher.phantomjs_proxy = "127.0.0.1:25555"
        self.rpc = xmlrpc_client.ServerProxy("http://localhost:%d" % 24444)
        self.xmlrpc_thread = utils.run_in_thread(self.fetcher.xmlrpc_run, port=24444)
        self.thread = utils.run_in_thread(self.fetcher.run)
        self.proxy_thread = subprocess.Popen(
            ["pyproxy", "--username=binux", "--password=123456", "--port=14830", "--debug"], close_fds=True
        )
        self.proxy = "127.0.0.1:14830"
        try:
            self.phantomjs = subprocess.Popen(
                [
                    "phantomjs",
                    os.path.join(os.path.dirname(__file__), "../pyspider/fetcher/phantomjs_fetcher.js"),
                    "25555",
                ]
            )
        except OSError:
            self.phantomjs = None
        time.sleep(0.5)
开发者ID:appleboy1977,项目名称:pyspider,代码行数:29,代码来源:test_fetcher.py

示例10: run_scheduler

def run_scheduler(g=g):
    from pyspider.scheduler import Scheduler
    scheduler = Scheduler(taskdb=g.taskdb, projectdb=g.projectdb, resultdb=g.resultdb,
            newtask_queue=g.newtask_queue, status_queue=g.status_queue,
            out_queue=g.scheduler2fetcher)
    g.scheduler = scheduler
    run_in_thread(scheduler.xmlrpc_run)
    scheduler.run()
开发者ID:CoralResort,项目名称:pyspider,代码行数:8,代码来源:run.py

示例11: run_scheduler

def run_scheduler(g=g):
    from pyspider.scheduler import Scheduler
    scheduler = Scheduler(taskdb=g.taskdb, projectdb=g.projectdb, resultdb=g.resultdb,
            newtask_queue=g.newtask_queue, status_queue=g.status_queue,
            out_queue=g.scheduler2fetcher)
    if g.demo_mode:
        scheduler.INQUEUE_LIMIT = 1000

    run_in_thread(scheduler.xmlrpc_run, port=g.scheduler_xmlrpc_port, bind=g.webui_host)
    scheduler.run()
开发者ID:BCriswell,项目名称:pyspider,代码行数:10,代码来源:run.py

示例12: fetcher

def fetcher(ctx, xmlrpc, xmlrpc_host, xmlrpc_port):
    g = ctx.obj
    from pyspider.fetcher.tornado_fetcher import Fetcher
    fetcher = Fetcher(inqueue=g.scheduler2fetcher, outqueue=g.fetcher2processor)
    fetcher.phantomjs_proxy = g.phantomjs_proxy
    g.instances.append(fetcher)

    if xmlrpc:
        run_in_thread(fetcher.xmlrpc_run, port=xmlrpc_port, bind=xmlrpc_host)
    fetcher.run()
开发者ID:YORYOR,项目名称:pyspider,代码行数:10,代码来源:run.py

示例13: test_40_multiple_threading_error

    def test_40_multiple_threading_error(self):
        def put(q):
            for i in range(100):
                q.put("DATA_%d" % i)

        def get(q):
            for i in range(100):
                q.get()

        utils.run_in_thread(put, self.q3)
        get(self.q3)
开发者ID:1337573v3,项目名称:pyspider,代码行数:11,代码来源:test_message_queue.py

示例14: setUpClass

 def setUpClass(self):
     self.inqueue = Queue(10)
     self.outqueue = Queue(10)
     self.fetcher = Fetcher(self.inqueue, self.outqueue)
     self.fetcher.phantomjs_proxy = 'localhost:25555'
     self.rpc = xmlrpclib.ServerProxy('http://localhost:%d' % 24444)
     self.xmlrpc_thread = utils.run_in_thread(self.fetcher.xmlrpc_run, port=24444)
     self.thread = utils.run_in_thread(self.fetcher.run)
     self.phantomjs = subprocess.Popen(['phantomjs',
         os.path.join(os.path.dirname(__file__),
             '../pyspider/fetcher/phantomjs_fetcher.js'),
         '25555'])
开发者ID:aleemb,项目名称:pyspider,代码行数:12,代码来源:test_fetcher.py

示例15: run_scheduler

 def run_scheduler():
     scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
                           newtask_queue=self.newtask_queue, status_queue=self.status_queue,
                           out_queue=self.scheduler2fetcher, data_path="./data/tests/",
                           resultdb=get_resultdb())
     scheduler.UPDATE_PROJECT_INTERVAL = 0.1
     scheduler.LOOP_INTERVAL = 0.1
     scheduler.INQUEUE_LIMIT = 10
     Scheduler.DELETE_TIME = 0
     scheduler._last_tick = int(time.time())  # not dispatch cronjob
     run_in_thread(scheduler.xmlrpc_run, port=self.scheduler_xmlrpc_port)
     scheduler.run()
开发者ID:7472741,项目名称:pyspider,代码行数:12,代码来源:test_scheduler.py


注:本文中的pyspider.libs.utils.run_in_thread函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。