当前位置: 首页>>代码示例>>Python>>正文


Python result.ResultSet类代码示例

本文整理汇总了Python中celery.result.ResultSet的典型用法代码示例。如果您正苦于以下问题:Python ResultSet类的具体用法?Python ResultSet怎么用?Python ResultSet使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了ResultSet类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: aggregate_result_set

    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set of celery AsyncResults and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc
        backend = current_app().backend
        amqp_backend = backend.__class__.__name__.startswith("AMQP")
        rset = ResultSet(self.results)
        for task_id, result_dict in rset.iter_native():
            check_mem_usage()  # warn if too much memory is used
            result = result_dict["result"]
            if isinstance(result, BaseException):
                raise result
            self.received.append(len(result))
            acc = agg(acc, result.unpickle())
            if amqp_backend:
                # work around a celery bug
                del backend._cache[task_id]
        return acc
开发者ID:acerisara,项目名称:oq-engine,代码行数:25,代码来源:tasks.py

示例2: test_iterate_respects_subpolling_interval

    def test_iterate_respects_subpolling_interval(self):
        r1 = self.app.AsyncResult(uuid())
        r2 = self.app.AsyncResult(uuid())
        backend = r1.backend = r2.backend = Mock()
        backend.subpolling_interval = 10

        ready = r1.ready = r2.ready = Mock()

        def se(*args, **kwargs):
            ready.side_effect = KeyError()
            return False

        ready.return_value = False
        ready.side_effect = se

        x = ResultSet([r1, r2])
        with self.dummy_copy():
            with patch("celery.result.time") as _time:
                with self.assertRaises(KeyError):
                    list(x.iterate())
                _time.sleep.assert_called_with(10)

            backend.subpolling_interval = 0
            with patch("celery.result.time") as _time:
                with self.assertRaises(KeyError):
                    ready.return_value = False
                    ready.side_effect = se
                    list(x.iterate())
                self.assertFalse(_time.sleep.called)
开发者ID:slyons,项目名称:celery,代码行数:29,代码来源:test_result.py

示例3: aggregate_result_set

        def aggregate_result_set(self, agg, acc):
            """
            Loop on a set of celery AsyncResults and update the accumulator
            by using the aggregation function.

            :param agg: the aggregation function, (acc, val) -> new acc
            :param acc: the initial value of the accumulator
            :returns: the final value of the accumulator
            """
            if isinstance(self.oqtask, types.FunctionType):
                # don't use celery
                return super(OqTaskManager, self).aggregate_result_set(
                    agg, acc)
            if not self.results:
                return acc
            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                parallel.check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc
开发者ID:mehmadi,项目名称:oq-engine,代码行数:31,代码来源:tasks.py

示例4: test_result_set_error

    def test_result_set_error(self, manager):
        assert manager.inspect().ping()

        rs = ResultSet([raise_error.delay(), add.delay(1, 1)])
        rs.get(timeout=TIMEOUT, propagate=False)

        assert rs.results[0].failed()
        assert rs.results[1].successful()
开发者ID:tothegump,项目名称:celery,代码行数:8,代码来源:test_canvas.py

示例5: test_times_out

 def test_times_out(self):
     r1 = self.app.AsyncResult(uuid)
     r1.ready = Mock()
     r1.ready.return_value = False
     x = ResultSet([r1])
     with self.dummy_copy():
         with patch("celery.result.time"):
             with self.assertRaises(TimeoutError):
                 list(x.iterate(timeout=1))
开发者ID:slyons,项目名称:celery,代码行数:9,代码来源:test_result.py

示例6: testbuf

def testbuf(padbytes=0, megabytes=0):
    padding = float(padbytes) + 2 ** 20 * float(megabytes)
    results = []
    print('> padding: %r' % (padding, ))

    for i in range(8 * 4):
        results.append(sleeping.delay(1, kw='x' * int(padding)))
        time.sleep(0.01)

    res = ResultSet(results)
    print(res.join())
开发者ID:NarrativeTeam,项目名称:celery,代码行数:11,代码来源:testbuf.py

示例7: test_add_discard

    def test_add_discard(self):
        x = ResultSet([])
        x.add(AsyncResult("1"))
        self.assertIn(AsyncResult("1"), x.results)
        x.discard(AsyncResult("1"))
        x.discard(AsyncResult("1"))
        x.discard("1")
        self.assertNotIn(AsyncResult("1"), x.results)

        x.update([AsyncResult("2")])
开发者ID:harmv,项目名称:celery,代码行数:10,代码来源:test_result.py

示例8: __init__

    def __init__(self, config, pool, manager_conf, pick_conf, cleaner):
        super(manager, self).__init__(manager_conf, pick_conf)
        self.path = getenv('MW_HOME')
        assert self.path != None
        self.tasks = ResultSet([])

        self.backoff = int(config['retry_backoff'])
        self.powlim = int(config['max_backoff_power'])
        # backend codes to retry
        # CONFIRM: we're not retrying with other codes
        codes = []
        if config['retry_forever_list'] != '':
            codes += [int(c) for c in config['retry_forever_list'].split(',')]
        if config['retry_sometime_list'] != '':
            codes += [int(c) for c in config['retry_sometime_list'].split(',')]
        self.backend_retry = set(codes)
        # thresholds
        self.thresholds = defaultdict(lambda: {})
        self.thresholds['audio']['score'] = int(config["fp_audio_score"])
        self.thresholds['video']['score'] = int(config["fp_video_score"])
        self.thresholds['audio']['duration'] = int(config["fp_audio_duration"])
        self.thresholds['video']['duration'] = int(config["fp_video_duration"])

        self.task_set_join_timeout = int(config['task_set_join_timeout'])

        self.pool = pool
        self.cleaner = cleaner
        self.taskm = defaultdict(dict)
开发者ID:dlf412,项目名称:thunderCopyright,代码行数:28,代码来源:manager.py

示例9: saveResultsAndCleanUp

    def saveResultsAndCleanUp(self):
        """
            Executes after the retrieval is done.
        """
        if self.use_celery:
            print("Waiting for tasks to complete...")
            res=ResultSet(self.tasks)
            while not res.ready():
                try:
                    time.sleep(7)
                except KeyboardInterrupt:
                    print("Cancelled waiting")
                    break
            print("All tasks finished.")

        for writer in self.writers:
            self.writers[writer].saveAsJSON(os.path.join(self.exp["exp_dir"],self.writers[writer].table_name+".json"))
开发者ID:danieldmm,项目名称:minerva,代码行数:17,代码来源:precomputed_pipeline.py

示例10: get_result

def get_result(request):
	ret = { 'status': 'error', 'result': '', 'messages': [ '', ],  }
	if request.method == 'POST' and request.user:
		try:
			user = MDBUser.objects.get(username=request.user.username)
		except Exception, e:
			ret['messages'][0] = "<strong>FATAL</strong>(get_result.user): %s" % e
		else:
			# Note: this is NOT status of tasks, 'success' here means that
			# get_result() request was processed correctly
			ret['status'] = 'success'
			async_res = AsyncResult(request.POST['task_id'])
			if async_res.ready():
				# Get all subtasks spawned by parent
				subtasks = None #ust_get_ids(user)
				# Create list of AsyncResults from list of task_ids
				async_results = []
				for task_id in subtasks:
					async_results.append(AsyncResult(task_id))
				# And also ResultSet for convenience
				async_res_set = ResultSet(async_results)
				ret['messages'][0] = 'parent task %s: %d of %d subtasks completed' %\
									 (request.POST['task_id'][:8],
									  async_res_set.completed_count(),
									  async_res_set.total,
									 )
				# All tasks completed ?
				if async_res_set.ready():
					# All tasks done, forget about those task ids
					#ust_clear_ids(user)
					# Any of them failed ?
					if async_res_set.failed():
						ret['result'] = 'FAILURE'
						for async_res in async_results:
							if async_res.state == 'FAILURE':
								ret['messages'].append("<strong>ERROR</strong>(get_result.FAILURE): '%s':'%s'" %\
													   (async_res.task_id[:8], async_res.result, ))
					else:
						ret['result'] = 'SUCCESS'
				else:
					ret['result'] = 'PENDING'
			else:
				ret['result'] = 'PENDING'
				ret['messages'][0] = 'parent task %s: PENDING' % \
					(request.POST['task_id'], )
开发者ID:velsa,项目名称:mdbox,代码行数:45,代码来源:views.py

示例11: aggregate_result_set

    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set results and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc

        distribute = oq_distribute()  # not called for distribute == 'no'

        if distribute == 'celery':

            backend = current_app().backend
            amqp_backend = backend.__class__.__name__.startswith('AMQP')
            rset = ResultSet(self.results)
            for task_id, result_dict in rset.iter_native():
                idx = self.task_ids.index(task_id)
                self.task_ids.pop(idx)
                check_mem_usage()  # warn if too much memory is used
                result = result_dict['result']
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
                if amqp_backend:
                    # work around a celery bug
                    del backend._cache[task_id]
            return acc

        elif distribute == 'futures':

            for future in as_completed(self.results):
                check_mem_usage()
                # log a warning if too much memory is used
                result = future.result()
                if isinstance(result, BaseException):
                    raise result
                self.received.append(len(result))
                acc = agg(acc, result.unpickle())
            return acc
开发者ID:gem,项目名称:oq-risklib,代码行数:44,代码来源:parallel.py

示例12: aggregate_result_set

    def aggregate_result_set(self, agg, acc):
        """
        Loop on a set of celery AsyncResults and update the accumulator
        by using the aggregation function.

        :param agg: the aggregation function, (acc, val) -> new acc
        :param acc: the initial value of the accumulator
        :returns: the final value of the accumulator
        """
        if not self.results:
            return acc
        backend = current_app().backend
        rset = ResultSet(self.results)
        for task_id, result_dict in rset.iter_native():
            check_mem_usage()  # log a warning if too much memory is used
            result = result_dict['result']
            if isinstance(result, BaseException):
                raise result
            acc = agg(acc, result.unpickle())
            del backend._cache[task_id]  # work around a celery bug
        return acc
开发者ID:serkansevilgen,项目名称:oq-engine,代码行数:21,代码来源:tasks.py

示例13: test_get

 def test_get(self):
     x = ResultSet(map(AsyncResult, [1, 2, 3]))
     b = x.results[0].backend = Mock()
     b.supports_native_join = False
     x.join_native = Mock()
     x.join = Mock()
     x.get()
     self.assertTrue(x.join.called)
     b.supports_native_join = True
     x.get()
     self.assertTrue(x.join_native.called)
开发者ID:aliscott,项目名称:celery,代码行数:11,代码来源:test_result.py

示例14: distributetasks

    def distributetasks(self, dnsublist, targetdnlist, workername=""):
        # Send the Task to Celery Queue
        import ltecpxx.mrosimpleexecutor as r

        taskid = uuid.uuid1().int  # create a unique main task id
        self.tasklist.append({taskid: ''})
        resultset = ResultSet([])
        for sourcedns in dnsublist:
            # send sub tasks for the main task to Celery
            args = (sourcedns, targetdnlist)
            kwargs= {'workername':workername}
            celery = Celery()
            celery.conf.CELERY_ALWAYS_EAGER = True
            celery.conf.CELERY_ALWAYS_EAGER = True
            result = r.doTargetprefilter.apply_async(args,kwargs)
            resultset.add(result)
            #print("Result Is Done %s Value is %d Is Done  Now %s" % (result.ready(),result.get(),result.ready()))
            print("Result Is Done %s "  % result.ready())
        self.tasklist[-1][taskid] = resultset
        print("Task List Conents", self.tasklist)
        # return the status of of the operation
        resp = {'TaskId': taskid, 'NumberofSubtasks': dnsublist.__len__(), 'RunState': str(RunState.Submitted)}
        return resp
开发者ID:riomus,项目名称:mypy,代码行数:23,代码来源:ltecp.py

示例15: initialise_fb_user

def initialise_fb_user(domain_uri, access_token):
    fb_user = get_fb_user(access_token)
    group_name = fb_user.id

    photos = get_fb_photos(access_token)

    if settings.USE_ASYNC:
        results = ResultSet([process_fb_photo.delay(d, access_token) for d in photos['data']])
        processed_photos = [p for photos in results.join() for p in photos]
    else:
        processed_photos = [process_fb_photo(d, access_token) for d in photos['data']]
        processed_photos = [p for photos in processed_photos for p in photos]

    filtered_photos = filter_fb_photos_for_training(processed_photos)
    media_uri = urlparse.urljoin(domain_uri, settings.MEDIA_URL)

    if settings.USE_ASYNC:
        results = ResultSet([upload_fb_photos_for_training.delay([p], group_name, media_uri) for p in filtered_photos])
        results.join()
    else:
        upload_fb_photos_for_training(filtered_photos, group_name, media_uri)

    train_fb_photos(group_name)
开发者ID:algobunny,项目名称:djv,代码行数:23,代码来源:tasks.py


注:本文中的celery.result.ResultSet类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。