当前位置: 首页>>代码示例>>Python>>正文


Python HgPushlogProcess.run方法代码示例

本文整理汇总了Python中treeherder.etl.pushlog.HgPushlogProcess.run方法的典型用法代码示例。如果您正苦于以下问题:Python HgPushlogProcess.run方法的具体用法?Python HgPushlogProcess.run怎么用?Python HgPushlogProcess.run使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在treeherder.etl.pushlog.HgPushlogProcess的用法示例。


在下文中一共展示了HgPushlogProcess.run方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: fetch_hg_push_log

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def fetch_hg_push_log(repo_name, repo_url):
    """
    Run a HgPushlog etl process
    """
    newrelic.agent.add_custom_parameter("repo_name", repo_name)
    process = HgPushlogProcess()
    process.run(repo_url + '/json-pushes/?full=1&version=2', repo_name)
开发者ID:AnthonyMeaux,项目名称:treeherder,代码行数:9,代码来源:buildapi_tasks.py

示例2: test_empty_json_pushes

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_empty_json_pushes(jm, test_base_dir,
                           test_repository, mock_post_json,
                           activate_responses):
    """
    Gracefully handle getting an empty list of pushes from json-pushes

    """

    pushlog_fake_url = "http://www.thisismypushlog.com/?full=1&version=2"

    # store the first push only
    empty_push_json = json.dumps({"lastpushid": 123, "pushes": {}})
    responses.add(responses.GET, pushlog_fake_url,
                  body=empty_push_json, status=200,
                  content_type='application/json',
                  match_querystring=True,
                  )

    process = HgPushlogProcess()
    process.run(pushlog_fake_url, jm.project)

    pushes_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.result_set_ids",
        return_type='tuple'
    )

    assert len(pushes_stored) == 0
开发者ID:KWierso,项目名称:treeherder,代码行数:29,代码来源:test_pushlog.py

示例3: test_ingest_hg_pushlog

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog(jm, test_base_dir,
                           test_repository, mock_post_json,
                           activate_responses):
    """ingesting a number of pushes should populate result set and revisions"""

    pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_content = f.read()
    pushlog_fake_url = "http://www.thisismypushlog.com"
    push_num = 10
    responses.add(responses.GET, pushlog_fake_url,
                  body=pushlog_content, status=200,
                  content_type='application/json')

    process = HgPushlogProcess()

    process.run(pushlog_fake_url, jm.project)

    pushes_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.result_set_ids",
        return_type='tuple'
    )

    assert len(pushes_stored) == push_num

    revisions_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.revision_ids",
        return_type='tuple'
    )

    assert len(revisions_stored) == 15
开发者ID:KWierso,项目名称:treeherder,代码行数:33,代码来源:test_pushlog.py

示例4: test_ingest_hg_pushlog_already_stored

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog_already_stored(jm, initial_data, test_base_dir,
                                          test_repository, mock_post_json, activate_responses):
    """test that trying to ingest a push already stored doesn't doesn't affect
    all the pushes in the request,
    e.g. trying to store [A,B] with A already stored, B will be stored"""

    pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_content = f.read()
    pushes = json.loads(pushlog_content).values()
    first_push, second_push = pushes[0:2]

    pushlog_fake_url = "http://www.thisismypushlog.com/?full=1"

    # store the first push only
    first_push_json = json.dumps({"1": first_push})
    responses.add(responses.GET, pushlog_fake_url,
                  body=first_push_json, status=200,
                  content_type='application/json',
                  match_querystring=True,
                  )

    process = HgPushlogProcess()
    process.run(pushlog_fake_url, jm.project)

    pushes_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.result_set_ids",
        return_type='tuple'
    )

    assert len(pushes_stored) == 1

    # store both first and second push
    first_and_second_push_json = json.dumps(
        {"1": first_push, "2": second_push}
    )
    second_push
    responses.add(
        responses.GET,
        pushlog_fake_url + "&fromchange=2c25d2bbbcd6ddbd45962606911fd429e366b8e1",
        body=first_and_second_push_json,
        status=200, content_type='application/json',
        match_querystring=True)

    process = HgPushlogProcess()

    process.run(pushlog_fake_url, jm.project)

    pushes_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.result_set_ids",
        return_type='tuple'
    )

    assert len(pushes_stored) == 2
开发者ID:EdgarChen,项目名称:treeherder,代码行数:56,代码来源:test_pushlog.py

示例5: _handle

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
    def _handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError("Need to specify (only) branch and changeset")

        (project, changeset) = args

        # get reference to repo
        rdm = RefDataManager()
        repos = filter(lambda x: x['name'] == project,
                       rdm.get_all_repository_info())
        if not repos:
            raise CommandError("No project found named '%s'" % project)
        repo = repos[0]

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo['url']

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        push_sha = process.run(pushlog_url, project, changeset=changeset)[:12]

        Builds4hJobsProcess().run(project_filter=project,
                                  revision_filter=push_sha,
                                  job_group_filter=options['filter_job_group'])
        PendingJobsProcess().run(project_filter=project,
                                 revision_filter=push_sha,
                                 job_group_filter=options['filter_job_group'])
        RunningJobsProcess().run(project_filter=project,
                                 revision_filter=push_sha,
                                 job_group_filter=options['filter_job_group'])
开发者ID:EricRahm,项目名称:treeherder,代码行数:37,代码来源:ingest_push.py

示例6: _handle

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
    def _handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError("Need to specify (only) branch and changeset")

        (project, changeset) = args

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        push_sha = process.run(pushlog_url, project, changeset=changeset)

        Builds4hJobsProcess().run(project_filter=project,
                                  revision_filter=push_sha,
                                  job_group_filter=options['filter_job_group'])
        PendingJobsProcess().run(project_filter=project,
                                 revision_filter=push_sha,
                                 job_group_filter=options['filter_job_group'])
        RunningJobsProcess().run(project_filter=project,
                                 revision_filter=push_sha,
                                 job_group_filter=options['filter_job_group'])
开发者ID:AnthonyMeaux,项目名称:treeherder,代码行数:32,代码来源:ingest_push.py

示例7: test_ingest_hg_pushlog_already_stored

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog_already_stored(jm, test_base_dir,
                                          test_repository, activate_responses):
    """test that trying to ingest a push already stored doesn't doesn't affect
    all the pushes in the request,
    e.g. trying to store [A,B] with A already stored, B will be stored"""

    pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_json = json.load(f)
    pushes = pushlog_json['pushes'].values()
    first_push, second_push = pushes[0:2]

    pushlog_fake_url = "http://www.thisismypushlog.com/?full=1&version=2"

    # store the first push only
    first_push_json = json.dumps({"lastpushid": 1, "pushes": {"1": first_push}})
    responses.add(responses.GET, pushlog_fake_url,
                  body=first_push_json, status=200,
                  content_type='application/json',
                  match_querystring=True,
                  )

    process = HgPushlogProcess()
    process.run(pushlog_fake_url, jm.project)

    assert Push.objects.count() == 1

    # store both first and second push
    first_and_second_push_json = json.dumps(
        {"lastpushid": 2, "pushes": {"1": first_push, "2": second_push}}
    )

    responses.add(
        responses.GET,
        pushlog_fake_url + "&startID=1",
        body=first_and_second_push_json,
        status=200, content_type='application/json',
        match_querystring=True)

    process = HgPushlogProcess()

    process.run(pushlog_fake_url, jm.project)

    assert Push.objects.count() == 2
开发者ID:askeing,项目名称:treeherder,代码行数:46,代码来源:test_pushlog.py

示例8: test_ingest_hg_pushlog

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog(jm, initial_data, test_base_dir,
                           test_repository, mock_post_json,
                           activate_responses, pulse_resultset_consumer):
    """ingesting a number of pushes should populate result set and revisions"""

    pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_content = f.read()
    pushlog_fake_url = "http://www.thisismypushlog.com"
    push_num = 10
    responses.add(responses.GET, pushlog_fake_url,
                  body=pushlog_content, status=200,
                  content_type='application/json')

    process = HgPushlogProcess()

    process.run(pushlog_fake_url, jm.project)

    pushes_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.result_set_ids",
        return_type='tuple'
    )

    assert len(pushes_stored) == push_num

    rev_to_push = set()
    for push in json.loads(pushlog_content).values():
        # Add each rev to the set remember we shorten them all down to 12 chars
        rev_to_push.add(push['changesets'][-1]['node'][0:12])

    # Ensure for each push we sent a pulse notification...
    for _ in range(0, push_num):
        message = pulse_resultset_consumer.get(block=True, timeout=2)
        content = json.loads(message.body)
        assert content['revision'] in rev_to_push
        # Ensure we don't match the same revision twice...
        rev_to_push.remove(content['revision'])

    revisions_stored = jm.get_dhub().execute(
        proc="jobs_test.selects.revision_ids",
        return_type='tuple'
    )

    assert len(revisions_stored) == 15
开发者ID:EdgarChen,项目名称:treeherder,代码行数:46,代码来源:test_pushlog.py

示例9: test_ingest_hg_pushlog

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog(test_repository, test_base_dir,
                           activate_responses):
    """ingesting a number of pushes should populate result set and revisions"""

    pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_content = f.read()
    pushlog_fake_url = "http://www.thisismypushlog.com"
    responses.add(responses.GET, pushlog_fake_url,
                  body=pushlog_content, status=200,
                  content_type='application/json')

    process = HgPushlogProcess()

    process.run(pushlog_fake_url, test_repository.name)

    # should be 10 pushes, 15 revisions
    assert Push.objects.count() == 10
    assert Commit.objects.count() == 15
开发者ID:SebastinSanty,项目名称:treeherder,代码行数:21,代码来源:test_pushlog.py

示例10: test_ingest_hg_pushlog

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog(jm, initial_data, test_base_dir, test_repository,
                           mock_post_json_data):
    """ingesting a number of pushes should populate result set and revisions"""

    pushlog = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json')
    process = HgPushlogProcess()

    process.run("file://{0}".format(pushlog), jm.project)

    pushes_stored = jm.get_jobs_dhub().execute(
        proc="jobs_test.selects.result_set_ids", return_type='tuple')

    assert len(pushes_stored) == 10

    revisions_stored = jm.get_jobs_dhub().execute(
        proc="jobs_test.selects.revision_ids", return_type='tuple')

    assert len(revisions_stored) == 15

    jm.disconnect()
开发者ID:jonasfj,项目名称:treeherder-service,代码行数:22,代码来源:test_pushlog.py

示例11: test_empty_json_pushes

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_empty_json_pushes(test_repository, test_base_dir,
                           activate_responses):
    """
    Gracefully handle getting an empty list of pushes from json-pushes

    """

    pushlog_fake_url = "http://www.thisismypushlog.com/?full=1&version=2"

    # store the first push only
    empty_push_json = json.dumps({"lastpushid": 123, "pushes": {}})
    responses.add(responses.GET, pushlog_fake_url,
                  body=empty_push_json, status=200,
                  content_type='application/json',
                  match_querystring=True,
                  )

    process = HgPushlogProcess()
    process.run(pushlog_fake_url, test_repository.name)

    assert Push.objects.count() == 0
开发者ID:SebastinSanty,项目名称:treeherder,代码行数:23,代码来源:test_pushlog.py

示例12: _handle

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer',
                        last_push_id, fetch_push_id)
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_TASK_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        process.run(pushlog_url, project, changeset=changeset, last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            raise CommandError(
                'This command is not yet able to ingest Taskcluster jobs automatically. '
                'Please manually configure pulse job ingestion using this guide: '
                'https://treeherder.readthedocs.io/pulseload.html'
            )
开发者ID:edmorley,项目名称:treeherder,代码行数:42,代码来源:ingest_push.py

示例13: test_ingest_hg_pushlog_cache_last_push

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def test_ingest_hg_pushlog_cache_last_push(jm, initial_data, test_repository,
                                           test_base_dir, mock_post_json,
                                           activate_responses):
    """
    ingesting a number of pushes should cache the top revision of the last push
    """

    pushlog_path = os.path.join(test_base_dir, 'sample_data',
                                'hg_pushlog.json')
    with open(pushlog_path) as f:
        pushlog_content = f.read()
    pushlog_fake_url = "http://www.thisismypushlog.com"
    responses.add(responses.GET, pushlog_fake_url, body=pushlog_content,
                  status=200, content_type='application/json')

    process = HgPushlogProcess()
    process.run(pushlog_fake_url, jm.project)

    pushlog_dict = json.loads(pushlog_content)
    pushes = pushlog_dict['pushes']
    max_push_id = max([int(k) for k in pushes.keys()])

    assert cache.get("test_treeherder:last_push_id") == max_push_id
开发者ID:adusca,项目名称:treeherder,代码行数:25,代码来源:test_pushlog.py

示例14: _handle

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
    def _handle(self, *args, **options):
        project = options['project']
        changeset = options['changeset']

        if not options['last_n_pushes'] and not changeset:
            raise CommandError('must specify --last-n-pushes or a positional '
                               'changeset argument')

        # get reference to repo
        repo = Repository.objects.get(name=project, active_status='active')

        if options['last_n_pushes']:
            last_push_id = last_push_id_from_server(repo)
            fetch_push_id = max(1, last_push_id - options['last_n_pushes'])
            logger.info('last server push id: %d; fetching push %d and newer'
                        % (last_push_id, fetch_push_id))
        else:
            fetch_push_id = None

        # make sure all tasks are run synchronously / immediately
        settings.CELERY_ALWAYS_EAGER = True

        # get hg pushlog
        pushlog_url = '%s/json-pushes/?full=1&version=2' % repo.url

        # ingest this particular revision for this project
        process = HgPushlogProcess()
        # Use the actual push SHA, in case the changeset specified was a tag
        # or branch name (eg tip). HgPushlogProcess returns the full SHA.
        push_sha = process.run(pushlog_url, project, changeset=changeset,
                               last_push_id=fetch_push_id)

        # Only perform additional processing if fetching a single changeset
        # because we only have the sha1 if the tip-most push in "last N pushes"
        # mode and can't filter appropriately.
        if not fetch_push_id:
            group_filter = options['filter_job_group']
            Builds4hJobsProcess().run(project_filter=project,
                                      revision_filter=push_sha,
                                      job_group_filter=group_filter)
            PendingJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
            RunningJobsProcess().run(project_filter=project,
                                     revision_filter=push_sha,
                                     job_group_filter=group_filter)
开发者ID:MikeLing,项目名称:treeherder,代码行数:48,代码来源:ingest_push.py

示例15: fetch_hg_push_log

# 需要导入模块: from treeherder.etl.pushlog import HgPushlogProcess [as 别名]
# 或者: from treeherder.etl.pushlog.HgPushlogProcess import run [as 别名]
def fetch_hg_push_log(repo_name, repo_url):
    """
    Run a HgPushlog etl process
    """
    process = HgPushlogProcess()
    process.run(repo_url + '/json-pushes/?full=1&version=2', repo_name)
开发者ID:arpan98,项目名称:treeherder,代码行数:8,代码来源:buildapi_tasks.py


注:本文中的treeherder.etl.pushlog.HgPushlogProcess.run方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。