当前位置: 首页>>代码示例>>Python>>正文


Python JournalFixtureFactory.make_many_journal_sources方法代码示例

本文整理汇总了Python中doajtest.fixtures.JournalFixtureFactory.make_many_journal_sources方法的典型用法代码示例。如果您正苦于以下问题:Python JournalFixtureFactory.make_many_journal_sources方法的具体用法?Python JournalFixtureFactory.make_many_journal_sources怎么用?Python JournalFixtureFactory.make_many_journal_sources使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在doajtest.fixtures.JournalFixtureFactory的用法示例。


在下文中一共展示了JournalFixtureFactory.make_many_journal_sources方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_03_withdraw

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_03_withdraw(self):
        acc = models.Account()
        acc.set_name("testuser")
        ctx = self._make_and_push_test_context(acc=acc)

        sources = JournalFixtureFactory.make_many_journal_sources(10, in_doaj=True)
        ids = []
        articles = []
        for source in sources:
            j = models.Journal(**source)
            j.save()
            ids.append(j.id)

            pissn = j.bibjson().get_identifiers(j.bibjson().P_ISSN)
            eissn = j.bibjson().get_identifiers(j.bibjson().E_ISSN)
            asource = ArticleFixtureFactory.make_article_source(pissn=pissn[0], eissn=eissn[0], with_id=False)
            a = models.Article(**asource)
            a.save()
            articles.append(a.id)

        time.sleep(2)

        change_in_doaj(ids, False)

        time.sleep(2)

        for id in ids:
            j = models.Journal.pull(id)
            assert j.is_in_doaj() is False

        for id in articles:
            a = models.Article.pull(id)
            assert a.is_in_doaj() is False

        ctx.pop()
开发者ID:DOAJ,项目名称:doaj,代码行数:37,代码来源:test_withdraw_reinstate.py

示例2: setUp

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def setUp(self):
        super(TestTaskJournalBulkEdit, self).setUp()

        self.default_eg = EditorGroupFixtureFactory.setup_editor_group_with_editors()

        acc = models.Account()
        acc.set_id("0987654321")
        acc.set_email("[email protected]")
        acc.save()

        egs = EditorGroupFixtureFactory.make_editor_group_source("1234567890", "0987654321")
        egm = models.EditorGroup(**egs)
        egm.save(blocking=True)

        self.journals = []
        for j_src in JournalFixtureFactory.make_many_journal_sources(count=TEST_JOURNAL_COUNT):
            self.journals.append(models.Journal(**j_src))
            self.journals[-1].set_editor_group("1234567890")
            self.journals[-1].set_editor("0987654321")
            self.journals[-1].save(blocking=True)

        self.forbidden_accounts = [
            AccountFixtureFactory.make_editor_source()['id'],
            AccountFixtureFactory.make_assed1_source()['id'],
            AccountFixtureFactory.make_assed2_source()['id'],
            AccountFixtureFactory.make_assed3_source()['id']
        ]

        self._make_and_push_test_context(acc=models.Account(**AccountFixtureFactory.make_managing_editor_source()))
开发者ID:DOAJ,项目名称:doaj,代码行数:31,代码来源:test_task_journal_bulkedit.py

示例3: test_02_reinstate_task

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_02_reinstate_task(self):
        sources = JournalFixtureFactory.make_many_journal_sources(10, in_doaj=False)
        ids = []
        articles = []
        for source in sources:
            j = models.Journal(**source)
            j.save()
            ids.append(j.id)

            pissn = j.bibjson().get_identifiers(j.bibjson().P_ISSN)
            eissn = j.bibjson().get_identifiers(j.bibjson().E_ISSN)
            asource = ArticleFixtureFactory.make_article_source(pissn=pissn[0], eissn=eissn[0], with_id=False, in_doaj=False)
            a = models.Article(**asource)
            a.save()
            articles.append(a.id)

        time.sleep(2)

        job = SetInDOAJBackgroundTask.prepare("testuser", journal_ids=ids, in_doaj=True)
        SetInDOAJBackgroundTask.submit(job)

        time.sleep(2)

        for id in ids:
            j = models.Journal.pull(id)
            assert j.is_in_doaj() is True

        for id in articles:
            a = models.Article.pull(id)
            assert a.is_in_doaj() is True
开发者ID:DOAJ,项目名称:doaj,代码行数:32,代码来源:test_withdraw_reinstate.py

示例4: test_03_oai_resumption_token

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_03_oai_resumption_token(self):
        """ Test the behaviour of the ResumptionToken in the OAI interface"""

        # Set the OAI interface to only return two identifiers at a time
        app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 2

        [j0, j1, j2, j3, j4] = JournalFixtureFactory.make_many_journal_sources(5, in_doaj=True)

        # Save a single journal in the index
        jm0 = models.Journal(**j0)
        jm0.save(blocking=True)

        # ListIdentifiers - we expect no resumptionToken because all results are returned
        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListIdentifiers&metadataPrefix=oai_dc')
            t = etree.fromstring(resp.data)
            assert t.xpath('//oai:identifier', namespaces=self.oai_ns)[0].text == 'oai:doaj.org/journal:journalid0'
            assert t.xpath('//oai:resumptionToken', namespaces=self.oai_ns) == []

        # Populate index with 4 more journals
        for j in [j1, j2, j3, j4]:
            jm = models.Journal(**j)
            jm.save(blocking=True)

        # ListIdentifiers - we expect 5 total results and a resumptionToken to fetch the rest
        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListIdentifiers&metadataPrefix=oai_dc')
            t = etree.fromstring(resp.data)
            #print etree.tostring(t, pretty_print=True)
            rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt.get('completeListSize') == '5'
            assert rt.get('cursor') == '2'

            # Get the next result
            resp2 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt.text))
            t = etree.fromstring(resp2.data)
            #print etree.tostring(t, pretty_print=True)
            rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt2.get('completeListSize') == '5'
            assert rt2.get('cursor') == '4'

            # And the final result - check we get an empty resumptionToken
            resp3 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt2.text))
            t = etree.fromstring(resp3.data)
            #print etree.tostring(t, pretty_print=True)
            rt3 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt3.get('completeListSize') == '5'
            assert rt3.get('cursor') == '5'
            assert rt3.text is None

            # We should get an error if we request again with an empty resumptionToken
            resp4 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt3.text))
            assert resp4.status_code == 200                                   # fixme: should this be a real error code?
            t = etree.fromstring(resp4.data)
            #print etree.tostring(t, pretty_print=True)

            err = t.xpath('//oai:error', namespaces=self.oai_ns)[0]
            assert 'the resumptionToken argument is invalid or expired' in err.text
开发者ID:DOAJ,项目名称:doaj,代码行数:60,代码来源:test_oaipmh.py

示例5: test_11_iterate

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
 def test_11_iterate(self):
     for jsrc in JournalFixtureFactory.make_many_journal_sources(count=99, in_doaj=True):
         j = models.Journal(**jsrc)
         j.save()
     time.sleep(2) # index all the journals
     journal_ids = []
     theqgen = models.JournalQuery()
     for j in models.Journal.iterate(q=theqgen.all_in_doaj(), page_size=10):
         journal_ids.append(j.id)
     journal_ids = list(set(journal_ids[:]))  # keep only unique ids
     assert len(journal_ids) == 99
     assert len(self.list_today_journal_history_files()) == 99
开发者ID:DOAJ,项目名称:doaj,代码行数:14,代码来源:test_models.py

示例6: test_05_date_ranges

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_05_date_ranges(self):
        """ Check that the interface adheres to the dates that records were added """

        # Set the OAI interface to only return one identifier at a time
        app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 1

        journals = JournalFixtureFactory.make_many_journal_sources(4, in_doaj=True)

        now = datetime.utcnow()
        yesterday = datetime.utcnow() - timedelta(days=1)
        day_before_yesterday = datetime.utcnow() - timedelta(days=2)
        two_days_before_yesterday = datetime.utcnow() - timedelta(days=3)

        # Save half of our journals 2 days ago
        with freeze_time(day_before_yesterday):
            for j in journals[:2]:
                jm = models.Journal(**j)
                jm.save(blocking=True)

        # Save the other half of our journals today
        with freeze_time(now):
            for j in journals[2:]:
                jm = models.Journal(**j)
                jm.save(blocking=True)

        # Request OAI journals since yesterday (looking for today's results only)
        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}'.format(yesterday.strftime('%Y-%m-%d')))
            t = etree.fromstring(resp.data)
            #print etree.tostring(t, pretty_print=True)
            rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt.get('completeListSize') == '2'
            assert rt.get('cursor') == '1'

            for title in t.xpath('//dc:title', namespaces=self.oai_ns):
                assert title.text in [journals[2]['bibjson']['title'], journals[3]['bibjson']['title']]

        # Request OAI journals from 3 days ago to yesterday (expecting the 2 days ago results)
        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}&until={1}'.format(
                two_days_before_yesterday.strftime('%Y-%m-%d'), yesterday.strftime('%Y-%m-%d')))
            t = etree.fromstring(resp.data)
            #print etree.tostring(t, pretty_print=True)
            rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt.get('completeListSize') == '2'
            assert rt.get('cursor') == '1'

            for title in t.xpath('//dc:title', namespaces=self.oai_ns):
                assert title.text in [journals[0]['bibjson']['title'], journals[1]['bibjson']['title']]
开发者ID:DOAJ,项目名称:doaj,代码行数:51,代码来源:test_oaipmh.py

示例7: setUp

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def setUp(self):
        super(TestTaskJournalBulkDelete, self).setUp()

        ArticleBulkDeleteBackgroundTask.BATCH_SIZE = 13

        self.journals = []
        self.articles = []
        for j_src in JournalFixtureFactory.make_many_journal_sources(count=TEST_JOURNAL_COUNT):
            j = models.Journal(**j_src)
            self.journals.append(j)
            j.save()
            for i in range(0, TEST_ARTICLES_PER_JOURNAL):
                a = models.Article(**ArticleFixtureFactory.make_article_source(with_id=False, eissn=j.bibjson().first_eissn, pissn=j.bibjson().first_pissn))
                a.save()
                self.articles.append(a)

        sleep(2)

        self._make_and_push_test_context(acc=models.Account(**AccountFixtureFactory.make_managing_editor_source()))
开发者ID:DOAJ,项目名称:doaj,代码行数:21,代码来源:test_task_article_bulk_delete.py

示例8: test_04_oai_changing_index

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_04_oai_changing_index(self):
        """ Check that changes to the index don't appear in in-progress requests """

        # Set the OAI interface to only return two identifiers at a time
        app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 2

        journals = JournalFixtureFactory.make_many_journal_sources(4, in_doaj=True)

        # Save our journals to the index
        for j in journals[:3]:
            jm = models.Journal(**j)
            jm.save(blocking=True)

        # ListRecords - we expect 3 total results and a resumptionToken to fetch the rest
        yesterday = (datetime.utcnow() - timedelta(days=1)).strftime('%Y-%m-%d')
        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}'.format(yesterday))
            t = etree.fromstring(resp.data)
            #print etree.tostring(t, pretty_print=True)
            rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt.get('completeListSize') == '3'
            assert rt.get('cursor') == '2'

            # Save another journal to the index
            [j] = journals[3:]
            jm = models.Journal(**j)
            jm.save(blocking=True)

            # Get the next result - the new journal shouldn't be added to the results
            resp2 = t_client.get('/oai?verb=ListRecords&resumptionToken={0}'.format(rt.text))
            t = etree.fromstring(resp2.data)
            #print etree.tostring(t, pretty_print=True)
            rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt2.get('completeListSize') == '3'
            assert rt2.get('cursor') == '3'

            # Start a new request - we should see the new journal
            resp3 = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}'.format(yesterday))
            t = etree.fromstring(resp3.data)
            #print etree.tostring(t, pretty_print=True)
            rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0]
            assert rt.get('completeListSize') == '4'
开发者ID:DOAJ,项目名称:doaj,代码行数:44,代码来源:test_oaipmh.py

示例9: test_02_oai_journals

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_02_oai_journals(self):
        """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ"""
        journal_sources = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True)
        j_public = models.Journal(**journal_sources[0])
        j_public.save(blocking=True)

        j_private = models.Journal(**journal_sources[1])
        j_private.set_in_doaj(False)
        j_private.save(blocking=True)

        with app.test_client() as t_client:
            resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc')
            assert resp.status_code == 200

            t = etree.fromstring(resp.data)
            records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns)

            # Check we only have one journal returned
            assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 1

            # Check we have the correct journal
            assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title
开发者ID:DOAJ,项目名称:doaj,代码行数:24,代码来源:test_oaipmh.py

示例10: test_public_data_dump

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_public_data_dump(self, name, kwargs):

        clean_arg = kwargs.get("clean")
        prune_arg = kwargs.get("prune")
        types_arg = kwargs.get("types")
        journals_arg = kwargs.get("journals")
        articles_arg = kwargs.get("articles")
        batch_size_arg = kwargs.get("batch_size")
        tmp_write_arg = kwargs.get("tmp_write")
        store_write_arg = kwargs.get("store_write")

        status_arg = kwargs.get("status")

        ###############################################
        ## set up

        clean = True if clean_arg == "yes" else False if clean_arg == "no" else None
        prune = True if prune_arg == "yes" else False if prune_arg == "no" else None
        types = types_arg if types_arg != "-" else None

        journal_count = int(journals_arg)
        article_count = int(articles_arg)
        batch_size = int(batch_size_arg)
        journal_file_count = 0 if journal_count == 0 else (journal_count / batch_size) + 1
        article_file_count = 0 if article_count == 0 else (article_count / batch_size) + 1
        first_article_file_records = 0 if article_count == 0 else batch_size if article_count > batch_size else article_count
        first_journal_file_records = 0 if journal_count == 0 else batch_size if journal_count > batch_size else journal_count

        # add the data to the index first, to maximise the time it has to become available for search
        sources = JournalFixtureFactory.make_many_journal_sources(journal_count, in_doaj=True)
        jids = []
        for i in range(len(sources)):
            source = sources[i]
            journal = models.Journal(**source)
            journal.save()
            jids.append((journal.id, journal.last_updated))

        aids = []
        for i in range(article_count):
            source = ArticleFixtureFactory.make_article_source(
                eissn="{x}000-0000".format(x=i),
                pissn="0000-{x}000".format(x=i),
                with_id=False,
                doi="10.123/{x}".format(x=i),
                fulltext="http://example.com/{x}".format(x=i)
            )
            article = models.Article(**source)
            article.save()
            aids.append((article.id, article.last_updated))

        # construct some test data in the local store
        container_id = app.config["STORE_PUBLIC_DATA_DUMP_CONTAINER"]
        localStore = store.StoreLocal(None)
        localStoreFiles = []
        if clean or prune:
            for i in range(5):
                localStore.store(container_id, "doaj_article_data_2018-01-0" + str(i) + ".tar.gz",
                                 source_stream=StringIO("test"))
                localStore.store(container_id, "doaj_journal_data_2018-01-0" + str(i) + ".tar.gz",
                                 source_stream=StringIO("test"))
            localStoreFiles = localStore.list(container_id)

        app.config["DISCOVERY_RECORDS_PER_FILE"] = batch_size

        # set the mocks for store write failures
        if tmp_write_arg == "fail":
            app.config["STORE_TMP_IMPL"] = StoreMockFactory.no_writes_classpath()

        if store_write_arg == "fail":
            app.config["STORE_IMPL"] = StoreMockFactory.no_writes_classpath()

        # block until all the records are saved
        for jid, lu in jids:
            models.Journal.block(jid, lu, sleep=0.05)
        for aid, lu in aids:
            models.Article.block(aid, lu, sleep=0.05)

        ###########################################################
        # Execution

        job = PublicDataDumpBackgroundTask.prepare("testuser", clean=clean, prune=prune, types=types)
        task = PublicDataDumpBackgroundTask(job)
        BackgroundApi.execute(task)

        # make sure we have a fresh copy of the job
        job = task.background_job
        assert job.status == status_arg

        if job.status != "error":
            article_url = models.cache.Cache.get_public_data_dump().get("article", {}).get("url")
            if types_arg in ["-", "all", "article"]:
                assert article_url is not None
            else:
                assert article_url is None

            journal_url = models.cache.Cache.get_public_data_dump().get("journal", {}).get("url")
            if types_arg in ["-", "all", "journal"]:
                assert journal_url is not None
            else:
                assert journal_url is None
#.........这里部分代码省略.........
开发者ID:DOAJ,项目名称:doaj,代码行数:103,代码来源:test_tasks_public_data_dump.py

示例11: test_journal_csv

# 需要导入模块: from doajtest.fixtures import JournalFixtureFactory [as 别名]
# 或者: from doajtest.fixtures.JournalFixtureFactory import make_many_journal_sources [as 别名]
    def test_journal_csv(self, name, kwargs):

        prune_arg = kwargs.get("prune")
        tmp_write_arg = kwargs.get("tmp_write")
        main_write_arg = kwargs.get("main_write")
        journals_arg = kwargs.get("journals")
        journals_no_issn_arg = kwargs.get("journals_no_issn")
        not_in_doaj_arg = kwargs.get("not_in_doaj")
        journals_with_articles_arg = kwargs.get("journals_with_articles")

        raises_arg = kwargs.get("raises")

        ###############################################
        ## set up

        raises = EXCEPTIONS.get(raises_arg)
        prune = True if prune_arg == "True" else False if prune_arg == "False" else None
        journal_count = int(journals_arg)
        journals_no_issn_count = int(journals_no_issn_arg)
        not_in_doaj_count = int(not_in_doaj_arg)
        journals_with_articles_count = int(journals_with_articles_arg)

        if tmp_write_arg == "fail":
            app.config["STORE_TMP_IMPL"] = StoreMockFactory.no_writes_classpath()

        if main_write_arg == "fail":
            app.config["STORE_IMPL"] = StoreMockFactory.no_writes_classpath()

        journals = []
        if journal_count > 0:
            journals += [models.Journal(**s) for s in JournalFixtureFactory.make_many_journal_sources(count=journal_count, in_doaj=True)]

        comparisons = {}
        articles = []
        for i in range(len(journals)):
            journal = journals[i]
            bj = journal.bibjson()
            bj.alternative_title = u"Заглавие на журнала"   # checking mixed unicode
            issns = journal.bibjson().issns()
            source1 = ArticleFixtureFactory.make_article_source(eissn=issns[0], pissn=issns[1], with_id=False, in_doaj=False)
            articles.append(models.Article(**source1))
            comparisons[issns[0]] = {"issns" : issns, "article_count": 0, "article_latest" : ""}
            if i < journals_with_articles_count:
                source2 = ArticleFixtureFactory.make_article_source(eissn=issns[0], pissn=issns[1], with_id=False, in_doaj=True)
                article2 = models.Article(**source2)
                article2.set_created("2019-0{i}-01T00:00:00Z".format(i=i + 1))
                articles.append(article2)

                source3 = ArticleFixtureFactory.make_article_source(eissn=issns[0], pissn=issns[1], with_id=False, in_doaj=True)
                article3 = models.Article(**source3)
                article3.set_created("2019-0{i}-02T00:00:00Z".format(i=i + 1))
                articles.append(article3)

                comparisons[issns[0]]["article_count"] = 2
                comparisons[issns[0]]["article_latest"] = "2019-0{i}-02T00:00:00Z".format(i=i + 1)

        if journals_no_issn_count > 0:
            noissns = [models.Journal(**s) for s in JournalFixtureFactory.make_many_journal_sources(count=journals_no_issn_count, in_doaj=True)]
            for i in range(len(noissns)):
                noissn = noissns[i]
                bj = noissn.bibjson()
                bj.remove_identifiers(idtype=bj.P_ISSN)
                bj.remove_identifiers(idtype=bj.E_ISSN)
                noissn.set_id("no_issn_{i}".format(i=i))
            journals += noissns

        if not_in_doaj_count > 0:
            nots = [models.Journal(**s) for s in JournalFixtureFactory.make_many_journal_sources(count=not_in_doaj_count, in_doaj=False)]
            for i in range(len(nots)):
                n = nots[i]
                n.set_id("not_in_doaj_{i}".format(i=i))
            journals += nots

        jids = []
        for i in range(len(journals)):
            journals[i].save()
            jids.append((journals[i].id, journals[i].last_updated))

        aids = []
        for i in range(len(articles)):
            articles[i].save()
            aids.append((articles[i].id, articles[i].last_updated))

        if prune:
            self.localStore.store(self.container_id, "journalcsv__doaj_20180101_0000_utf8.csv", source_stream=StringIO("test1"))
            self.localStore.store(self.container_id, "journalcsv__doaj_20180601_0000_utf8.csv", source_stream=StringIO("test2"))
            self.localStore.store(self.container_id, "journalcsv__doaj_20190101_0000_utf8.csv", source_stream=StringIO("test3"))

        models.Journal.blockall(jids)
        models.Article.blockall(aids)

        ###########################################################
        # Execution

        if raises is not None:
            with self.assertRaises(raises):
                self.svc.csv(prune)

                tempFiles = self.tmpStore.list(self.container_id)
                assert len(tempFiles) == 0
#.........这里部分代码省略.........
开发者ID:DOAJ,项目名称:doaj,代码行数:103,代码来源:test_bll_journal_csv.py


注:本文中的doajtest.fixtures.JournalFixtureFactory.make_many_journal_sources方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。