当前位置: 首页>>代码示例>>Python>>正文


Python InternetArchiveCommon.get_docketxml_name方法代码示例

本文整理汇总了Python中InternetArchiveCommon.get_docketxml_name方法的典型用法代码示例。如果您正苦于以下问题:Python InternetArchiveCommon.get_docketxml_name方法的具体用法?Python InternetArchiveCommon.get_docketxml_name怎么用?Python InternetArchiveCommon.get_docketxml_name使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在InternetArchiveCommon的用法示例。


在下文中一共展示了InternetArchiveCommon.get_docketxml_name方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _cron_process_docketXML

# 需要导入模块: import InternetArchiveCommon [as 别名]
# 或者: from InternetArchiveCommon import get_docketxml_name [as 别名]
def _cron_process_docketXML(docket, ppentry):
    ''' Required to have the lock. '''

    court = docket.casemeta["court"]
    casenum = docket.casemeta["pacer_case_num"]

    # Force '0' in the XML on docs that failed to upload.
    _update_docs_availability(docket)

    # The docket filename
    docketname = IACommon.get_docketxml_name(court, casenum)

    # Step 1: Try to fetch the existing docket from IA
    docketstring, fetcherror = IADirect.get_docket_string(court, casenum)

    if docketstring:
        # Got the existing docket-- put merged docket file.
        ia_docket, parse_msg = DocketXML.parse_xml_string(docketstring)

        if ia_docket:
            put_result, put_msg = _cron_me_up(ia_docket, docket, ppentry)

            print "  %s %s" % (docketname, put_msg)
        else:
            print "  %s docket parsing error: %s" % (docketname, parse_msg)

    elif fetcherror is IADirect.FETCH_NO_FILE:
        # Bucket exists but no docket-- put a new docket file.
        put_result, put_msg = put_docket(docket, court, casenum, ppentry)

        print "  %s put into existing bucket: %s" % (docketname, put_msg)

    elif fetcherror is IADirect.FETCH_NO_BUCKET:
        # Bucket doesn't exist-- make the bucket and put a new docket file.
        put_result, put_msg = put_docket(docket, court, casenum, ppentry,
                                         newbucket=1)

        print "  %s put into new bucket: %s" % (docketname, put_msg)

    elif fetcherror is IADirect.FETCH_URLERROR:
        # Couldn't get the IA docket

        # Unset the processing flag for later
#        ppentry.processing = 0
#        ppentry.save()
        # Leave the pickle file for later
        # Drop Lock Here?

        print "  %s timed out.  wait for next cron." % (docketname)

    else:
        # Unknown fetch error.

        # Unset the processing flag for later
#        ppentry.processing = 0
#        ppentry.save()
        # Drop Lock Here?

        # Leave the pickle file for later
        print "  %s unknown fetch error.  wait for next cron." % (docketname)
开发者ID:brianwc,项目名称:recap-server,代码行数:62,代码来源:InternetArchive.py

示例2: do_me_up

# 需要导入模块: import InternetArchiveCommon [as 别名]
# 或者: from InternetArchiveCommon import get_docketxml_name [as 别名]
def do_me_up(docket):
    ''' Download, merge and update the docket with IA. '''
    # Pickle this object for do_me_up by the cron process.

    court = docket.get_court()
    casenum = docket.get_casenum()

    docketname = IACommon.get_docketxml_name(court, casenum)

    # Check if this docket is already scheduled to be processed.
    query = PickledPut.objects.filter(filename=docketname)

    try:
        ppentry = query[0]
    except IndexError:
        # Not already scheduled, so schedule it now.
        ppentry = PickledPut(filename=docketname, docket=1)

        try:
            ppentry.save()
        except IntegrityError:
            # Try again.
            do_me_up(docket)
        else:
            # Pickle this object.
            pickle_success, msg = IA.pickle_object(docket, docketname)

            if pickle_success:
                # Ready for processing.
                ppentry.ready = 1
                ppentry.save()

                logging.info("do_me_up: ready. %s" % (docketname))
            else:
                # Pickle failed, remove from DB.
                ppentry.delete()
                logging.error("do_me_up: %s %s" % (msg, docketname))

    else:
        # Already scheduled.
        # If there is a lock for this case, it's being uploaded. Don't merge now
        locked = BucketLockManager.lock_exists(court, casenum)
        if ppentry.ready and not locked:
            # Docket is waiting to be processed by cron job.

            # Revert state back to 'not ready' so we can do local merge.
            ppentry.ready = 0
            ppentry.save()

            # Fetch and unpickle the waiting docket.
            prev_docket, unpickle_msg = IA.unpickle_object(docketname)

            if prev_docket:

                # Do the local merge.
                prev_docket.merge_docket(docket)

                # Pickle it back
                pickle_success, pickle_msg = \
                    IA.pickle_object(prev_docket, docketname)

                if pickle_success:
                    # Merged and ready.
                    ppentry.ready = 1
                    ppentry.save()
                    logging.info("do_me_up: merged and ready. %s" %(docketname))
                else:
                    # Re-pickle failed, delete.
                    ppentry.delete()
                    logging.error("do_me_up: re-%s %s" % (pickle_msg,
                                                          docketname))

            else:
                # Unpickle failed
                ppentry.delete()
                IA.delete_pickle(docketname)
                logging.error("do_me_up: %s %s" % (unpickle_msg, docketname))


        # Ignore if in any of the other three possible state...
        #   because another cron job is already doing work on this entity
        # Don't delete DB entry or pickle file.
        elif ppentry.ready and locked:
            pass
            #logging.debug("do_me_up: %s discarded, processing conflict." %
            #              (docketname))
        elif not ppentry.ready and not locked:
            pass
            #logging.debug("do_me_up: %s discarded, preparation conflict." %
            #              (docketname))
        else:
            logging.error("do_me_up: %s discarded, inconsistent state." %
                          (docketname))
开发者ID:janderse,项目名称:recap-server,代码行数:95,代码来源:UploadHandler.py

示例3: len

# 需要导入模块: import InternetArchiveCommon [as 别名]
# 或者: from InternetArchiveCommon import get_docketxml_name [as 别名]
                  .order_by('-lastdate', '-modified')

    yesterday = datetime.datetime.now() - datetime.timedelta(1)

    old_or_avail_query = doc_query.filter(available=1) \
                         | doc_query.filter(modified__lte=yesterday)
    query = None
    try:
        query = old_or_avail_query[0]
    except IndexError:
        try:
            query = doc_query[0]
        except IndexError:
            query = None
        else:
            ppquery = PickledPut.objects.filter(filename=IACommon.get_docketxml_name(court, casenum))
            if len(ppquery) > 0:
                query = None



    if query:
        try:
            # we only have a last date for documents that have been uploaded
            date = query.lastdate.strftime("%m/%d/%y")
        except AttributeError:
            try:
                date = query.modified.strftime("%m/%d/%y")
            except AttributeError:
                date = "Unknown"
开发者ID:brianwc,项目名称:recap-server,代码行数:32,代码来源:views.py

示例4: len

# 需要导入模块: import InternetArchiveCommon [as 别名]
# 或者: from InternetArchiveCommon import get_docketxml_name [as 别名]
    yesterday = datetime.datetime.now() - datetime.timedelta(1)

    old_or_avail_query = doc_query.filter(available=1) | \
                         doc_query.filter(modified__lte=yesterday)
    query = None
    try:
        query = old_or_avail_query[0]
    except IndexError:
        try:
            query = doc_query[0]
        except IndexError:
            query = None
        else:
            ppquery = PickledPut.objects.filter(
                filename=IACommon.get_docketxml_name(court, casenum))
            if len(ppquery) > 0:
                query = None

    if query:
        try:
            # we only have a last date for documents that have been uploaded
            date = query.lastdate.strftime("%m/%d/%y")
        except AttributeError:
            try:
                date = query.modified.strftime("%m/%d/%y")
            except AttributeError:
                date = "Unknown"

        response = {
            "docket_url": IACommon.get_dockethtml_url(court,
开发者ID:freelawproject,项目名称:recap-server,代码行数:32,代码来源:views.py


注:本文中的InternetArchiveCommon.get_docketxml_name方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。