当前位置: 首页>>代码示例>>Python>>正文


Python Dataset_File.save方法代码示例

本文整理汇总了Python中tardis.tardis_portal.models.Dataset_File.save方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset_File.save方法的具体用法?Python Dataset_File.save怎么用?Python Dataset_File.save使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tardis.tardis_portal.models.Dataset_File的用法示例。


在下文中一共展示了Dataset_File.save方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: add_datafile_to_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def add_datafile_to_dataset(dataset, filepath, size):
    """
    Adds datafile metadata to a dataset

    :param dataset: dataset who's directory to be written to
    :type dataset: :class:`tardis.tardis_portal.models.Dataset`
    :param filepath: The full os path to the file
    :type filepath: string
    :param size: The file size in bytes
    :type size: string
    :rtype: The new datafile object
    """

    experiment_path = path.join(settings.FILE_STORE_PATH,
                                str(dataset.experiment.id))

    dataset_path = path.join(experiment_path, str(dataset.id))
    urlpath = 'file:/' + filepath[len(experiment_path):]
    filename = urlpath.rpartition('/')[2]

    datafile = Dataset_File(dataset=dataset, filename=filename,
                            url=urlpath, size=size, protocol='')
    datafile.save()

    return datafile
开发者ID:grischa,项目名称:mytardis-mrtardis,代码行数:27,代码来源:staging.py

示例2: testLocalFile

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = Dataset_File(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        replica = Replica(datafile=datafile,
                          url=write_uploaded_file_to_dataset(self.dataset, cf),
                          location=Location.get_default_location())
        replica.save()

        def get_replica(datafile):
            return Replica.objects.get(datafile=datafile)

        # undo auto-verify:
        replica.verified = False
        replica.save(update_fields=['verified'])

        # Check that it's not currently verified
        expect(get_replica(datafile).verified).to_be(False)

        # Check it verifies
        verify_files()
        expect(get_replica(datafile).verified).to_be(True)
开发者ID:TheGoodRob,项目名称:mytardis,代码行数:30,代码来源:test_tasks.py

示例3: add_staged_file_to_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def add_staged_file_to_dataset(rel_filepath, dataset_id, username,
                               mimetype="application/octet-stream"):
    """
    add file in user's staging path to a dataset
    may be replaced by main code functions.
    quick and dirty hack to get it working
    """
    originfilepath = os.path.join(get_full_staging_path(username), rel_filepath)
    dataset = Dataset.objects.get(pk=dataset_id)
    newDatafile = Dataset_File()
    newDatafile.dataset = dataset
    newDatafile.size = os.path.getsize(originfilepath)
    newDatafile.protocol = "tardis"
    newDatafile.mimetype = mimetype
    file_dir = "/" + str(dataset.experiment.id) + "/" + str(dataset.id) + "/"
    file_path = file_dir + rel_filepath
    prelim_full_file_path = settings.FILE_STORE_PATH + file_path
    full_file_path = duplicate_file_check_rename(prelim_full_file_path)
    newDatafile.filename = os.path.basename(full_file_path)
    newDatafile.url = "%s://%s" % (newDatafile.protocol,
                                   full_file_path[
            len(settings.FILE_STORE_PATH) + len(file_dir):])
    if not os.path.exists(os.path.dirname(full_file_path)):
        os.makedirs(os.path.dirname(full_file_path))
    shutil.move(originfilepath, full_file_path)
    newDatafile.save()
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:28,代码来源:utils.py

示例4: process_enclosure

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
    def process_enclosure(self, dataset, enclosure):
        filename = getattr(enclosure, 'title', basename(enclosure.href))
        datafile = Dataset_File(filename=filename, dataset=dataset)
        try:
            datafile.mimetype = enclosure.mime
        except AttributeError:
            pass
        try:
            datafile.size = enclosure.length
        except AttributeError:
            pass
        try:
            hash = enclosure.hash
            # Split on white space, then ':' to get tuples to feed into dict
            hashdict = dict([s.partition(':')[::2] for s in hash.split()])
            # Set SHA-512 sum
            datafile.sha512sum = hashdict['sha-512']
        except AttributeError:
            pass
        datafile.save()
        url = enclosure.href
        # This means we will allow the atom feed to feed us any enclosure
        # URL that matches a registered location.  Maybe we should restrict
        # this to a specific location.
        location = Location.get_location_for_url(url)
        if not location:
            logger.error('Rejected ingestion for unknown location %s' % url)
            return

        replica = Replica(datafile=datafile, url=url,
                          location=location)
        replica.protocol = enclosure.href.partition('://')[0]
        replica.save()
        self.make_local_copy(replica)
开发者ID:crawley,项目名称:mytardis-app-atom,代码行数:36,代码来源:atom_ingest.py

示例5: testRemoteFile

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
    def testRemoteFile(self):
            content = urandom(1024)
            with NamedTemporaryFile() as f:
                # Create new Datafile
                datafile = Dataset_File(dataset=self.dataset)
                datafile.filename = 'background_task_testfile'
                datafile.size = len(content)
                datafile.sha512sum = hashlib.sha512(content).hexdigest()
                datafile.url = 'file://' + path.abspath(f.name)
                datafile.save()

                def get_datafile(datafile):
                    return Dataset_File.objects.get(id=datafile.id)

                # Check that it won't verify as it stands
                expect(get_datafile(datafile).verified).to_be(False)
                verify_files()
                expect(get_datafile(datafile).verified).to_be(False)
                expect(get_datafile(datafile).is_local()).to_be(False)


                # Fill in the content
                f.write(content)
                f.flush()

                # Check it now verifies
                verify_files()
                expect(get_datafile(datafile).verified).to_be(True)
                expect(get_datafile(datafile).is_local()).to_be(True)
开发者ID:JMSS-IT-11-2012,项目名称:mytardis,代码行数:31,代码来源:test_tasks.py

示例6: clone

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
 def clone(cls, oldInstance, newDescription, username):
     newInstance = cls(description=newDescription,
                       experiment_id=oldInstance.dataset.experiment.id)
     for param in oldInstance.parameters:
         if param.name.name not in cls.doNotCopyParams:
             if param.name.isNumeric():
                 value = param.numerical_value
             else:
                 value = param.string_value
             newInstance.new_param(param.name.name, value)
     import shutil
     import os
     for filename in oldInstance.get_params("uploaded_file", value=True):
         if filename[-8:] != ".jobfile":
             thisfile = Dataset_File.objects.get(
                 dataset=oldInstance.dataset,
                 filename=filename)
             shutil.copy(thisfile.get_absolute_filepath(),
                         get_full_staging_path(username))
             newfileurl = os.path.join(get_full_staging_path(username),
                                       filename)
             newDatafile = Dataset_File(
                 dataset=newInstance.dataset,
                 url=newfileurl,
                 protocol="staging",
                 mimetype=thisfile.mimetype,
                 )
             newDatafile.save()
     return newInstance
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:31,代码来源:task.py

示例7: _make_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
 def _make_dataset(self, exp, filenames):
     dataset = Dataset(experiment=exp)
     dataset.save()
     for filename in filenames:
         df = Dataset_File(dataset=dataset, size=41, protocol='file')
         df.filename = filename
         df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
         df.save()
开发者ID:conkiztador,项目名称:mytardis,代码行数:10,代码来源:test_integrity.py

示例8: _create_test_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range (0, nosDatafiles) :
        df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
        df_.save()
    ds_.save()
    return ds_
开发者ID:JasonBoyka,项目名称:mytardis,代码行数:10,代码来源:test_rmexperiment.py

示例9: _build

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
 def _build(dataset, filename, url, protocol):
     from tardis.tardis_portal.models import \
         Dataset_File, Replica, Location
     datafile = Dataset_File(dataset=dataset, filename=filename)
     datafile.save()
     replica = Replica(datafile=datafile, url=url, 
                       protocol=protocol,
                       location=Location.get_default_location())
     replica.save()
     return datafile
开发者ID:cvl-em-apm,项目名称:mytardis,代码行数:12,代码来源:test_models.py

示例10: _create_test_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range (0, nosDatafiles) :
        df_ = Dataset_File(dataset=ds_, size='21', sha512sum='bogus')
        df_.save()
        rep_ = Replica(datafile=df_,
                       url='http://planet-python.org/' + str(_next_id()),
                       location=Location.get_default_location())
        rep_.save()
    ds_.save()
    return ds_
开发者ID:TheGoodRob,项目名称:mytardis,代码行数:14,代码来源:test_rmexperiment.py

示例11: create_datafile

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
        def create_datafile(index):
            testfile = path.join(path.dirname(__file__), 'fixtures',
                                 'jeol_sem_test%d.txt' % index)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = Dataset_File(dataset=dataset,
                                    filename=path.basename(testfile),
                                    url='file://'+path.abspath(testfile),
                                    protocol='file',
                                    size=size,
                                    sha512sum=sha512sum)
            datafile.verify()
            datafile.save()
            return datafile
开发者ID:JMSS-IT-11-2012,项目名称:mytardis,代码行数:17,代码来源:test_jeolsem.py

示例12: create_staging_datafile

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def create_staging_datafile(filepath, username, dataset_id):
    dataset = Dataset.objects.get(id=dataset_id)

    url, size = get_staging_url_and_size(username, filepath)
    datafile = Dataset_File(dataset=dataset,
                            filename=path.basename(filepath),
                            size=size)
    replica = Replica(datafile=datafile,
                      protocol='staging',
                      url=url,
                      location=Location.get_location('staging'))
    replica.verify(allowEmptyChecksums=True)
    datafile.save()
    replica.datafile = datafile
    replica.save()
开发者ID:crawley,项目名称:mytardis,代码行数:17,代码来源:tasks.py

示例13: generate_datafile

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def generate_datafile(path, dataset, content=None, size=-1, 
                      verify=True, verified=True):
    '''Generates a datafile AND a replica to hold its contents'''
    from tardis.tardis_portal.models import Dataset_File, Replica, Location

    saved = settings.REQUIRE_DATAFILE_CHECKSUMS 
    settings.REQUIRE_DATAFILE_CHECKSUMS = False
    try:
        datafile = Dataset_File()
        if content:
            datafile.size = str(len(content))
        else:
            datafile.size = str(size)
        # Normally we use any old string for the datafile path, but some
        # tests require the path to be the same as what 'staging' would use
        if path == None:
            datafile.dataset_id = dataset.id
            datafile.save()
            path = "%s/%s/%s" % (dataset.get_first_experiment().id,
                                 dataset.id, datafile.id)

        filepath = os.path.normpath(FILE_STORE_PATH + '/' + path)
        if content:
            try:
                os.makedirs(os.path.dirname(filepath))
                os.remove(filepath)
            except:
                pass
            file = open(filepath, 'wb+')
            file.write(content)
            file.close()
        datafile.mimetype = "application/unspecified"
        datafile.filename = os.path.basename(filepath)
        datafile.dataset_id = dataset.id
        datafile.save()

        location = _infer_location(path)
        replica = Replica(datafile=datafile, url=path, protocol='',
                          location=location)
        if verify and content:
            if not replica.verify(allowEmptyChecksums=True):
                raise RuntimeError('verify failed!?!')
        else:
            replica.verified = verified
        replica.save()
        return (datafile, replica)
    finally:
        settings.REQUIRE_DATAFILE_CHECKSUMS = saved
开发者ID:guillaumeprevost,项目名称:mytardis,代码行数:50,代码来源:generate.py

示例14: _make_data_file

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def _make_data_file(dataset, filename, content):
    # TODO:
    # create datasetfile

    f = mktemp()
    print "Inside make data file ", f
    open(f, "w+b").write(content)
    df = Dataset_File()
    df.dataset = dataset
    df.filename = filename
    df.url = 'file://'+f
    df.protocol = "staging"
    df.size = len(content)
    df.verify(allowEmptyChecksums=True)
    df.save()
    print "Df ---", df
开发者ID:bioscience-data-platform,项目名称:mytardis_hpc_app,代码行数:18,代码来源:views.py

示例15: aadd_staged_file_to_dataset

# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import save [as 别名]
def aadd_staged_file_to_dataset(rel_filepath, dataset_id, username,
                               mimetype="application/octet-stream"):
    """
    add file in user's staging path to a dataset
    may be replaced by main code functions.
    quick and dirty hack to get it working
    """
    originfilepath = os.path.join(get_full_staging_path(username),
                                  rel_filepath)
    dataset = Dataset.objects.get(pk=dataset_id)

    newDatafile = Dataset_File(
        dataset=dataset,
        url=originfilepath,
        protocol="staging",
        mimetype=mimetype,
        )
    newDatafile.save()
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:20,代码来源:utils.py


注:本文中的tardis.tardis_portal.models.Dataset_File.save方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。