本文整理汇总了Python中tardis.tardis_portal.models.Dataset_File.url方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset_File.url方法的具体用法?Python Dataset_File.url怎么用?Python Dataset_File.url使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tardis.tardis_portal.models.Dataset_File
的用法示例。
在下文中一共展示了Dataset_File.url方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testRemoteFile
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def testRemoteFile(self):
content = urandom(1024)
with NamedTemporaryFile() as f:
# Create new Datafile
datafile = Dataset_File(dataset=self.dataset)
datafile.filename = 'background_task_testfile'
datafile.size = len(content)
datafile.sha512sum = hashlib.sha512(content).hexdigest()
datafile.url = 'file://' + path.abspath(f.name)
datafile.save()
def get_datafile(datafile):
return Dataset_File.objects.get(id=datafile.id)
# Check that it won't verify as it stands
expect(get_datafile(datafile).verified).to_be(False)
verify_files()
expect(get_datafile(datafile).verified).to_be(False)
expect(get_datafile(datafile).is_local()).to_be(False)
# Fill in the content
f.write(content)
f.flush()
# Check it now verifies
verify_files()
expect(get_datafile(datafile).verified).to_be(True)
expect(get_datafile(datafile).is_local()).to_be(True)
示例2: add_staged_file_to_dataset
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def add_staged_file_to_dataset(rel_filepath, dataset_id, username,
mimetype="application/octet-stream"):
"""
add file in user's staging path to a dataset
may be replaced by main code functions.
quick and dirty hack to get it working
"""
originfilepath = os.path.join(get_full_staging_path(username), rel_filepath)
dataset = Dataset.objects.get(pk=dataset_id)
newDatafile = Dataset_File()
newDatafile.dataset = dataset
newDatafile.size = os.path.getsize(originfilepath)
newDatafile.protocol = "tardis"
newDatafile.mimetype = mimetype
file_dir = "/" + str(dataset.experiment.id) + "/" + str(dataset.id) + "/"
file_path = file_dir + rel_filepath
prelim_full_file_path = settings.FILE_STORE_PATH + file_path
full_file_path = duplicate_file_check_rename(prelim_full_file_path)
newDatafile.filename = os.path.basename(full_file_path)
newDatafile.url = "%s://%s" % (newDatafile.protocol,
full_file_path[
len(settings.FILE_STORE_PATH) + len(file_dir):])
if not os.path.exists(os.path.dirname(full_file_path)):
os.makedirs(os.path.dirname(full_file_path))
shutil.move(originfilepath, full_file_path)
newDatafile.save()
示例3: _make_dataset
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def _make_dataset(self, exp, filenames):
dataset = Dataset(experiment=exp)
dataset.save()
for filename in filenames:
df = Dataset_File(dataset=dataset, size=41, protocol='file')
df.filename = filename
df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
df.save()
示例4: _make_data_file
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def _make_data_file(dataset, filename, content):
# TODO:
# create datasetfile
f = mktemp()
print "Inside make data file ", f
open(f, "w+b").write(content)
df = Dataset_File()
df.dataset = dataset
df.filename = filename
df.url = 'file://'+f
df.protocol = "staging"
df.size = len(content)
df.verify(allowEmptyChecksums=True)
df.save()
print "Df ---", df
示例5: testLocalFile
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def testLocalFile(self):
content = urandom(1024)
cf = ContentFile(content, 'background_task_testfile')
# Create new Datafile
datafile = Dataset_File(dataset=self.dataset)
datafile.filename = cf.name
datafile.size = len(content)
datafile.sha512sum = hashlib.sha512(content).hexdigest()
datafile.url = write_uploaded_file_to_dataset(self.dataset, cf)
datafile.save()
def get_datafile(datafile):
return Dataset_File.objects.get(id=datafile.id)
# Check that it's not currently verified
expect(get_datafile(datafile).verified).to_be(False)
# Check it verifies
verify_files()
expect(get_datafile(datafile).verified).to_be(True)
示例6: generate_datafile
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def generate_datafile(path, dataset, content=None, size=-1,
verify=True, verified=True):
from tardis.tardis_portal.models import Dataset_File
datafile = Dataset_File()
# Normally we use any old string for the datafile path, but some
# tests require the path to be the same as what 'staging' would use
if path == None:
datafile.dataset_id = dataset.id
datafile.save()
path = "%s/%s/%s" % (dataset.get_first_experiment().id,
dataset.id, datafile.id)
filepath = os.path.normpath(FILE_STORE_PATH + '/' + path)
if content:
try:
os.makedirs(os.path.dirname(filepath))
os.remove(filepath)
except:
pass
file = open(filepath, 'wb+')
file.write(content)
file.close()
datafile.url = path
datafile.mimetype = "application/unspecified"
datafile.filename = os.path.basename(filepath)
datafile.dataset_id = dataset.id
if content:
datafile.size = str(len(content))
else:
datafile.size = str(size)
if verify and content:
if not datafile.verify(allowEmptyChecksums=True):
raise RuntimeError('verify failed!?!')
else:
datafile.verified = verified
datafile.save()
return datafile
示例7: _create_datafile
# 需要导入模块: from tardis.tardis_portal.models import Dataset_File [as 别名]
# 或者: from tardis.tardis_portal.models.Dataset_File import url [as 别名]
def _create_datafile():
user = User.objects.create_user("testuser", "[email protected]", "pwd")
user.save()
UserProfile(user=user).save()
full_access = Experiment.PUBLIC_ACCESS_FULL
experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
experiment.save()
ExperimentACL(
experiment=experiment,
pluginId="django_user",
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ExperimentACL.OWNER_OWNED,
).save()
dataset = Dataset()
dataset.save()
dataset.experiments.add(experiment)
dataset.save()
# Create new Datafile
tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
with Image(filename="magick:rose") as img:
img.format = "tiff"
img.save(file=tempfile.file)
tempfile.file.flush()
datafile = Dataset_File(dataset=dataset)
datafile.size = os.path.getsize(tempfile.file.name)
# os.remove(tempfilename)
datafile.filename = "iiif_named_file"
datafile.url = write_uploaded_file_to_dataset(dataset, tempfile)
datafile.verify(allowEmptyChecksums=True)
datafile.save()
return datafile