本文整理汇总了Python中tardis.tardis_portal.models.Dataset_File类的典型用法代码示例。如果您正苦于以下问题:Python Dataset_File类的具体用法?Python Dataset_File怎么用?Python Dataset_File使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Dataset_File类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: clone
def clone(cls, oldInstance, newDescription, username):
newInstance = cls(description=newDescription,
experiment_id=oldInstance.dataset.experiment.id)
for param in oldInstance.parameters:
if param.name.name not in cls.doNotCopyParams:
if param.name.isNumeric():
value = param.numerical_value
else:
value = param.string_value
newInstance.new_param(param.name.name, value)
import shutil
import os
for filename in oldInstance.get_params("uploaded_file", value=True):
if filename[-8:] != ".jobfile":
thisfile = Dataset_File.objects.get(
dataset=oldInstance.dataset,
filename=filename)
shutil.copy(thisfile.get_absolute_filepath(),
get_full_staging_path(username))
newfileurl = os.path.join(get_full_staging_path(username),
filename)
newDatafile = Dataset_File(
dataset=newInstance.dataset,
url=newfileurl,
protocol="staging",
mimetype=thisfile.mimetype,
)
newDatafile.save()
return newInstance
示例2: add_datafile_to_dataset
def add_datafile_to_dataset(dataset, filepath, size):
"""
Adds datafile metadata to a dataset
:param dataset: dataset who's directory to be written to
:type dataset: :class:`tardis.tardis_portal.models.Dataset`
:param filepath: The full os path to the file
:type filepath: string
:param size: The file size in bytes
:type size: string
:rtype: The new datafile object
"""
experiment_path = path.join(settings.FILE_STORE_PATH,
str(dataset.experiment.id))
dataset_path = path.join(experiment_path, str(dataset.id))
urlpath = 'file:/' + filepath[len(experiment_path):]
filename = urlpath.rpartition('/')[2]
datafile = Dataset_File(dataset=dataset, filename=filename,
url=urlpath, size=size, protocol='')
datafile.save()
return datafile
示例3: _create_test_dataset
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
df_.save()
ds_.save()
return ds_
示例4: _make_dataset
def _make_dataset(self, exp, filenames):
dataset = Dataset(experiment=exp)
dataset.save()
for filename in filenames:
df = Dataset_File(dataset=dataset, size=41, protocol='file')
df.filename = filename
df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
df.save()
示例5: setUp
def setUp(self):
# create a test user
self.user = User.objects.create_user(username='DownloadTestUser',
email='',
password='secret')
# create a public experiment
self.experiment1 = Experiment(title='Experiment 1',
created_by=self.user,
public=True)
self.experiment1.save()
# create a non-public experiment
self.experiment2 = Experiment(title='Experiment 2',
created_by=self.user,
public=False)
self.experiment2.save()
# dataset1 belongs to experiment1
self.dataset1 = Dataset(experiment=self.experiment1)
self.dataset1.save()
# dataset2 belongs to experiment2
self.dataset2 = Dataset(experiment=self.experiment2)
self.dataset2.save()
# absolute path first
filename = 'testfile.txt'
self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s'
% self.experiment1.id))
self.dest2 = abspath(join(settings.FILE_STORE_PATH, '%s'
% self.experiment2.id))
if not exists(self.dest1):
mkdir(self.dest1)
if not exists(self.dest2):
mkdir(self.dest2)
testfile1 = abspath(join(self.dest1, filename))
f = open(testfile1, 'w')
f.write("Hello World!\n")
f.close()
testfile2 = abspath(join(self.dest2, filename))
f = open(testfile2, 'w')
f.write("Hello World!\n")
f.close()
self.dataset_file1 = Dataset_File(dataset=self.dataset1,
filename=filename,
protocol='tardis',
url='tardis://%s' % filename)
self.dataset_file1.save()
self.dataset_file2 = Dataset_File(dataset=self.dataset2,
filename=basename(filename),
protocol='tardis',
url='tardis://%s' % filename)
self.dataset_file2.save()
示例6: _build
def _build(dataset, filename, url, protocol):
from tardis.tardis_portal.models import \
Dataset_File, Replica, Location
datafile = Dataset_File(dataset=dataset, filename=filename)
datafile.save()
replica = Replica(datafile=datafile, url=url,
protocol=protocol,
location=Location.get_default_location())
replica.save()
return datafile
示例7: add_staged_file_to_dataset
def add_staged_file_to_dataset(rel_filepath, dataset_id, username,
mimetype="application/octet-stream"):
"""
add file in user's staging path to a dataset
may be replaced by main code functions.
quick and dirty hack to get it working
"""
originfilepath = os.path.join(get_full_staging_path(username), rel_filepath)
dataset = Dataset.objects.get(pk=dataset_id)
newDatafile = Dataset_File()
newDatafile.dataset = dataset
newDatafile.size = os.path.getsize(originfilepath)
newDatafile.protocol = "tardis"
newDatafile.mimetype = mimetype
file_dir = "/" + str(dataset.experiment.id) + "/" + str(dataset.id) + "/"
file_path = file_dir + rel_filepath
prelim_full_file_path = settings.FILE_STORE_PATH + file_path
full_file_path = duplicate_file_check_rename(prelim_full_file_path)
newDatafile.filename = os.path.basename(full_file_path)
newDatafile.url = "%s://%s" % (newDatafile.protocol,
full_file_path[
len(settings.FILE_STORE_PATH) + len(file_dir):])
if not os.path.exists(os.path.dirname(full_file_path)):
os.makedirs(os.path.dirname(full_file_path))
shutil.move(originfilepath, full_file_path)
newDatafile.save()
示例8: _create_test_dataset
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, size='21', sha512sum='bogus')
df_.save()
rep_ = Replica(datafile=df_,
url='http://planet-python.org/' + str(_next_id()),
location=Location.get_default_location())
rep_.save()
ds_.save()
return ds_
示例9: create_staging_datafile
def create_staging_datafile(filepath, username, dataset_id):
dataset = Dataset.objects.get(id=dataset_id)
url, size = get_staging_url_and_size(username, filepath)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(filepath),
size=size)
replica = Replica(datafile=datafile,
protocol='staging',
url=url,
location=Location.get_location('staging'))
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
示例10: create_datafile
def create_datafile(index):
testfile = path.join(path.dirname(__file__), 'fixtures',
'jeol_sem_test%d.txt' % index)
size, sha512sum = get_size_and_sha512sum(testfile)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(testfile),
url='file://'+path.abspath(testfile),
protocol='file',
size=size,
sha512sum=sha512sum)
datafile.verify()
datafile.save()
return datafile
示例11: _make_data_file
def _make_data_file(dataset, filename, content):
# TODO:
# create datasetfile
f = mktemp()
print "Inside make data file ", f
open(f, "w+b").write(content)
df = Dataset_File()
df.dataset = dataset
df.filename = filename
df.url = 'file://'+f
df.protocol = "staging"
df.size = len(content)
df.verify(allowEmptyChecksums=True)
df.save()
print "Df ---", df
示例12: testRemoteFile
def testRemoteFile(self):
content = urandom(1024)
with NamedTemporaryFile() as f:
# Create new Datafile
datafile = Dataset_File(dataset=self.dataset)
datafile.filename = 'background_task_testfile'
datafile.size = len(content)
datafile.sha512sum = hashlib.sha512(content).hexdigest()
datafile.url = 'file://' + path.abspath(f.name)
datafile.save()
def get_datafile(datafile):
return Dataset_File.objects.get(id=datafile.id)
# Check that it won't verify as it stands
expect(get_datafile(datafile).verified).to_be(False)
verify_files()
expect(get_datafile(datafile).verified).to_be(False)
expect(get_datafile(datafile).is_local()).to_be(False)
# Fill in the content
f.write(content)
f.flush()
# Check it now verifies
verify_files()
expect(get_datafile(datafile).verified).to_be(True)
expect(get_datafile(datafile).is_local()).to_be(True)
示例13: aadd_staged_file_to_dataset
def aadd_staged_file_to_dataset(rel_filepath, dataset_id, username,
mimetype="application/octet-stream"):
"""
add file in user's staging path to a dataset
may be replaced by main code functions.
quick and dirty hack to get it working
"""
originfilepath = os.path.join(get_full_staging_path(username),
rel_filepath)
dataset = Dataset.objects.get(pk=dataset_id)
newDatafile = Dataset_File(
dataset=dataset,
url=originfilepath,
protocol="staging",
mimetype=mimetype,
)
newDatafile.save()
示例14: _create_datafile
def _create_datafile():
user = User.objects.create_user("testuser", "[email protected]", "pwd")
user.save()
UserProfile(user=user).save()
Location.force_initialize()
full_access = Experiment.PUBLIC_ACCESS_FULL
experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
experiment.save()
ObjectACL(
content_object=experiment,
pluginId="django_user",
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ObjectACL.OWNER_OWNED,
).save()
dataset = Dataset()
dataset.save()
dataset.experiments.add(experiment)
dataset.save()
# Create new Datafile
tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
with Image(filename="magick:rose") as img:
img.format = "tiff"
img.save(file=tempfile.file)
tempfile.file.flush()
datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file")
replica = Replica(
datafile=datafile,
url=write_uploaded_file_to_dataset(dataset, tempfile),
location=Location.get_default_location(),
)
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
return datafile
示例15: fpupload
def fpupload(request, dataset_id):
"""
Uploads all files picked by filepicker to the dataset
:param request: a HTTP Request instance
:type request: :class:`django.http.HttpRequest`
:param dataset_id: the dataset_id
:type dataset_id: integer
:returns: boolean true if successful
:rtype: bool
"""
dataset = Dataset.objects.get(id=dataset_id)
logger.debug('called fpupload')
if request.method == 'POST':
logger.debug('got POST')
for key, val in request.POST.items():
splits = val.split(",")
for url in splits:
try:
fp = FilepickerFile(url)
except ValueError:
pass
else:
picked_file = fp.get_file()
filepath = write_uploaded_file_to_dataset(dataset,
picked_file)
datafile = Dataset_File(dataset=dataset,
filename=picked_file.name,
size=picked_file.size)
replica = Replica(datafile=datafile,
url=filepath,
protocol='',
location=Location.get_default_location())
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
return HttpResponse(json.dumps({"result": True}))