本文整理汇总了Python中tardis.tardis_portal.models.Location类的典型用法代码示例。如果您正苦于以下问题:Python Location类的具体用法?Python Location怎么用?Python Location使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Location类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_location
def test_location(self):
from tardis.tardis_portal.models import Location
self.assertEquals(Location.get_default_location().name,
'local')
self.assertEquals(Location.get_location('staging').name,
'staging')
self.assertEquals(len(Location.objects.all()), 6)
示例2: setUp
def setUp(self):
self.user = generate_user('fred')
Location.force_initialize()
self.experiment = generate_experiment(users=[self.user])
self.dataset = generate_dataset(experiments=[self.experiment])
self.server = SimpleHttpTestServer()
self.server.start()
示例3: testReplicaVerify
def testReplicaVerify(self):
from django.conf import settings
saved = settings.REQUIRE_DATAFILE_CHECKSUMS
try:
Location.get_location('test')
datafile, replica = generate_datafile("1/2/3", self.dataset,
"Hi mum")
settings.REQUIRE_DATAFILE_CHECKSUMS = True
self.assertTrue(replica.verify(), 'Replica.verify() failed.')
replica.datafile.sha512sum = ''
replica.datafile.md5sum = ''
self.assertFalse(
replica.verify(),
'Replica.verify() succeeded despite no checksum '
'(settings.REQUIRE_DATAFILE_CHECKSUMS=True).')
self.assertFalse(replica.verify(allowEmptyChecksums=False),
'Replica.verify() succeeded despite no checksum '
'(allowEmptyChecksums=False)')
settings.REQUIRE_DATAFILE_CHECKSUMS = False
datafile.sha512sum = None
datafile.md5sum = None
self.assertTrue(replica.verify(allowEmptyChecksums=True),
'Replica.verify() failed wrongly '
'(allowEmptyChecksums=True)')
datafile.sha512sum = None
datafile.md5sum = None
self.assertTrue(replica.verify(),
'Replica.verify() failed wrongly')
finally:
settings.REQUIRE_DATAFILE_CHECKSUMS = saved
示例4: testMigrateStoreWithSpaces
def testMigrateStoreWithSpaces(self):
dest = Location.get_location('test')
local = Location.get_location('local')
datafile, replica = generate_datafile('1/1/Hi Mum', self.dataset,
"Hi mum")
datafile2, replica2 = generate_datafile('1/1/Hi Dad', self.dataset,
"Hi dad")
path = datafile.get_absolute_filepath()
self.assertTrue(os.path.exists(path))
path2 = datafile.get_absolute_filepath()
self.assertTrue(os.path.exists(path2))
# Migrate them
migrate_replica(replica, dest)
self.assertFalse(os.path.exists(path))
migrate_replica(replica2, dest)
self.assertFalse(os.path.exists(path2))
# Bring them back
migrate_replica(datafile.get_preferred_replica(), local)
self.assertTrue(os.path.exists(path))
migrate_replica(datafile2.get_preferred_replica(), local)
self.assertTrue(os.path.exists(path2))
示例5: setUp
def setUp(self):
from tardis.tardis_portal import models
from tempfile import mkdtemp, mktemp
from django.conf import settings
from os import path
import os
# Disconnect post_save signal
from django.db.models.signals import post_save
from tardis.tardis_portal.models import \
staging_hook, Dataset_File, Replica, Location
post_save.disconnect(staging_hook, sender=Replica)
from django.contrib.auth.models import User
user = 'tardis_user1'
pwd = 'secret'
email = ''
self.user = User.objects.create_user(user, email, pwd)
try:
os.makedirs(settings.GET_FULL_STAGING_PATH_TEST)
except OSError:
pass
self.temp = mkdtemp(dir=settings.GET_FULL_STAGING_PATH_TEST)
self.file = mktemp(dir=self.temp)
content = 'test file'
with open(self.file, "w+b") as f:
f.write(content)
Location.force_initialize()
# make datafile
exp = models.Experiment(title='test exp1',
institution_name='monash',
created_by=self.user)
exp.save()
# make dataset
dataset = models.Dataset(description="dataset description...")
dataset.save()
dataset.experiments.add(exp)
dataset.save()
# create datafile
df = models.Dataset_File(dataset=dataset, size = len(content),
filename = path.basename(self.file),
md5sum='f20d9f2072bbeb6691c0f9c5099b01f3')
df.save()
# create replica
base_url = 'file://' + settings.GET_FULL_STAGING_PATH_TEST
location = Location.load_location({
'name': 'staging-test-yyy', 'url': base_url, 'type': 'external',
'priority': 10, 'transfer_provider': 'local'})
replica = models.Replica(datafile=df, url='file://'+self.file,
protocol="staging",location=location)
replica.verify()
replica.save()
self.replica = replica
示例6: setUp
def setUp(self):
# Create test owner without enough details
username, email, password = ('testuser',
'[email protected]',
'password')
user = User.objects.create_user(username, email, password)
profile = UserProfile(user=user, isDjangoAccount=True)
profile.save()
Location.force_initialize()
# Create test experiment and make user the owner of it
experiment = Experiment(title='Text Experiment',
institution_name='Test Uni',
created_by=user)
experiment.save()
acl = ObjectACL(
pluginId='django_user',
entityId=str(user.id),
content_object=experiment,
canRead=True,
canWrite=True,
canDelete=True,
isOwner=True,
aclOwnershipType=ObjectACL.OWNER_OWNED)
acl.save()
dataset = Dataset(description='dataset description...')
dataset.save()
dataset.experiments.add(experiment)
dataset.save()
def create_datafile(filename):
testfile = path.join(path.dirname(__file__), 'fixtures',
filename)
size, sha512sum = get_size_and_sha512sum(testfile)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(testfile),
size=size,
sha512sum=sha512sum)
datafile.save()
base_url = 'file://' + path.abspath(path.dirname(testfile))
location = Location.load_location({
'name': 'test-grabber', 'url': base_url, 'type': 'external',
'priority': 10, 'transfer_provider': 'local'})
replica = Replica(datafile=datafile,
url='file://'+path.abspath(testfile),
protocol='file',
location=location)
replica.verify()
replica.save()
return Dataset_File.objects.get(pk=datafile.pk)
self.dataset = dataset
self.datafiles = [create_datafile('data_grabber_test1.admin'),
create_datafile('testfile.txt')
]
示例7: setUp
def setUp(self):
self.user = generate_user('fred')
Location.force_initialize()
self.experiment = generate_experiment(
users=[self.user],
title='Meanwhile, down in the archives ...',
url='http://example.com/something')
self.dataset = generate_dataset(experiments=[self.experiment])
示例8: setUp
def setUp(self):
from django.contrib.auth.models import User
from tardis.tardis_portal.models import Location
user = 'tardis_user1'
pwd = 'secret'
email = ''
self.user = User.objects.create_user(user, email, pwd)
Location.force_initialize()
示例9: setUp
def setUp(self):
from os import path, mkdir
from tempfile import mkdtemp
user = 'tardis_user1'
pwd = 'secret'
email = ''
self.user = User.objects.create_user(user, email, pwd)
self.userProfile = UserProfile(user=self.user).save()
self.test_dir = mkdtemp()
Location.force_initialize()
self.exp = Experiment(title='test exp1',
institution_name='monash', created_by=self.user)
self.exp.save()
acl = ObjectACL(
pluginId=django_user,
entityId=str(self.user.id),
content_object=self.exp,
canRead=True,
isOwner=True,
aclOwnershipType=ObjectACL.OWNER_OWNED,
)
acl.save()
self.dataset = \
Dataset(description='dataset description...')
self.dataset.save()
self.dataset.experiments.add(self.exp)
self.dataset.save()
self.experiment_path = path.join(settings.FILE_STORE_PATH,
str(self.dataset.get_first_experiment().id))
self.dataset_path = path.join(self.experiment_path,
str(self.dataset.id))
if not path.exists(self.experiment_path):
mkdir(self.experiment_path)
if not path.exists(self.dataset_path):
mkdir(self.dataset_path)
# write test file
self.filename = 'testfile.txt'
self.f1 = open(path.join(self.test_dir, self.filename), 'w')
self.f1.write('Test file 1')
self.f1.close()
self.f1_size = path.getsize(path.join(self.test_dir,
self.filename))
self.f1 = open(path.join(self.test_dir, self.filename), 'r')
示例10: _infer_location
def _infer_location(path):
if urlparse.urlparse(path).scheme == '':
loc = Location.get_default_location()
else:
loc = Location.get_location_for_url(path)
if loc:
return loc
else:
raise Exception('Cannot infer a location for %s' % path)
示例11: testRemoveExperimentData
def testRemoveExperimentData(self):
# First with no sharing
self._build()
archive_location = Location.get_location('archtest')
try:
nos_experiments = Experiment.objects.count()
nos_datasets = Dataset.objects.count()
nos_datafiles = Dataset_File.objects.count()
nos_replicas = Replica.objects.count()
self.assertTrue(exists(self.replica.get_absolute_filepath()))
remove_experiment_data(self.experiment,
'http://example.com/some.tar.gz',
archive_location)
self.assertEquals(nos_experiments, Experiment.objects.count())
self.assertEquals(nos_datasets, Dataset.objects.count())
self.assertEquals(nos_datafiles, Dataset_File.objects.count())
self.assertEquals(nos_replicas, Replica.objects.count())
new_replica = self.datafile.get_preferred_replica()
self.assertTrue(self.replica.id != new_replica.id)
self.assertFalse(new_replica.stay_remote)
self.assertTrue(new_replica.verified)
self.assertEqual(self.replica.protocol, new_replica.protocol)
self.assertEqual(archive_location.id, new_replica.location.id)
self.assertEqual('http://example.com/some.tar.gz#1/1/1',
new_replica.url)
self.assertFalse(exists(self.replica.get_absolute_filepath()))
finally:
self._clear()
# (Check that the deletes cascaded ... )
self.assertEquals(0, Dataset_File.objects.count())
self.assertEquals(0, Replica.objects.count())
# Repeat, but with the first dataset in 2 experiments.
self._build()
self.dataset.experiments.add(self.experiment2)
archive_location = Location.get_location('archtest')
try:
nos_experiments = Experiment.objects.count()
nos_datasets = Dataset.objects.count()
nos_datafiles = Dataset_File.objects.count()
nos_replicas = Replica.objects.count()
self.assertTrue(exists(self.replica.get_absolute_filepath()))
remove_experiment_data(self.experiment,
'http://example.com/some.tar.gz',
archive_location)
self.assertEquals(nos_experiments, Experiment.objects.count())
self.assertEquals(nos_datasets, Dataset.objects.count())
self.assertEquals(nos_datafiles, Dataset_File.objects.count())
self.assertEquals(nos_replicas, Replica.objects.count())
new_replica = self.datafile.get_preferred_replica()
self.assertTrue(self.replica.id == new_replica.id)
self.assertTrue(exists(self.replica.get_absolute_filepath()))
self.assertFalse(exists(self.replica2.get_absolute_filepath()))
finally:
self._clear()
示例12: setUpClass
def setUpClass(cls):
cls.priorcwd = os.getcwd()
os.chdir(os.path.dirname(__file__)+'/atom_test')
cls.server = TestWebServer()
cls.server.start()
Location.force_initialize()
Location.load_location({
'name': 'test-atom',
'transfer_provider': 'http',
'url': 'http://localhost:4272/files/',
'type': 'external',
'priority': 10})
Location.load_location({
'name': 'test-atom2',
'transfer_provider': 'http',
'url': 'http://mydatagrabber.cmm.uq.edu.au/files',
'type': 'external',
'priority': 10})
files = path.realpath(path.join(path.dirname(__file__),
'atom_test', 'files'))
Location.load_location({
'name': 'test-atom3',
'transfer_provider': 'local',
'url': 'file://' + files,
'type': 'external',
'priority': 10})
示例13: process_enclosure
def process_enclosure(self, dataset, enclosure):
filename = getattr(enclosure, 'title', basename(enclosure.href))
datafile = Dataset_File(filename=filename, dataset=dataset)
try:
datafile.mimetype = enclosure.mime
except AttributeError:
pass
try:
datafile.size = enclosure.length
except AttributeError:
pass
try:
hash = enclosure.hash
# Split on white space, then ':' to get tuples to feed into dict
hashdict = dict([s.partition(':')[::2] for s in hash.split()])
# Set SHA-512 sum
datafile.sha512sum = hashdict['sha-512']
except AttributeError:
pass
datafile.save()
url = enclosure.href
# This means we will allow the atom feed to feed us any enclosure
# URL that matches a registered location. Maybe we should restrict
# this to a specific location.
location = Location.get_location_for_url(url)
if not location:
logger.error('Rejected ingestion for unknown location %s' % url)
return
replica = Replica(datafile=datafile, url=url,
location=location)
replica.protocol = enclosure.href.partition('://')[0]
replica.save()
self.make_local_copy(replica)
示例14: testScoring
def testScoring(self):
self._setup()
scorer = MigrationScorer(Location.get_location('local').id)
self.assertEquals(2.0, scorer.datafile_score(self.df1))
self.assertEquals(2, get_user_priority(self.user1))
self.assertEquals(1, get_user_priority(self.user2))
self.assertEquals(1.0, scorer.user_score(self.user1))
self.assertEquals(2.0, scorer.user_score(self.user2))
self.assertEquals(2.0, scorer.experiment_score(self.exp1))
self.assertEquals(2.0, scorer.dataset_score(self.df1.dataset))
self.assertEquals(4.0, scorer.score_datafile(self.df1))
self.assertEquals([(self.df1, self.rep1, 4.0)],
scorer.score_datafiles_in_dataset(self.ds1))
self.assertEquals([(self.df5, self.rep5, 8.0),
(self.df4, self.rep4, 6.0),
(self.df1, self.rep1, 4.0)],
scorer.score_datafiles_in_experiment(self.exp1))
self.assertEquals([(self.df5, self.rep5, 8.0),
(self.df4, self.rep4, 6.0)],
scorer.score_datafiles_in_experiment(self.exp2))
self.assertEquals([(self.df6, self.rep6, 5.0)],
scorer.score_datafiles_in_experiment(self.exp3))
self.assertEquals([(self.df5, self.rep5, 8.0),
(self.df4, self.rep4, 6.0),
(self.df6, self.rep6, 5.0),
(self.df1, self.rep1, 4.0),
(self.df7, self.rep7, 0.0),
(self.df8, self.rep8, 0.0)],
scorer.score_all_datafiles())
self.assertEquals([(self.df7, self.rep7, 0.0),
(self.df8, self.rep8, 0.0)],
scorer.score_datafiles_in_dataset(self.ds4))
示例15: testScoringWithTimes
def testScoringWithTimes(self):
self._setup()
scorer = MigrationScorer(
Location.get_location('local').id, {
'user_priority_weighting': [5.0, 2.0, 1.0, 0.5, 0.2],
'file_size_weighting': 1.0,
'file_access_weighting': 1.0,
'file_age_weighting': 1.0,
'file_size_threshold': 0,
'file_access_threshold': 0,
'file_age_threshold': 1})
self.assertEquals(0.0, scorer.datafile_score(self.df1))
f = tempfile.NamedTemporaryFile(dir=settings.FILE_STORE_PATH)
f.write("Hi Mom!!\n")
rep = Replica.objects.get(pk=self.rep1.pk)
rep.url = f.name
rep.save()
self.assertEquals(2.0, scorer.datafile_score(self.df1))
older = time.time() - (60 * 60 * 24 + 300)
os.utime(f.name, (older, older))
self.assertEquals(3.0, scorer.datafile_score(self.df1))
older = time.time() - (60 * 60 * 24 * 2 + 300)
os.utime(f.name, (older, older))
self.assertEquals(5.0, scorer.datafile_score(self.df1))
f.close()