本文整理汇总了Python中pthelma.timeseries.Timeseries.read方法的典型用法代码示例。如果您正苦于以下问题:Python Timeseries.read方法的具体用法?Python Timeseries.read怎么用?Python Timeseries.read使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pthelma.timeseries.Timeseries
的用法示例。
在下文中一共展示了Timeseries.read方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def setUp(self):
parms = settings.BITIA_TEST_ENHYDRIS_INSTALLATION
self.cookies = enhydris_api.login(parms['base_url'],
parms['user'],
parms['password'])
# Create two time series
j = {
'gentity': parms['station_id'],
'variable': parms['variable_id'],
'unit_of_measurement': parms['unit_of_measurement_id'],
'time_zone': parms['time_zone_id'],
}
self.ts1_id = enhydris_api.post_model(
parms['base_url'], self.cookies, 'Timeseries', j)
self.ts2_id = enhydris_api.post_model(
parms['base_url'], self.cookies, 'Timeseries', j)
assert self.ts1_id != self.ts2_id
# Add some data (all but the last record) to the database
ts = Timeseries(self.ts1_id)
ts.read(StringIO(self.timeseries1_top))
enhydris_api.post_tsdata(parms['base_url'], self.cookies, ts)
ts = Timeseries(self.ts2_id)
ts.read(StringIO(self.timeseries2_top))
enhydris_api.post_tsdata(parms['base_url'], self.cookies, ts)
# Temporary directory for cache files
self.tempdir = tempfile.mkdtemp()
示例2: setUp
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def setUp(self):
self.parms = json.loads(os.getenv('PTHELMA_TEST_ENHYDRIS_API'))
self.cookies = enhydris_api.login(self.parms['base_url'],
self.parms['user'],
self.parms['password'])
# Create two time series
j = {
'gentity': self.parms['station_id'],
'variable': self.parms['variable_id'],
'unit_of_measurement': self.parms['unit_of_measurement_id'],
'time_zone': self.parms['time_zone_id'],
'time_step': 3,
'timestamp_offset_minutes': 0,
'timestamp_offset_months': 0,
'remarks': 'Très importante',
}
self.ts1_id = enhydris_api.post_model(
self.parms['base_url'], self.cookies, 'Timeseries', j)
self.ts2_id = enhydris_api.post_model(
self.parms['base_url'], self.cookies, 'Timeseries', j)
assert self.ts1_id != self.ts2_id
# Add some data (all but the last record) to the database
ts = Timeseries(self.ts1_id)
ts.read(StringIO(self.timeseries1_top))
enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)
ts = Timeseries(self.ts2_id)
ts.read(StringIO(self.timeseries2_top))
enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)
# Temporary directory for cache files
self.tempdir = tempfile.mkdtemp()
self.savedcwd = os.getcwd()
os.chdir(self.tempdir)
示例3: test_get_ts_end_date
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def test_get_ts_end_date(self):
v = json.loads(os.getenv('PTHELMA_TEST_ENHYDRIS_API'))
cookies = enhydris_api.login(v['base_url'], v['user'], v['password'])
# Create a time series in the database
j = {
'gentity': v['station_id'],
'variable': v['variable_id'],
'unit_of_measurement': v['unit_of_measurement_id'],
'time_zone': v['time_zone_id'],
}
ts_id = enhydris_api.post_model(v['base_url'], cookies, 'Timeseries',
j)
# Get its last date while it has no data
date = enhydris_api.get_ts_end_date(v['base_url'], cookies, ts_id)
self.assertEqual(date.isoformat(), '0001-01-01T00:00:00')
# Now upload some data
ts = Timeseries(ts_id)
ts.read(StringIO(self.test_timeseries))
enhydris_api.post_tsdata(v['base_url'], cookies, ts)
# Get its last date
date = enhydris_api.get_ts_end_date(v['base_url'], cookies, ts_id)
self.assertEqual(date.isoformat(), '2014-01-05T08:00:00')
# Get the last date of a nonexistent time series
self.assertRaises(requests.HTTPError, enhydris_api.get_ts_end_date,
v['base_url'], cookies, ts_id + 1)
示例4: check
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def check(self, datadir):
for parm in self.parameters:
if not parm['ts_id']:
continue
actual_ts = Timeseries(parm['ts_id'])
enhydris_api.read_tsdata(self.base_url, self.cookies, actual_ts)
reference_ts = Timeseries()
with open(os.path.join(
datadir, 'generated', parm['expname'] + '.txt')) as f:
reference_ts.read(f)
precision = self.guess_precision(f)
self.assertTimeseriesEqual(actual_ts, reference_ts, precision,
parm['expname'] + '.txt')
示例5: put
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def put(self, request, pk, format=None):
try:
ts = Timeseries(id=int(pk))
self.check_object_permissions(request, ts)
result_if_error = status.HTTP_400_BAD_REQUEST
ts.read(StringIO(request.DATA['timeseries_records']))
result_if_error = status.HTTP_409_CONFLICT
ts.append_to_db(connection, commit=False)
return HttpResponse(str(len(ts)), content_type="text/plain")
except ValueError as e:
return HttpResponse(status=result_if_error,
content=str(e),
content_type="text/plain")
示例6: test_update_timeseries_cache
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def test_update_timeseries_cache(self):
parms = settings.BITIA_TEST_ENHYDRIS_INSTALLATION
bitia_timeseries_groups = {
'one': [{'base_url': parms['base_url'],
'id': self.ts1_id,
'user': parms['user'],
'password': parms['password'],
},
{'base_url': parms['base_url'],
'id': self.ts2_id,
'user': parms['user'],
'password': parms['password'],
},
],
}
with self.settings(BITIA_TIMESERIES_CACHE_DIR=self.tempdir,
BITIA_TIMESERIES_GROUPS=bitia_timeseries_groups):
# Cache the two timeseries
update_timeseries_cache()
# Check that the cached stuff is what it should be
file1, file2 = [os.path.join(self.tempdir, '{}.hts'.format(x))
for x in (self.ts1_id, self.ts2_id)]
with open(file1) as f:
self.assertEqual(f.read().replace('\r', ''),
self.timeseries1_top)
with open(file2) as f:
self.assertEqual(f.read().replace('\r', ''),
self.timeseries2_top)
# Append a record to the database for each timeseries
ts = Timeseries(self.ts1_id)
ts.read(StringIO(self.timeseries1_bottom))
enhydris_api.post_tsdata(parms['base_url'], self.cookies, ts)
ts = Timeseries(self.ts2_id)
ts.read(StringIO(self.timeseries2_bottom))
enhydris_api.post_tsdata(parms['base_url'], self.cookies, ts)
# Update the cache
update_timeseries_cache()
# Check that the cached stuff is what it should be
file1, file2 = [os.path.join(self.tempdir, '{}.hts'.format(x))
for x in (self.ts1_id, self.ts2_id)]
with open(file1) as f:
self.assertEqual(f.read().replace('\r', ''),
self.test_timeseries1)
with open(file2) as f:
self.assertEqual(f.read().replace('\r', ''),
self.test_timeseries2)
示例7: test_post_tsdata
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def test_post_tsdata(self):
v = json.loads(os.getenv('PTHELMA_TEST_ENHYDRIS_API'))
cookies = enhydris_api.login(v['base_url'], v['user'], v['password'])
# Create a time series in the database
j = {
'gentity': v['station_id'],
'variable': v['variable_id'],
'unit_of_measurement': v['unit_of_measurement_id'],
'time_zone': v['time_zone_id'],
}
ts_id = enhydris_api.post_model(v['base_url'], cookies, 'Timeseries',
j)
# Now upload some data
ts = Timeseries(ts_id)
ts.read(StringIO(self.test_timeseries_top))
enhydris_api.post_tsdata(v['base_url'], cookies, ts)
# Read and check the time series
url = enhydris_api.urljoin(v['base_url'],
'timeseries/d/{}/download/'.format(ts.id))
r = requests.get(url, cookies=cookies)
r.raise_for_status()
self.assertEqual(get_after_blank_line(r.text),
self.test_timeseries_top)
# Upload more data
ts = Timeseries(ts_id)
ts.read(StringIO(self.test_timeseries_bottom))
enhydris_api.post_tsdata(v['base_url'], cookies, ts)
# Read and check the time series
url = enhydris_api.urljoin(v['base_url'],
'timeseries/d/{}/download/'.format(ts.id))
r = requests.get(url, cookies=cookies)
r.raise_for_status()
self.assertEqual(get_after_blank_line(r.text),
self.test_timeseries)
示例8: TestDst
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
class TestDst(TestCase):
datafiledict = {'date_format': '%Y-%m-%dT%H:%M',
'timezone': 'Europe/Athens'}
def setUp(self):
get_server_from_env(self.__dict__)
self.ref_ts = Timeseries(0)
if not self.base_url:
return
self.cookies = enhydris_api.login(self.base_url, self.user,
self.password)
self.timeseries_id = create_timeseries(self.cookies, self.__dict__)
self.ts = Timeseries(self.timeseries_id)
def tearDown(self):
if not self.base_url:
return
enhydris_api.delete_model(self.base_url, self.cookies,
'Timeseries', self.timeseries_id)
def run_test(self):
if not self.base_url:
return
d = {'filename': full_testdata_filename(self.filename),
'datafile_fields': str(self.timeseries_id),
'datafile_format': 'irrelevant'}
d.update(self.datafiledict)
df = Datafile_simple(self.base_url, self.cookies, d)
df.update_database()
enhydris_api.read_tsdata(self.base_url, self.cookies, self.ts)
self.assertEqual(len(self.ts), len(self.ref_ts))
(items, ritems) = [x.items() for x in (self.ts, self.ref_ts)]
for item, ritem in zip(items, ritems):
self.assertEqual(item[0], ritem[0])
self.assertAlmostEqual(item[1], ritem[1], 4)
self.assertEqual(item[1].flags, ritem[1].flags)
@skipUnless(os.getenv('PTHELMA_TEST_ENHYDRIS_API'), "see above")
def test_to_dst(self):
self.filename = 'data_at_change_to_dst.txt'
with open(full_testdata_filename('timeseries_at_change_to_dst.txt')
) as f:
self.ref_ts.read(f)
self.run_test()
@skipUnless(os.getenv('PTHELMA_TEST_ENHYDRIS_API'), "see above")
def test_from_dst(self):
self.filename = 'data_at_change_from_dst.txt'
with open(full_testdata_filename('timeseries_at_change_from_dst.txt')
) as f:
self.ref_ts.read(f)
self.run_test()
def test_fix_dst(self):
d = {'filename': 'irrelevant',
'datafile_fields': '0',
'datafile_format': 'irrelevant'}
d.update(self.datafiledict)
df = Datafile_simple('http://irrelevant/', {}, d)
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 2, 59)),
datetime(2012, 10, 28, 1, 59))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 3, 00)),
datetime(2012, 10, 28, 3, 00))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 4, 00)),
datetime(2012, 10, 28, 4, 00))
# Now we pretend that the switch from dst hasn't occurred yet.
# This is the only case when loggertodb should assume that
# ambiguous times refer to before the switch.
athens = pytz.timezone('Europe/Athens')
now = athens.localize(datetime(2012, 10, 28, 3, 59), is_dst=True)
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 2, 59), now=now),
datetime(2012, 10, 28, 1, 59))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 3, 00), now=now),
datetime(2012, 10, 28, 2, 00))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 4, 00), now=now),
datetime(2012, 10, 28, 4, 00))
# Once more; the switch from DST has just occurred; now it
# should be assumed that ambiguous times refer to after the
# switch.
now = athens.localize(datetime(2012, 10, 28, 3, 0), is_dst=False)
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 2, 59), now=now),
datetime(2012, 10, 28, 1, 59))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 3, 00), now=now),
datetime(2012, 10, 28, 3, 00))
self.assertEqual(df._fix_dst(datetime(2012, 10, 28, 4, 00), now=now),
datetime(2012, 10, 28, 4, 00))
示例9: test_update
# 需要导入模块: from pthelma.timeseries import Timeseries [as 别名]
# 或者: from pthelma.timeseries.Timeseries import read [as 别名]
def test_update(self):
self.parms = json.loads(os.getenv('PTHELMA_TEST_ENHYDRIS_API'))
timeseries_group = [{'base_url': self.parms['base_url'],
'id': self.ts1_id,
'user': self.parms['user'],
'password': self.parms['password'],
'file': 'file1',
},
{'base_url': self.parms['base_url'],
'id': self.ts2_id,
'user': self.parms['user'],
'password': self.parms['password'],
'file': 'file2',
},
]
# Cache the two timeseries
cache = TimeseriesCache(timeseries_group)
cache.update()
# Check that the cached stuff is what it should be
with open('file1') as f:
ts1_before = Timeseries()
ts1_before.read_file(f)
self.assertEqual(ts1_before.time_step.length_minutes, 1440)
self.assertEqual(ts1_before.time_step.length_months, 0)
c = StringIO()
ts1_before.write(c)
self.assertEqual(c.getvalue().replace('\r', ''),
self.timeseries1_top)
with open('file2') as f:
ts2_before = Timeseries()
ts2_before.read_file(f)
self.assertEqual(ts2_before.time_step.length_minutes, 1440)
self.assertEqual(ts2_before.time_step.length_months, 0)
c = StringIO()
ts2_before.write(c)
self.assertEqual(c.getvalue().replace('\r', ''),
self.timeseries2_top)
# Append a record to the database for each timeseries
ts = Timeseries(self.ts1_id)
ts.read(StringIO(self.timeseries1_bottom))
enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)
ts = Timeseries(self.ts2_id)
ts.read(StringIO(self.timeseries2_bottom))
enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)
# Update the cache
cache.update()
# Check that the cached stuff is what it should be
with open('file1') as f:
ts1_after = Timeseries()
ts1_after.read_file(f)
self.assertEqual(ts1_after.time_step.length_minutes, 1440)
self.assertEqual(ts1_after.time_step.length_months, 0)
c = StringIO()
ts1_after.write(c)
self.assertEqual(c.getvalue().replace('\r', ''),
self.test_timeseries1)
with open('file2') as f:
ts2_after = Timeseries()
ts2_after.read_file(f)
self.assertEqual(ts2_after.time_step.length_minutes, 1440)
self.assertEqual(ts2_after.time_step.length_months, 0)
c = StringIO()
ts2_after.write(c)
self.assertEqual(c.getvalue().replace('\r', ''),
self.test_timeseries2)
# Check that the time series comments are the same before and after
self.assertEqual(ts1_before.comment, ts1_after.comment)
self.assertEqual(ts2_before.comment, ts2_after.comment)