本文整理汇总了Python中whisper.info函数的典型用法代码示例。如果您正苦于以下问题:Python info函数的具体用法?Python info怎么用?Python info使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了info函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_normal
def test_normal(self):
whisper.create(self.filename, [(1, 60), (60, 60)])
whisper.CACHE_HEADERS = True
whisper.info(self.filename)
whisper.info(self.filename)
whisper.CACHE_HEADERS = False
示例2: test_setAggregation
def test_setAggregation(self):
"""Create a db, change aggregation, xFilesFactor, then use info() to validate"""
retention = [(1, 60), (60, 60)]
# create a new db with a valid configuration
whisper.create(self.db, retention)
#set setting every AggregationMethod available
for ag in whisper.aggregationMethods:
for xff in 0.0,0.2,0.4,0.7,0.75,1.0:
#original xFilesFactor
info0 = whisper.info(self.db)
#optional xFilesFactor not passed
whisper.setAggregationMethod(self.db, ag)
#original value should not change
info1 = whisper.info(self.db)
self.assertEqual(info0['xFilesFactor'],info1['xFilesFactor'])
#the selected aggregation method should have applied
self.assertEqual(ag,info1['aggregationMethod'])
#optional xFilesFactor used
whisper.setAggregationMethod(self.db, ag, xff)
#new info should match what we just set it to
info2 = whisper.info(self.db)
#packing and unpacking because
#AssertionError: 0.20000000298023224 != 0.2
target_xff = struct.unpack("!f", struct.pack("!f",xff))[0]
self.assertEqual(info2['xFilesFactor'], target_xff)
#same aggregationMethod asssertion again, but double-checking since
#we are playing with packed values and seek()
self.assertEqual(ag,info2['aggregationMethod'])
self._removedb()
示例3: ccmerge
def ccmerge(path_from, path_to):
"""Carbon-copy style of merge: instead of propagating datapoints
from the source WSP file through archives in the destination WSP file
the datapoint is copied to the corresponding slot in corresponding archives.
Works only with archives having the same retention schema.
Only datapoints missing in the destination archive are copied
(i.e. existing datapoints in the destination archive do not get overwritten)"""
import fcntl
map_from = mmap_file(path_from)
fd = os.open(path_to, os.O_RDWR)
fcntl.flock(fd, fcntl.LOCK_EX)
map_to = mmap.mmap(fd, 0, prot=mmap.PROT_WRITE)
dstHeader = whisper.info(path_from)
srcHeader = whisper.info(path_to)
srcArchives = srcHeader['archives']
dstArchives = dstHeader['archives']
for srcArchive, dstArchive in zip(srcArchives, dstArchives):
for p in ('points', 'secondsPerPoint'):
if srcArchive[p] != dstArchive[p]:
raise Exception, "%s and %s have different number of %s: %d vs %d" % (path_from, path_to, p, srcArchive[p], dstArchive[p])
for srcArchive, dstArchive in zip(srcArchives, dstArchives):
srcArchiveStart = seriesStart(srcArchive, map_from)
dstArchiveStart = seriesStart(dstArchive, map_to)
# source archive is empty => nothing to copy
if srcArchiveStart is None:
continue
# destination archive is empty => does not matter which postition we put data to
if dstArchiveStart is None:
dstArchiveStart = 0
# find the difference in alignment
step = srcArchive['secondsPerPoint']
alignmentDiff = (dstArchiveStart - srcArchiveStart) / step # offset in number of datapoints between source and destination archives
# iterate through points and copy them
base_offset = srcArchive['offset']
points = srcArchive['points']
for pointInSrc in xrange(points):
pointInDst = pointInSrc - alignmentDiff
# archive is circular
if pointInDst < 0:
pointInDst += points
if pointInDst >= points:
pointInDst -= points
dstOffset = base_offset + pointInDst * whisper.pointSize
srcOffset = base_offset + pointInSrc * whisper.pointSize
(dstTimestamp, dstValue) = whisper.struct.unpack(whisper.pointFormat, map_to[dstOffset:dstOffset+whisper.pointSize])
# we have the datapoint in the destination arhive, no need to copy
if dstTimestamp != 0:
continue
(srcTimestamp, srcValue) = whisper.struct.unpack(whisper.pointFormat, map_from[srcOffset:srcOffset+whisper.pointSize])
# datapoint is missing in the source archive as well, nothing to copy
if srcTimestamp == 0:
continue
# copy the datapoint
map_to[dstOffset:dstOffset+whisper.pointSize] = whisper.struct.pack(whisper.pointFormat, srcTimestamp, srcValue)
map_to.flush()
os.close(fd)
示例4: test_setAggregation
def test_setAggregation(self):
"""
Create a db, change aggregation, xFilesFactor, then use info() to validate
"""
original_lock = whisper.LOCK
original_caching = whisper.CACHE_HEADERS
original_autoflush = whisper.AUTOFLUSH
whisper.LOCK = True
whisper.AUTOFLUSH = True
whisper.CACHE_HEADERS = True
# create a new db with a valid configuration
whisper.create(self.filename, self.retention)
with AssertRaisesException(whisper.InvalidAggregationMethod('Unrecognized aggregation method: yummy beer')):
whisper.setAggregationMethod(self.filename, 'yummy beer')
#set setting every AggregationMethod available
for ag in whisper.aggregationMethods:
for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
# original xFilesFactor
info0 = whisper.info(self.filename)
# optional xFilesFactor not passed
old_ag = whisper.setAggregationMethod(self.filename, ag)
# should return old aggregationmethod
self.assertEqual(old_ag, info0['aggregationMethod'])
# original value should not change
info1 = whisper.info(self.filename)
self.assertEqual(info0['xFilesFactor'], info1['xFilesFactor'])
# the selected aggregation method should have applied
self.assertEqual(ag, info1['aggregationMethod'])
# optional xFilesFactor used
old_ag = whisper.setAggregationMethod(self.filename, ag, xff)
# should return old aggregationmethod
self.assertEqual(old_ag, info1['aggregationMethod'])
# new info should match what we just set it to
info2 = whisper.info(self.filename)
# packing and unpacking because
# AssertionError: 0.20000000298023224 != 0.2
target_xff = struct.unpack("!f", struct.pack("!f", xff))[0]
self.assertEqual(info2['xFilesFactor'], target_xff)
# same aggregationMethod assertion again, but double-checking since
# we are playing with packed values and seek()
self.assertEqual(ag, info2['aggregationMethod'])
with SimulatedCorruptWhisperFile():
with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read header', self.filename)):
whisper.setAggregationMethod(self.filename, ag)
whisper.LOCK = original_lock
whisper.AUTOFLUSH = original_autoflush
whisper.CACHE_HEADERS = original_caching
示例5: test_info_bogus_file
def test_info_bogus_file(self):
self.assertIsNone(whisper.info('bogus-file'))
# Validate "corrupt" whisper metadata
whisper.create(self.filename, self.retention)
with SimulatedCorruptWhisperFile():
with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read header', self.filename)):
whisper.info(self.filename)
# Validate "corrupt" whisper archive data
with SimulatedCorruptWhisperFile(corrupt_archive=True):
with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read archive0 metadata', self.filename)):
whisper.info(self.filename)
示例6: test_create
def test_create(self):
"""Create a db and use info() to validate"""
retention = [(1, 60), (60, 60)]
# check if invalid configuration fails successfully
with self.assertRaises(whisper.InvalidConfiguration):
whisper.create(self.db, [])
# create a new db with a valid configuration
whisper.create(self.db, retention)
# attempt to create another db in the same file, this should fail
with self.assertRaises(whisper.InvalidConfiguration):
whisper.create(self.db, 0)
info = whisper.info(self.db)
# check header information
self.assertEqual(info['maxRetention'],
max([a[0] * a[1] for a in retention]))
self.assertEqual(info['aggregationMethod'], 'average')
self.assertEqual(info['xFilesFactor'], 0.5)
# check archive information
self.assertEqual(len(info['archives']), len(retention))
self.assertEqual(info['archives'][0]['points'], retention[0][1])
self.assertEqual(info['archives'][0]['secondsPerPoint'],
retention[0][0])
self.assertEqual(info['archives'][0]['retention'],
retention[0][0] * retention[0][1])
self.assertEqual(info['archives'][1]['retention'],
retention[1][0] * retention[1][1])
# remove database
self._removedb()
#as written, invlaid aggregation methods default back to 'average'
for bad_m in self.bad_methods:
whisper.create(self.db, retention, xFilesFactor=None,aggregationMethod=bad_m)
info = whisper.info(self.db)
self.assertEqual(info['aggregationMethod'], 'average')
self._removedb()
for f in self.bad_xff:
with self.assertRaises(ValueError):
whisper.create(self.db, retention, xFilesFactor=f)
#assure files with bad XFilesFactors are not created
with self.assertRaises(IOError):
with open(self.db): pass
示例7: fetch
def fetch(self, startTime, endTime):
try:
data = whisper.fetch(self.fs_path, startTime, endTime)
except IOError:
log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
return None
if not data:
return None
time_info, values = data
(start,end,step) = time_info
meta_info = whisper.info(self.fs_path)
aggregation_method = meta_info['aggregationMethod']
lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
# Merge in data from carbon's cache
cached_datapoints = []
try:
cached_datapoints = CarbonLink().query(self.real_metric_path)
except:
log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
cached_datapoints = []
if isinstance(cached_datapoints, dict):
cached_datapoints = cached_datapoints.items()
values = merge_with_cache(cached_datapoints,
start,
step,
values,
aggregation_method)
return time_info, values
示例8: test_create_and_info
def test_create_and_info(self):
"""
Create a db and use info() to validate
"""
# check if invalid configuration fails successfully
for retention in (0, []):
with AssertRaisesException(whisper.InvalidConfiguration('You must specify at least one archive configuration!')):
whisper.create(self.filename, retention)
# create a new db with a valid configuration
whisper.create(self.filename, self.retention)
# Ensure another file can't be created when one exists already
with AssertRaisesException(whisper.InvalidConfiguration('File {0} already exists!'.format(self.filename))):
whisper.create(self.filename, self.retention)
info = whisper.info(self.filename)
# check header information
self.assertEqual(info['maxRetention'],
max([a[0] * a[1] for a in self.retention]))
self.assertEqual(info['aggregationMethod'], 'average')
self.assertEqual(info['xFilesFactor'], 0.5)
# check archive information
self.assertEqual(len(info['archives']), len(self.retention))
self.assertEqual(info['archives'][0]['points'], self.retention[0][1])
self.assertEqual(info['archives'][0]['secondsPerPoint'],
self.retention[0][0])
self.assertEqual(info['archives'][0]['retention'],
self.retention[0][0] * self.retention[0][1])
self.assertEqual(info['archives'][1]['retention'],
self.retention[1][0] * self.retention[1][1])
示例9: fill_archives
def fill_archives(src, dst, startFrom):
header = whisper.info(dst)
archives = header['archives']
archives = sorted(archives, key=lambda t: t['retention'])
for archive in archives:
fromTime = time.time() - archive['retention']
if fromTime >= startFrom:
continue
(timeInfo, values) = whisper.fetch(dst, fromTime, startFrom)
(start, end, step) = timeInfo
gapstart = None
for v in values:
if not v and not gapstart:
gapstart = start
elif v and gapstart:
# ignore single units lost
if (start - gapstart) > archive['secondsPerPoint']:
fill(src, dst, gapstart - step, start)
gapstart = None
elif gapstart and start == end - step:
fill(src, dst, gapstart - step, start)
start += step
startFrom = fromTime
示例10: _read_points
def _read_points(self, path):
"""Return a list of (timestamp, value)."""
info = whisper.info(path)
res = []
if not info:
return []
archives = info["archives"]
with io.open(path, "rb") as f:
buf = f.read()
stage0 = True
for archive in archives:
offset = archive["offset"]
stage = bg_metric.Stage(
precision=archive["secondsPerPoint"],
points=archive["points"],
stage0=stage0,
)
stage0 = False
if stage in self._opts.ignored_stages:
continue
for _ in range(archive["points"]):
timestamp, value = _POINT_STRUCT.unpack_from(buf, offset)
offset += whisper.pointSize
if timestamp == 0:
continue
elif timestamp >= self.time_start and timestamp <= self.time_end:
res.append((timestamp, value, 1, stage))
return res
示例11: test_create
def test_create(self):
"""Create a db and use info() to validate"""
retention = [(1, 60), (60, 60)]
# check if invalid configuration fails successfully
with self.assertRaises(whisper.InvalidConfiguration):
whisper.create(self.db, [])
# create a new db with a valid configuration
whisper.create(self.db, retention)
# attempt to create another db in the same file, this should fail
with self.assertRaises(whisper.InvalidConfiguration):
whisper.create(self.db, 0)
info = whisper.info(self.db)
# check header information
self.assertEqual(info['maxRetention'],
max([a[0] * a[1] for a in retention]))
self.assertEqual(info['aggregationMethod'], 'average')
self.assertEqual(info['xFilesFactor'], 0.5)
# check archive information
self.assertEqual(len(info['archives']), len(retention))
self.assertEqual(info['archives'][0]['points'], retention[0][1])
self.assertEqual(info['archives'][0]['secondsPerPoint'],
retention[0][0])
self.assertEqual(info['archives'][0]['retention'],
retention[0][0] * retention[0][1])
self.assertEqual(info['archives'][1]['retention'],
retention[1][0] * retention[1][1])
# remove database
self._removedb()
示例12: fetch
def fetch(self, startTime, endTime):
data = whisper.fetch(self.fs_path, startTime, endTime)
if not data:
return None
time_info, values = data
(start, end, step) = time_info
meta_info = whisper.info(self.fs_path)
lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
# Merge in data from carbon's cache
cached_datapoints = []
try:
if step == lowest_step:
cached_datapoints = CarbonLink.query(self.real_metric_path)
except:
log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
cached_datapoints = []
if isinstance(cached_datapoints, dict):
cached_datapoints = cached_datapoints.items()
for (timestamp, value) in cached_datapoints:
interval = timestamp - (timestamp % step)
try:
i = int(interval - start) / step
values[i] = value
except:
pass
return (time_info, values)
示例13: _read_points
def _read_points(path):
"""Return a list of (timestamp, value)."""
info = whisper.info(path)
res = []
if not info:
return []
archives = info["archives"]
with open(path) as f:
buf = f.read()
for archive in archives:
offset = archive["offset"]
step = archive["secondsPerPoint"]
expected_next_timestamp = 0
for _ in range(archive["points"]):
timestamp, val = _POINT_STRUCT.unpack_from(buf, offset)
# Detect holes in data. The heuristic is the following:
# - If a value is non-zero, it is assumed to be meaningful.
# - If it is a zero with a fresh timestamp relative to the last
# time we saw meaningful data, it is assumed to be meaningful.
# So it unfortunately skips leading zeroes after a gap.
if timestamp != expected_next_timestamp and val == 0:
expected_next_timestamp += step
continue
else:
expected_next_timestamp = timestamp + step
res.append((timestamp, val))
offset += whisper.pointSize
return res
示例14: fetch
def fetch(self, startTime, endTime):
data = whisper.fetch(self.fs_path, startTime, endTime)
if not data:
return None
consolidationFunc = ""
whisper_info = whisper.info(self.fs_path)
if "aggregationMethod" in whisper_info:
aggregationMethod = whisper_info["aggregationMethod"]
if aggregationMethod == 'min' or aggregationMethod == 'max':
consolidationFunc = aggregationMethod
time_info, values = data
(start,end,step) = time_info
# Merge in data from carbon's cache
try:
cached_datapoints = CarbonLink.query(self.real_metric_path)
except:
log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
cached_datapoints = []
for (timestamp, value) in cached_datapoints:
interval = timestamp - (timestamp % step)
try:
i = int(interval - start) / step
values[i] = value
except:
pass
return (time_info, values, consolidationFunc)
示例15: test_file_fetch_edge_cases
def test_file_fetch_edge_cases(self):
"""
Test some of the edge cases in file_fetch() that should return
None or raise an exception
"""
whisper.create(self.filename, [(1, 60)])
with open(self.filename, 'rb') as fh:
msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
until_time = 0
from_time = int(time.time()) + 100
with AssertRaisesException(
whisper.InvalidTimeInterval(msg.format(from_time, until_time))):
whisper.file_fetch(fh, fromTime=from_time, untilTime=until_time)
# fromTime > now aka metrics from the future
self.assertIsNone(
whisper.file_fetch(fh, fromTime=int(time.time()) + 100,
untilTime=int(time.time()) + 200),
)
# untilTime > oldest time stored in the archive
headers = whisper.info(self.filename)
the_past = int(time.time()) - headers['maxRetention'] - 200
self.assertIsNone(
whisper.file_fetch(fh, fromTime=the_past - 1, untilTime=the_past),
)
# untilTime > now, change untilTime to now
now = int(time.time())
self.assertEqual(
whisper.file_fetch(fh, fromTime=now, untilTime=now + 200, now=now),
((now + 1, now + 2, 1), [None]),
)