Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
until_time = 0
from_time = int(time.time()) + 100
with AssertRaisesException(
whisper.InvalidTimeInterval(msg.format(from_time, until_time))):
whisper.file_fetch(fh, fromTime=from_time, untilTime=until_time)
# fromTime > now aka metrics from the future
self.assertIsNone(
whisper.file_fetch(fh, fromTime=int(time.time()) + 100,
untilTime=int(time.time()) + 200),
)
# untilTime > oldest time stored in the archive
headers = whisper.info(self.filename)
the_past = int(time.time()) - headers['maxRetention'] - 200
self.assertIsNone(
whisper.file_fetch(fh, fromTime=the_past - 1, untilTime=the_past),
)
# untilTime > now, change untilTime to now
now = int(time.time())
self.assertEqual(
whisper.file_fetch(fh, fromTime=now, untilTime=now + 200, now=now),
((now + 1, now + 2, 1), [None]),
)
def test_set_xfilesfactor(self):
"""
Create a whisper file
Update xFilesFactor
Check if update succeeded
Check if exceptions get raised with wrong input
"""
whisper.create(self.filename, [(1, 20)])
target_xff = 0.42
info0 = whisper.info(self.filename)
old_xff = whisper.setXFilesFactor(self.filename, target_xff)
# return value should match old xff
self.assertEqual(info0['xFilesFactor'], old_xff)
info1 = whisper.info(self.filename)
# Other header information should not change
self.assertEqual(info0['aggregationMethod'],
info1['aggregationMethod'])
self.assertEqual(info0['maxRetention'], info1['maxRetention'])
self.assertEqual(info0['archives'], info1['archives'])
# packing and unpacking because
# AssertionError: 0.20000000298023224 != 0.2
target_xff = struct.unpack("!f", struct.pack("!f", target_xff))[0]
self.assertEqual(info1['xFilesFactor'], target_xff)
with AssertRaisesException(
whisper.InvalidXFilesFactor('Invalid xFilesFactor zero, not a '
'float')):
whisper.setXFilesFactor(self.filename, "zero")
option_parser.add_option('--json', default=False, action='store_true',
help="Output results in JSON form")
(options, args) = option_parser.parse_args()
if len(args) < 1:
option_parser.print_help()
sys.exit(1)
path = args[0]
if len(args) > 1:
field = args[1]
else:
field = None
try:
info = whisper.info(path)
except whisper.WhisperException as exc:
raise SystemExit('[ERROR] %s' % str(exc))
info['fileSize'] = os.stat(path).st_size
if field:
if field not in info:
print('Unknown field "%s". Valid fields are %s' % (field, ','.join(info)))
sys.exit(1)
print(info[field])
sys.exit(0)
if options.json:
print(json.dumps(info, indent=2, separators=(',', ': ')))
else:
def get_metadata(self, metric, key):
"""
Get metric metadata
"""
if key != "aggregationMethod":
raise ValueError("Invalid metadata key: %s" % key)
path = self.get_path(metric)
return whisper.info(path)["aggregationMethod"]
"""
method to process a given metric, and resize it if necessary
Parameters:
fullPath - full path to the metric whisper file
schemas - carbon storage schemas loaded from config
agg_schemas - carbon storage aggregation schemas load from confg
"""
schema_config_args = ''
schema_file_args = ''
rebuild = False
messages = ''
# get archive info from whisper file
info = whisper.info(fullPath)
# get graphite metric name from fullPath
metric = getMetricFromPath(fullPath)
# loop the carbon-storage schemas
for schema in schemas:
if schema.matches(metric):
# returns secondsPerPoint and points for this schema in tuple format
archive_config = [archive.getTuple() for archive in schema.archives]
break
# loop through the carbon-aggregation schemas
for agg_schema in agg_schemas:
if agg_schema.matches(metric):
xFilesFactor, aggregationMethod = agg_schema.archives
break
def fill_archives(src, dst, startFrom):
header = whisper.info(dst)
archives = header['archives']
archives = sorted(archives, key=lambda t: t['retention'])
for archive in archives:
fromTime = time.time() - archive['retention']
if fromTime >= startFrom:
continue
(timeInfo, values) = whisper.fetch(dst, fromTime, startFrom)
(start, end, step) = timeInfo
gapstart = None
for v in values:
if not v and not gapstart:
gapstart = start
elif v and gapstart:
# ignore single units lost
def _read_points(self, path):
"""Return a list of (timestamp, value)."""
info = whisper.info(path)
res = []
if not info:
return []
archives = info["archives"]
with io.open(path, "rb") as f:
buf = f.read()
stage0 = True
for archive in archives:
offset = archive["offset"]
stage = bg_metric.Stage(
precision=archive["secondsPerPoint"],
points=archive["points"],
stage0=stage0,
)
for i, (timestamp, value) in enumerate(new_datapoints):
if timestamp > last_timestamp:
slice_end = i
break
all_datapoints += new_datapoints[i:]
else:
all_datapoints += new_datapoints
oldtimestamps = map(lambda p: p[0], all_datapoints)
oldvalues = map(lambda p: p[1], all_datapoints)
print("oldtimestamps: %s" % oldtimestamps)
# Simply cleaning up some used memory
del all_datapoints
new_info = whisper.info(newfile)
new_archives = new_info['archives']
for archive in new_archives:
step = archive['secondsPerPoint']
fromTime = now - archive['retention'] + now % step
untilTime = now + now % step + step
print("(%s,%s,%s)" % (fromTime, untilTime, step))
timepoints_to_update = range(fromTime, untilTime, step)
print("timepoints_to_update: %s" % timepoints_to_update)
newdatapoints = []
for tinterval in zip(timepoints_to_update[:-1], timepoints_to_update[1:]):
# TODO: Setting lo= parameter for 'lefti' based on righti from previous
# iteration. Obviously, this can only be done if
# timepoints_to_update is always updated. Is it?
lefti = bisect.bisect_left(oldtimestamps, tinterval[0])
righti = bisect.bisect_left(oldtimestamps, tinterval[1], lo=lefti)
def fill(src, dst, tstart, tstop):
# fetch range start-stop from src, taking values from the highest
# precision archive, thus optionally requiring multiple fetch + merges
srcHeader = whisper.info(src)
srcArchives = srcHeader['archives']
srcArchives.sort(key=itemgetter('retention'))
# find oldest point in time, stored by both files
srcTime = int(time.time()) - srcHeader['maxRetention']
if tstart < srcTime and tstop < srcTime:
return
# we want to retain as much precision as we can, hence we do backwards
# walk in time
# skip forward at max 'step' points at a time
for archive in srcArchives:
# skip over archives that don't have any data points
def getIntervals(self):
start = time.time() - whisper.info(self.fs_path)['maxRetention']
end = max( os.stat(self.fs_path).st_mtime, start )
return [ (start, end) ]