Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
original_caching = whisper.CACHE_HEADERS
original_autoflush = whisper.AUTOFLUSH
whisper.LOCK = True
whisper.AUTOFLUSH = True
whisper.CACHE_HEADERS = True
# create a new db with a valid configuration
whisper.create(self.filename, self.retention)
with AssertRaisesException(
whisper.InvalidAggregationMethod(
'Unrecognized aggregation method: yummy beer')):
whisper.setAggregationMethod(self.filename, 'yummy beer')
# set setting every AggregationMethod available
for ag in whisper.aggregationMethods:
for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
# original xFilesFactor
info0 = whisper.info(self.filename)
# optional xFilesFactor not passed
old_ag = whisper.setAggregationMethod(self.filename, ag)
# should return old aggregationmethod
self.assertEqual(old_ag, info0['aggregationMethod'])
# original value should not change
info1 = whisper.info(self.filename)
self.assertEqual(info0['xFilesFactor'], info1['xFilesFactor'])
# the selected aggregation method should have applied
self.assertEqual(ag, info1['aggregationMethod'])
import optparse
try:
import rrdtool
except ImportError as exc:
raise SystemExit('[ERROR] Missing dependency: %s' % str(exc))
try:
import whisper
except ImportError:
raise SystemExit('[ERROR] Please make sure whisper is installed properly')
# Ignore SIGPIPE
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
aggregationMethods = whisper.aggregationMethods
# RRD doesn't have a 'sum' or 'total' type
aggregationMethods.remove('sum')
# RRD doesn't have a 'absmax' type
aggregationMethods.remove('absmax')
# RRD doesn't have a 'absmin' type
aggregationMethods.remove('absmin')
option_parser = optparse.OptionParser(usage='''%prog rrd_path''')
option_parser.add_option(
'--xFilesFactor',
help="The xFilesFactor to use in the output file. " +
"Defaults to the input RRD's xFilesFactor",
default=None,
type='float')
option_parser.add_option(
option_parser = optparse.OptionParser(
usage='''%prog path timePerPoint:timeToStore [timePerPoint:timeToStore]*
%prog --estimate timePerPoint:timeToStore [timePerPoint:timeToStore]*
timePerPoint and timeToStore specify lengths of time, for example:
60:1440 60 seconds per datapoint, 1440 datapoints = 1 day of retention
15m:8 15 minutes per datapoint, 8 datapoints = 2 hours of retention
1h:7d 1 hour per datapoint, 7 days of retention
12h:2y 12 hours per datapoint, 2 years of retention
''')
option_parser.add_option('--xFilesFactor', default=0.5, type='float')
option_parser.add_option('--aggregationMethod', default='average',
type='string',
help="Function to use when aggregating values (%s)" %
', '.join(whisper.aggregationMethods))
option_parser.add_option('--overwrite', default=False, action='store_true')
option_parser.add_option('--estimate', default=False, action='store_true',
help="Don't create a whisper file, estimate storage "
"requirements based on archive definitions")
option_parser.add_option('--sparse', default=False, action='store_true',
help="Create new whisper as sparse file")
option_parser.add_option('--fallocate', default=False, action='store_true',
help="Create new whisper and use fallocate")
(options, args) = option_parser.parse_args()
if options.estimate:
if len(args) == 0:
option_parser.print_usage()
sys.exit(1)
if len(args) == 1 and args[0].find(",") > 0:
import optparse
try:
import whisper
except ImportError:
raise SystemExit('[ERROR] Please make sure whisper is installed properly')
# Ignore SIGPIPE
try:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except AttributeError:
#windows?
pass
option_parser = optparse.OptionParser(
usage='%%prog path <%s> [xFilesFactor]' % '|'.join(whisper.aggregationMethods))
(options, args) = option_parser.parse_args()
if len(args) < 2:
option_parser.print_help()
sys.exit(1)
path = args[0]
aggregationMethod = args[1]
xFilesFactor = None
if len(args) == 3:
xFilesFactor = args[2]
try:
oldAggregationMethod = whisper.setAggregationMethod(path, aggregationMethod, xFilesFactor)
timePerPoint and timeToStore specify lengths of time, for example:
60:1440 60 seconds per datapoint, 1440 datapoints = 1 day of retention
15m:8 15 minutes per datapoint, 8 datapoints = 2 hours of retention
1h:7d 1 hour per datapoint, 7 days of retention
12h:2y 12 hours per datapoint, 2 years of retention
''')
option_parser.add_option(
'--xFilesFactor', default=None,
type='float', help="Change the xFilesFactor")
option_parser.add_option(
'--aggregationMethod', default=None,
type='string', help="Change the aggregation function (%s)" %
', '.join(whisper.aggregationMethods))
option_parser.add_option(
'--force', default=False, action='store_true',
help="Perform a destructive change")
option_parser.add_option(
'--newfile', default=None, action='store',
help="Create a new database file without removing the existing one")
option_parser.add_option(
'--nobackup', action='store_true',
help='Delete the .bak file after successful execution')
option_parser.add_option(
'--aggregate', action='store_true',
help='Try to aggregate the values to fit the new archive better.'
' Note that this will make things slower and use more memory.')
(options, args) = option_parser.parse_args()
for section in config.sections():
options = dict(config.items(section))
matchAll = options.get('match-all')
pattern = options.get('pattern')
listName = options.get('list')
xFilesFactor = options.get('xfilesfactor')
aggregationMethod = options.get('aggregationmethod')
try:
if xFilesFactor is not None:
xFilesFactor = float(xFilesFactor)
assert 0 <= xFilesFactor <= 1
if aggregationMethod is not None:
assert aggregationMethod in whisper.aggregationMethods
except:
log.msg("Invalid schemas found in %s." % section)
continue
archives = (xFilesFactor, aggregationMethod)
if matchAll:
mySchema = DefaultSchema(section, archives)
elif pattern:
mySchema = PatternSchema(section, pattern, archives)
elif listName:
mySchema = ListSchema(section, listName, archives)
schemaList.append(mySchema)
self.graphite_url + '/tags/tagMultiSeries',
[('path', metric) for metric in metrics]
)
log.debug("Tagged %s in %s" % (', '.join(metrics), time.time() - t), type='tagdb')
except Exception as err:
log.msg("Error tagging %s: %s" % (', '.join(metrics), err), type='tagdb')
try:
import whisper
except ImportError:
pass
else:
class WhisperDatabase(TimeSeriesDatabase):
plugin_name = 'whisper'
aggregationMethods = whisper.aggregationMethods
def __init__(self, settings):
super(WhisperDatabase, self).__init__(settings)
self.data_dir = settings.LOCAL_DATA_DIR
self.sparse_create = settings.WHISPER_SPARSE_CREATE
self.fallocate_create = settings.WHISPER_FALLOCATE_CREATE
if settings.WHISPER_AUTOFLUSH:
log.msg("Enabling Whisper autoflush")
whisper.AUTOFLUSH = True
if settings.WHISPER_FALLOCATE_CREATE:
if whisper.CAN_FALLOCATE:
log.msg("Enabling Whisper fallocate support")
else:
log.err("WHISPER_FALLOCATE_CREATE is enabled but linking failed.")