Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
minmax_fp_ts_csv = '%s/fpid.%s.%s.minmax_fp_ts.tsfresh.input.std.csv' % (
settings.SKYLINE_TMP_DIR, str(fp_id), base_name)
if os.path.isfile(minmax_fp_ts_csv):
os.remove(minmax_fp_ts_csv)
minmax_fp_fname_out = minmax_fp_ts_csv + '.transposed.csv'
if os.path.isfile(minmax_fp_fname_out):
os.remove(minmax_fp_fname_out)
anomalous_ts_csv = '%s/%s.%s.minmax_anomalous_ts.tsfresh.std.csv' % (
settings.SKYLINE_TMP_DIR, metric_timestamp, base_name)
if os.path.isfile(anomalous_ts_csv):
os.remove(anomalous_ts_csv)
anomalous_fp_fname_out = anomalous_ts_csv + '.transposed.csv'
if os.path.isfile(anomalous_fp_fname_out):
os.remove(anomalous_fp_fname_out)
tsf_settings = ReasonableFeatureExtractionSettings()
tsf_settings.disable_progressbar = True
minmax_fp_features_sum = None
minmax_anomalous_features_sum = None
if minmax_anomalous_ts and minmax_fp_ts:
if not os.path.isfile(minmax_fp_ts_csv):
datapoints = minmax_fp_ts
converted = []
for datapoint in datapoints:
try:
new_datapoint = [float(datapoint[0]), float(datapoint[1])]
converted.append(new_datapoint)
except: # nosec
continue
for ts, value in converted:
try:
utc_ts_line = '%s,%s,%s\n' % (base_name, str(int(ts)), str(value))
continue
if os.path.isfile(ts_csv):
os.remove(ts_csv)
for ts, value in converted:
# print('%s,%s' % (str(int(ts)), str(value)))
utc_ts_line = '%s,%s,%s\n' % (metric, str(int(ts)), str(value))
with open(ts_csv, 'a') as fh:
fh.write(utc_ts_line)
del converted
df = pd.read_csv(ts_csv, delimiter=',', header=None, names=['metric', 'timestamp', 'value'])
# print('DataFrame created with %s' % ts_csv)
df.columns = ['metric', 'timestamp', 'value']
tsf_settings = ReasonableFeatureExtractionSettings()
# Disable tqdm progress bar
tsf_settings.disable_progressbar = True
df_features = extract_features(
df, column_id='metric', column_sort='timestamp', column_kind=None,
column_value=None, feature_extraction_settings=tsf_settings)
del df
# print('features extracted from %s data' % ts_csv)
# write to disk
fname_out = fname_in + '.features.csv'
# Transpose
df_t = df_features.transpose()
# print('features transposed')
# Create transposed features csv
t_fname_out = fname_in + '.features.transposed.csv'
df_t.to_csv(t_fname_out)
del df_t
logger.error('error :: could not minmax scale current time series anomalous_timeseries for %s' % (str(fp_id), str(base_name)))
if len(minmax_anomalous_ts) > 0:
logger.info('minmax_anomalous_ts is populated with %s data points' % str(len(minmax_anomalous_ts)))
else:
logger.error('error :: minmax_anomalous_ts is not populated')
else:
logger.info('minmax scaled check will be skipped - anomalous_ts_values_count is %s and minmax_fp_ts is %s' % (str(anomalous_ts_values_count), str(minmax_fp_ts_values_count)))
minmax_fp_ts_csv = '%s/fpid.%s.%s.minmax_fp_ts.tsfresh.input.std.csv' % (
settings.SKYLINE_TMP_DIR, str(fp_id), base_name)
minmax_fp_fname_out = minmax_fp_ts_csv + '.transposed.csv'
anomalous_ts_csv = '%s/%s.%s.minmax_anomalous_ts.tsfresh.std.csv' % (
settings.SKYLINE_TMP_DIR, metric_timestamp, base_name)
anomalous_fp_fname_out = anomalous_ts_csv + '.transposed.csv'
tsf_settings = ReasonableFeatureExtractionSettings()
tsf_settings.disable_progressbar = True
minmax_fp_features_sum = None
minmax_anomalous_features_sum = None
if minmax_anomalous_ts and minmax_fp_ts:
if LOCAL_DEBUG:
logger.debug('debug :: analyzing minmax_fp_ts and minmax_anomalous_ts')
if not os.path.isfile(minmax_fp_ts_csv):
if LOCAL_DEBUG:
logger.debug('debug :: creating %s from minmax_fp_ts' % minmax_fp_ts_csv)
datapoints = minmax_fp_ts
converted = []
for datapoint in datapoints:
try:
new_datapoint = [float(datapoint[0]), float(datapoint[1])]
converted.append(new_datapoint)
except: # nosec
# Added log_context to report the context
current_logger.info('%s :: starting extract_features with %s' % (
log_context, str(TSFRESH_VERSION)))
df_features = False
try:
# @modified 20161226 - Bug #1822: tsfresh extract_features process stalling
# Changed to use the new ReasonableFeatureExtractionSettings that was
# introduced in tsfresh-0.4.0 to exclude the computationally high cost
# of extracting features from very static timeseries that has little to
# no variation is the values, which results in features taking up to
# almost 600 seconds to calculate on a timeseries of length 10075
# (168h - 1 datapoint per 60s)
# In terms of inline feature calculatation, always exclude
# high_comp_cost features.
# df_features = extract_features(df, column_id='metric', column_sort='timestamp', column_kind=None, column_value=None)
tsf_settings = ReasonableFeatureExtractionSettings()
# Disable tqdm progress bar
tsf_settings.disable_progressbar = True
df_features = extract_features(
df, column_id='metric', column_sort='timestamp', column_kind=None,
column_value=None, feature_extraction_settings=tsf_settings)
# @modified 20190413 - Bug #2934: Ionosphere - no mirage.redis.24h.json file
# Added log_context to report the context
current_logger.info('%s :: features extracted from %s data' % (
log_context, ts_csv))
except:
trace = traceback.print_exc()
current_logger.debug(trace)
# @modified 20190413 - Bug #2934: Ionosphere - no mirage.redis.24h.json file
# Added log_context to report the context
fail_msg = 'error: %s :: extracting features with tsfresh from - %s' % (log_context, ts_csv)
current_logger.error('%s' % fail_msg)