Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def handle_no_cache(context):
"""Handle lack-of-cache error, prompt user for index process."""
logger.error(
f"Could not locate wily cache, the cache is required to provide insights."
)
p = input("Do you want to run setup and index your project now? [y/N]")
if p.lower() != "y":
exit(1)
else:
revisions = input("How many previous git revisions do you want to index? : ")
revisions = int(revisions)
path = input("Path to your source files; comma-separated for multiple: ")
paths = path.split(",")
context.invoke(build, max_revisions=revisions, targets=paths, operators=None)
# Expand directories to paths
files = [
os.path.relpath(fn, config.path)
for fn in radon.cli.harvest.iter_filenames(targets)
]
logger.debug(f"Targeting - {files}")
if not revision:
target_revision = state.index[state.default_archiver].last_revision
else:
rev = resolve_archiver(state.default_archiver).cls(config).find(revision)
logger.debug(f"Resolved {revision} to {rev.key} ({rev.message})")
try:
target_revision = state.index[state.default_archiver][rev.key]
except KeyError:
logger.error(
f"Revision {revision} is not in the cache, make sure you have run wily build."
)
exit(1)
logger.info(
f"Comparing current with {format_revision(target_revision.revision.key)} by {target_revision.revision.author_name} on {format_date(target_revision.revision.date)}."
)
# Convert the list of metrics to a list of metric instances
operators = {resolve_operator(metric.split(".")[0]) for metric in metrics}
metrics = [(metric.split(".")[0], resolve_metric(metric)) for metric in metrics]
results = []
# Build a set of operators
with multiprocessing.Pool(processes=len(operators)) as pool:
operator_exec_out = pool.starmap(
data = []
operator, metric = resolve_metric_as_tuple(metric)
operator = operator.name
state = State(config)
if not revision_index:
target_revision = state.index[state.default_archiver].last_revision
else:
rev = resolve_archiver(state.default_archiver).cls(config).find(revision_index)
logger.debug(f"Resolved {revision_index} to {rev.key} ({rev.message})")
try:
target_revision = state.index[state.default_archiver][rev.key]
except KeyError:
logger.error(
f"Revision {revision_index} is not in the cache, make sure you have run wily build."
)
exit(1)
logger.info(
f"-----------Rank for {metric.description} for {format_revision(target_revision.revision.key)} by {target_revision.revision.author_name} on {format_date(target_revision.revision.date)}.------------"
)
if path is None:
files = target_revision.get_paths(config, state.default_archiver, operator)
logger.debug(f"Analysing {files}")
else:
# Resolve target paths when the cli has specified --path
if config.path != DEFAULT_PATH:
targets = [str(Path(config.path) / Path(path))]
else:
if len(values) > 0:
result[str(root)]["total"][metric.name] = func(values)
prev_indices = set(result.keys())
prev_roots = roots
stats["operator_data"][operator_name] = result
bar.next()
prev_stats = stats
seed = False
ir = index.add(revision, operators=operators)
ir.store(config, _archiver, stats)
index.save()
bar.finish()
except Exception as e:
logger.error(f"Failed to build cache: {type(e)}: '{e}'")
raise e
finally:
# Reset the archive after every run back to the head of the branch
_archiver.finish()