Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __call__(self, parser, namespace, values, option_string=None):
try:
lib50.logout()
except lib50.Error:
raise internal.Error(_("failed to logout"))
else:
termcolor.cprint(_("logged out successfully"), "green")
parser.exit()
"localedir": "locale",
"domain": "messages",
}
# Get config file
try:
config_file = lib50.config.get_config_filepath(check_dir)
except lib50.Error:
raise Error(_("Invalid slug for check50. Did you mean something else?"))
# Load config
with open(config_file) as f:
try:
config = CONFIG_LOADER.load(f.read())
except lib50.InvalidConfigError:
raise Error(_("Invalid slug for check50. Did you mean something else?"))
# Update the config with defaults
if isinstance(config, dict):
options.update(config)
# Apply translations
if options["translations"]:
if isinstance(options["translations"], dict):
translation_options.update(options["translations"])
options["translations"] = translation_options
return options
"""
for _i in range(pings):
# Query for check results.
res = requests.get(f"https://submit.cs50.io/api/results/check50", params={"commit_hash": commit_hash, "slug": slug})
results = res.json()
if res.status_code not in [404, 200]:
raise RemoteCheckError(results)
if res.status_code == 200 and results["received_at"] is not None:
break
time.sleep(sleep)
else:
# Terminate if no response
raise internal.Error(
_("check50 is taking longer than normal!\n"
"See https://submit.cs50.io/check50/{} for more detail").format(commit_hash))
if not results["check50"]:
raise RemoteCheckError(results)
if "error" in results["check50"]:
raise RemoteCheckError(results["check50"])
# TODO: Should probably check payload["version"] here to make sure major version is same as __version__
# (otherwise we may not be able to parse results)
return results["tag_hash"], {
"slug": results["check50"]["slug"],
"results": results["check50"]["results"],
import time
import attr
import lib50
import requests
import termcolor
from . import internal, renderer, __version__
from .runner import CheckRunner
lib50.set_local_path(os.environ.get("CHECK50_PATH", "~/.local/share/check50"))
SLUG = None
class RemoteCheckError(internal.Error):
def __init__(self, remote_json):
super().__init__("check50 ran into an error while running checks! Please contact sysadmins@cs50.harvard.edu!")
self.payload = {"remote_json": remote_json}
@contextlib.contextmanager
def nullcontext(entry_result=None):
"""This is just contextlib.nullcontext but that function is only available in 3.7+."""
yield entry_result
def excepthook(cls, exc, tb):
# All channels to output to
outputs = excepthook.outputs
for output in excepthook.outputs:
def after_every(self, func):
"""Run func at the end of every check.
:param func: callback to be run after every check
:raises check50.internal.Error: if called when a check is being run"""
if check_running:
raise Error("cannot register callback to run after every check when check is running")
self._after_everies.append(func)
:param checks: YAML checks read from config
:type checkcs: dict
:param prompt: prompt user if ``out_file`` already exists
:type prompt: bool
:param out_file: file to write compiled checks
:type out_file: str
:returns: ``out_file``
:rtype: str
"""
file_path = check_dir / out_file
# Prompt to replace __init__.py (compile destination)
if prompt and file_path.exists():
if not _yes_no_prompt("check50 will compile the YAML checks to __init__.py, are you sure you want to overwrite its contents?"):
raise Error("Aborting: could not overwrite to __init__.py")
# Compile simple checks
with open(check_dir / out_file, "w") as f:
f.write(_simple.compile(checks))
return out_file
def raise_invalid_slug(slug, offline=False):
"""Raise an error signalling slug is invalid for check50."""
msg = _("Could not find checks for {}.").format(slug)
similar_slugs = lib50.get_local_slugs("check50", similar_to=slug)[:3]
if similar_slugs:
msg += _(" Did you mean:")
for similar_slug in similar_slugs:
msg += f"\n {similar_slug}"
msg += _("\nDo refer back to the problem specification if unsure.")
if offline:
msg += _("\nIf you are confident the slug is correct and you have an internet connection," \
" try running without --offline.")
raise internal.Error(msg)
def run(self, check_names, files, working_area):
"""
Run just the targeted checks, and the checks they depends on.
Returns just the result of the targetted checks.
"""
if len(set(check_names)) < len(check_names):
raise internal.Error(_("Duplicate checks targetted: {}".format(check_names)))
# Find the docs for every check in the dependency map
check_docs = {}
for dependents in self.dependency_map.values():
for dependent, doc in dependents:
check_docs[dependent] = doc
# For every targetted check, validate that it exists
for check_name in check_names:
if check_name not in check_docs:
raise internal.Error(_("Unknown check {}").format(check_name))
# Build an inverse dependency map, from a check to its dependency
inverse_dependency_map = self._create_inverse_dependency_map()
# Reconstruct a new dependency_map, consisting of the targetted checks and their dependencies
Run just the targeted checks, and the checks they depends on.
Returns just the result of the targetted checks.
"""
if len(set(check_names)) < len(check_names):
raise internal.Error(_("Duplicate checks targetted: {}".format(check_names)))
# Find the docs for every check in the dependency map
check_docs = {}
for dependents in self.dependency_map.values():
for dependent, doc in dependents:
check_docs[dependent] = doc
# For every targetted check, validate that it exists
for check_name in check_names:
if check_name not in check_docs:
raise internal.Error(_("Unknown check {}").format(check_name))
# Build an inverse dependency map, from a check to its dependency
inverse_dependency_map = self._create_inverse_dependency_map()
# Reconstruct a new dependency_map, consisting of the targetted checks and their dependencies
new_dependency_map = collections.defaultdict(set)
for check_name in check_names:
cur_check_name = check_name
while cur_check_name != None:
dependency_name = inverse_dependency_map[cur_check_name]
new_dependency_map[dependency_name].add((cur_check_name, check_docs[cur_check_name]))
cur_check_name = dependency_name
# Temporarily replace dependency_map and run
try:
old_dependency_map = self.dependency_map