Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_log_invalid_level(writer, level):
logger.add(writer)
with pytest.raises(TypeError):
logger.log(level, "test")
def test_edit_level(writer):
logger.level("info", no=0, color="", icon="[?]")
fmt = "->{level.no}, {level.name}, {level.icon}, {message}<-"
logger.add(writer, format=fmt, colorize=True)
logger.log("info", "nope")
logger.level("info", no=11)
logger.log("info", "a")
logger.level("info", icon="[!]")
logger.log("info", "b")
logger.level("info", color="")
logger.log("info", "c")
assert writer.read() == parse(
"->11, info, [?], a<-\n"
"->11, info, [!], b<-\n"
def test_bind_and_add_level(writer, using_bound):
logger_bound = logger.bind()
logger.add(writer, format="{level.name} {message}")
if using_bound:
logger_bound.level("bar", 15)
else:
logger.level("bar", 15)
logger.log("bar", "root")
logger_bound.log("bar", "bound")
assert writer.read() == "bar root\nbar bound\n"
def test_log_formatting(writer, message, args, kwargs, expected, use_log_function):
logger.add(writer, format="{message}", colorize=False)
if use_log_function:
logger.log(10, message, *args, **kwargs)
else:
logger.debug(message, *args, **kwargs)
assert writer.read() == expected + "\n"
pad,
total,
song,
e
)
else:
tags = audio_metadata.loads(audio).tags
filepath = gm_utils.template_to_filepath(template, tags).with_suffix('.mp3')
if filepath.is_file():
filepath.unlink()
filepath.parent.mkdir(parents=True, exist_ok=True)
filepath.touch()
filepath.write_bytes(audio)
logger.log(
'ACTION_SUCCESS',
"({:>{}}/{}) Downloaded -- {} ({})",
songnum,
pad,
total,
filepath,
song['id']
)
self.operators[operator].process(artifacts)
except Exception:
self.statsd.incr(f'error.operator.{operator}')
logger.exception(f"Unknown error in operator '{operator}'")
continue
# Record stats and update the summary.
types = artifact_types(artifacts)
summary.update(types)
for artifact_type in types:
self.statsd.incr(f'source.{source}.{artifact_type}', types[artifact_type])
self.statsd.incr(f'artifacts.{artifact_type}', types[artifact_type])
# Log the summary.
logger.log('NOTIFY', f"New artifacts: {dict(summary)}")
async def _monitor(sensor: Sensor, experiment: Experiment, dry_run: bool, strict: bool):
logger.debug(f"Started monitoring {sensor.name}")
sensor._stop = False
try:
async for result in sensor._monitor(dry_run=dry_run):
await experiment._update(
device=sensor.name,
datapoint=Datapoint(
data=result["data"],
timestamp=result["timestamp"],
experiment_elapsed_time=result["timestamp"] - experiment.start_time,
),
)
except Exception as e:
logger.log("ERROR" if strict else "WARNING", f"Failed to read {sensor}!")
if strict:
raise RuntimeError(str(e))
def log(level, msg):
if level >= 40:
logger.log(level, msg)
else:
logger.opt(depth=6).log("DEBUG5", msg)
song_id = song['id']
logger.trace(
"{} -- {} -- {} ({})",
title,
artist,
album,
song_id
)
else:
missing_songs = google_songs
if not google_songs and not local_songs:
logger.log('NORMAL', "No songs to compare hashes.")
elif not google_songs:
logger.log('NORMAL', "No Google songs to compare hashes.")
elif not local_songs:
logger.log('NORMAL', "No local songs to compare hashes.")
if args.use_metadata:
if args.use_hash:
google_songs = missing_songs
if google_songs and local_songs:
logger.log('NORMAL', "Comparing metadata")
missing_songs = natsorted(
gm_utils.find_missing_items(
google_songs,
local_songs,
fields=['artist', 'album', 'title', 'tracknumber'],
normalize_values=True