Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
idx_checksum_before = hash.sha1(cache_file)
idx_date_before = date.get_file_mtime(cache_file)
new_file = os.path.join(tree_base_dir, 'some file.pyc')
with open(new_file, 'wb') as nf:
nf.write('somthing')
check_consistency = True
cache.get_cached_index(cache_dir, check_consistency, timeout,
tree_base_dir, licenses_data_dir, rules_data_dir)
assert tree_before == open(checksum_file).read()
assert idx_checksum_before == hash.sha1(cache_file)
assert idx_date_before == date.get_file_mtime(cache_file)
# if the treechecksum file dies the index is rebuilt
fileutils.delete(checksum_file)
idx_checksum_before = hash.sha1(cache_file)
check_consistency = False
cache.get_cached_index(cache_dir, check_consistency, timeout,
tree_base_dir, licenses_data_dir, rules_data_dir)
assert tree_before == open(checksum_file).read()
assert idx_date_before != date.get_file_mtime(cache_file)
# if the index cache file dies the index is rebuilt
fileutils.delete(cache_file)
check_consistency = False
cache.get_cached_index(cache_dir, check_consistency, timeout,
tree_base_dir, licenses_data_dir, rules_data_dir)
assert idx_checksum_before == hash.sha1(cache_file)
assert idx_date_before == date.get_file_mtime(cache_file)
# if the treechecksum file dies the index is rebuilt
fileutils.delete(checksum_file)
idx_checksum_before = hash.sha1(cache_file)
check_consistency = False
cache.get_cached_index(cache_dir, check_consistency, timeout,
tree_base_dir, licenses_data_dir, rules_data_dir)
assert tree_before == open(checksum_file).read()
assert idx_date_before != date.get_file_mtime(cache_file)
# if the index cache file dies the index is rebuilt
fileutils.delete(cache_file)
check_consistency = False
cache.get_cached_index(cache_dir, check_consistency, timeout,
tree_base_dir, licenses_data_dir, rules_data_dir)
assert tree_before == open(checksum_file).read()
assert os.path.exists(cache_file)
def test_tree_checksum_is_different_when_file_is_removed(self):
test_dir = self.get_test_loc('cache/tree', copy=True)
new_file = os.path.join(test_dir, 'some.py')
with open(new_file, 'wb') as py:
py.write(' ')
before = cache.tree_checksum(test_dir)
fileutils.delete(new_file)
after = cache.tree_checksum(test_dir)
assert before != after
scan_names = ', '.join(p.name for p in scanner_plugins)
echo_stderr('Scanning done.', fg='green' if success else 'red')
display_summary(codebase, scan_names, processes, verbose=verbose)
finally:
# remove temporary files
scancode_temp_dir = scancode_config.scancode_temp_dir
if keep_temp_files:
if not quiet:
msg = 'Keeping temporary files in: "{}".'.format(scancode_temp_dir)
echo_stderr(msg, fg='green' if success else 'red')
else:
if not quiet:
echo_stderr('Removing temporary files...', fg='green', nl=False)
from commoncode import fileutils
fileutils.delete(scancode_temp_dir)
if not quiet:
echo_stderr('done.', fg='green')
rc = 0 if success else 1
ctx.exit(rc)
with open(metadata, 'rb') as met:
content = met.read()
elif os.path.exists(metadata_gz):
content, warnings = get_gz_compressed_file_content(metadata_gz)
if warnings:
raise Exception('Failed to extract RubyGem .gem/metadata.gz file.\n' + '\n'.join(warnings))
else:
raise Exception('No gem metadata found in RubyGem .gem file.')
return content
finally:
if extract_loc:
fileutils.delete(extract_loc)
extractor = archive.get_extractor(location, kinds)
if TRACE:
logger.debug('extract_file: extractor: for: %(location)r with kinds: %(kinds)r : ' % locals()
+ getattr(extractor, '__module__', '')
+ '.' + getattr(extractor, '__name__', ''))
if extractor:
yield ExtractEvent(location, target, done=False, warnings=[], errors=[])
try:
# extract first to a temp directory: if there is an error, the
# extracted files will not be moved to target
tmp_tgt = fileutils.get_temp_dir(prefix='scancode-extract-')
abs_location = abspath(expanduser(location))
warns = extractor(abs_location, tmp_tgt) or []
warnings.extend(warns)
fileutils.copytree(tmp_tgt, target)
fileutils.delete(tmp_tgt)
except Exception as e:
errors = [str(e).strip(' \'"')]
if verbose:
errors.append(traceback.format_exc())
if TRACE:
tb = traceback.format_exc()
logger.debug('extract_file: ERROR: %(location)r: %(errors)r\n%(e)r\n%(tb)s' % locals())
finally:
yield ExtractEvent(location, target, done=True, warnings=warnings, errors=errors)
def cli(license_dir, source, trace, clean, match_text=False, match_approx=False):
"""
Synchronize ScanCode licenses with an external license source.
DIR is the directory to store (or load) external licenses.
When using the dejacode source your need to set the
'DEJACODE_API_URL' and 'DEJACODE_API_KEY' environment variables with
your credentials.
"""
global TRACE
TRACE = trace
if clean:
fileutils.delete(license_dir)
fileutils.delete(license_dir.rstrip('/\\') + '-new')
fileutils.delete(license_dir.rstrip('/\\') + '-update')
fileutils.delete(license_dir.rstrip('/\\') + '-del')
source_cls = SOURCES[source]
source = source_cls(license_dir, match_text, match_approx)
synchronize_licenses(source)
print()
directory and copy the assets to this directory. The target
directory is deleted if it exists.
Raise HtmlAppAssetCopyWarning if the output_file is or
HtmlAppAssetCopyError if the copy was not possible.
"""
try:
if is_stdout(output_file):
raise HtmlAppAssetCopyWarning()
assets_dir = join(get_template_dir('html-app'), 'assets')
# delete old assets
tgt_dirs = get_html_app_files_dirs(output_file)
target_dir = join(*tgt_dirs)
if exists(target_dir):
delete(target_dir)
# copy assets
copytree(assets_dir, target_dir)
# write json data
# FIXME: this should a regular JSON scan format
root_path, assets_dir = get_html_app_files_dirs(output_file)
with codecs.open(join(root_path, assets_dir, 'data.json'), 'wb', encoding='utf-8') as f:
f.write('data=')
simplejson.dump(results, f, iterable_as_array=True)
# create help file
with codecs.open(join(root_path, assets_dir, 'help.html'), 'wb', encoding='utf-8') as f:
f.write(get_html_app_help(basename(output_file.name)))
except HtmlAppAssetCopyWarning, w:
raise w
# attempt extract first to a temp dir
temp_target1 = compat.unicode(fileutils.get_temp_dir(prefix='scancode-extract1-'))
try:
warnings = extractor1(abs_location, temp_target1)
if TRACE:
logger.debug('extract_with_fallback: temp_target1: %(temp_target1)r' % locals())
fileutils.copytree(temp_target1, abs_target_dir)
except:
try:
temp_target2 = compat.unicode(fileutils.get_temp_dir(prefix='scancode-extract2-'))
warnings = extractor2(abs_location, temp_target2)
if TRACE:
logger.debug('extract_with_fallback: temp_target2: %(temp_target2)r' % locals())
fileutils.copytree(temp_target2, abs_target_dir)
finally:
fileutils.delete(temp_target2)
finally:
fileutils.delete(temp_target1)
return warnings