Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_copyfile(path, repo_dir):
src = repo_dir.FOO
dest = path
src_info = PathInfo(repo_dir.BAR)
dest_info = PathInfo(path)
copyfile(src, dest)
if os.path.isdir(dest):
assert filecmp.cmp(
src, os.path.join(dest, os.path.basename(src)), shallow=False
)
else:
assert filecmp.cmp(src, dest, shallow=False)
copyfile(src_info, dest_info)
if os.path.isdir(dest_info.fspath):
assert filecmp.cmp(
src_info.fspath,
os.path.join(dest_info.fspath, os.path.basename(src_info.fspath)),
shallow=False,
def test_file_md5(repo_dir):
fname = repo_dir.FOO
fname_object = PathInfo(fname)
assert file_md5(fname) == file_md5(fname_object)
def test_stage_fname(add):
out = mock.Mock()
out.is_in_repo = False
out.path_info = PathInfo("path/to/out.txt")
fname = Stage._stage_fname([out], add)
assert fname == "out.txt.dvc"
def test_get_state_record_for_inode(get_inode_mock, dvc_repo, repo_dir):
state = State(dvc_repo, dvc_repo.config.config)
inode = state.MAX_INT + 2
assert inode != state._to_sqlite(inode)
path = os.path.join(dvc_repo.root_dir, repo_dir.FOO)
md5 = file_md5(path)[0]
get_inode_mock.side_effect = mock_get_inode(inode)
with state:
state.save(PathInfo(path), md5)
ret = state.get_state_record_for_inode(inode)
assert ret is not None
def test_path_object_and_str_are_valid_arg_types(self):
base_path = "foo"
target_path = os.path.join(base_path, "bar")
self.assertFalse(contains_symlink_up_to(target_path, base_path))
self.assertFalse(
contains_symlink_up_to(PathInfo(target_path), PathInfo(base_path))
)
def test_should_relink_on_repeated_add(
link, new_link, link_test_func, tmp_dir, dvc
):
from dvc.path_info import PathInfo
dvc.config.set("cache", "type", link)
tmp_dir.dvc_gen({"foo": "foo", "bar": "bar"})
os.remove("foo")
getattr(dvc.cache.local, link)(PathInfo("bar"), PathInfo("foo"))
dvc.cache.local.cache_types = [new_link]
dvc.add("foo")
assert link_test_func("foo")
path_info = self.checksum_to_path_info(checksum)
try:
with self.cache.open(path_info, "r") as fobj:
d = json.load(fobj)
except (ValueError, FileNotFoundError) as exc:
raise DirCacheError(checksum, cause=exc)
if not isinstance(d, list):
msg = "dir cache file format error '{}' [skipping the file]"
logger.error(msg.format(relpath(path_info)))
return []
for info in d:
# NOTE: here is a BUG, see comment to .as_posix() below
relative_path = PathInfo.from_posix(info[self.PARAM_RELPATH])
info[self.PARAM_RELPATH] = relative_path.fspath
return d
def _get_dir_info_checksum(self, dir_info):
tmp = tempfile.NamedTemporaryFile(delete=False).name
with open(tmp, "w+") as fobj:
json.dump(dir_info, fobj, sort_keys=True)
from_info = PathInfo(tmp)
to_info = self.cache.path_info / tmp_fname("")
self.cache.upload(from_info, to_info, no_progress_bar=True)
checksum = self.get_file_checksum(to_info) + self.CHECKSUM_DIR_SUFFIX
return checksum, to_info
for stage in stages:
for out in stage.outs:
if out.path_info in outs:
stages = [stage.relpath, outs[out.path_info].stage.relpath]
raise OutputDuplicationError(str(out), stages)
outs[out.path_info] = out
for stage in stages:
for out in stage.outs:
for p in out.path_info.parents:
if p in outs:
raise OverlappingOutputPathsError(outs[p], out)
for stage in stages:
stage_path_info = PathInfo(stage.path)
for p in chain([stage_path_info], stage_path_info.parents):
if p in outs:
raise StagePathAsOutputError(stage.wdir, stage.relpath)
for stage in stages:
node = relpath(stage.path, self.root_dir)
G.add_node(node, stage=stage)
for dep in stage.deps:
if dep.path_info is None:
continue
for out in outs:
if (
out == dep.path_info