Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if tokenized_startswith(test_url.split('/'), that_test_url.split('/')):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 5. channel_alias match
ca = context.channel_alias
if ca.location and tokenized_startswith(test_url.split('/'), ca.location.split('/')):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if tokenized_startswith(test_url.split('/'), that_test_url.split('/')):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 5. channel_alias match
ca = context.channel_alias
if ca.location and tokenized_startswith(test_url.split('/'), ca.location.split('/')):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
https://anaconda-repo.dev/packages/conda-forge/linux-64/repodata.json, add an
entry 'conda-forge: https://anaconda-repo.dev/packages'.
"""),
'custom_multichannels': dals("""
A multichannel is a metachannel composed of multiple channels. The two reserved
multichannels are 'defaults' and 'local'. The 'defaults' multichannel is
customized using the 'default_channels' parameter. The 'local'
multichannel is a list of file:// channel locations where conda-build stashes
successfully-built packages. Other multichannels can be defined with
custom_multichannels, where the key is the multichannel name and the value is
a list of channel names and/or channel urls.
"""),
'default_channels': dals("""
The list of channel names and/or urls used for the 'defaults' multichannel.
"""),
'disallow': dals("""
Package specifications to disallow installing. The default is to allow
all packages.
"""),
'envs_dirs': dals("""
The list of directories to search for named environments. When creating a new
named environment, the environment will be placed in the first writable
location.
"""),
'force': dals("""
Override any of conda's objections and safeguards for installing packages and
potentially breaking environments. Also re-installs the package, even if the
package is already installed. Implies --no-deps.
"""),
'json': dals("""
Ensure all output written to stdout is structured json.
"""),
with open(target_path) as fh:
created_file_contents = fh.read()
first_line, second_line, third_line, remainder = created_file_contents.split('\n', 3)
if on_win:
win_conda_exe = join(conda_prefix, 'Scripts', 'conda.exe')
assert first_line == 'set -gx CONDA_EXE (cygpath "%s")' % win_conda_exe
assert second_line == 'set _CONDA_ROOT (cygpath "%s")' % conda_prefix
assert third_line == 'set _CONDA_EXE (cygpath "%s")' % win_conda_exe
else:
assert first_line == 'set -gx CONDA_EXE "%s"' % join(conda_prefix, 'bin', 'conda')
assert second_line == 'set _CONDA_ROOT "%s"' % conda_prefix
assert third_line == 'set _CONDA_EXE "%s"' % join(conda_prefix, 'bin', 'conda')
with open(join(CONDA_PACKAGE_ROOT, 'shell', 'etc', 'fish', 'conf.d', 'conda.fish')) as fh:
original_contents = fh.read()
assert remainder == original_contents
result = install_conda_fish(target_path, conda_prefix)
assert result == Result.NO_CHANGE
def test_install_conda_fish(self):
with tempdir() as conda_temp_prefix:
conda_prefix = abspath(sys.prefix)
target_path = join(conda_temp_prefix, 'etc', 'fish', 'conf.d', 'conda.fish')
result = install_conda_fish(target_path, conda_prefix)
assert result == Result.MODIFIED
with open(target_path) as fh:
created_file_contents = fh.read()
first_line, second_line, third_line, remainder = created_file_contents.split('\n', 3)
if on_win:
win_conda_exe = join(conda_prefix, 'Scripts', 'conda.exe')
assert first_line == 'set -gx CONDA_EXE (cygpath "%s")' % win_conda_exe
assert second_line == 'set _CONDA_ROOT (cygpath "%s")' % conda_prefix
assert third_line == 'set _CONDA_EXE (cygpath "%s")' % win_conda_exe
else:
assert first_line == 'set -gx CONDA_EXE "%s"' % join(conda_prefix, 'bin', 'conda')
assert second_line == 'set _CONDA_ROOT "%s"' % conda_prefix
assert third_line == 'set _CONDA_EXE "%s"' % join(conda_prefix, 'bin', 'conda')
with open(join(CONDA_PACKAGE_ROOT, 'shell', 'etc', 'fish', 'conf.d', 'conda.fish')) as fh:
original_contents = fh.read()
assert remainder == original_contents
'perl-encode',
'perl-exporter',
'perl-getopt-long',
'perl-lib',
'perl-pod-usage',
'perl-time-hires',
'perl-pod-escapes',
'perl-extutils-makemaker',
'perl-test',
'perl-parent',
'perl-data-dumper',
'perl-test-harness',
)
all_package_names = set(info['name'] for info in itervalues(keep))
for fn, info in r6json['packages'].items():
if info['name'] in keep_list:
_keep[fn] = info
for dep in info['depends']:
dep = dep.split()[0]
if dep not in keep_list and dep not in all_package_names:
missing_in_whitelist.add(dep)
if missing_in_whitelist:
print(">>> missing 6 <<<")
pprint(missing_in_whitelist)
# patch 'perl-*' to include an explicit dependency on perl, as from the 'perl-threaded' package
perl_info_dicts = tuple(info for info in _keep.values() if info['name'].startswith('perl-'))
for info in perl_info_dicts:
if not any(dep.startswith("perl ") for dep in info['depends']):
info['depends'].append('perl 5.22.0*')
def get_index_r_1(subdir=context.subdir):
with open(join(dirname(__file__), 'data', 'index.json')) as fi:
packages = json.load(fi)
repodata = {
"info": {
"subdir": subdir,
"arch": context.arch_name,
"platform": context.platform,
},
"packages": packages,
}
channel = Channel('https://conda.anaconda.org/channel-1/%s' % subdir)
sd = SubdirData(channel)
with env_var("CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol):
sd._process_raw_repodata_str(json.dumps(repodata))
sd._loaded = True
def test_package_info(self):
index_json_record = IndexRecord(build=0, build_number=0, name="test_foo", version=0,
channel='defaults', subdir=context.subdir, fn='doesnt-matter',
md5='0123456789')
icondata = "icondata"
package_metadata = PackageMetadata(
package_metadata_version=1,
noarch=Noarch(type="python", entry_points=["test:foo"]),
)
paths = [PathData(_path="test/path/1", file_mode=FileMode.text, path_type=PathType.hardlink,
prefix_placeholder="/opt/anaconda1anaconda2anaconda3", ),
PathData(_path="test/path/2", no_link=True, path_type=PathType.hardlink),
PathData(_path="test/path/3", path_type=PathType.softlink),
PathData(_path="menu/test.json", path_type=PathType.hardlink)]
paths_data = PathsData(paths_version=0, paths=paths)
package_info = PackageInfo(
extracted_package_dir='/some/path',
def test_strictness(self):
assert MatchSpec('foo').strictness == 1
assert MatchSpec('foo 1.2').strictness == 2
assert MatchSpec('foo 1.2 3').strictness == 3
assert MatchSpec('foo 1.2 3 [channel=burg]').strictness == 3
# Seems odd, but this is needed for compatibility
assert MatchSpec('test* 1.2').strictness == 3
assert MatchSpec('foo', build_number=2).strictness == 3
a = MatchSpec(dst)
b = MatchSpec(a)
c = MatchSpec(dst, optional=True, target='burg')
d = MatchSpec(a, build='5')
assert a == b
assert hash(a) == hash(b)
assert a is b
assert a != c
assert hash(a) != hash(c)
assert a != d
assert hash(a) != hash(d)
p = MatchSpec(channel='defaults',name='python',version=VersionSpec('3.5*'))
assert p.match(Dist(channel='defaults', dist_name='python-3.5.3-1', name='python',
version='3.5.3', build_string='1', build_number=1, base_url=None,
platform=None))
assert not p.match(Dist(channel='defaults', dist_name='python-3.6.0-0', name='python',
version='3.6.0', build_string='0', build_number=0, base_url=None,
platform=None))
assert p.match(Dist(channel='defaults', dist_name='python-3.5.1-0', name='python',
version='3.5.1', build_string='0', build_number=0, base_url=None,
platform=None))
assert p.match(PackageRecord(name='python', version='3.5.1', build='0', build_number=0,
depends=('openssl 1.0.2*', 'readline 6.2*', 'sqlite',
'tk 8.5*', 'xz 5.0.5', 'zlib 1.2*', 'pip'),
channel=Channel(scheme='https', auth=None,
location='repo.anaconda.com', token=None,