Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'perl-encode',
'perl-exporter',
'perl-getopt-long',
'perl-lib',
'perl-pod-usage',
'perl-time-hires',
'perl-pod-escapes',
'perl-extutils-makemaker',
'perl-test',
'perl-parent',
'perl-data-dumper',
'perl-test-harness',
)
all_package_names = set(info['name'] for info in itervalues(keep))
for fn, info in r6json['packages'].items():
if info['name'] in keep_list:
_keep[fn] = info
for dep in info['depends']:
dep = dep.split()[0]
if dep not in keep_list and dep not in all_package_names:
missing_in_whitelist.add(dep)
if missing_in_whitelist:
print(">>> missing 6 <<<")
pprint(missing_in_whitelist)
# patch 'perl-*' to include an explicit dependency on perl, as from the 'perl-threaded' package
perl_info_dicts = tuple(info for info in _keep.values() if info['name'].startswith('perl-'))
for info in perl_info_dicts:
if not any(dep.startswith("perl ") for dep in info['depends']):
info['depends'].append('perl 5.22.0*')
def _verify_transaction_level(prefix_setups):
# 1. make sure we're not removing conda from conda's env
# 2. make sure we're not removing a conda dependency from conda's env
# 3. enforce context.disallowed_packages
# 4. make sure we're not removing pinned packages without no-pin flag
# 5. make sure conda-meta/history for each prefix is writable
# TODO: Verification 4
conda_prefixes = (join(context.root_prefix, 'envs', '_conda_'), context.root_prefix)
conda_setups = tuple(setup for setup in itervalues(prefix_setups)
if setup.target_prefix in conda_prefixes)
conda_unlinked = any(prec.name == 'conda'
for setup in conda_setups
for prec in setup.unlink_precs)
conda_prec, conda_final_setup = next(
((prec, setup)
for setup in conda_setups
for prec in setup.link_precs
if prec.name == 'conda'),
(None, None)
)
if conda_unlinked and conda_final_setup is None:
# means conda is being unlinked and not re-linked anywhere
def _get_pfe(self):
from .package_cache_data import ProgressiveFetchExtract
if self._pfe is not None:
pfe = self._pfe
elif not self.prefix_setups:
self._pfe = pfe = ProgressiveFetchExtract(())
else:
link_precs = set(concat(stp.link_precs for stp in itervalues(self.prefix_setups)))
self._pfe = pfe = ProgressiveFetchExtract(link_precs)
return pfe
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
channels_from_multichannels = concat(channel for channel
in itervalues(self.custom_multichannels))
all_channels = odict((x.name, x) for x in (ch for ch in concatv(
channels_from_multichannels,
custom_channels,
)))
return all_channels
session=session), url)
for url in reversed(channel_urls)])
for future in future_to_url:
url = future_to_url[future]
repodatas.append((url, future.result()))
except ImportError:
# concurrent.futures is only available in Python 3
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)),
reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
add_unknown(index)
if config.add_pip_as_python_dependency:
add_pip_dependency(index)
return index
print('\n'.join(lines))
return
if args.show:
from collections import OrderedDict
d = OrderedDict((key, getattr(context, key))
for key in context.list_parameters())
if context.json:
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': '),
cls=EntityEncoder))
else:
# coerce channels
d['custom_channels'] = {
channel.name: "%s://%s" % (channel.scheme, channel.location)
for channel in itervalues(d['custom_channels'])
}
# TODO: custom_multichannels needs better formatting
d['custom_multichannels'] = {k: json.dumps([text_type(c) for c in chnls])
for k, chnls in iteritems(d['custom_multichannels'])}
print('\n'.join(format_dict(d)))
context.validate_configuration()
return
if args.describe:
paramater_names = context.list_parameters()
if context.json:
print(json.dumps([context.describe_parameter(name) for name in paramater_names],
sort_keys=True, indent=2, separators=(',', ': '),
cls=EntityEncoder))
else:
and any(dist.name == "conda" for dist in solution)):
specs_map["conda"] = MatchSpec("conda")
# add in explicitly requested specs from specs_to_add
# this overrides any name-matching spec already in the spec map
specs_map.update((s.name, s) for s in specs_to_add)
# collect additional specs to add to the solution
track_features_specs = pinned_specs = ()
if context.track_features:
track_features_specs = tuple(MatchSpec(x + '@') for x in context.track_features)
if not ignore_pinned:
pinned_specs = get_pinned_specs(self.prefix)
final_environment_specs = IndexedSet(concatv(
itervalues(specs_map),
track_features_specs,
pinned_specs,
))
# We've previously checked `solution` for consistency (which at that point was the
# pre-solve state of the environment). Now we check our compiled set of
# `final_environment_specs` for the possibility of a solution. If there are conflicts,
# we can often avoid them by neutering specs that have a target (e.g. removing version
# constraint) and also making them optional. The result here will be less cases of
# `UnsatisfiableError` handed to users, at the cost of more packages being modified
# or removed from the environment.
conflicting_specs = r.get_conflicting_specs(tuple(final_environment_specs))
if log.isEnabledFor(DEBUG):
log.debug("conflicting specs: %s", dashlist(conflicting_specs))
for spec in conflicting_specs:
if spec.target:
def _topo_sort_handle_cycles(cls, graph):
# remove edges that point directly back to the node
for k, v in iteritems(graph):
v.discard(k)
# disconnected nodes go first
nodes_that_are_parents = set(node for parents in itervalues(graph) for node in parents)
nodes_without_parents = (node for node in graph if not graph[node])
disconnected_nodes = sorted(
(node for node in nodes_without_parents if node not in nodes_that_are_parents),
key=lambda x: x.name
)
for node in disconnected_nodes:
yield node
t = cls._toposort_raise_on_cycles(graph)
while True:
try:
value = next(t)
yield value
except CyclicalDependencyError as e:
# TODO: Turn this into a warning, but without being too annoying with
specs_to_add,
itervalues(specs_from_history_map),
))
index, r = self._prepare(prepared_specs)
if specs_to_remove:
# In a previous implementation, we invoked SAT here via `r.remove()` to help with
# spec removal, and then later invoking SAT again via `r.solve()`. Rather than invoking
# SAT for spec removal determination, we can use the PrefixGraph and simple tree
# traversal if we're careful about how we handle features. We still invoke sat via
# `r.solve()` later.
_track_fts_specs = (spec for spec in specs_to_remove if 'track_features' in spec)
feature_names = set(concat(spec.get_raw_value('track_features')
for spec in _track_fts_specs))
graph = PrefixGraph((index[dist] for dist in solution), itervalues(specs_map))
removed_records = []
for spec in specs_to_remove:
# If the spec was a track_features spec, then we need to also remove every
# package with a feature that matches the track_feature. The
# `graph.remove_spec()` method handles that for us.
log.trace("using PrefixGraph to remove records for %s", spec)
removed_records.extend(graph.remove_spec(spec))
for rec in removed_records:
# We keep specs (minus the feature part) for the non provides_features packages
# if they're in the history specs. Otherwise, we pop them from the specs_map.
rec_has_a_feature = set(rec.features or ()) & feature_names
if rec_has_a_feature and rec.name in specs_from_history_map:
spec = specs_map.get(rec.name, MatchSpec(rec.name))
spec._match_components.pop('features', None)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
pd = PrefixData(prefix)
return {Dist(prefix_record): prefix_record for prefix_record in itervalues(pd._prefix_records)}