Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def add_requirements(builder, cache):
for resolved_dist in resolve(requirements, cache=cache, interpreter=builder.interpreter):
builder.add_requirement(resolved_dist.requirement)
builder.add_distribution(resolved_dist.distribution)
def test_osx_platform_intel_issue_523():
def bad_interpreter(include_site_extras=True):
return PythonInterpreter.from_binary(_KNOWN_BAD_APPLE_INTERPRETER,
include_site_extras=include_site_extras)
interpreter = bad_interpreter(include_site_extras=False)
with temporary_dir() as cache:
# We need to run the bad interpreter with a modern, non-Apple-Extras setuptools in order to
# successfully install psutil.
for requirement in (SETUPTOOLS_REQUIREMENT, WHEEL_REQUIREMENT):
for dist in resolver.resolve([requirement],
cache=cache,
# We can't use wheels since we're bootstrapping them.
precedence=(SourcePackage, EggPackage),
interpreter=interpreter):
interpreter = interpreter.with_extra(dist.key, dist.version, dist.location)
with nested(yield_pex_builder(installer_impl=WheelInstaller, interpreter=interpreter),
temporary_filename()) as (pb, pex_file):
for dist in resolver.resolve(['psutil==5.4.3'],
cache=cache,
precedence=(SourcePackage, WheelPackage),
interpreter=interpreter):
pb.add_dist_location(dist.location)
pb.build(pex_file)
# NB: We want PEX to find the bare bad interpreter at runtime.
def test_pex_run_conflicting_custom_setuptools_useable():
# Here we use an older setuptools to build the pex which has a newer setuptools requirement.
# These setuptools dists have different pkg_resources APIs:
# $ diff \
# <(zipinfo -1 setuptools-20.3.1-py2.py3-none-any.whl | grep pkg_resources/ | sort) \
# <(zipinfo -1 setuptools-40.4.3-py2.py3-none-any.whl | grep pkg_resources/ | sort)
# 2a3,4
# > pkg_resources/py31compat.py
# > pkg_resources/_vendor/appdirs.py
with temporary_dir() as resolve_cache:
dists = resolve(['setuptools==20.3.1'], cache=resolve_cache)
interpreter = PythonInterpreter.from_binary(sys.executable,
path_extras=[dist.location for dist in dists],
include_site_extras=False)
dists = resolve(['setuptools==40.4.3'], cache=resolve_cache)
with temporary_dir() as temp_dir:
pex = write_simple_pex(
temp_dir,
'from pkg_resources import appdirs, py31compat',
dists=dists,
interpreter=interpreter
)
rc = PEX(pex.path()).run()
assert rc == 0
# Confirm separate PYTHONPATH entries, which we need to test namespace packages.
self.assertNotEqual(synthetic_target_one.target_base, synthetic_target_two.target_base)
targets = (synthetic_target_one, synthetic_target_two)
self.context(for_subsystems=[PythonInterpreterCache, PythonRepos])
interpreter_cache = PythonInterpreterCache.global_instance()
python_repos = PythonRepos.global_instance()
interpreter = interpreter_cache.select_interpreter_for_targets(targets)
# We need setuptools to import namespace packages under python 2 (via pkg_resources), so we
# prime the PYTHONPATH with a known good version of setuptools.
# TODO(John Sirois): We really should be emitting setuptools in a
# `synthetic_target_extra_dependencies` override in `ApacheThriftPyGen`:
# https://github.com/pantsbuild/pants/issues/5975
pythonpath = [os.path.join(get_buildroot(), t.target_base) for t in targets]
for resolved_dist in resolve([f'thrift=={self.get_thrift_version(apache_thrift_gen)}',
'setuptools==40.6.3'],
interpreter=interpreter,
context=python_repos.get_network_context(),
fetchers=python_repos.get_fetchers()):
pythonpath.append(resolved_dist.distribution.location)
process = subprocess.Popen([interpreter.binary,
'-c',
'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'],
env={'PYTHONPATH': os.pathsep.join(pythonpath)},
stderr=subprocess.PIPE)
_, stderr = process.communicate()
self.assertEqual(0, process.returncode, stderr)
:param find_links: Additional paths to search for source packages during resolution.
"""
distributions = dict()
interpreter = interpreter or PythonInterpreter.get()
if not isinstance(interpreter, PythonInterpreter):
raise TypeError('Expected interpreter to be a PythonInterpreter, got {}'.format(type(interpreter)))
cache = python_setup.egg_cache_dir
platforms = get_platforms(platforms or python_setup.platforms)
fetchers = python_repos.get_fetchers()
if find_links:
fetchers.extend(Fetcher([path]) for path in find_links)
context = python_repos.get_network_context()
for platform in platforms:
distributions[platform] = resolve(
requirements=requirements,
interpreter=interpreter,
fetchers=fetchers,
platform=platform,
context=context,
cache=cache,
cache_ttl=ttl)
return distributions
def _resolve_plugins(self):
logger.info('Resolving new plugins...:\n {}'.format('\n '.join(self._plugin_requirements)))
resolved_dists = resolver.resolve(self._plugin_requirements,
fetchers=self._python_repos.get_fetchers(),
context=self._python_repos.get_network_context(),
cache=self.plugin_cache_dir,
# Effectively never expire.
cache_ttl=10 * 365 * 24 * 60 * 60,
allow_prereleases=PANTS_SEMVER.is_prerelease,
# Plugins will all depend on `pantsbuild.pants` which is
# distributed as a manylinux wheel.
use_manylinux=True)
return [resolved_dist.distribution for resolved_dist in resolved_dists]
def _resolve_plugins(self):
logger.info('Resolving new plugins...:\n {}'.format('\n '.join(self._plugin_requirements)))
resolved_dists = resolver.resolve(self._plugin_requirements,
fetchers=self._python_repos.get_fetchers(),
interpreter=self._interpreter,
context=self._python_repos.get_network_context(),
cache=self.plugin_cache_dir,
# Effectively never expire.
cache_ttl=10 * 365 * 24 * 60 * 60,
allow_prereleases=PANTS_SEMVER.is_prerelease,
# Plugins will all depend on `pantsbuild.pants` which is
# distributed as a manylinux wheel.
use_manylinux=True)
return [resolved_dist.distribution for resolved_dist in resolved_dists]
:param find_links: Additional paths to search for source packages during resolution.
:return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
to satisfy the requirements on that platform.
"""
python_setup = self._python_setup_subsystem
python_repos = self._python_repos_subsystem
platforms = platforms or python_setup.platforms
find_links = find_links or []
distributions = {}
fetchers = python_repos.get_fetchers()
fetchers.extend(Fetcher([path]) for path in find_links)
for platform in platforms:
requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir,
str(interpreter.identity))
resolved_dists = resolve(
requirements=[str(req.requirement) for req in requirements],
interpreter=interpreter,
fetchers=fetchers,
platform=platform,
context=python_repos.get_network_context(),
cache=requirements_cache_dir,
cache_ttl=python_setup.resolver_cache_ttl,
allow_prereleases=python_setup.resolver_allow_prereleases,
use_manylinux=python_setup.use_manylinux)
distributions[platform] = [resolved_dist.distribution for resolved_dist in resolved_dists]
return distributions
def _resolve_plugins(self):
logger.info('Resolving new plugins...:\n {}'.format('\n '.join(self._plugin_requirements)))
resolved_dists = resolver.resolve(self._plugin_requirements,
fetchers=self._python_repos.get_fetchers(),
interpreter=self._interpreter,
context=self._python_repos.get_network_context(),
cache=self.plugin_cache_dir,
# Effectively never expire.
cache_ttl=10 * 365 * 24 * 60 * 60,
allow_prereleases=PANTS_SEMVER.is_prerelease,
# Plugins will all depend on `pantsbuild.pants` which is
# distributed as a manylinux wheel.
use_manylinux=True)
return [resolved_dist.distribution for resolved_dist in resolved_dists]