Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def main():
assert sys.argv[1] in ('..changeps1')
if sys.argv[1] == '..changeps1':
print(int(conda.config.changeps1))
sys.exit(0)
where NAME is taken to be the name of the environment.
"""
# Take the default home DIR for conda and virtualenv as the default
_default_conda_dirs = ['~/.conda/envs/']
_default_virtualenv_dirs = ['~/.virtualenvs']
# Check for the CONDA_ENV_PATH variable and add it to the list if set.
if os.environ.get('CONDA_ENV_PATH', False):
_default_conda_dirs.append(os.environ['CONDA_ENV_PATH'].split('envs')[0])
# If we are running inside the root conda env can get all the env dirs:
if HAVE_CONDA:
import conda
_default_conda_dirs += conda.config.envs_dirs
# Remove any duplicates
_default_conda_dirs = list(set(map(os.path.expanduser,
_default_conda_dirs)))
conda_env_dirs = List(
_default_conda_dirs,
config=True,
help="List of directories in which are conda environments.")
virtualenv_env_dirs = List(
_default_virtualenv_dirs,
config=True,
help="List of directories in which are virtualenv environments.")
blacklist_envs = List(
def distribution_exists_on_channel(binstar_cli, meta, owner, channel='main'):
"""
Determine whether a distribution exists on a specific channel.
Note from @pelson: As far as I can see, there is no easy way to do this on binstar.
"""
fname = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
distributions_on_channel = [dist['basename'] for dist in
binstar_cli.show_channel(owner=owner, channel=channel)['files']]
return fname in distributions_on_channel
def create_env(pref, specs, pypi=False):
if not isdir(bldpkgs_dir):
os.makedirs(bldpkgs_dir)
update_index(bldpkgs_dir)
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = get_index([url_path(config.croot)])
cc.pkgs_dirs = cc.pkgs_dirs[:1]
if pypi:
from conda.from_pypi import install_from_pypi
specs = install_from_pypi(pref, index, specs)
actions = plan.install_actions(pref, index, specs)
plan.display_actions(actions, index)
plan.execute_actions(actions, index, verbose=True)
# ensure prefix exists, even if empty, i.e. when specs are empty
if not isdir(pref):
os.makedirs(pref)
def _get_items(self):
res = []
for dir in config.envs_dirs:
try:
res.extend(os.listdir(dir))
except OSError:
pass
return res
from __future__ import print_function, division, absolute_import
import os
import sys
from os.path import abspath, expanduser, join
import conda.config as cc
CONDA_PY = int(os.getenv('CONDA_PY', cc.default_python.replace('.', '')))
CONDA_NPY = int(os.getenv('CONDA_NPY', 17))
PY3K = int(bool(CONDA_PY >= 30))
if cc.root_writable:
croot = join(cc.root_dir, 'conda-bld')
else:
croot = abspath(expanduser('~/conda-bld'))
build_prefix = join(cc.envs_dirs[0], '_build')
test_prefix = join(cc.envs_dirs[0], '_test')
def _get_python(prefix):
if sys.platform == 'win32':
res = join(prefix, 'python.exe')
else:
res = join(prefix, 'bin/python')
if PY3K:
res += '3'
return res
build_python = _get_python(build_prefix)
def main():
p = argparse.ArgumentParser(
description='tool for building conda packages'
)
p.add_argument(
'-c', "--check",
action="store_true",
help="only check (validate) the recipe",
)
p.add_argument(
"--no-binstar-upload",
action="store_false",
help="do not ask to upload the package to binstar",
dest='binstar_upload',
default=config.binstar_upload,
)
p.add_argument(
"--output",
action="store_true",
help="output the conda package filename which would have been "
"created and exit",
)
p.add_argument(
'-s', "--source",
action="store_true",
help="only obtain the source (but don't build)",
)
p.add_argument(
'-t', "--test",
action="store_true",
help="test package (assumes package is already build)",
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession(ssl_verify=config.ssl_verify,
proxy_servers=config.get_proxy_servers())
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession(ssl_verify=config.ssl_verify,
proxy_servers=config.get_proxy_servers())
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession(ssl_verify=config.ssl_verify,
proxy_servers=config.get_proxy_servers())
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}