Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
except ValueError:
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
downloaded = 0
show_progress = total_length > 40 * 1000 or not total_length
show_url = link.show_url
try:
if show_progress:
## FIXME: the URL can get really long in this message:
if total_length:
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
else:
logger.start_progress('Downloading %s (unknown size): ' % show_url)
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
def resp_read(chunk_size):
try:
# Special case for urllib3.
try:
for chunk in resp.raw.stream(
chunk_size, decode_content=False):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except AttributeError:
# Standard file-like object.
while True:
@classmethod
def from_line(cls, name, comes_from=None):
"""Creates an InstallRequirement from a name, which might be a
requirement, filename, or URL.
"""
url = None
name = name.strip()
req = name
if is_url(name):
url = name
## FIXME: I think getting the requirement here is a bad idea:
#req = get_requirement_from_url(url)
req = None
elif is_filename(name):
if not os.path.exists(name):
logger.warn('Requirement %r looks like a filename, but the file does not exist'
% name)
url = filename_to_url(name)
#req = get_requirement_from_url(url)
req = None
return cls(req, comes_from, url=url)
content = zip.read(name)
dest = os.path.join(package_path, name)
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
if not content and dest.endswith(os.path.sep):
if not os.path.exists(dest):
os.makedirs(dest)
else:
f = open(dest, 'wb')
f.write(content)
f.close()
else:
to_save.append((name, zip.read(name)))
zip.close()
if not to_save:
logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
os.unlink(zip_filename)
self.remove_filename_from_pth(zip_filename)
else:
logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
zip = zipfile.ZipFile(zip_filename, 'w')
for name, content in to_save:
zip.writestr(name, content)
zip.close()
finally:
logger.indent -= 2
if self.add_requirement(subreq):
reqs.append(subreq)
if not self.has_requirement(req_to_install.name):
#'unnamed' requirements will get added here
self.add_requirement(req_to_install)
if self.is_download or req_to_install._temp_build_dir is not None:
self.reqs_to_cleanup.append(req_to_install)
else:
self.reqs_to_cleanup.append(req_to_install)
if install:
self.successfully_downloaded.append(req_to_install)
if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
self.copy_to_build_dir(req_to_install)
finally:
logger.indent -= 2
except ImportError:
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
" To fix this, run: pip install --upgrade setuptools"
)
else:
if not hasattr(pkg_resources, 'DistInfoDistribution'):
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info "
"support. To fix this, run: pip install --upgrade "
"setuptools"
)
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
session = self._build_session(options)
def __init__(self, src_dir, requirements, successfully_downloaded):
self.src_dir = src_dir
self.comparison_cache = ({}, {}) # two maps, one does a->b, the other one does b->a
self.pre_installed = {} # maps name -> PackageData
self.repo_up_to_date = {} # maps local git clone path -> boolean
self.requirements = requirements
self.successfully_downloaded = successfully_downloaded
try:
self.load_installed_distributions()
except Exception, e:
logger.notify("Exception loading installed distributions " + str(e))
raise
self.prefer_pinned_revision = False
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
output = call_subprocess(
['svn', 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
match = _svn_url_re.search(output)
if not match:
logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
logger.info('Output that cannot be parsed: \n%s' % output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
logger.info('Output that cannot be parsed: \n%s' % output)
return url, None
return url, match.group(1)
#unpack and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = [req for req in reqset if not req.is_wheel]
if not buildset:
return
#build the wheels
logger.notify(
'Building wheels for collected packages: %s' %
','.join([req.name for req in buildset])
)
logger.indent += 2
build_success, build_failure = [], []
for req in buildset:
if self._build_one(req):
build_success.append(req)
else:
build_failure.append(req)
logger.indent -= 2
#notify sucess/failure
if build_success:
logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success]))
if build_failure:
logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure]))
if cache is not None:
cache.set_is_archive(url)
return None
logger.debug('Getting page %s' % url)
resp = urllib2.urlopen(url)
real_url = resp.geturl()
headers = resp.info()
inst = cls(resp.read(), real_url, headers)
except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
desc = str(e)
if isinstance(e, socket.timeout):
log_meth = logger.info
level =1
desc = 'timed out'
elif isinstance(e, urllib2.URLError):
log_meth = logger.info
if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
desc = 'timed out'
level = 1
else:
level = 2
elif isinstance(e, urllib2.HTTPError) and e.code == 404:
## FIXME: notify?
log_meth = logger.info
level = 2
else:
log_meth = logger.info
level = 1
log_meth('Could not fetch URL %s: %s' % (link, desc))
log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
if cache is not None:
cache.add_page_failure(url, level)
def print_results(hits, name_column_width=25, terminal_width=None):
installed_packages = [p.project_name for p in pkg_resources.working_set]
for hit in hits:
name = hit['name']
summary = hit['summary'] or ''
if terminal_width is not None:
# wrap and indent summary to fit terminal
summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%s - %s' % (name.ljust(name_column_width), summary)
try:
logger.notify(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
logger.indent += 2
try:
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.notify('INSTALLED: %s (latest)' % dist.version)
else:
logger.notify('INSTALLED: %s' % dist.version)
logger.notify('LATEST: %s' % latest)
finally:
logger.indent -= 2
except UnicodeEncodeError:
pass