Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if 'HTTP_PROXY' in os.environ and scheme == 'http':
kwargs['proxy'] = os.environ['HTTP_PROXY']
elif 'HTTPS_PROXY' in os.environ and scheme == 'https':
kwargs['proxy'] = os.environ['HTTPS_PROXY']
async with session.get(url, timeout=timeout, **kwargs) as resp:
if resp.status != 200:
raise FailedDownload(filepath_partial, url, resp)
else:
filepath, skip = get_filepath(filepath_partial(resp, url), overwrite)
if skip:
return str(filepath)
if callable(file_pb):
file_pb = file_pb(position=token.n, unit='B', unit_scale=True,
desc=filepath.name, leave=False,
total=get_http_size(resp))
else:
file_pb = None
# This queue will contain the downloaded chunks and their offsets
# as tuples: (offset, chunk)
downloaded_chunk_queue = asyncio.Queue()
download_workers = []
writer = self.loop.create_task(
self._write_worker(downloaded_chunk_queue, file_pb, filepath))
if max_splits and resp.headers.get('Accept-Ranges', None) == "bytes":
content_length = int(resp.headers['Content-length'])
split_length = content_length // max_splits
ranges = [
[start, start + split_length]
def main():
args = parse_args(sys.argv[1:])
downloader = Downloader(max_conn=args.max_conn, file_progress=not args.no_file_progress, overwrite=args.overwrite)
for url in args.urls:
downloader.enqueue_file(url, path=args.directory)
results = downloader.download()
for i in results:
print(i)
err_str = ''
for err in results.errors:
err_str += f'{err.url} \t {err.exception}\n'
if err_str:
sys.exit(err_str)
def getenvironment(self):
try:
from parfive import Downloader
except ImportError:
log.error("To use gammapy download, install the parfive package!")
return
dl = Downloader(progress=False, file_progress=False)
filename_env = "gammapy-" + self.release + "-environment.yml"
url_file_env = RELEASES_BASE_URL + "/install/" + filename_env
filepath_env = str(self.outfolder / filename_env)
dl.enqueue_file(url_file_env, path=filepath_env)
try:
log.info(f"Downloading {url_file_env}")
Path(filepath_env).parent.mkdir(parents=True, exist_ok=True)
dl.download()
except Exception as ex:
log.error(ex)
exit()
aio_pool = ThreadPoolExecutor(1)
self.loop = asyncio.new_event_loop()
self.run_until_complete = partial(run_in_thread, aio_pool, self.loop)
else:
self.loop = loop
self.run_until_complete = self.loop.run_until_complete
asyncio.set_event_loop(self.loop)
# Setup queues
self.http_queue = asyncio.Queue(loop=self.loop)
self.http_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
self.ftp_queue = asyncio.Queue(loop=self.loop)
self.ftp_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
for i in range(self.max_conn):
self.http_tokens.put_nowait(Token(i + 1))
self.ftp_tokens.put_nowait(Token(i + 1))
self.loop = asyncio.new_event_loop()
self.run_until_complete = partial(run_in_thread, aio_pool, self.loop)
else:
self.loop = loop
self.run_until_complete = self.loop.run_until_complete
asyncio.set_event_loop(self.loop)
# Setup queues
self.http_queue = asyncio.Queue(loop=self.loop)
self.http_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
self.ftp_queue = asyncio.Queue(loop=self.loop)
self.ftp_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
for i in range(self.max_conn):
self.http_tokens.put_nowait(Token(i + 1))
self.ftp_tokens.put_nowait(Token(i + 1))
`HTTPS_PROXY`, depending on the protocol of the `url` passed. Proxy
Authentication `proxy_auth` should be passed as a `aiohttp.BasicAuth`
object. Proxy Headers `proxy_headers` should be passed as `dict`
object.
"""
overwrite = overwrite or self.overwrite
if path is None and filename is None:
raise ValueError("Either path or filename must be specified.")
elif path is None:
path = './'
path = pathlib.Path(path)
if not filename:
filepath = partial(default_name, path)
elif callable(filename):
filepath = filename
else:
# Define a function because get_file expects a callback
def filepath(*args):
return path / filename
scheme = urllib.parse.urlparse(url).scheme
if scheme in ('http', 'https'):
get_file = partial(self._get_http, url=url, filepath_partial=filepath,
overwrite=overwrite, **kwargs)
self.http_queue.put_nowait(get_file)
elif scheme == 'ftp':
if aioftp is None:
raise ValueError("The aioftp package must be installed to download over FTP.")
def _start_loop(self, loop):
# Setup asyncio loops
if not loop:
aio_pool = ThreadPoolExecutor(1)
self.loop = asyncio.new_event_loop()
self.run_until_complete = partial(run_in_thread, aio_pool, self.loop)
else:
self.loop = loop
self.run_until_complete = self.loop.run_until_complete
asyncio.set_event_loop(self.loop)
# Setup queues
self.http_queue = asyncio.Queue(loop=self.loop)
self.http_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
self.ftp_queue = asyncio.Queue(loop=self.loop)
self.ftp_tokens = asyncio.Queue(maxsize=self.max_conn, loop=self.loop)
for i in range(self.max_conn):
self.http_tokens.put_nowait(Token(i + 1))
self.ftp_tokens.put_nowait(Token(i + 1))
download_workers = []
writer = self.loop.create_task(
self._write_worker(downloaded_chunks_queue, file_pb, filepath))
download_workers.append(
self.loop.create_task(self._ftp_download_worker(stream, downloaded_chunks_queue))
)
await asyncio.gather(*download_workers)
await downloaded_chunks_queue.join()
writer.cancel()
return str(filepath)
except Exception as e:
raise FailedDownload(filepath_partial, url, e)
else:
download_workers.append(
self.loop.create_task(self._http_download_worker(
session, url, chunksize, None, timeout, downloaded_chunk_queue, **kwargs
))
)
# run all the download workers
await asyncio.gather(*download_workers)
# join() waits till all the items in the queue have been processed
await downloaded_chunk_queue.join()
writer.cancel()
return str(filepath)
except Exception as e:
raise FailedDownload(filepath_partial, url, e)
def __str__(self):
out = super().__repr__()
if self.errors:
out += '\nErrors:\n'
for error in self.errors:
if isinstance(error, FailedDownload):
resp = self._get_nice_resp_repr(error.exception)
out += f"(url={error.url}, response={resp})\n"
else:
out += "({})".format(repr(error))
return out