Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def init_curl(self):
self.rep = StringIO()
self.header = ""
self.pycurl = pycurl.Curl()
self.pycurl.setopt(pycurl.FOLLOWLOCATION, 1)
self.pycurl.setopt(pycurl.MAXREDIRS, 5)
self.pycurl.setopt(pycurl.TIMEOUT, (self.timeout*3600))
self.pycurl.setopt(pycurl.CONNECTTIMEOUT, 30)
self.pycurl.setopt(pycurl.NOSIGNAL, 1)
self.pycurl.setopt(pycurl.NOPROGRESS, 0)
self.pycurl.setopt(pycurl.PROGRESSFUNCTION, self.progress)
self.pycurl.setopt(pycurl.AUTOREFERER, 1)
self.pycurl.setopt(pycurl.BUFFERSIZE, self.bufferSize)
self.pycurl.setopt(pycurl.SSL_VERIFYPEER, 0)
if self.debug:
self.pycurl.setopt(pycurl.VERBOSE, 1)
if self.interface:
self.pycurl.setopt(pycurl.INTERFACE, self.interface)
#!/usr/bin/python
import pycurl
import StringIO
import urllib
url = "http://10.23.248.79:8050/rest/2.0/channel/channel"
post_data_dic = {'method':'fetch_tag', 'apikey':'GkWwrvZrCaMQfCZ190ujndZm'}
crl = pycurl.Curl()
#crl.setopt(pycurl.VERBOSE,1)
crl.setopt(pycurl.FOLLOWLOCATION, 1)
crl.setopt(pycurl.MAXREDIRS, 5)
#crl.setopt(pycurl.AUTOREFERER,1)
crl.setopt(pycurl.CONNECTTIMEOUT, 60)
crl.setopt(pycurl.TIMEOUT, 300)
#crl.setopt(pycurl.PROXY,proxy)
crl.setopt(pycurl.HTTPPROXYTUNNEL,1)
#crl.setopt(pycurl.NOSIGNAL, 1)
crl.fp = StringIO.StringIO()
header = StringIO.StringIO()
crl.setopt(pycurl.USERAGENT, "dhgu hoho")
# Option -d/--data <data> HTTP POST data
crl.setopt(crl.POSTFIELDS, urllib.urlencode(post_data_dic))
crl.setopt(pycurl.URL, url)
crl.setopt(crl.HEADERFUNCTION, header.write)
crl.setopt(crl.WRITEFUNCTION, crl.fp.write)
crl.perform()
</data>
for _url in urls:
url, interface, item, method, symbol, threshold, floatingthreshold, floatingvalue, counter, attempt, groups = _url
filename = "/tmp/%s_url_%04d" % (uuid, len(queue)+1)
queue.append((url, filename, interface, item, method, symbol, threshold, floatingthreshold, floatingvalue, counter, attempt, groups))
num_urls = len(urls)
m = pycurl.CurlMulti()
m.handles = []
for i in range(num_conn):
c = pycurl.Curl()
c.fp = None
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(pycurl.MAXREDIRS, 5)
c.setopt(pycurl.CONNECTTIMEOUT, 5)
c.setopt(pycurl.TIMEOUT, 10)
c.setopt(pycurl.NOSIGNAL, 1)
m.handles.append(c)
#main loop
freelist = m.handles[:]
num_processed = 0
while num_processed < num_urls:
while queue and freelist:
url, filename, interface, item, method, symbol, threshold, floatingthreshold, floatingvalue, counter, attempt, groups = queue.pop()
c = freelist.pop()
c.fp = open(filename, "wb")
c.setopt(pycurl.URL, url)
c.setopt(pycurl.WRITEDATA, c.fp)
m.add_handle(c)
return value
# Curl properties
verbose = curlprop(pycurl.VERBOSE, curlprop.WRITEONLY, False)
content_length_download = curlprop(pycurl.CONTENT_LENGTH_DOWNLOAD)
content_length_upload = curlprop(pycurl.CONTENT_LENGTH_UPLOAD)
# TODO: content length download that takes into account resume_from,
# like position we need a total size
size_download = curlprop(pycurl.SIZE_DOWNLOAD)
size_upload = curlprop(pycurl.SIZE_UPLOAD)
resume_from = curlprop(pycurl.RESUME_FROM, curlprop.WRITEONLY)
response_code = curlprop(pycurl.RESPONSE_CODE)
follow_location = curlprop(pycurl.FOLLOWLOCATION, curlprop.WRITEONLY)
max_redirs = curlprop(pycurl.MAXREDIRS, curlprop.WRITEONLY)
connect_timeout = curlprop(pycurl.CONNECTTIMEOUT, curlprop.WRITEONLY)
timeout = curlprop(pycurl.TIMEOUT, curlprop.WRITEONLY)
userpwd = curlprop(pycurl.USERPWD, curlprop.WRITEONLY)
effective_url = curlprop(pycurl.EFFECTIVE_URL, curlprop.READONLY)
bind_address = curlprop(pycurl.INTERFACE, curlprop.WRITEONLY, filter=_prop_filter_bind_address)
curl.setopt(pycurl.SSL_VERIFYPEER, True)
if cafile:
curl.setopt(pycurl.CAINFO, str(cafile))
if capath:
curl.setopt(pycurl.CAPATH, str(capath))
# Not changing anything for using default CA bundle
else:
# Disable SSL certificate verification
curl.setopt(pycurl.SSL_VERIFYPEER, False)
if proxy is not None:
curl.setopt(pycurl.PROXY, str(proxy))
# Timeouts
if connect_timeout is not None:
curl.setopt(pycurl.CONNECTTIMEOUT, connect_timeout)
if timeout is not None:
curl.setopt(pycurl.TIMEOUT, timeout)
"""
Downloads an URL and stores the response to a directory with named as the host/IP
:param host_id:
:param url:
:return:
"""
output = StringIO.StringIO()
header = StringIO.StringIO()
print "[>] Trying to download URL: %s" % url
# Download file
try:
# 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)')
c = pycurl.Curl()
c.setopt(c.URL, url)
c.setopt(pycurl.CONNECTTIMEOUT, 10)
c.setopt(pycurl.TIMEOUT, 180)
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(pycurl.USERAGENT, 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)')
c.setopt(c.WRITEFUNCTION, output.write)
c.setopt(c.HEADERFUNCTION, header.write)
c.perform()
# Header parsing
header_info = header_function(header.getvalue())
except Exception, e:
if args.debug:
traceback.print_exc()
print_highlighted("[-] Error MESSAGE: %s" % str(e))
# Write File
if c.getinfo(c.RESPONSE_CODE) == 200:
# Check folder
curl.setopt(pycurl.URL, request.url)
curl.setopt(pycurl.HTTPHEADER,
["%s: %s" % i for i in request.headers.iteritems()])
try:
curl.setopt(pycurl.HEADERFUNCTION,
functools.partial(_curl_header_callback, headers))
except:
# Old version of curl; response will not include headers
pass
if request.streaming_callback:
curl.setopt(pycurl.WRITEFUNCTION, request.streaming_callback)
else:
curl.setopt(pycurl.WRITEFUNCTION, buffer.write)
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
curl.setopt(pycurl.CONNECTTIMEOUT, int(request.connect_timeout))
curl.setopt(pycurl.TIMEOUT, int(request.request_timeout))
if request.user_agent:
curl.setopt(pycurl.USERAGENT, request.user_agent)
else:
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
if request.network_interface:
curl.setopt(pycurl.INTERFACE, request.network_interface)
if request.use_gzip:
curl.setopt(pycurl.ENCODING, "gzip,deflate")
else:
curl.setopt(pycurl.ENCODING, "none")
# Set the request method through curl's retarded interface which makes
# up names for every single method
curl_options = {
"GET": pycurl.HTTPGET,
def init_handle(self):
"""
Sets common options to curl handle.
"""
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
self.c.setopt(pycurl.MAXREDIRS, 5)
self.c.setopt(pycurl.CONNECTTIMEOUT, 30)
self.c.setopt(pycurl.NOSIGNAL, 1)
self.c.setopt(pycurl.NOPROGRESS, 1)
if hasattr(pycurl, "AUTOREFERER"):
self.c.setopt(pycurl.AUTOREFERER, 1)
self.c.setopt(pycurl.SSL_VERIFYPEER, 0)
# Interval for low speed, detects connection loss, but can abort dl if
# hoster stalls the download
self.c.setopt(pycurl.LOW_SPEED_TIME, 45)
self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5)
# do not save the cookies
self.c.setopt(pycurl.COOKIEFILE, b"")
self.c.setopt(pycurl.COOKIEJAR, b"")
# self.c.setopt(pycurl.VERBOSE, 1)
def __init__(self, url, auth, verify):
self.url = url
self.received_buffer = BytesIO()
headers = ['Cache-Control: no-cache', 'Accept: text/event-stream']
self.curl = pycurl.Curl()
self.curl.setopt(pycurl.URL, url)
self.curl.setopt(pycurl.ENCODING, 'gzip')
self.curl.setopt(pycurl.CONNECTTIMEOUT, 10)
self.curl.setopt(pycurl.WRITEDATA, self.received_buffer)
# Marathon >= 1.7.x returns 30x responses for /v2/events responses
# when they're coming from a non-leader. So we follow redirects.
self.curl.setopt(pycurl.FOLLOWLOCATION, True)
self.curl.setopt(pycurl.MAXREDIRS, 1)
self.curl.setopt(pycurl.UNRESTRICTED_AUTH, True)
# The below settings are to prevent the connection from hanging if the
# connection breaks silently. Since marathon-lb only listens, silent
# connection failure results in marathon-lb waiting infinitely.
#
# Minimum bytes/second below which it is considered "low speed". So
# "low speed" here refers to 0 bytes/second.
self.curl.setopt(pycurl.LOW_SPEED_LIMIT, 1)
# How long (in seconds) it's allowed to go below the speed limit
# A queue of our requests, and the number of requests in flight
self.queue = []
self.retryQueue = []
# Background processing
self.threaded = threaded
self.processQueue = []
self.processor = threading.Thread(target=self.process)
self.num = 0
# Now instantiate a pool of easy handles
self.pool = []
for i in range(poolSize):
c = pycurl.Curl()
# It will need a file to write to
c.fp = None
# Set some options
c.setopt(pycurl.CONNECTTIMEOUT, 15)
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(pycurl.SHARE, self.share)
#c.setopt(pycurl.FRESH_CONNECT, 1)
#c.setopt(pycurl.FORBID_REUSE, 1)
c.setopt(pycurl.MAXREDIRS, 5)
c.setopt(pycurl.TIMEOUT, 15)
c.setopt(pycurl.NOSIGNAL, 1)
# Now add it to the pool
self.pool.append(c)
self.multi.handles = self.pool[:]
# Now listen for certain events
self.signalWatchers = [pyev.Signal(sig, loop, self.signal) for sig in SIGSTOP]
self.timerWatcher = pyev.Timer(10.0, 0.0, loop, self.timer)