Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def send_post(self):
self.curl = self.get_curl()
self.curl.setopt( pycurl.URL, self.action )
curl_post = self.curl
curl_post.setopt(pycurl.HTTPPOST, self.__form.items())
if self.proxy:
for arg in self.proxy:
print arg
self.curl.setopt( *arg )
# curl_post.perform()
multi = pycurl.CurlMulti()
try:
multi.add_handle( curl_post )
except pycurl.error:
pass
num_handles = 1
self.stop = False
while num_handles:
while 1:
ret, num_handles = multi.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
if self.stop:
raise UploaderError("Upload cancel")
multi.select(1.0)
self.http_code = curl_post.getinfo(pycurl.HTTP_CODE)
if self.http_code in (404, 500):
raise UploaderError("%s %s error"%( self.host, self.http_code) )
#TODO: сделать как результат объект `responce`
def _download_part( self, part_num ):
"""
Download a part from the source URL. Returns a BytesIO buffer. The buffer's tell() method
will return the size of the downloaded part, which may be less than the requested part
size if the part is the last one for the URL.
"""
buf = BytesIO( )
with closing( pycurl.Curl( ) ) as c:
c.setopt( c.URL, self.url )
c.setopt( c.WRITEDATA, buf )
c.setopt( c.FAILONERROR, 1 )
start, end = self._get_part_range( part_num )
c.setopt( c.RANGE, "%i-%i" % (start, end - 1) )
try:
c.perform( )
except pycurl.error as e:
error_code, message = e
if error_code == c.E_BAD_DOWNLOAD_RESUME: # bad range for FTP
pass
elif error_code == c.E_HTTP_RETURNED_ERROR:
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.17
if c.getinfo( c.RESPONSE_CODE ) == 416:
pass
else:
raise
else:
raise
buf.seek( 0 )
buf.truncate( )
return buf
if msg == "offline":
pyfile.setStatus("offline")
self.log.warning(_("Download is offline: %s") % pyfile.name)
elif msg == "temp. offline":
pyfile.setStatus("temp. offline")
self.log.warning(_("Download is temporary offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
self.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
self.core.addonManager.downloadFailed(pyfile)
self.clean(pyfile)
continue
except error, e:
if len(e.args) == 2:
code, msg = e.args
else:
code = 0
msg = e.args
self.log.debug("pycurl exception %s: %s" % (code, msg))
if code in (7, 18, 28, 52, 56):
self.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
wait = time() + 60
pyfile.waitUntil = wait
pyfile.setStatus("waiting")
while time() < wait:
sleep(1)
try:
cnt += 1
buf = cStringIO.StringIO()
c = pycurl.Curl()
c.setopt(c.URL, 'http://text-processing.com/api/sentiment/')
c.setopt(c.WRITEFUNCTION, buf.write)
postdata = ''
postdata = 'text=' + post
c.setopt(c.POSTFIELDS, postdata)
c.perform()
val = buf.getvalue()
data = ast.literal_eval(val)
data["post"] = post
sentiments.append(data)
buf.close()
except pycurl.error, error:
errno, errstr = error
print "An error occured: ", errstr
print "sentiments computed for %d posts" % cnt
return sentiments
try:
progress.start()
copy2(source_file, local_filename)
progress.done()
self.umask_permissions(local_filename)
except IOError, e:
if e.errno == 2 and os.path.exists(local_filename):
raise CacheImportError('%s not found' % full_url)
else:
raise
else:
try:
get_remote_file(full_url, local_filename,
progress = progress)
except pycurl.error, msg:
raise CacheImportError('%s, %s' % (msg, full_url))
self.incorporate_file(local_filename, locator.blob_id)
mass_progress.note_finished(locator.blob_id)
mass_progress.write_progress()
finally:
if os.path.exists(local_filename):
os.unlink(local_filename)
response_body_stream = io.BytesIO()
url = definition.get_access().build_url(
'/repository/collection/' + definition.get_collection_id() + '/backup', True)
curl = pycurl.Curl()
curl.setopt(curl.URL, url)
curl.setopt(curl.CAINFO, certifi.where())
curl.setopt(curl.CUSTOMREQUEST, 'POST')
curl.setopt(curl.UPLOAD, 1)
curl.setopt(curl.READFUNCTION, process_reader.read_callback)
curl.setopt(curl.WRITEFUNCTION, response_body_stream.write)
curl.setopt(curl.VERBOSE, False)
try:
curl.perform()
except pycurl.error as e:
# raise the exception directly
if process_reader.exception:
raise process_reader.exception
raise Exception('HTTP request error: ' + str(e) + '. Probably the application ' +
'backup process exited or timed out unexpectedly. Read above messages for details')
response_body = response_body_stream.getvalue().decode('utf-8')
self._logger.debug('Request: ' + str(url))
self._logger.debug('response(' + response_body + ')')
try:
_json = json_loads(response_body)
except JSONDecodeError:
_json = {}
# header_buf = BytesIO()
headers = {'count': 0, 'content': [{}]}
try:
setup_curl_for_post(c, payload, data_buf, headers) # header_buf)
c.perform()
resp = curl_result(c)
resp['url'] = payload.get('url')
resp['id'] = payload.get('id')
resp['state'] = 'normal'
resp['spider'] = 'pycurl'
resp['payload'] = payload
pycurl_get_resp(data_buf, headers, payload, resp)
return resp
except pycurl.error as e:
resp = curl_result(c)
resp['url'] = payload.get('url')
resp['id'] = payload.get('id')
resp['state'] = 'error'
resp['spider'] = 'pycurl'
resp['error_code'] = code = e.args[0]
resp['error_desc'] = desc = e.args[1]
if code in [18, 47]:
resp['state'] = 'abnormal'
pycurl_get_resp(data_buf, headers, payload, resp)
return resp
except Exception as e:
resp = curl_result(c)
resp['url'] = payload.get('url')
resp['id'] = payload.get('id')
resp['state'] = 'critical'
def request(self):
try:
with self._sigint_handler.handle_sigint():
self.handler.perform()
except pycurl.error as ex:
raise build_network_error(ex.args[0], ex.args[1])
def http_GET(url):
# Note: pycurl is very sensitive to unicode/string issues
status = None
c = pycurl.Curl()
buffer = StringIO()
c.setopt(c.FAILONERROR, True)
c.setopt(c.WRITEFUNCTION, buffer.write)
c.setopt(c.URL, str(url))
try:
c.perform()
except pycurl.error, error:
errno, errstr = error
print 'Error, could not submit URL: ' + url
print 'Error cause: ' + errstr
status = c.getinfo(c.RESPONSE_CODE)
c.close()
return status