Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def init(self):
self.ready.clear()
self.login()
self.corepool.spawn(self.loop)
p = Pool(5)
p.map(lambda f: f(), [
self.refresh_buddy_list,
self.refresh_group_list,
])
# -------------
# preserve these, which affects login process, cunning tencent!
self.call_server('s:get_discus_list',
method='get',
clientid=self.clientid,
psessionid=self.psessionid,
key='',
vfwebqq=self.vfwebqq,
)
self.call_server('s:get_self_info2', t=int(time.time() * 1000))
def _start_server(self, port):
"""Start TCP(v4 or v6) server"""
pool = Pool(POOL_SIZE)
if self.networking.ip6_address == "::":
server = StreamServer(("0.0.0.0", port), self._handler, spawn=pool)
else:
server = StreamServer(("::", port), self._handler, spawn=pool)
try:
server.serve_forever()
except KeyboardInterrupt:
pass
if r.status_code != 204:
errors.append({
'name': f,
'container': container,
'status_code': r.status_code,
'headers': r.headers,
'response': json.loads(r.text)
})
finally:
if verbose > 1:
print ('Thread %3s: delete complete for %s'
% (i, f))
s = requests.Session()
pool = Pool(size=threads)
errors = []
for i in xrange(threads):
pool.spawn(_delete, i, self._queue, errors)
pool.join()
return errors
if __name__ == "__main__":
N = 1000
C = 10
url = URL('http://127.0.0.1/index.html')
qs = url.request_uri
def run(client):
response = client.get(qs)
response.read()
assert response.status_code == 200
client = HTTPClient.from_url(url, concurrency=C)
group = gevent.pool.Pool(size=C)
now = time.time()
for _ in xrange(N):
group.spawn(run, client)
group.join()
delta = time.time() - now
req_per_sec = N / delta
print "request count:%d, concurrenry:%d, %f req/s" % (
N, C, req_per_sec)
def __init__(self, listener, locals=None, banner=None, **server_args):
"""
:keyword locals: If given, a dictionary of "builtin" values that will be available
at the top-level.
:keyword banner: If geven, a string that will be printed to each connecting user.
"""
group = Pool(greenlet_class=_Greenlet_stdreplace) # no limit on number
StreamServer.__init__(self, listener, spawn=group, **server_args)
_locals = {'__doc__': None, '__name__': '__console__'}
if locals:
_locals.update(locals)
self.locals = _locals
self.banner = banner
self.stderr = sys.stderr
def __init__(self, age, ppid, listener, cfg, script):
ProxyServer.__init__(self, listener, script,
spawn=Pool(cfg.worker_connections))
if cfg.ssl_keyfile and cfg.ssl_certfile:
self.wrap_socket = wrap_socket
self.ssl_args = dict(
keyfile = cfg.ssl_keyfile,
certfile = cfg.ssl_certfile,
server_side = True,
cert_reqs = cfg.ssl_cert_reqs,
ca_certs = cfg.ssl_ca_certs,
suppress_ragged_eofs=True,
do_handshake_on_connect=True)
self.ssl_enabled = True
self.name = cfg.name
self.age = age
self.ppid = ppid
def chunk_wrapper(feeds):
from gevent import monkey; monkey.patch_all()
from gevent.pool import Pool
p = Pool(config.settings.fetcher.mix_pool)
log.info(">> starting %d greenlets" % config.settings.fetcher.mix_pool)
p.map(controllers.feed_worker, feeds)
import argparse
import csv
import math
from operator import itemgetter
import gevent.monkey
gevent.monkey.patch_all()
from urllib.parse import urlparse
from closeio_api import Client as CloseIO_API
from gevent.pool import Pool
pool = Pool(7)
parser = argparse.ArgumentParser(description='Find duplicate leads in your Close org via lead name, email address, phone number, or lead url hostname')
parser.add_argument('--api-key', '-k', required=True, help='API Key')
parser.add_argument('--field', '-f', default='all', choices=['lead_name', 'email', 'phone', 'url', 'all'], required=False, help="Specify a field to compare uniqueness")
args = parser.parse_args()
# Initialize Close API Wrapper
api = CloseIO_API(args.api_key)
org_id = api.get('api_key/' + args.api_key)['organization_id']
org_name = api.get('organization/' + org_id, params={'_fields': 'name'})['name'].replace('/', '')
# Calculate number of slices necessary to get all leads
total_leads = api.get('lead', params={'_limit': 0, 'query': 'sort:created'})['total_results']
total_slices = int(math.ceil(float(total_leads) / 1000))
slices = range(1, total_slices + 1)
leads = []
def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False):
self.log.debug("New downloadFile pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad))
self.worker_manager.started_task_num += len(inner_paths)
pool = gevent.pool.Pool(pool_size)
num_skipped = 0
for inner_path in inner_paths:
if not only_if_bad or inner_path in self.bad_files:
pool.spawn(self.needFile, inner_path, update=True)
else:
num_skipped += 1
self.worker_manager.started_task_num -= 1
self.log.debug("Ended downloadFile pool len: %s, skipped: %s" % (len(inner_paths), num_skipped))