Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_elb_bucket_locations(self):
elbs = self.manager.get_resource_manager('elb').resources()
get_elb_attrs = functools.partial(
_query_elb_attrs, self.manager.session_factory)
with self.executor_factory(max_workers=2) as w:
futures = []
for elb_set in chunks(elbs, 100):
futures.append(w.submit(get_elb_attrs, elb_set))
for f in as_completed(futures):
if f.exception():
log.error("Error while scanning elb log targets: %s" % (
f.exception()))
continue
for tgt in f.result():
yield tgt
# uniqueness constraint validity across apis?
resource_map = {r['id']: r for r in resources}
futures = {}
results = set()
with self.executor_factory(max_workers=2) as w:
tasks = []
for r in resources:
r_method_set = method_set
if method_set == 'all':
r_method_set = r.get('resourceMethods', {}).keys()
for m in r_method_set:
tasks.append((r, m))
for task_set in utils.chunks(tasks, 20):
futures[w.submit(
self.process_task_set, client, task_set)] = task_set
for f in as_completed(futures):
task_set = futures[f]
if f.exception():
self.manager.log.warning(
"Error retrieving methods on resources %s",
["%s:%s" % (r['restApiId'], r['path'])
for r, mt in task_set])
continue
for m in f.result():
if self.match(m):
results.add(m['resourceId'])
resource_map[m['resourceId']].setdefault(
ANNOTATION_KEY_MATCHED_METHODS, []).append(m)
resources, ('running',))
if self.data.get('restart') and running_instances:
Stop({'terminate-ephemeral': False},
self.manager).process(running_instances)
client = utils.local_session(
self.manager.session_factory).client('ec2')
waiter = client.get_waiter('instance_stopped')
try:
waiter.wait(
InstanceIds=[r['InstanceId'] for r in running_instances])
except ClientError as e:
self.log.exception(
"Exception stopping instances for resize:\n %s" % e)
for instance_set in utils.chunks(itertools.chain(
stopped_instances, running_instances), 20):
self.process_resource_set(instance_set)
if self.data.get('restart') and running_instances:
client.start_instances(
InstanceIds=[i['InstanceId'] for i in running_instances])
return list(itertools.chain(stopped_instances, running_instances))
def create_elb_active_attributes_tuples(self, elb_policy_tuples):
"""
creates a list of tuples for all attributes that are marked
as "true" in the load balancer's polices, e.g.
(myelb,['Protocol-SSLv1','Protocol-SSLv2'])
"""
active_policy_attribute_tuples = []
client = local_session(self.manager.session_factory).client('elb')
with self.executor_factory(max_workers=2) as w:
futures = []
for elb_policy_set in chunks(elb_policy_tuples, 50):
futures.append(
w.submit(self.process_elb_policy_set, client, elb_policy_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception processing elb policies \n %s" % (
f.exception()))
continue
for elb_policies in f.result():
active_policy_attribute_tuples.append(elb_policies)
return active_policy_attribute_tuples
# some variances between cpy and pypy, sniff detect
for closure in func.delay.func_closure:
if getattr(closure.cell_contents, 'queue', None):
ctx = closure.cell_contents
break
q = Queue(ctx.queue, connection=connection)
argv = list(args)
argv.append(None)
job = Job.create(
func, args=argv, connection=connection,
description="bucket-%s" % func.func_name,
origin=q.name, status=JobStatus.QUEUED, timeout=ctx.timeout,
result_ttl=0, ttl=ctx.ttl)
for n in chunks(nargs, 100):
job.created_at = datetime.utcnow()
with connection.pipeline() as pipe:
for s in n:
argv[-1] = s
job._id = unicode(uuid4()) # noqa: F821
job.args = argv
q.enqueue_job(job, pipeline=pipe)
pipe.execute()
for b in buckets:
connection.hset(
'bucket-ages', bucket_id(account_info, b['Name']),
b['CreationDate'].isoformat())
account_buckets = account_info.pop('buckets', None)
buckets = [n['Name'] for n in buckets
if not account_buckets or
n['Name'] in account_buckets]
account_not_buckets = account_info.pop('not-buckets', None)
buckets = [n for n in buckets
if not account_not_buckets or
n not in account_not_buckets]
log.info("processing %d buckets in account %s",
len(buckets), account_info['name'])
for bucket_set in chunks(buckets, 50):
invoke(process_bucket_set, account_info, bucket_set)
if not members:
if not suspended_ids and not invited_ids:
log.info("Region:%s All accounts already enabled", region)
return list(active_ids)
if (len(members) + len(extant_ids)) > 1000:
raise ValueError(
("Region:%s Guard Duty only supports "
"1000 member accounts per master account") % (region))
log.info(
"Region:%s Enrolling %d accounts in guard duty", region, len(members))
unprocessed = []
for account_set in chunks(members, 25):
unprocessed.extend(master_client.create_members(
DetectorId=detector_id,
AccountDetails=account_set).get('UnprocessedAccounts', []))
if unprocessed:
log.warning(
"Region:%s accounts where unprocessed - member create\n %s",
region, format_event(unprocessed))
log.info("Region:%s Inviting %d member accounts", region, len(members))
unprocessed = []
for account_set in chunks(
[m for m in members if not m['AccountId'] in invited_ids], 25):
params = {'AccountIds': [m['AccountId'] for m in account_set],
'DetectorId': detector_id}
if message:
params['Message'] = message
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('iam')
with self.executor_factory(max_workers=2) as w:
augment_set = [r for r in resources if self.annotation_key not in r]
self.log.debug(
"Querying %d users' api keys" % len(augment_set))
list(w.map(
functools.partial(self.get_user_keys, client),
chunks(augment_set, 50)))
matched = []
for r in resources:
k_matched = []
for k in r[self.annotation_key]:
if self.match(k):
k_matched.append(k)
for k in k_matched:
k['c7n:matched-type'] = 'access'
self.merge_annotation(r, self.matched_annotation_key, k_matched)
if k_matched:
matched.append(r)
return matched
def flush(self):
buf = self.buf
self.buf = []
for segment_set in utils.chunks(buf, 50):
self.client.put_trace_segments(
TraceSegmentDocuments=[
s.serialize() for s in segment_set])
def process(self, resources, event=None):
alias = utils.get_account_alias_from_sts(
utils.local_session(self.manager.session_factory))
message = {
'event': event,
'account_id': self.manager.config.account_id,
'account': alias,
'region': self.manager.config.region,
'execution_id': self.manager.ctx.execution_id,
'execution_start': self.manager.ctx.start_time,
'policy': self.manager.data}
message['action'] = self.expand_variables(message)
for batch in utils.chunks(resources, self.batch_size):
message['resources'] = self.prepare_resources(batch)
receipt = self.send_data_message(message)
self.log.info("sent message:%s policy:%s template:%s count:%s" % (
receipt, self.manager.data['name'],
self.data.get('template', 'default'), len(batch)))