Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def logs(args):
if args.log_group and (args.log_stream or args.start_time or args.end_time):
if args.export:
return export_and_print_log_events(args)
else:
return print_log_events(args)
table = []
group_cols = ["logGroupName"]
stream_cols = ["logStreamName", "lastIngestionTime", "storedBytes"]
args.columns = group_cols + stream_cols
for group in paginate(clients.logs.get_paginator("describe_log_groups")):
if args.log_group and group["logGroupName"] != args.log_group:
continue
n = 0
for stream in paginate(clients.logs.get_paginator("describe_log_streams"),
logGroupName=group["logGroupName"], orderBy="LastEventTime", descending=True):
now = datetime.utcnow().replace(microsecond=0)
stream["lastIngestionTime"] = now - datetime.utcfromtimestamp(stream.get("lastIngestionTime", 0) // 1000)
table.append(dict(group, **stream))
n += 1
if n >= args.max_streams_per_group:
break
page_output(tabulate(table, args))
def find_acm_cert(dns_name):
for cert in paginate(clients.acm.get_paginator("list_certificates")):
cert.update(clients.acm.describe_certificate(CertificateArn=cert["CertificateArn"])["Certificate"])
for name in cert["SubjectAlternativeNames"]:
if name in [dns_name, ".".join(["*"] + dns_name.split(".")[1:])]:
return cert
raise AegeaException("Unable to find ACM certificate for {}".format(dns_name))
def filter(args):
filter_args = dict(logGroupName=args.log_group)
if args.log_stream:
filter_args.update(logStreamNames=[args.log_stream])
if args.pattern:
filter_args.update(filterPattern=args.pattern)
if args.start_time:
filter_args.update(startTime=int(timestamp(args.start_time) * 1000))
if args.end_time:
filter_args.update(endTime=int(timestamp(args.end_time) * 1000))
num_results = 0
while True:
for event in paginate(clients.logs.get_paginator("filter_log_events"), **filter_args):
if "timestamp" not in event or "message" not in event:
continue
print_log_event(event)
num_results += 1
if args.follow:
time.sleep(1)
else:
return SystemExit(os.EX_OK if num_results > 0 else os.EX_DATAERR)
def list_load_balancers():
elbs = paginate(clients.elb.get_paginator("describe_load_balancers"))
albs = paginate(clients.elbv2.get_paginator("describe_load_balancers"))
return list(elbs) + list(albs)
def tasks(args):
list_clusters = clients.ecs.get_paginator("list_clusters")
list_tasks = clients.ecs.get_paginator("list_tasks")
def list_tasks_worker(worker_args):
cluster, status = worker_args
return cluster, status, list(paginate(list_tasks, cluster=cluster, desiredStatus=status))
def describe_tasks_worker(t, cluster=None):
return clients.ecs.describe_tasks(cluster=cluster, tasks=t)["tasks"] if t else []
task_descs = []
if args.clusters is None:
args.clusters = [__name__.replace(".", "_")] if args.tasks else list(paginate(list_clusters))
if args.tasks:
task_descs = describe_tasks_worker(args.tasks, cluster=args.clusters[0])
else:
with concurrent.futures.ThreadPoolExecutor() as executor:
for cluster, status, tasks in executor.map(list_tasks_worker, product(args.clusters, args.desired_status)):
worker = partial(describe_tasks_worker, cluster=cluster)
descs = executor.map(worker, (tasks[pos:pos + 100] for pos in range(0, len(tasks), 100)))
task_descs += sum(descs, [])
page_output(tabulate(task_descs, args))
def complete_queue_name(**kwargs):
return [q["jobQueueName"] for q in paginate(clients.batch.get_paginator("describe_job_queues"))]
get_products_args = dict(ServiceCode=service_code,
Filters=[dict(Type="TERM_MATCH", Field=k, Value=v) for k, v in filters])
cache_key = hashlib.sha256(json.dumps(get_products_args, sort_keys=True).encode()).hexdigest()[:32]
service_code_filename = os.path.join(config.user_config_dir, "pricing_cache_{}.json.gz".format(cache_key))
try:
cache_date = datetime.fromtimestamp(os.path.getmtime(service_code_filename))
if cache_date < datetime.now() - timedelta(days=max_cache_age_days):
raise Exception("Cache is too old, discard")
with gzip.open(service_code_filename) as gz_fh:
with io.BufferedReader(gz_fh) as buf_fh:
pricing_data = json.loads(buf_fh.read().decode())
except Exception:
logger.info("Fetching pricing data for %s", service_code)
client = boto3.client("pricing", region_name="us-east-1")
pricing_data = [json.loads(p) for p in paginate(client.get_paginator("get_products"), **get_products_args)]
try:
with gzip.open(service_code_filename, "w") as fh:
fh.write(json.dumps(pricing_data).encode())
except Exception as e:
print(e, file=sys.stderr)
return pricing_data
def ls(args):
table = []
rrs_cols = ["Name", "Type", "TTL"]
record_cols = ["Value"]
for zone in paginate(clients.route53.get_paginator("list_hosted_zones")):
if args.zones and zone["Name"] not in args.zones + [z + "." for z in args.zones]:
continue
for rrs in paginate(clients.route53.get_paginator("list_resource_record_sets"), HostedZoneId=zone["Id"]):
for record in rrs.get("ResourceRecords", [rrs.get("AliasTarget", {})]):
row = [rrs.get(f) for f in rrs_cols]
row += [record.get(f, record.get("DNSName")) for f in record_cols]
row += [get_field(zone, "Config.PrivateZone"), zone["Id"].rpartition("/")[-1]]
table.append(row)
column_names = rrs_cols + record_cols + ["Private", "Id"]
page_output(format_table(table, column_names=column_names, max_col_width=args.max_col_width))