Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _get_state_file_from_s3(self, state_file_url, profile=None, region=None):
if profile:
session = boto3.session.Session(profile_name=profile, region_name=region)
else:
session = get_boto3_session()
s3 = session.resource('s3')
parts = state_file_url[5:].split('/')
bucket = parts[0]
filename = "/".join(parts[1:])
key = s3.Object(bucket, filename)
try:
state_file = key.get()["Body"].read().decode('utf-8')
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
raise NoSuchStateFile("Could not find Terraform state file {}".format(self.state_file_url))
else:
raise ex
return json.loads(state_file)
print("")
print("Load Balancer")
elb = get_boto3_session().client('elb')
response = elb.describe_instance_health(LoadBalancerName=self.load_balancer['load_balancer_name'])
states = response['InstanceStates']
if len(states) < desired_count:
success = False
for state in states:
if state['State'] != "InService" or state['Description'] != "N/A":
success = False
print(state['InstanceId'], state['State'], state['Description'])
elif lbtype == 'alb':
for target_group in self.load_balancer:
print("")
print("Target Group: {}".format(target_group['target_group_arn']))
alb = get_boto3_session().client('elbv2')
response = alb.describe_target_health(TargetGroupArn=target_group['target_group_arn'])
if len(response['TargetHealthDescriptions']) < desired_count:
success = False
for desc in response['TargetHealthDescriptions']:
if desc['TargetHealth']['State'] != 'healthy':
success = False
print(
desc['Target']['Id'],
desc['TargetHealth']['State'],
desc['TargetHealth'].get('Description', '')
)
return success
def __init__(self, clusterName, yml={}):
"""
:param clusterName: the name of the cluster in which we'll run our
helper tasks
:type clusterName: string
:param yml: the task definition information for the task from our
deployfish.yml file
:type yml: dict
"""
self.clusterName = clusterName
self.ecs = get_boto3_session().client('ecs')
self.commands = {}
self.from_yaml(yml)
self.desired_task_definition = TaskDefinition(yml=yml)
self.active_task_definition = None
def populate(self):
"""
Lazy loading function to load the values from AWS.
"""
if self.populated:
return
self.ssm = get_boto3_session().client('ssm')
self.from_yaml(self.yml)
self.from_aws()
self.populated = True
def put_string(self, data, key):
s3 = get_boto3_session().client('s3')
s3.put_object(Bucket=self.config["dest"], Key=key, Body=data)
def __init__(self, group_name=None, yml={}):
self.asg = get_boto3_session().client('autoscaling')
self.__groupName = group_name
self.from_yaml(yml)
self.from_aws()
{
'namespace': 'local',
'name': 'test',
'dns_records': [
{
'type': 'A',
'ttl': '60',
}
],
:param yml: service discovery config from ``deployfish.yml`` as described above
:type yml: dict
"""
self.sd = get_boto3_session().client('servicediscovery')
self.__defaults()
self._registry_arn = registry_arn
self.from_yaml(yml)
def __init__(self, task):
self.task = task
self.client = get_boto3_session().client('events')
self.name = "{}-scheduler".format(self.task.taskName)
self.target_name = "{}-scheduler-target".format(self.task.taskName)
:param serviceName: the name of the ECS service to monitor
:type serviceName: string
:param clusterName: the name of the cluster the service is in
:type clusterName: string
:param aws: (optional) the dict returned by ``describe_alarms()`` for this Alarm
:type aws: dict
:param scaling_policy_arn: (optional) the ARN of the scaling policy that should be activated when the alarm
enters ALARM state.
:type scaling_policy_arn: string
"""
self.cloudwatch = get_boto3_session().client('cloudwatch')
self.serviceName = serviceName
self.clusterName = clusterName
self.scaling_policy_arn = scaling_policy_arn
self.__defaults()
self.from_yaml(yml)
self.from_aws(aws)