How to use the localstack.utils.aws.aws_stack.connect_to_service function in localstack

To help you get started, we’ve selected a few localstack examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github omrikiei / aws_logging_handlers / tests / __init__.py View on Github external
def setUp(self):
        self.s3_client = aws_stack.connect_to_service('s3')
        self.bucket = "test_log_bucket"

        self.s3_client.create_bucket(Bucket=self.bucket)
        try:
            b_objects = [{'Key': o['Key']} for o in self.s3_client.list_objects(Bucket=self.bucket).get('Contents')]

            self.s3_client.delete_objects(Bucket=self.bucket, Delete={
                'Objects': b_objects
            })
        except:
            pass
github localstack / localstack / tests / integration / test_lambda.py View on Github external
def _run_forward_to_fallback_url(self, url, num_requests=3):
        lambda_client = aws_stack.connect_to_service('lambda')
        config.LAMBDA_FALLBACK_URL = url
        try:
            for i in range(num_requests):
                lambda_client.invoke(FunctionName='non-existing-lambda-%s' % i,
                    Payload=b'{}', InvocationType='RequestResponse')
        finally:
            config.LAMBDA_FALLBACK_URL = ''
github localstack / localstack / tests / integration / test_notifications.py View on Github external
def _delete_notification_config(self):
        s3_client = aws_stack.connect_to_service('s3')
        s3_client.put_bucket_notification_configuration(
            Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS, NotificationConfiguration={})
        config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
        self.assertFalse(config.get('QueueConfigurations'))
        self.assertFalse(config.get('TopicConfiguration'))
github localstack / localstack / tests / integration / test_dynamodb.py View on Github external
def test_large_data_download(self):
        dynamodb = aws_stack.connect_to_resource('dynamodb')
        dynamodb_client = aws_stack.connect_to_service('dynamodb')

        aws_stack.create_dynamodb_table(TEST_DDB_TABLE_NAME_2, partition_key=PARTITION_KEY)
        table = dynamodb.Table(TEST_DDB_TABLE_NAME_2)

        # Create a large amount of items
        num_items = 20
        for i in range(0, num_items):
            item = {PARTITION_KEY: 'id%s' % i, 'data1': 'foobar123 ' * 1000}
            table.put_item(Item=item)

        # Retrieve the items. The data will be transmitted to the client with chunked transfer encoding
        result = table.scan(TableName=TEST_DDB_TABLE_NAME_2)
        assert len(result['Items']) == num_items

        # Clean up
        dynamodb_client.delete_table(TableName=TEST_DDB_TABLE_NAME_2)
github localstack / localstack / tests / integration / test_stepfunctions.py View on Github external
def setUpClass(cls):
        cls.lambda_client = aws_stack.connect_to_service('lambda')
        cls.s3_client = aws_stack.connect_to_service('s3')
        cls.sfn_client = aws_stack.connect_to_service('stepfunctions')

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            runtime=LAMBDA_RUNTIME_PYTHON36
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_1,
            zip_file=zip_file,
            runtime=LAMBDA_RUNTIME_PYTHON36,
            envvars={'Hello': TEST_RESULT_VALUE}
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_2,
            zip_file=zip_file,
github localstack / localstack / tests / integration / test_cloudformation.py View on Github external
def stream_exists(name):
    kinesis_client = aws_stack.connect_to_service('kinesis')
    streams = kinesis_client.list_streams()
    return name in streams['StreamNames']
github localstack / localstack / localstack / services / sns / sns_listener.py View on Github external
def publish_message(topic_arn, req_data, subscription_arn=None):
    message = req_data['Message'][0]
    sqs_client = aws_stack.connect_to_service('sqs')
    for subscriber in SNS_SUBSCRIPTIONS.get(topic_arn, []):
        if subscription_arn not in [None, subscriber['SubscriptionArn']]:
            continue
        filter_policy = json.loads(subscriber.get('FilterPolicy') or '{}')
        message_attributes = get_message_attributes(req_data)
        if not check_filter_policy(filter_policy, message_attributes):
            continue
        if subscriber['Protocol'] == 'sqs':
            endpoint = subscriber['Endpoint']
            if 'sqs_queue_url' in subscriber:
                queue_url = subscriber.get('sqs_queue_url')
            elif '://' in endpoint:
                queue_url = endpoint
            else:
                queue_name = endpoint.split(':')[5]
                queue_url = aws_stack.get_sqs_queue_url(queue_name)
github localstack / localstack / localstack / services / s3 / s3_listener.py View on Github external
def bucket_exists(bucket_name):
    """Tests for the existence of the specified bucket. Returns the error code
    if the bucket does not exist (200 if the bucket does exist).
    """
    bucket_name = normalize_bucket_name(bucket_name)

    s3_client = aws_stack.connect_to_service('s3')
    try:
        s3_client.head_bucket(Bucket=bucket_name)
    except ClientError as err:
        error_code = err.response.get('Error').get('Code')
        return False, error_code

    return True, 200
github localstack / localstack / localstack / utils / cloudformation / template_deployer.py View on Github external
elif resource_type == 'ApiGateway::Method':
            api_id = resolve_refs_recursively(stack_name, resource_props['RestApiId'], resources)
            res_id = resolve_refs_recursively(stack_name, resource_props['ResourceId'], resources)
            if not api_id or not res_id:
                return None
            res_obj = aws_stack.connect_to_service('apigateway').get_resource(restApiId=api_id, resourceId=res_id)
            match = [v for (k, v) in res_obj['resourceMethods'].items()
                     if resource_props['HttpMethod'] in (v.get('httpMethod'), k)]
            int_props = resource_props.get('Integration')
            if int_props:
                match = [m for m in match if
                    m.get('methodIntegration', {}).get('type') == int_props.get('Type') and
                    m.get('methodIntegration', {}).get('httpMethod') == int_props.get('IntegrationHttpMethod')]
            return any(match) or None
        elif resource_type == 'SQS::Queue':
            sqs_client = aws_stack.connect_to_service('sqs')
            queues = sqs_client.list_queues()
            result = list(filter(lambda item:
                # TODO possibly find a better way to compare resource_id with queue URLs
                item.endswith('/%s' % resource_id), queues.get('QueueUrls', [])))
            if not result:
                return None
            result = sqs_client.get_queue_attributes(QueueUrl=result[0], AttributeNames=['All'])['Attributes']
            result['Arn'] = result['QueueArn']
            return result
        elif resource_type == 'SNS::Topic':
            topics = aws_stack.connect_to_service('sns').list_topics()
            result = list(filter(lambda item: item['TopicArn'] == resource_id, topics.get('Topics', [])))
            return result[0] if result else None
        elif resource_type == 'S3::Bucket':
            bucket_name = resource_props.get('BucketName') or resource_id
            return aws_stack.connect_to_service('s3').get_bucket_location(Bucket=bucket_name)
github localstack / localstack / localstack / services / s3 / s3_listener.py View on Github external
def send_notification_for_subscriber(notif, bucket_name, object_path, version_id, api_method, action, event_name):
    bucket_name = normalize_bucket_name(bucket_name)

    if (not event_type_matches(notif['Event'], action, api_method) or
            not filter_rules_match(notif.get('Filter'), object_path)):
        return
    # send notification
    message = get_event_message(
        event_name=event_name, bucket_name=bucket_name,
        file_name=urlparse.urlparse(object_path[1:]).path,
        version_id=version_id
    )
    message = json.dumps(message)
    if notif.get('Queue'):
        sqs_client = aws_stack.connect_to_service('sqs')
        try:
            queue_url = queue_url_for_arn(notif['Queue'])
            sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)
        except Exception as e:
            LOGGER.warning('Unable to send notification for S3 bucket "%s" to SQS queue "%s": %s' %
                (bucket_name, notif['Queue'], e))
    if notif.get('Topic'):
        sns_client = aws_stack.connect_to_service('sns')
        try:
            sns_client.publish(TopicArn=notif['Topic'], Message=message, Subject='Amazon S3 Notification')
        except Exception:
            LOGGER.warning('Unable to send notification for S3 bucket "%s" to SNS topic "%s".' %
                (bucket_name, notif['Topic']))
    # CloudFunction and LambdaFunction are semantically identical
    lambda_function_config = notif.get('CloudFunction') or notif.get('LambdaFunction')
    if lambda_function_config: