Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Prepare paths and parameters
if type(message['Metadata'])==dict: #support for cellprofiler --print-groups output
if message['output_structure']=='':
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=str(message['Metadata'].values()),create_log_group=False)
logger.addHandler(watchtowerlogger)
printandlog('You must specify an output structure when passing Metadata as dictionaries',logger)
logger.removeHandler(watchtowerlogger)
return 'INPUT_PROBLEM'
else:
metadataID = message['output_structure']
metadataForCall = ''
for eachMetadata in message['Metadata'].keys():
if eachMetadata not in metadataID:
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=str(message['Metadata'].values()),create_log_group=False)
logger.addHandler(watchtowerlogger)
printandlog('Your specified output structure does not match the Metadata passed',logger)
else:
metadataID = string.replace(metadataID,eachMetadata,message['Metadata'][eachMetadata])
metadataForCall+=eachMetadata+'='+message['Metadata'][eachMetadata]+','
message['Metadata']=metadataForCall[:-1]
elif 'output_structure' in message.keys():
if message['output_structure']!='': #support for explicit output structuring
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=message['Metadata'],create_log_group=False)
logger.addHandler(watchtowerlogger)
metadataID = message['output_structure']
for eachMetadata in message['Metadata'].split(','):
if eachMetadata.split('=')[0] not in metadataID:
printandlog('Your specified output structure does not match the Metadata passed',logger)
else:
metadataID = string.replace(metadataID,eachMetadata.split('=')[0],eachMetadata.split('=')[1])
return 'INPUT_PROBLEM'
else:
metadataID = message['output_structure']
metadataForCall = ''
for eachMetadata in message['Metadata'].keys():
if eachMetadata not in metadataID:
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=str(message['Metadata'].values()),create_log_group=False)
logger.addHandler(watchtowerlogger)
printandlog('Your specified output structure does not match the Metadata passed',logger)
else:
metadataID = string.replace(metadataID,eachMetadata,message['Metadata'][eachMetadata])
metadataForCall+=eachMetadata+'='+message['Metadata'][eachMetadata]+','
message['Metadata']=metadataForCall[:-1]
elif 'output_structure' in message.keys():
if message['output_structure']!='': #support for explicit output structuring
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=message['Metadata'],create_log_group=False)
logger.addHandler(watchtowerlogger)
metadataID = message['output_structure']
for eachMetadata in message['Metadata'].split(','):
if eachMetadata.split('=')[0] not in metadataID:
printandlog('Your specified output structure does not match the Metadata passed',logger)
else:
metadataID = string.replace(metadataID,eachMetadata.split('=')[0],eachMetadata.split('=')[1])
printandlog('metadataID ='+metadataID, logger)
else: #backwards compatability with 1.0.0 and/or no desire to structure output
metadataID = '-'.join([x.split('=')[1] for x in message['Metadata'].split(',')]) # Strip equal signs from the metadata
else: #backwards compatability with 1.0.0 and/or no desire to structure output
metadataID = '-'.join([x.split('=')[1] for x in message['Metadata'].split(',')]) # Strip equal signs from the metadata
localOut = LOCAL_OUTPUT + '/%(MetadataID)s' % {'MetadataID': metadataID}
remoteOut= os.path.join(message['output'],metadataID)
replaceValues = {'PL':message['pipeline'], 'OUT':localOut, 'FL':message['data_file'],
def set_stream_logger(self):
logger = logging.getLogger(self.name)
logger.setLevel(self.level)
now = datetime.datetime.now()
stream_name = "{}/{}/{}".format(now.year, now.month, now.day)
logger.addHandler(
watchtower.CloudWatchLogHandler(
log_group='/cis/{}'.format(self.log_group_name),
stream_name=stream_name
)
)
self.logger = logger
"""
if "watchtower" not in sys.modules:
logging.error("CloudWatch logging was requested, but 'watchtower' is not available")
return
try:
logger = logging.getLogger()
args = {'create_log_group': False,
'log_group': log_group,
'stream_name': log_stream if log_stream else "default",
'send_interval': 3,
}
if region:
args['boto3_session'] = Session(region_name=region)
cw = watchtower.CloudWatchLogHandler(**args)
cw.setFormatter(get_formatter(level))
logger.addHandler(cw)
except Exception:
logging.exception("failed to setup watchtower")
log.setLevel(logging.INFO)
formatter = logging.Formatter('%(levelname)-8s %(message)s')
console_logger = logging.StreamHandler(stream=sys.stdout)
console_logger.setFormatter(formatter)
log.addHandler(console_logger)
if 'remote_logging' in settings:
remote_log_settings = settings['remote_logging']
aws_session = boto3.session.Session(
**make_aws_args(remote_log_settings)
)
log_stream_name = '{}-{}'.format(job_name, int(time.time()))
log.info('*** logging to AWS CloudFront stream %s', log_stream_name)
aws_logger = watchtower.CloudWatchLogHandler(
log_group=remote_log_settings['log_group'],
stream_name=log_stream_name,
boto3_session=aws_session,
send_interval=5
)
aws_logger.setFormatter(formatter)
log.addHandler(aws_logger)
return log, formatter
def get_handler(self):
handler = watchtower.CloudWatchLogHandler(
log_group=self.__log_group,
stream_name=self.__stream_name,
send_interval=self.__send_interval,
boto3_session=Session(),
create_log_group=False,
)
handler.setFormatter(JsonFormatter('%(message)s %(threadName)s %(lineno)d %(pathname)s'))
return handler
log.addHandler(console_logger)
if 'remote_logging' in settings:
remote_log_settings = settings['remote_logging']
if remote_log_settings['destination'] != 'cloudwatch':
log.warning('*** unknown log destination %s, skipping',
remote_log_settings['destination'])
return log, formatter
aws_session = boto3.session.Session(
**_make_aws_args(remote_log_settings)
)
log_stream_name = '{}-{}'.format(job_name, int(time.time()))
log.info('*** logging to AWS CloudFront stream %s', log_stream_name)
aws_logger = watchtower.CloudWatchLogHandler(
log_group=remote_log_settings['log_group'],
stream_name=log_stream_name,
boto3_session=aws_session,
send_interval=5
)
aws_logger.setFormatter(formatter)
log.addHandler(aws_logger)
return log, formatter
#API secured by secrets the AWS unicorns have
data = request.get_json()
req = requests.post(BACKEND_API+'/unicorn/'+unicorn_id, json={'snack':data['snack'],'teamid':data['teamid']})
return req.json(), req.status_code
api.add_resource(HealthCheck,'/healthcheck','/')
api.add_resource(Unicorn, '/unicorn')
api.add_resource(Unicorns, '/unicorn/')
if __name__ == '__main__':
#If running in prod - log to CWL
try:
import watchtower
handler = watchtower.CloudWatchLogHandler(log_group='CICDApiProxy',)
app.logger.addHandler(handler)
logging.getLogger("werkzeug").addHandler(handler)
except:
print("Couldn't start CW Logging")
app.run(host='0.0.0.0')
'DATA': DATA_ROOT, 'Metadata': message['Metadata'], 'IN': message['input'],
'MetadataID':metadataID }
# See if this is a message you've already handled, if you've so chosen
if CHECK_IF_DONE_BOOL.upper() == 'TRUE':
try:
s3client=boto3.client('s3')
bucketlist=s3client.list_objects(Bucket=AWS_BUCKET,Prefix=remoteOut+'/')
objectsizelist=[k['Size'] for k in bucketlist['Contents']]
if len(objectsizelist)>=int(EXPECTED_NUMBER_FILES):
if 0 not in objectsizelist:
return 'SUCCESS'
except KeyError: #Returned if that folder does not exist
pass
# Start loggging now that we have a job we care about
watchtowerlogger=watchtower.CloudWatchLogHandler(log_group=LOG_GROUP_NAME, stream_name=metadataID,create_log_group=False)
logger.addHandler(watchtowerlogger)
# Build and run CellProfiler command
cp2 = False
with open(os.path.join(replaceValues['DATA'],replaceValues['PL']), 'r') as openpipe:
for line in openpipe:
if 'DateRevision:2' in line: #comes from a CP2 pipeline
cp2 = True
cmdstem = 'cellprofiler -c -r -b '
if not cp2:
cmdstem = 'cellprofiler -c -r '
cpDone = localOut + '/cp.is.done'
if message['pipeline'][-3:]!='.h5':
cmd = cmdstem + '-p %(DATA)s/%(PL)s -i %(DATA)s/%(IN)s -o %(OUT)s -d ' + cpDone
cmd += ' --data-file=%(DATA)s/%(FL)s -g %(Metadata)s'
else: