Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _load_storage_config(self, storage_config):
if 'store' not in storage_config:
eprint('You must set the \'store\' of \'storage\'')
sys.exit(0)
if 'source' in storage_config:
if not self._validate_storage_fields(storage_config['source']):
sys.exit(0)
if not self._validate_storage_fields(storage_config['store']):
sys.exit(0)
source_config = storage_config['source']
store_config = storage_config['store']
else:
if not self._validate_storage_fields(storage_config['store']):
sys.exit(0)
source_config = storage_config['store']
store_config = storage_config['store']
self.SOURCE_STORAGE = self._choose_storage_type(source_config)
self.STORE_STORAGE = self._choose_storage_type(store_config)
def __init__(self):
config = self._load_config_file()
self.PORT = config.get('port', 6001)
self.SECRET_KEY = config.get('secret_key', None)
# Cache store is optional field
if 'cache_store' in config:
self._load_cache_store_config(config['cache_store'])
# Options is optional field
if 'options' in config:
self._load_options_config(config['options'])
# Storage is required field
if 'storage' in config:
self._load_storage_config(config['storage'])
else:
eprint('You must set the \'storage\' config')
sys.exit(-1)
def __init__(self, aws_access_key_id, aws_secret_access_key, bucket_name, bucket_region, location, **kwargs):
if not S3Storage.is_valid_region(bucket_region):
eprint('\'{}\' region is not valid for S3'.format(bucket_region))
sys.exit(-1)
self.TYPE = 's3'
self.AWS_ACCESS_KEY_ID = aws_access_key_id
self.AWS_SECRET_ACCESS_KEY = aws_secret_access_key
self.BUCKET_NAME = bucket_name
self.BUCKET_REGION = bucket_region
self.LOCATION = location
def _leaderboard_compute_overall_score(self, N=100):
"""Based on NULL distribution, compute overall score of model1
Not finalised.
"""
self._compute_pvalues_pred1(N=N)
self._compute_pvalues_param1(N=N)
import fitter
fit_param1 = fitter.Fitter(self.rdistance_param1)
fit_param1.distributions = ['beta']
fit_param1.fit()
fit_pred1 = fitter.Fitter(self.rdistance_pred1)
fit_pred1.distributions = ['beta']
fit_pred1.fit()
import scipy.stats
self.pvalues_param1 = scipy.stats.beta.cdf(self.scores['param1'].scores,
*fit_param1.fitted_param['beta'])
self.pvalues_pred1 = scipy.stats.beta.cdf(self.scores['pred1'].scores,
*fit_pred1.fitted_param['beta'])
self.scores['pred1']['pvalues'] = self.pvalues_pred1
self.scores['param1']['pvalues'] = self.pvalues_param1
def _leaderboard_compute_overall_score(self, N=100):
"""Based on NULL distribution, compute overall score of model1
Not finalised.
"""
self._compute_pvalues_pred1(N=N)
self._compute_pvalues_param1(N=N)
import fitter
fit_param1 = fitter.Fitter(self.rdistance_param1)
fit_param1.distributions = ['beta']
fit_param1.fit()
fit_pred1 = fitter.Fitter(self.rdistance_pred1)
fit_pred1.distributions = ['beta']
fit_pred1.fit()
import scipy.stats
self.pvalues_param1 = scipy.stats.beta.cdf(self.scores['param1'].scores,
*fit_param1.fitted_param['beta'])
self.pvalues_pred1 = scipy.stats.beta.cdf(self.scores['pred1'].scores,
*fit_pred1.fitted_param['beta'])
self.scores['pred1']['pvalues'] = self.pvalues_pred1
self.scores['param1']['pvalues'] = self.pvalues_param1
def __init__(self, aws_access_key_id, aws_secret_access_key, bucket_name, bucket_region, location, **kwargs):
if not S3Storage.is_valid_region(bucket_region):
eprint('\'{}\' region is not valid for S3'.format(bucket_region))
sys.exit(-1)
self.TYPE = 's3'
self.AWS_ACCESS_KEY_ID = aws_access_key_id
self.AWS_SECRET_ACCESS_KEY = aws_secret_access_key
self.BUCKET_NAME = bucket_name
self.BUCKET_REGION = bucket_region
self.LOCATION = location
def run(self):
# Runs the fitting using the 3D Morphable Model
# model = Model()
# self.model = model.init_from_basel(model_pf)
fitter = MorphableModelFitter(self.model)
fitter.fit(self.image, self.anchors_pf)
def _validate_cache_store_fields(cache_store_config):
_required_fields = ('type',)
if not all(map(lambda e: e in cache_store_config, _required_fields)):
eprint('There are some missing values for cache store: one of {}'.format(_required_fields))
return False
if cache_store_config['type'] not in ('redis', 'in-memory'):
eprint('\'{}\' is not supported type for cache store'.format(cache_store_config['type']))
return False
return True
_required_fields_for_fs_storage = ('location',)
_required_fields_for_s3_storage = (
'aws_access_key_id', 'aws_secret_access_key', 'bucket_name', 'bucket_region', 'location')
if not all(map(lambda e: e in storage_config, _required_fields)):
eprint('There are some missing values for storage: one of {}'.format(_required_fields))
return False
if storage_config['type'] == 'fs':
if not all(map(lambda e: e in storage_config, _required_fields_for_fs_storage)):
eprint('There are some missing values of \'file system\' backed storage')
return False
elif storage_config['type'] == 's3':
if not all(map(lambda e: e in storage_config, _required_fields_for_s3_storage)):
eprint('There are some missing values of \'s3\' backed storage')
return False
else:
eprint('{} is not supported storage type'.format(storage_config['type']))
return False
return True
def _validate_storage_fields(storage_config):
_required_fields = ('type',)
_required_fields_for_fs_storage = ('location',)
_required_fields_for_s3_storage = (
'aws_access_key_id', 'aws_secret_access_key', 'bucket_name', 'bucket_region', 'location')
if not all(map(lambda e: e in storage_config, _required_fields)):
eprint('There are some missing values for storage: one of {}'.format(_required_fields))
return False
if storage_config['type'] == 'fs':
if not all(map(lambda e: e in storage_config, _required_fields_for_fs_storage)):
eprint('There are some missing values of \'file system\' backed storage')
return False
elif storage_config['type'] == 's3':
if not all(map(lambda e: e in storage_config, _required_fields_for_s3_storage)):
eprint('There are some missing values of \'s3\' backed storage')
return False
else:
eprint('{} is not supported storage type'.format(storage_config['type']))
return False
return True