Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
:type type: string
:param format: a string specifying the format of the field,
e.g. ``"DD.MM.YYYY"`` for a field of type ``"date"``
(optional)
:type format: string
:returns: the field that was created
:rtype: dict
'''
try:
data_dict, errors = dictization_functions.validate(data_dict,
schema.resource_schema_field_create_schema(), context)
except exceptions.InvalidResourceIDException, e:
raise toolkit.ValidationError(e)
if errors:
raise toolkit.ValidationError(errors)
resource_id = data_dict.pop('resource_id')
resource_dict = toolkit.get_action('resource_show')(context,
{'id': resource_id})
if data_dict.get('type') in ('date', 'time', 'datetime'):
try:
path = util.get_path_to_resource_file(resource_dict)
except exceptions.ResourceFileDoesNotExistException:
path = None
if path:
def _load_and_validate_datapackage(url=None, upload=None):
try:
if _upload_attribute_is_valid(upload):
dp = datapackage.DataPackage(upload.file)
else:
dp = datapackage.DataPackage(url)
dp.validate()
except (datapackage.exceptions.DataPackageException,
datapackage.exceptions.SchemaError,
datapackage.exceptions.ValidationError) as e:
msg = {'datapackage': [e.message]}
raise toolkit.ValidationError(msg)
if not dp.safe():
msg = {'datapackage': ['the Data Package has unsafe attributes']}
raise toolkit.ValidationError(msg)
return dp
def all_issues(get_query_dict):
query, errors = toolkit.navl_validate(
dict(get_query_dict),
schema.issue_dataset_controller_schema()
)
if errors:
raise toolkit.ValidationError(errors)
query.pop('__extras', None)
return _search_issues(include_datasets=True,
**query)
def record_show(context, data_dict):
'''Retrieve an individual record
:param context:
:param data_dict:
'''
context[u'user'] = toolkit.c.user or toolkit.c.author
schema = context.get(u'schema', nhm_schema.record_show_schema())
data_dict, errors = toolkit.navl_validate(data_dict, schema, context)
if errors:
raise toolkit.ValidationError(errors)
resource_id = toolkit.get_or_bust(data_dict, u'resource_id')
record_id = toolkit.get_or_bust(data_dict, u'record_id')
# Retrieve datastore record
record_data_dict = {
u'resource_id': resource_id,
u'filters': {
u'_id': record_id
}
}
if u'version' in data_dict:
record_data_dict[u'version'] = data_dict[u'version']
search_result = toolkit.get_action(u'datastore_search')(context, record_data_dict)
try:
try:
if _upload_attribute_is_valid(upload):
dp = datapackage.DataPackage(upload.file)
else:
dp = datapackage.DataPackage(url)
dp.validate()
except (datapackage.exceptions.DataPackageException,
datapackage.exceptions.SchemaError,
datapackage.exceptions.ValidationError) as e:
msg = {'datapackage': [e.message]}
raise toolkit.ValidationError(msg)
if not dp.safe():
msg = {'datapackage': ['the Data Package has unsafe attributes']}
raise toolkit.ValidationError(msg)
return dp
def _pages_update(context, data_dict):
if db.pages_table is None:
db.init_db(context['model'])
org_id = data_dict.get('org_id')
page = data_dict.get('page')
# we need the page in the context for name validation
context['page'] = page
context['group_id'] = org_id
data, errors = df.validate(data_dict, schema, context)
if errors:
raise p.toolkit.ValidationError(errors)
out = db.Page.get(group_id=org_id, name=page)
if not out:
out = db.Page()
out.group_id = org_id
out.name = page
items = ['title', 'content', 'name', 'private',
'order', 'page_type', 'publish_date']
for item in items:
setattr(out, item, data.get(item,'page' if item =='page_type' else None)) #backward compatible with older version where page_type does not exist
extras = {}
extra_keys = set(schema.keys()) - set(items + ['id', 'created'])
for key in extra_keys:
if key in data:
extras[key] = data.get(key)
def _create_and_upload_local_resource(context, resource):
path = resource['path']
del resource['path']
if isinstance(path, list):
path = path[0]
try:
with open(path, 'r') as f:
_create_and_upload_resource(context, resource, f)
except IOError:
msg = {'datapackage': [(
"Couldn't create some of the resources."
" Please make sure that all resources' files are accessible."
)]}
raise toolkit.ValidationError(msg)
:param format: a string specifying the format of the field,
e.g. ``"DD.MM.YYYY"`` for a field of type ``"date"``
(optional)
:type format: string
:returns: the updated field
:rtype: dict
'''
try:
data_dict, errors = dictization_functions.validate(data_dict,
schema.resource_schema_field_update_schema(), context)
except exceptions.InvalidResourceIDException, e:
raise toolkit.ValidationError(e)
if errors:
raise toolkit.ValidationError(errors)
if validate_only:
return data_dict, errors
resource_id = data_dict.pop('resource_id')
index = data_dict['index']
resource_dict = toolkit.get_action('resource_show')(context,
{'id': resource_id})
if data_dict.get('type') in ('date', 'time', 'datetime'):
try:
path = util.get_path_to_resource_file(resource_dict)
except exceptions.ResourceFileDoesNotExistException:
path = None
:param resource_id: the id of the resource to which the event belongs
(optional)
:type resource_id: string
:param package_id: the id of the package to which the event belongs
(optional)
'''
schema = context.get('schema',
realtime_schema.realtime_broadcast_event_schema())
data_dict, errors = _validate(data_dict, schema, context)
if errors:
raise p.toolkit.ValidationError(errors)
if not 'resource_id' in data_dict and not 'package_id' in data_dict:
raise p.toolkit.ValidationError('Either resource_id or package_id or both, have to be set')
p.toolkit.check_access('realtime_broadcast_event', context, data_dict)
event = EventFactory.build_event(data_dict)
EventDispatcher.dispatch_one(event)
def download_original_image(context, data_dict):
'''
Request an original image from the MAM. Before sending request, performs a number of checks
- The resource exists
- The record exists on that resource
- And the image exists on that record
:param context:
:param data_dict:
'''
# validate the data
schema = context.get(u'schema', nhm_schema.download_original_image_schema())
data_dict, errors = validate(data_dict, schema, context)
if errors:
raise toolkit.ValidationError(errors)
# Get the resource
resource = toolkit.get_action(u'resource_show')(context,
{
u'id': data_dict[u'resource_id']
})
# Retrieve datastore record
search_result = toolkit.get_action(u'datastore_search')(context, {
u'resource_id': data_dict[u'resource_id'],
u'filters': {
u'_id': data_dict[u'record_id']
}
})
try: