Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"LFRicArgDescriptor._validate_scalar(): expecting a scalar "
"argument but got an argument of type '{0}'.".
format(arg_type.args[0]))
# There must be at least 2 arguments to describe a scalar
if len(arg_type.args) != 2:
raise ParseError(
"In the LFRic API each 'meta_arg' entry must have 2 "
"arguments if its first argument is 'gh_{{r,i}}scalar', but "
"found {0} in '{1}'.".format(len(arg_type.args), arg_type))
# Test allowed accesses for scalars (read_only or reduction)
scalar_accesses = [AccessType.READ] + \
AccessType.get_valid_reduction_modes()
# Convert generic access types to GH_* names for error messages
api_config = Config.get().api_conf(API)
rev_access_mapping = api_config.get_reverse_access_mapping()
if self._access_type not in scalar_accesses:
api_specific_name = rev_access_mapping[self._access_type]
valid_reductions = AccessType.get_valid_reduction_names()
raise ParseError(
"In the LFRic API scalar arguments must have read-only "
"('gh_read') or a reduction {0} access but found '{1}' "
"in '{2}'.".format(valid_reductions, api_specific_name,
arg_type))
# Reduction access is currently only valid for real scalar arguments
if self._type != "gh_real" and self._access_type in \
AccessType.get_valid_reduction_modes():
raise ParseError(
"In the LFRic API a reduction access '{0}' is only valid "
"with a real scalar argument, but '{1}' was found in '{2}'.".
format(self._access_type.api_specific_name(),
def __init__(self, api=""):
if not api:
_config = Config.get()
self._type = _config.default_api
else:
check_api(api)
self._type = api
def check_api(api):
'''Check that the supplied API is valid.
:param str api: The API to check.
:raises ParseError: if the supplied API is not recognised.
'''
_config = Config.get()
if api not in _config.supported_apis:
raise ParseError(
"utils.py:check_api: Unsupported API '{0}' specified. "
"Supported types are {1}.".format(api,
_config.supported_apis))
:raises TransformationError: if transformation is applied to a Loop \
without its parent Directive when \
optimisations are applied.
:raises TransformationError: if transformation is applied to an \
orphaned Directive without its parent \
Directive.
'''
# Check ExtractTrans specific constraints.
# Extracting distributed memory code is not supported due to
# generation of infrastructure calls to set halos dirty or clean.
# This constraint covers the presence of HaloExchange and
# GlobalSum classes as they are only generated when distributed
# memory is enabled.
if Config.get().distributed_memory:
raise TransformationError(
"Error in {0}: Distributed memory is not supported."
.format(str(self.name)))
# Check constraints not covered by valid_node_types for
# individual Nodes in node_list.
from psyclone.psyir.nodes import Loop
from psyclone.psyGen import BuiltIn, Directive, Kern, \
OMPParallelDirective, ACCParallelDirective
for node in node_list:
# Check that ExtractNode is not inserted between a Kernel or
# a BuiltIn call and its parent Loop.
if isinstance(node, (Kern, BuiltIn)) and \
isinstance(node.parent.parent, Loop):
if not isinstance(name, tuple) or not len(name) == 2 or \
not name[0] or not isinstance(name[0], str) or \
not name[1] or not isinstance(name[1], str):
raise TransformationError(
"Error in {0}. User-supplied region name must be a "
"tuple containing two non-empty strings."
"".format(self.name))
# pylint: enable=too-many-boolean-expressions
if "prefix" in options:
prefix = options["prefix"]
if prefix not in Config.get().valid_psy_data_prefixes:
raise TransformationError(
"Error in 'prefix' parameter: found '{0}', expected "
"one of {1} as defined in {2}"
.format(prefix, Config.get().valid_psy_data_prefixes,
Config.get().filename))
super(PSyDataTrans, self).validate(node_list, options)
# The checks below are only for the NEMO API and can be removed
# once #435 is done.
sched = node_list[0].ancestor(InvokeSchedule)
invoke = sched.invoke
if not isinstance(invoke, NemoInvoke):
return
# Get the parse tree of the routine containing this region
# pylint: disable=protected-access
ptree = invoke._ast
# pylint: enable=protected-access
# Search for the Specification_Part
if not walk([ptree], Fortran2003.Specification_Part):
be present on an OpenACC device before the kernel associated with
this Arguments object may be launched.
:returns: list of (Fortran) quantities
:rtype: list of str
'''
arg_list = []
# First off, specify the field object which we will de-reference in
# order to get any grid properties (if this kernel requires them).
# We do this as some compilers do less optimisation if we get (read-
# -only) grid properties from a field object that has read-write
# access.
grid_fld = self.find_grid_access()
grid_ptr = grid_fld.name + "%grid"
api_config = Config.get().api_conf("gocean1.0")
# TODO: #676 go_grid_data is actually a field property
data_fmt = api_config.grid_properties["go_grid_data"].fortran
arg_list.extend([grid_fld.name, data_fmt.format(grid_fld.name)])
for arg in self._args:
if arg.type == "scalar":
arg_list.append(arg.name)
elif arg.type == "field" and arg != grid_fld:
# The remote device will need the reference to the field
# object *and* the reference to the array within that object.
arg_list.extend([arg.name, data_fmt.format(arg.name)])
elif arg.type == "grid_property":
if grid_ptr not in arg_list:
# This kernel needs a grid property and therefore the
# pointer to the grid object must be copied to the device.
arg_list.append(grid_ptr)
arg_list.append(grid_ptr+"%"+arg.name)
def api_specific_name(self):
'''This convenience function returns the name of the type in the
current API. E.g. in a dynamo0.3 API, WRITE --> "gh_write"
:returns: The API specific name.
:rtype: str
'''
api_config = Config.get().api_conf()
rev_access_mapping = api_config.get_reverse_access_mapping()
return rev_access_mapping[self]
raise ParseError(
"In the LFRic API the 1st argument of a 'meta_arg' "
"entry should be a valid argument type (one of {0}), but "
"found '{1}' in '{2}'.".
format(LFRicArgDescriptor.VALID_ARG_TYPE_NAMES,
argtype, arg_type))
# Check for a valid vector size in case of a binary
# operator expression
if separator:
self._validate_vector_size(separator, arg_type)
# The 2nd arg is an access descriptor. Permitted accesses for each
# argument type are dealt with in the related _validate methods.
# Convert from GH_* names to the generic access type
api_config = Config.get().api_conf(API)
access_mapping = api_config.get_access_mapping()
try:
self._access_type = access_mapping[arg_type.args[1].name]
except KeyError:
valid_names = api_config.get_valid_accesses_api()
raise ParseError(
"In the LFRic API the 2nd argument of a 'meta_arg' entry "
"must be a valid access descriptor (one of {0}), but found "
"'{1}' in '{2}'.".format(valid_names,
arg_type.args[1].name, arg_type))
# FIELD, OPERATOR and SCALAR argument type descriptors and checks
if self._type in LFRicArgDescriptor.VALID_FIELD_NAMES:
# Validate field arguments
self._validate_field(arg_type)
def _create_loop(self, parent, variable_name):
'''
Specialized method to create a NemoLoop instead of a
generic Loop.
:param parent: the parent of the node.
:type parent: :py:class:`psyclone.psyir.nodes.Node`
:param str variable_name: name of the iteration variable.
:return: a new NemoLoop instance.
:rtype: :py:class:`psyclone.nemo.NemoLoop`
'''
loop = NemoLoop(parent=parent, variable_name=variable_name)
loop_type_mapping = Config.get().api_conf("nemo")\
.get_loop_type_mapping()
# Identify the type of loop
if variable_name in loop_type_mapping:
loop.loop_type = loop_type_mapping[variable_name]
else:
loop.loop_type = "unknown"
return loop
def check_api(api):
'''
Check that the supplied API is valid.
:param str api: The API to check.
:raises ParseError: if the supplied API is not recognised.
'''
_config = Config.get()
if api not in _config.supported_apis:
from psyclone.parse import ParseError
raise ParseError(
"check_api: Unsupported API '{0}' specified. "
"Supported types are {1}.".format(api,
_config.supported_apis))