Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _create_call_graph(self):
dag = DAG(nodes=['root'])
queue = ['root']
while queue:
caller = queue.pop(0)
callees = FindNodes(Call).visit(self.efuncs[caller])
for callee in filter_ordered([i.name for i in callees]):
if callee in self.efuncs: # Exclude foreign Calls, e.g., MPI calls
try:
dag.add_node(callee)
queue.append(callee)
except KeyError:
# `callee` already in `dag`
pass
dag.add_edge(callee, caller)
# Sanity check
assert dag.size == len(self.efuncs)
return dag
assert isinstance(stencil, Eq)
self.stencil = stencil
self.dimensions = []
self.functions = []
# Traverse stencil to determine meta information
for e in preorder_traversal(self.stencil):
if isinstance(e, SymbolicData):
self.dimensions += list(e.indices)
self.functions += [e]
if isinstance(e, IndexedBase):
self.dimensions += list(e.function.indices)
self.functions += [e.function]
# Filter collected dimensions and functions
self.dimensions = filter_ordered(self.dimensions)
self.functions = filter_ordered(self.functions)
def _build_dist_datamap(self, support=None):
"""
Mapper ``M : MPI rank -> required sparse data``.
"""
ret = {}
support = support or self._support
for i, s in enumerate(support):
# Sparse point `i` is "required" by the following ranks
for r in self.grid.distributor.glb_to_rank(s):
ret.setdefault(r, []).append(i)
return {k: filter_ordered(v) for k, v in ret.items()}
@cached_property
def indices(self):
return tuple(filter_ordered(flatten(getattr(i, 'indices', ())
for i in self._args_diff)))
@memoized_meth
def _index_matrix(self, offset):
# Note about the use of *memoization*
# Since this method is called by `_interpolation_indices`, using
# memoization avoids a proliferation of symbolically identical
# ConditionalDimensions for a given set of indirection indices
# List of indirection indices for all adjacent grid points
index_matrix = [tuple(idx + ii + offset for ii, idx
in zip(inc, self._coordinate_indices))
for inc in self._point_increments]
# A unique symbol for each indirection index
indices = filter_ordered(flatten(index_matrix))
points = OrderedDict([(p, Symbol(name='ii_%s_%d' % (self.name, i)))
for i, p in enumerate(indices)])
return index_matrix, points
@classmethod
def reorder(cls, items, relations):
# The relations are between dimensions, not intervals. So we take
# care of that here
ordering = filter_ordered(toposort(relations) + [i.dim for i in items])
return sorted(items, key=lambda i: ordering.index(i.dim))
except KeyError:
# `callee` already in `dag`
pass
dag.add_edge(callee, caller)
assert dag.size == len(state._efuncs)
# Apply `func`
for i in dag.topological_sort():
state._efuncs[i], metadata = func(state._efuncs[i], **kwargs)
# Track any new Dimensions introduced by `func`
state.dimensions.extend(list(metadata.get('dimensions', [])))
# Track any new #include required by `func`
state.includes.extend(list(metadata.get('includes', [])))
state.includes = filter_ordered(state.includes)
# Track any new ElementalFunctions
state._efuncs.update(OrderedDict([(i.name, i)
for i in metadata.get('efuncs', [])]))
# If there's a change to the `args` and the `iet` is an efunc, then
# we must update the call sites as well, as the arguments dropped down
# to the efunc have just increased
args = as_tuple(metadata.get('args'))
if args:
# `extif` avoids redundant updates to the parameters list, due
# to multiple children wanting to add the same input argument
extif = lambda v: list(v) + [e for e in args if e not in v]
stack = [i] + dag.all_downstreams(i)
for n in stack:
efunc = state._efuncs[n]
def add_ldflags(self, flags):
self.ldflags = filter_ordered(self.ldflags + list(as_tuple(flags)))
def _index_matrix(self, offset):
# Note about the use of *memoization*
# Since this method is called by `_interpolation_indices`, using
# memoization avoids a proliferation of symbolically identical
# ConditionalDimensions for a given set of indirection indices
# List of indirection indices for all adjacent grid points
index_matrix = [tuple(idx + ii + offset for ii, idx
in zip(inc, self._coordinate_indices))
for inc in self._point_increments]
# A unique symbol for each indirection index
indices = filter_ordered(flatten(index_matrix))
points = OrderedDict([(p, Symbol(name='ii_%s_%d' % (self.name, i)))
for i, p in enumerate(indices)])
return index_matrix, points
def add_libraries(self, libs):
self.libraries = filter_ordered(self.libraries + list(as_tuple(libs)))