Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.to_append_block.append(deque())
self.to_insert.append(deque())
new_values = copy(self.visit_statements(old_value))
self.to_insert.pop()
else:
new_values = []
for value in old_value:
if isinstance(value, gast.AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, gast.AST):
new_values.extend(value)
continue
new_values.append(value)
if isinstance(node, gast.FunctionDef) and field == 'body':
new_values.extendleft(self.to_insert_top)
self.to_insert_top = deque([])
if (type(node), field) in grammar.BLOCKS:
new_values.extendleft(self.to_prepend_block.pop())
return_ = None
if new_values and isinstance(new_values[-1], gast.Return):
return_ = new_values.pop()
new_values.extend(self.to_append_block.pop())
if return_:
new_values.append(return_)
old_value[:] = new_values
elif isinstance(old_value, gast.AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
ancestors = beniget.Ancestors()
ancestors.visit(module)
boost = [d for d in duc.locals[module] if d.name() == "boost"][0]
chain = duc.chains[boost.node]
users = chain.users()
class_defs = []
function_defs = []
for user in users:
func_or_class_def = ancestors.parent(user.node)
if isinstance(func_or_class_def, ast.ClassDef):
class_defs.append(func_or_class_def)
elif isinstance(func_or_class_def, ast.FunctionDef):
function_defs.append(func_or_class_def)
for fdef in function_defs:
for node in fdef.decorator_list:
if node.id == "boost":
fdef.decorator_list.remove(node)
capturex = CaptureX(module, fdef)
capturex.visit(fdef)
for node in capturex.external:
# print(astunparse.dump(node))
print(astunparse.unparse(node).strip())
node: An AST node on which to run dataflow analysis.
analyses: Either an instance of the Forward or Backward dataflow analysis
class, or a list or tuple of them.
Returns:
node: The node, but now with annotations on the AST nodes containing the
results of the dataflow analyses.
"""
if not isinstance(analyses, (tuple, list)):
analyses = (analyses,)
for analysis in analyses:
if not isinstance(analysis, (Forward, Backward)):
raise TypeError('not a valid forward analysis object')
for child_node in gast.walk(node):
if isinstance(child_node, gast.FunctionDef):
cfg_obj = CfgBuilder().build_cfg(child_node)
for analysis in analyses:
if isinstance(analysis, Backward):
analysis.visit(cfg_obj.exit)
elif isinstance(analysis, Forward):
analysis.visit(cfg_obj.entry)
for analysis in analyses:
PropagateAnalysis(analysis).visit(node)
return node
def outline(name, formal_parameters, out_parameters, stmts,
has_return, has_break, has_cont):
args = ast.arguments(
[ast.Name(fp, ast.Param(), None) for fp in formal_parameters],
None, [], [], None, [])
if isinstance(stmts, ast.expr):
assert not out_parameters, "no out parameters with expr"
fdef = ast.FunctionDef(name, args, [ast.Return(stmts)], [], None)
else:
fdef = ast.FunctionDef(name, args, stmts, [], None)
# this is part of a huge trick that plays with delayed type inference
# it basically computes the return type based on out parameters, and
# the return statement is unconditionally added so if we have other
# returns, there will be a computation of the output type based on the
# __combined of the regular return types and this one The original
# returns have been patched above to have a different type that
# cunningly combines with this output tuple
#
# This is the only trick I found to let pythran compute both the output
# variable type and the early return type. But hey, a dirty one :-/
stmts.append(
ast.Return(
ast.Tuple(
[ast.Name(fp, ast.Load(), None) for fp in out_parameters],
)
# add extra metadata to this node
metadata.add(body, metadata.Comprehension(starget))
init = ast.Assign(
[ast.Name(starget, ast.Store(), None, None)],
ast.Call(
ast.Attribute(
ast.Name('__builtin__', ast.Load(), None, None),
comp_type,
ast.Load()
),
[], [],)
)
result = ast.Return(ast.Name(starget, ast.Load(), None, None))
sargs = [ast.Name(arg, ast.Param(), None, None) for arg in args]
fd = ast.FunctionDef(name,
ast.arguments(sargs, [], None, [], [], None, []),
[init, body, result],
[], None, None)
metadata.add(fd, metadata.Local())
self.ctx.module.body.append(fd)
return ast.Call(
ast.Name(name, ast.Load(), None, None),
[ast.Name(arg.id, ast.Load(), None, None) for arg in sargs],
[],
) # no sharing !
def visit_Call(self, node):
"""
Replace function call by inlined function's body.
We can inline if it aliases on only one function.
"""
func_aliases = self.aliases[node.func]
if len(func_aliases) == 1:
function_def = next(iter(func_aliases))
if (isinstance(function_def, ast.FunctionDef) and
function_def.name in self.inlinable):
self.update = True
to_inline = copy.deepcopy(self.inlinable[function_def.name])
arg_to_value = dict()
values = node.args
values += to_inline.args.defaults[len(node.args) -
len(to_inline.args.args):]
for arg_fun, arg_call in zip(to_inline.args.args, values):
v_name = "__pythran_inline{}{}{}".format(function_def.name,
arg_fun.id,
self.call_count)
new_var = ast.Name(id=v_name,
ctx=ast.Store(),
annotation=None,type_comment=None)
self.defs.append(ast.Assign(targets=[new_var],
value=arg_call))
def prepare(self, node):
'''Gather analysis result required by this analysis'''
if isinstance(node, ast.Module):
self.ctx.module = node
elif isinstance(node, ast.FunctionDef):
self.ctx.function = node
for D in self.deps:
d = D()
d.attach(self.passmanager, self.ctx)
result = d.run(node)
setattr(self, uncamel(D.__name__), result)
def infer_function(self, node, ty_args: List['TyObj']):
# TODO(momohatt): varargs
assert isinstance(node, gast.FunctionDef)
if node.args.vararg == []:
assert len(ty_args) == len(node.args.args), \
"Wrong number of arguments: expected {}, got {}".format(
len(node.args.args), len(ty_args))
if self.is_debug:
print("\x1b[33m======================= function {} =======================\x1b[39m".format(node.name))
for arg_node, ty in zip(node.args.args, ty_args):
self.tyenv[arg_node.id] = ty
for ty in ty_args:
if isinstance(ty, TyUserDefinedClass):
for attr, val in ty.instance.__dict__.items():
self.attribute_tyenv[(ty.instance, attr)] = \
type_of_value(val)