Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def visit_FunctionDef(self, node):
modified_node = self.generic_visit(node)
returned_id = len(self.func_returned_stack)
returned_flags = self.func_returned_stack.pop()
if returned_flags:
node.body.insert(0, gast.Assign(targets=[gast.Name(id=self.returned_flag + str(returned_id), ctx=gast.Store(), annotation=None)], value=gast.NameConstant(value=False)))
node.body.insert(0, gast.Assign(targets=[gast.Name(id=self.returned_value_key, ctx=gast.Store(), annotation=None)], value=gast.NameConstant(value=None)))
node.body.append(gast.Return(value=gast.Name(id=self.returned_value_key, ctx=gast.Load(), annotation=None)))
return modified_node
else:
fdef = ast.FunctionDef(name, args, stmts, [], None)
# this is part of a huge trick that plays with delayed type inference
# it basically computes the return type based on out parameters, and
# the return statement is unconditionally added so if we have other
# returns, there will be a computation of the output type based on the
# __combined of the regular return types and this one The original
# returns have been patched above to have a different type that
# cunningly combines with this output tuple
#
# This is the only trick I found to let pythran compute both the output
# variable type and the early return type. But hey, a dirty one :-/
stmts.append(
ast.Return(
ast.Tuple(
[ast.Name(fp, ast.Load(), None) for fp in out_parameters],
ast.Load()
)
)
)
if has_return:
pr = PatchReturn(stmts[-1], has_break or has_cont)
pr.visit(fdef)
if has_break or has_cont:
if not has_return:
stmts[-1].value = ast.Tuple([ast.Num(LOOP_NONE),
stmts[-1].value],
ast.Load())
pbc = PatchBreakContinue(stmts[-1])
def infer_stmt(self, node) -> 'TyObj':
if self.is_debug:
debug(gast.dump(node))
self.stack.append(node)
if isinstance(node, gast.FunctionDef):
self.nodetype[node] = self.infer_FunctionDef(node)
elif isinstance(node, gast.Return):
# Return(expr? value)
if node.value is None:
self.nodetype[node] = TyNone()
else:
self.nodetype[node] = self.infer_expr(node.value)
elif isinstance(node, gast.Delete):
self.nodetype[node] = TyNone()
elif isinstance(node, gast.Assign):
self.infer_Assign(node)
self.nodetype[node] = TyNone()
elif isinstance(node, gast.AugAssign):
self.infer_AugAssign(node)
self.nodetype[node] = TyNone()
elif isinstance(node, gast.For):
self.infer_For(node)
self.nodetype[node] = TyNone()
cont_ass = self.make_control_flow_handlers(cont_n, status_n,
expected_return,
has_cont, has_break)
cmpr = ast.Compare(ast.Name(status_n, ast.Load(), None),
[ast.Eq()], [ast.Num(EARLY_RET)])
fast_return = [ast.Name(status_n, ast.Store(), None),
ast.Name(return_n, ast.Store(), None),
ast.Name(cont_n, ast.Store(), None)]
return [ast.Assign([ast.Tuple(fast_return, ast.Store())],
actual_call),
ast.If(cmpr,
[ast.Return(ast.Name(return_n, ast.Load(), None))],
cont_ass)]
elif has_break or has_cont:
cont_ass = self.make_control_flow_handlers(cont_n, status_n,
expected_return,
has_cont, has_break)
fast_return = [ast.Name(status_n, ast.Store(), None),
ast.Name(cont_n, ast.Store(), None)]
return [ast.Assign([ast.Tuple(fast_return, ast.Store())],
actual_call)] + cont_ass
elif expected_return:
return ast.Assign([ast.Tuple(expected_return, ast.Store())],
actual_call)
else:
return ast.Expr(actual_call)
def outline(name, formal_parameters, out_parameters, stmts,
has_return, has_break, has_cont):
args = ast.arguments(
[ast.Name(fp, ast.Param(), None) for fp in formal_parameters],
None, [], [], None, [])
if isinstance(stmts, ast.expr):
assert not out_parameters, "no out parameters with expr"
fdef = ast.FunctionDef(name, args, [ast.Return(stmts)], [], None)
else:
fdef = ast.FunctionDef(name, args, stmts, [], None)
# this is part of a huge trick that plays with delayed type inference
# it basically computes the return type based on out parameters, and
# the return statement is unconditionally added so if we have other
# returns, there will be a computation of the output type based on the
# __combined of the regular return types and this one The original
# returns have been patched above to have a different type that
# cunningly combines with this output tuple
#
# This is the only trick I found to let pythran compute both the output
# variable type and the early return type. But hey, a dirty one :-/
stmts.append(
ast.Return(
def __init__(self, astc, args, func_field):
super().__init__()
assert isinstance(astc.nast, (gast.FunctionDef, gast.Lambda))
self.name = astc.gast.name if isinstance(astc.nast, gast.FunctionDef) else (lambda: None).__name__
self.args = args
self.func_field = func_field
if isinstance(astc.nast, gast.Lambda):
astc.nast.body = gast.Return(value=astc.nast.body) # Add return to the body
self.ast = astc.nast
self.filename = astc.filename
self.lineno = astc.lineno
>> from numpy.linalg import det
>> det(a)
Becomes
>> numpy.linalg.det(a)
"""
if node.id in self.symbols:
symbol = path_to_node(self.symbols[node.id])
if not getattr(symbol, 'isliteral', lambda: False)():
parent = self.ancestors[node][-1]
blacklist = (ast.Tuple,
ast.List,
ast.Set,
ast.Return)
if isinstance(parent, blacklist):
raise PythranSyntaxError(
"Unsupported module identifier manipulation",
node)
new_node = path_to_attr(self.symbols[node.id])
new_node.ctx = node.ctx
ast.copy_location(new_node, node)
return new_node
return node
try:
unify(target_type, res_type)
except InferenceError:
raise PythranTypeError(
"Invalid update operand for `{}`: `{}` and `{}`".format(
symbol_of[type(node.op)],
res_type,
target_type
),
node
)
return env
elif isinstance(node, gast.Raise):
return env # TODO
elif isinstance(node, gast.Return):
if env['@gen']:
return env
if node.value is None:
ret_type = NoneType
else:
ret_type = analyse(node.value, env, non_generic)
if '@ret' in env:
try:
ret_type = merge_unify(env['@ret'], ret_type)
except InferenceError:
raise PythranTypeError(
"function may returns with incompatible types "
"`{}` and `{}`".format(env['@ret'], ret_type),
node
)
if self.preserve_result:
# Append an extra Assign operation to the primal body
# that saves the original output value
stored_result_node = quoting.quote(self.namer.unique('result'))
assign_stored_result = template.replace(
'result=orig_result',
result=stored_result_node,
orig_result=return_node.value)
body.append(assign_stored_result)
dx.elts.append(stored_result_node)
for _dx in dx.elts:
_dx.ctx = gast.Load()
return_dx = gast.Return(value=dx)
# We add the stack as first argument of the primal
node.args.args = [self.stack] + node.args.args
# Rename the function to its primal name
func = anno.getanno(node, 'func')
node.name = naming.primal_name(func, self.wrt)
# The new body is the primal body plus the return statement
node.body = body + node.body[-1:]
# Find the cost; the first variable of potentially multiple return values
# The adjoint will receive a value for the initial gradient of the cost
y = node.body[-1].value
if isinstance(y, gast.Tuple):
y = y.elts[0]