Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
with auth_db.thread_lock:
u = AuthEmailUser.search_by(token=token)
if u and u.expires >= srtime.utc_now():
n = _verify_confirm(req.type, token, auth.need_complete_registration(u))
u.query.filter(
(AuthEmailUser.user_name == u.unverified_email),
AuthEmailUser.unverified_email != u.unverified_email,
).delete()
u.user_name = u.unverified_email
u.token = None
u.expires = None
u.save()
auth.login(this_module, sim_type=req.type, model=u, display_name=n)
raise AssertionError('auth.login returned unexpectedly')
if not u:
pkdlog('login with invalid token={}', token)
else:
pkdlog(
'login with expired token={}, email={}',
token,
u.unverified_email,
)
# if user is already logged in via email, then continue to the app
if auth.user_if_logged_in(AUTH_METHOD):
pkdlog(
'user already logged in. ignoring invalid token: {}, user: {}',
token,
auth.logged_in_user(),
)
raise sirepo.util.Redirect(sirepo.uri.local_route(req.type))
auth.login_fail_redirect(req.type, this_module, 'email-token')
def destroy_op(self, op):
pkdlog('destroy_op={}', op.opId)
self._ops.remove(op)
op.destroy()
def sim_frame(frame_id, op):
f, s = sirepo.sim_data.parse_frame_id(frame_id)
# document parsing the request
sirepo.http_request.parse_post(req_data=f, id=True)
try:
x = op(f)
except Exception as e:
pkdlog('error generating report frame_id={} stack={}', frame_id, pkdexc())
raise sirepo.util.convert_exception(e, display_text='Report not generated')
r = sirepo.http_reply.gen_json(x)
if 'error' not in x and s.want_browser_frame_cache():
r.headers['Cache-Control'] = 'private, max-age=31536000'
else:
sirepo.http_reply.headers_for_no_cache(r)
return r
def _remove_old_tmp_dirs():
pkdlog('scanning for stale tmp dirs')
count = 0
cutoff = time.time() - srdb.TMP_DIR_CLEANUP_TIME
for dirpath, dirnames, filenames in os.walk(srdb.root()):
if (dirpath.endswith(srdb.TMP_DIR_SUFFIX)
and os.stat(dirpath).st_mtime < cutoff):
pkdlog('removing stale tmp dir: {}', dirpath)
pkio.unchecked_remove(dirpath)
count += 1
pkdlog('finished scanning for stale tmp dirs ({} found)', count)
runner_info = pkjson.load_any(runner_info_file)
else:
# Legacy run_dir
runner_info = pkcollections.Dict(
version=1, backend='local', backend_info={},
)
assert runner_info.version == 1
# run the job
cmd = ['sirepo', 'extract', subcmd, arg]
result = await local_process.run_extract_job(
io_loop, run_dir, cmd, runner_info.backend_info,
)
if result.stderr:
pkdlog(
'got output on stderr ({} {}):\n{}',
run_dir, jhash,
result.stderr.decode('utf-8', errors='ignore'),
)
if result.returncode != 0:
pkdlog(
'failed with return code {} ({} {}), stdout:\n{}',
result.returncode,
run_dir,
subcmd,
result.stdout.decode('utf-8', errors='ignore'),
)
raise AssertionError
return pkjson.load_any(result.stdout)
if self.db.status == job.ERROR:
self.db.error = r.get('error', '')
if 'computeJobStart' in r:
self.db.computeJobStart = r.computeJobStart
if 'parallelStatus' in r:
self.db.parallelStatus.update(r.parallelStatus)
self.db.lastUpdateTime = r.parallelStatus.lastUpdateTime
else:
# sequential jobs don't send this
self.db.lastUpdateTime = int(time.time())
#TODO(robnagler) will need final frame count
self.__db_write()
if r.state in job.EXIT_STATUSES:
break
except Exception as e:
pkdlog('error={} stack={}', e, pkdexc())
self.db.status = job.ERROR
self.db.error = e
finally:
self.destroy_op(op)
async def run_extract_job(self, io_loop, run_dir, jhash, subcmd, arg):
pkdc('{} {}: {} {}', run_dir, jhash, subcmd, arg)
status = self.report_job_status(run_dir, jhash)
if status is runner_client.JobStatus.MISSING:
pkdlog('{} {}: report is missing; skipping extract job',
run_dir, jhash)
return {}
# figure out which backend and any backend-specific info
runner_info_file = run_dir.join(_RUNNER_INFO_BASENAME)
if runner_info_file.exists():
runner_info = pkjson.load_any(runner_info_file)
else:
# Legacy run_dir
runner_info = pkcollections.Dict(
version=1, backend='local', backend_info={},
)
assert runner_info.version == 1
# run the job
cmd = ['sirepo', 'extract', subcmd, arg]
result = await local_process.run_extract_job(
ar2d = ndimage.rotate(ar2d, rotate_angle, reshape = rotate_reshape, mode='constant', order = 3)
pkdc('Size after rotate: {} Dimensions: {}', ar2d.size, ar2d.shape)
shape_rotate = list(ar2d.shape)
pkdc('x_range and y_range before rotate is [{},{}] and [{},{}]', x_range[0], x_range[1], y_range[0], y_range[1])
x_range[0] = shape_rotate[0]/shape_before[0]*x_range[0]
x_range[1] = shape_rotate[0]/shape_before[0]*x_range[1]
y_range[0] = shape_rotate[1]/shape_before[1]*y_range[0]
y_range[1] = shape_rotate[1]/shape_before[1]*y_range[1]
pkdc('x_range and y_range after rotate is [{},{}] and [{},{}]', x_range[0], x_range[1], y_range[0], y_range[1])
x_range[2] = ar2d.shape[1]
y_range[2] = ar2d.shape[0]
if info['title'] != 'Power Density': info['subtitle'] = info['subtitle'] + ' Image Rotate {}^0'.format(rotate_angle)
except Exception:
pkdlog('Cannot rotate the image - scipy.ndimage.rotate() cannot be imported.')
if z_units:
z_label = u'{} [{}]'.format(z_label, z_units)
return pkcollections.Dict({
'x_range': x_range,
'y_range': y_range,
'x_label': info['x_label'],
'y_label': info['y_label'],
'z_label': _superscript(z_label),
'title': info['title'],
'subtitle': _superscript_2(info['subtitle']),
'z_matrix': ar2d.tolist(),
})
async def kill_all(self, run_dir):
"""Forcibly stop any jobs currently running in run_dir.
Assumes that you've already checked what those jobs are (perhaps by
calling run_dir_status), and decided they need to die.
"""
job_info = self.report_jobs.get(run_dir)
if job_info is None:
return
if job_info.status is not runner_client.JobStatus.RUNNING:
return
pkdlog(
'kill_all: killing job with jhash {} in {}',
job_info.jhash, run_dir,
)
job_info.cancel_requested = True
await job_info.report_job.kill(_KILL_TIMEOUT_SECS)
def _method_auth_state(values, uid):
if values.method not in _METHOD_MODULES:
pkdlog('auth state method: "{}" not present in supported methods: {}', values.method, _METHOD_MODULES.keys())
return
m = _METHOD_MODULES[values.method]
u = _method_user_model(m, uid)
if not u:
return
values.userName = u.user_name
if hasattr(m, 'avatar_uri'):
values.avatarUrl = m.avatar_uri(u, _AVATAR_SIZE)