Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Returns:
Flask.response
"""
sirepo.auth.process_request()
try:
if path is None:
return call_api(_empty_route.func, {})
# werkzeug doesn't convert '+' to ' '
parts = re.sub(r'\+', ' ', path).split('/')
try:
route = _uri_to_route[parts[0]]
parts.pop(0)
except KeyError:
# sim_types (applications)
route = _default_route
kwargs = pkcollections.Dict()
for p in route.params:
if not parts:
if not p.is_optional:
raise sirepo.util.raise_not_found('{}: uri missing parameter ({})', path, p.name)
break
if p.is_path_info:
kwargs[p.name] = '/'.join(parts)
parts = None
break
kwargs[p.name] = parts.pop(0)
if parts:
raise sirepo.util.raise_not_found('{}: unknown parameters in uri ({})', parts, path)
return call_api(route.func, kwargs)
except Exception as e:
pkdlog('exception={} path={} stack={}', path, e, pkdexc())
raise
def for_sim_type(sim_type):
"""Get cfg for simulation type
Args:
sim_type (str): srw, warppba, etc.
Returns:
dict: application specific config
"""
import pykern.pkcollections
c = cfg()
if sim_type not in c:
return pykern.pkcollections.PKDict()
return pykern.pkcollections.PKDict(
pykern.pkcollections.map_items(c[sim_type]),
)
def for_sim_type(sim_type):
"""Get cfg for simulation type
Args:
sim_type (str): srw, warppba, etc.
Returns:
dict: application specific config
"""
import pykern.pkcollections
c = cfg()
if sim_type not in c:
return pykern.pkcollections.PKDict()
return pykern.pkcollections.PKDict(
pykern.pkcollections.map_items(c[sim_type]),
)
pass
if need_validate and do_validate:
srschema.validate_name(
data,
iterate_simulation_datafiles(
sim_type,
lambda res, _, d: res.append(d),
PKDict({'simulation.folder': s.folder}),
),
SCHEMA_COMMON.common.constants.maxSimCopies
)
srschema.validate_fields(data, get_schema(data.simulationType))
s.simulationSerial = _serial_new()
# Do not write simulationStatus or computeJobCacheKey
d = copy.deepcopy(data)
pkcollections.unchecked_del(d.models, 'simulationStatus', 'computeJobCacheKey')
write_json(fn, d)
return data
CURRENT_FILE = 'currents.npy'
MONITOR_LOGFILE = 'monitor.log'
OPTIMIZER_RESULT_FILE = 'opt.json'
STEERING_FILE = 'steering.json'
_DIM_PLOT_COLORS = [
'#d0c383',
'#9400d3'
]
_MONITOR_TO_MODEL_FIELDS = pkcollections.Dict()
_SCHEMA = simulation_db.get_schema(SIM_TYPE)
_SETTINGS_PLOT_COLORS = [
'#ff0000',
'#f4a442',
'#e9ed2d',
'#44c926',
'#2656c9',
'#3d25c4',
'#7e23c4'
]
_SETTINGS_KICKER_SYMBOLS = PKDict(
hkick='square',
hpos='square',
def _start(self):
"""Detach a process from the controlling terminal and run it in the
background as a daemon.
"""
#POSIT: jid is valid docker name (word chars and dash)
self.cname = _DOCKER_CONTAINER_PREFIX + self.jid
ctx = pkcollections.Dict(
kill_secs=_KILL_TIMEOUT_SECS,
run_dir=self.run_dir,
run_log=self.run_dir.join(template_common.RUN_LOG),
run_secs=self.__run_secs(),
sh_cmd=self.__sh_cmd(),
)
script = str(self.run_dir.join(_DOCKER_CONTAINER_PREFIX + 'run.sh'))
with open(str(script), 'wb') as f:
f.write(pkjinja.render_resource('runner/docker.sh', ctx))
cmd = [
'run',
#TODO(robnagler) configurable
'--cpus=1',
'--detach',
'--init',
'--log-driver=json-file',
values.update(
t_image=half_lens,
t_source=(source if is_first else 0.0) + source_width,
)
else:
values.update(
t_image=source_width,
t_source=half_lens,
)
fields = sorted(values.keys())
return '''
oe = Shadow.OE(){}
beam.traceOE(oe, {})'''.format(_fields('oe', values, fields), count + is_obj)
common = pkcollections.Dict(
dummy=1.0,
fwrite=3,
)
# Same for all lenses (afaict)
common.update(
f_ext=1,
f_refrac=1,
t_incidence=0.0,
t_reflection=180.0,
)
common.fmirr = item.fmirr
if not _eq(item, 'fmirr', 'Plane'):
if _eq(item, 'useCCC', 'Yes'):
common.fmirr = 10
if _eq(item, 'fcyl', 'Yes'):
common.update(
def get_enums(schema, name):
enum_dict = pkcollections.Dict()
for info in schema.enum[name]:
enum_name = info[0]
enum_dict[enum_name] = enum_name
return enum_dict
def validate_name(data, data_files, max_copies):
"""Validate and if necessary uniquify name
Args:
data (dict): what to validate
data_files(list): simulation files already in the folder
"""
s = data.models.simulation
sim_id = s.simulationId
n = s.name
f = s.folder
starts_with = pkcollections.Dict()
for d in data_files:
n2 = d.models.simulation.name
if n2.startswith(n) and d.models.simulation.simulationId != sim_id:
starts_with[n2] = d.models.simulation.simulationId
i = 2
n2 = data.models.simulation.name
while n2 in starts_with:
n2 = '{} {}'.format(data.models.simulation.name, i)
i += 1
assert i - 1 <= max_copies, util.err(n, 'Too many copies: {} > {}', i - 1, max_copies)
data.models.simulation.name = n2
def nginx_proxy():
"""Starts nginx in container.
Used for development only.
"""
assert pkconfig.channel_in('dev')
run_dir = _run_dir().join('nginx_proxy').ensure(dir=True)
with pkio.save_chdir(run_dir):
f = run_dir.join('default.conf')
values = dict(pkcollections.map_items(cfg))
pkjinja.render_resource('nginx_proxy.conf', values, output=f)
cmd = [
'docker',
'run',
'--net=host',
'--rm',
'--volume={}:/etc/nginx/conf.d/default.conf'.format(f),
'nginx',
]
pksubprocess.check_call_with_signals(cmd)