Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def display_workflow(run_id):
sql_conn = get_db()
df_workflow = pd.read_sql_query('SELECT workflow_name, time_began, rundir, run_id FROM workflows WHERE run_id=(?)',
sql_conn, params=(run_id, ))
df_workflows = pd.read_sql_query('SELECT workflow_name, time_began, rundir, run_id FROM workflows WHERE workflow_name=(?)',
sql_conn, params=(df_workflow['workflow_name'][0], ))
close_db()
return html.Div(children=[
html.H2('Workflow name'),
html.H4(id='workflow_name', children=df_workflows['workflow_name'][0]),
html.H2('Run Id'),
html.H4(id='run_id', children=run_id),
html.H2('Version'),
html.H4(id='run_version', children=df_workflow['rundir'].iloc[0].split('/').pop()),
dropdown(dataframe=df_workflows.sort_values(by='time_began', ascending=False)),
dcc.Tabs(id="tabs", value='workflow', children=[
def display_workflow(run_id):
sql_conn = get_db()
df_workflow = pd.read_sql_query('SELECT workflow_name, time_began, rundir, run_id FROM workflows WHERE run_id=(?)',
sql_conn, params=(run_id, ))
if df_workflow.empty:
close_db()
return 'Run id ' + run_id + ' not found'
df_workflows = pd.read_sql_query('SELECT workflow_name, time_began, rundir, run_id FROM workflows WHERE workflow_name=(?)',
sql_conn, params=(df_workflow['workflow_name'][0], ))
close_db()
return html.Div(children=[
html.Div(className='flex_container',
children=[
html.Img(className='parsl_logo',
src='../../../assets/parsl-logo.png'),
def plot(self, option, columns, run_id):
sql_conn = get_db()
df_resources = pd.read_sql_query('SELECT psutil_process_time_system, timestamp, task_id FROM task_resources WHERE run_id=(?)',
sql_conn, params=(run_id, ))
df_task = pd.read_sql_query('SELECT task_id, task_time_returned FROM task WHERE run_id=(?)',
sql_conn, params=(run_id, ))
close_db()
min_range = min(df_resources['psutil_process_time_system'].astype('float'))
max_range = max(df_resources['psutil_process_time_system'].astype('float'))
time_step = (max_range - min_range) / columns
x_axis = []
for i in np.arange(min_range, max_range + time_step, time_step):
x_axis.append(i)
apps_dict = dict()
for i in range(len(df_task)):
html.Td(html.A(children=dataframe.iloc[i]['workflow_name'], href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['workflow_version'], href=href(i))),
html.Td(html.A(children=num_to_timestamp(float(dataframe.iloc[i]['time_began']))
.strftime(DB_DATE_FORMAT) if dataframe.iloc[i]['time_began'] != 'None' else 'None', href=href(i))),
html.Td(html.A(children=num_to_timestamp(float(dataframe.iloc[i]['time_completed']))
.strftime(DB_DATE_FORMAT) if dataframe.iloc[i]['time_completed'] != 'None' else 'None', href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['tasks_completed_count'], href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['tasks_failed_count'], href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['user'], href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['host'], href=href(i))),
html.Td(html.A(children=dataframe.iloc[i]['rundir'], href=href(i)))
]) for i in range(len(dataframe))])
)
sql_conn = get_db()
layout = html.Div(children=[
html.H1("Workflows"),
dataframe_to_html_table(id='workflows_table',
dataframe=pd.read_sql_query("SELECT run_id, "
"workflow_name, "
"workflow_version, "
"time_began, "
"time_completed, "
"tasks_completed_count, "
"tasks_failed_count, "
"user, "
"host, "
"rundir FROM workflows", sql_conn)
.sort_values(
by=['time_began'],
import pandas as pd
import dash_html_components as html
from parsl.monitoring.web_app.app import get_db, close_db
from parsl.monitoring.web_app.utils import dataframe_to_html_table
sql_conn = get_db()
layout = html.Div(children=[
html.H1("Workflows"),
dataframe_to_html_table(id='workflows_table',
field='workflow_name',
dataframe=pd.read_sql_query("SELECT run_id, "
"workflow_name, "
"workflow_version, "
"time_began, "
"time_completed, "
"tasks_completed_count, "
"tasks_failed_count, "
"user, "
"host, "
"rundir FROM workflows", sql_conn)
.sort_values(
def plot(self, apps, run_id):
sql_conn = get_db()
df_status = pd.read_sql_query('SELECT run_id, task_id, task_status_name, timestamp FROM task_status WHERE run_id=(?)',
sql_conn, params=(run_id, ))
if type(apps) is dict:
apps = ['', apps['label']]
elif len(apps) == 1:
apps.append('')
df_task = pd.read_sql_query('SELECT task_id, task_func_name FROM task WHERE run_id=(?) AND task_func_name IN {apps}'.format(apps=tuple(apps)),
sql_conn, params=(run_id, ))
close_db()
def y_axis_setup(array):
count = 0
items = []
def plot(self, option, columns, run_id):
sql_conn = get_db()
df_resources = pd.read_sql_query('SELECT psutil_process_memory_resident, timestamp, task_id FROM task_resources WHERE run_id=(?)',
sql_conn, params=(run_id, ))
df_task = pd.read_sql_query('SELECT task_id, task_time_returned FROM task WHERE run_id=(?)',
sql_conn, params=(run_id, ))
close_db()
min_range = min(df_resources['psutil_process_memory_resident'].astype('float'))
max_range = max(df_resources['psutil_process_memory_resident'].astype('float'))
time_step = (max_range - min_range) / columns
x_axis = []
for i in np.arange(min_range, max_range + time_step, time_step):
x_axis.append(i)
apps_dict = dict()
for i in range(len(df_task)):
import pandas as pd
import dash_html_components as html
from parsl.monitoring.web_app.app import get_db, close_db
from parsl.monitoring.web_app.utils import dataframe_to_html_table
sql_conn = get_db()
layout = html.Div(children=[
html.H1("Workflows"),
dataframe_to_html_table(id='workflows_table',
field='workflow_name',
dataframe=pd.read_sql_query("SELECT run_id, "
"workflow_name, "
"workflow_version, "
"time_began, "
"time_completed, "
"tasks_completed_count, "
"tasks_failed_count, "
"user, "
"host, "
"rundir FROM workflows", sql_conn)
.sort_values(
def plot(self, minutes, seconds, run_id, apps=None):
sql_conn = get_db()
df_status = pd.read_sql_query('SELECT run_id, task_id, task_status_name, timestamp FROM task_status WHERE run_id=(?)',
sql_conn, params=(run_id, ))
if apps:
if type(apps) is dict:
apps = ['', apps['label']]
elif len(apps) == 1:
apps.append('')
df_task = pd.read_sql_query('SELECT task_id, task_func_name FROM task WHERE run_id=(?) AND task_func_name IN {apps}'.format(apps=tuple(apps)),
sql_conn, params=(run_id, ))
else:
df_task = pd.read_sql_query('SELECT task_id, task_func_name FROM task WHERE run_id=(?)',
sql_conn, params=(run_id, ))
close_db()
def total_tasks_plot_tasks(apps, minutes, seconds, run_id):
# apps is sometimes a dict and sometimes a list.
# This if statement is to make sure that tuple(apps) in pd.read_sql_query is formatted correctly
if type(apps) is dict:
apps = ['', apps['label']]
elif len(apps) == 1:
apps.append('')
sql_conn = get_db()
df_status = pd.read_sql_query('SELECT run_id, task_id, task_status_name, timestamp FROM task_status WHERE run_id=(?)',
sql_conn, params=(run_id, ))
df_task = pd.read_sql_query('SELECT task_id, task_func_name FROM task WHERE run_id=(?) AND task_fn_hash IN {apps}'.format(apps=tuple(apps)),
sql_conn, params=(run_id, ))
close_db()
min_time = timestamp_to_int(min(df_status['timestamp']))
max_time = timestamp_to_int(max(df_status['timestamp']))
time_step = 60 * minutes + seconds
x_axis = []
for i in range(min_time, max_time, time_step):
x_axis.append(num_to_timestamp(i).strftime(DB_DATE_FORMAT))
# Fill up dict "apps" like: {app1: [#task1, #task2], app2: [#task4], app3: [#task3]}
apps_dict = dict()