Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
training.set_gpu_limit(1)
prediction = dataflow_tf_predict_op(
data_file_pattern=evaluation,
schema=schema,
target_column=target,
model=training.output,
run_mode=predict_mode,
gcp_project=project,
predictions_dir=output_template
).apply(gcp.use_gcp_secret('user-gcp-sa'))
confusion_matrix = confusion_matrix_op(
predictions=prediction.output,
output_dir=output_template
).apply(gcp.use_gcp_secret('user-gcp-sa'))
training_output_dir=output_template
).apply(gcp.use_gcp_secret('user-gcp-sa'))
if use_gpu:
training.image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:0517114dc2b365a4a6d95424af6157ead774eff3',
training.set_gpu_limit(1)
prediction = dataflow_tf_predict_op(
data_file_pattern=evaluation,
schema=schema,
target_column=target,
model=training.output,
run_mode=predict_mode,
gcp_project=project,
predictions_dir=output_template
).apply(gcp.use_gcp_secret('user-gcp-sa'))
confusion_matrix = confusion_matrix_op(
predictions=prediction.output,
output_dir=output_template
).apply(gcp.use_gcp_secret('user-gcp-sa'))
server_name=tf_server_name
)
else:
deploy = kubeflow_deploy_op(
cluster_name=project,
model_dir=str(training.output) + '/export/export',
pvc_name='users-pvc',
# pvc_name=vop.outputs["name"],
server_name=tf_server_name,
service_type='NodePort',
)
steps = [validation, preprocess, training, analysis, prediction, cm, roc, deploy]
for step in steps:
if platform == 'GCP':
step.apply(gcp.use_gcp_secret('user-gcp-sa'))
else:
step.apply(onprem.mount_pvc('users-pvc', 'local-storage', output))
# step.apply(onprem.mount_pvc(vop.outputs["name"], 'local-storage', output))
'runner', value='gs:///tmp'),
input_path=kfp.dsl.PipelineParam(
'path', value='gs:///'),
output_path=kfp.dsl.PipelineParam(
'path', value='gs:///output/output'),
window=kfp.dsl.PipelineParam('window', value=280),
period=kfp.dsl.PipelineParam('period', value=1)):
"""Defines the pipeline."""
sentiment_analysis_task = SentimentAnalysisOp(
'SentimentAnalysis',
project, # To authenticate.
gcp_temp_location,
input_path,
output_path,
window, period).apply(
kfp.gcp.use_gcp_secret()) # To apply gcp service account secret.
train_batch_size=1024,
eval_batch_size=1024,
steps_per_eval=250,
train_steps=10000,
num_train_images=218593,
num_eval_images=54648,
num_label_classes=10):
output_dir = os.path.join(str(output), '{{workflow.name}}')
preprocess_staging = os.path.join(output_dir, 'staging')
preprocess_output = os.path.join(output_dir, 'preprocessed_output')
train_output = os.path.join(output_dir, 'model')
preprocess = resnet_preprocess_op(project_id, preprocess_output, preprocess_staging, train_csv,
validation_csv, labels, train_batch_size, eval_batch_size).apply(gcp.use_gcp_secret())
train = resnet_train_op(project_id, preprocess_output, train_output, region, depth, train_batch_size,
eval_batch_size, steps_per_eval, train_steps, num_train_images, num_eval_images,
num_label_classes, tf_version).apply(gcp.use_gcp_secret())
train.after(preprocess)
export_output = os.path.join(str(train.outputs['job_dir']), 'export')
deploy = resnet_deploy_op(export_output, model, version, project_id, region,
tf_version).apply(gcp.use_gcp_secret())