Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.taskIndex=taskIndex
self.inputFile=inputFile
self.nocopy = DeepCopyProtector()
self.nocopy.totalWorkCompleted = totalWorkCompleted
def workflow(self) :
import os
infp = open(self.inputFile, "rb")
value = int(infp.read().strip())
infp.close()
self.flowLog("File: %s Value: %i" % (self.inputFile, value))
os.remove(self.inputFile)
self.nocopy.totalWorkCompleted.addOrderedValue(self.taskIndex, value)
class LaunchUntilWorkflow(WorkflowRunner) :
def __init__(self):
self.totalContinuousWorkTarget = 100
def workflow(self):
taskByIndex = []
allTasks = set()
completedTasks = set()
totalWorkCompleted = SyncronizedAccumulator()
def launchNextTask() :
taskIndex = len(allTasks)
workerTaskLabel = "workerTask_%05i" % (taskIndex)
workerTaskFile = "outputFile_%05i" % (taskIndex)
workerTaskCmd=[sys.executable, workerJob, workerTaskFile]
#
import os.path
import sys
# add module path
#
scriptDir=os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(scriptDir,os.pardir,os.pardir,"src")))
from pyflow import WorkflowRunner
# all pyflow workflows are written into classes derived from pyflow.WorkflowRunner:
#
class HelloWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the WorkflowRunner.workflow() method:
#
def workflow(self) :
#
# The output for this task will be written to the file helloWorld.out.txt
#
self.addTask("easy_task1", "echo 'Hello World!' > helloWorld.out.txt")
# Instantiate the workflow
#
wflow = HelloWorkflow()
#
# very simple task scripts called by the demo:
#
testJobDir = os.path.join(filePath, "testtasks")
sleepjob = os.path.join(testJobDir, "sleeper.bash") # sleeps
yelljob = os.path.join(testJobDir, "yeller.bash") # generates some i/o
runjob = os.path.join(testJobDir, "runner.bash") # runs at 100% cpu
# all pyflow workflows are written into classes derived from
# pyflow.WorkflowRunner:
#
class TestWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the
# WorkflowRunner.workflow() method:
#
def workflow(self) :
# A simple command task with no dependencies, labeled 'task1'.
#
cmd = "%s 1" % (yelljob)
self.addTask("task1", cmd)
# Another task which runs the same command, this time the
# command is provided as an argument list. An argument list
# can be useful when a command has many arguments or
# complicated quoting issues:
#
import os.path
import sys
# add module path by hand
#
scriptDir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(scriptDir + "/../../src")
from pyflow import WorkflowRunner
# all pyflow workflows are written into classes derived from
# pyflow.WorkflowRunner:
#
class MakeWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the
# WorkflowRunner.workflow() method:
#
def workflow(self) :
# This command 'configures' a makefile
#
self.addTask("task1", "cd %s; cp .hidden Makefile" % scriptDir)
# Sometimes you get to deal with make. The task below
# demonstates a make command which starts when the above task
# completes. Make tasks are specified as directories which
# contain a makefile. This task points to the direcotry of
# this demo script, which contains has a Makefile at the
# completion of task1.
# resource feature.
#
import os.path
import sys
# add module path by hand
#
sys.path.append(os.path.abspath(os.path.dirname(__file__)) + "/../../src")
from pyflow import WorkflowRunner
# all pyflow workflows are written into classes derived from pyflow.WorkflowRunner:
#
class RetryWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the WorkflowRunner.workflow() method:
#
def workflow(self) :
# this task behaves correctly it retries the job 4 times before failing, no automated way
# to confirm success right now.
#
self.flowLog("****** NOTE: This demo is supposed to fail ******")
self.addTask("retry_task_success", "exit 0", retryMax=8, retryWait=2, retryWindow=0, retryMode="all")
self.addTask("retry_task_fail", "exit 1", retryMax=3, retryWait=2, retryWindow=0, retryMode="all")
# Instantiate the workflow
#
rmGraphTmpCmd = getRmdirCmd() + [tmpGraphDir]
rmTask=self.addTask(preJoin(taskPrefix,"removeTmpDir"),rmGraphTmpCmd,dependencies=mergeTask)
graphStatsCmd = [self.params.mantaGraphStatsBin,"--global"]
graphStatsCmd.extend(["--graph-file",graphPath])
graphStatsCmd.extend(["--output-file",graphStatsPath])
graphStatsTask = self.addTask(preJoin(taskPrefix,"locusGraphStats"),graphStatsCmd,dependencies=mergeTask,memMb=self.params.mergeMemMb)
nextStepWait = set()
nextStepWait.add(checkTask)
return nextStepWait
class listFileWorkflow(WorkflowRunner) :
"""
creates a file which enumerates the values in a list, one line per item
"""
def __init__(self2, listFile, listItems) :
self2.listFile = listFile
self2.listItems = listItems
def workflow(self2) :
fp = open(self2.listFile, "w")
for listItem in self2.listItems :
fp.write(listItem+"\n")
def sortEvidenceBams(self, sortBamTasks, taskPrefix="", binStr="", dependencies=None):
#
# very simple task scripts called by the demo:
#
testJobDir=os.path.join(scriptDir,"testtasks")
sleepjob=os.path.join(testJobDir,"sleeper.bash") # sleeps
yelljob=os.path.join(testJobDir,"yeller.bash") # generates some i/o
runjob=os.path.join(testJobDir,"runner.bash") # runs at 100% cpu
# all pyflow workflows are written into classes derived from
# pyflow.WorkflowRunner:
#
class SimpleWorkflow(WorkflowRunner) :
# WorkflowRunner objects can create regular constructors to hold
# run parameters or other state information:
#
def __init__(self,params) :
self.params=params
# a workflow is defined by overloading the
# WorkflowRunner.workflow() method:
#
def workflow(self) :
# A simple command task with no dependencies, labeled 'task1'.
#
cmd="%s 1" % (yelljob)
from pyflow import WorkflowRunner
#
# very simple task scripts called by the demo:
#
testJobDir = os.path.join(scriptDir, "testtasks")
sleepjob = os.path.join(testJobDir, "sleeper.bash") # sleeps
# all pyflow workflows are written into classes derived from
# pyflow.WorkflowRunner:
#
class MutexWorkflow(WorkflowRunner) :
# a workflow is defined by overloading the
# WorkflowRunner.workflow() method:
#
def workflow(self) :
# create an array of mutex restricted tasks which can only run
# once at a time:
for i in range(8) :
self.addTask("mutex_task_" + str(i), sleepjob + " 1", mutex="test")
# and add an array of 'normal' tasks for comparison:
for i in range(16) :
self.addTask("normal_task_" + str(i), sleepjob + " 1")