@techreport{Saxena2011,
author = {Saxena, Tripti and Dubey, Abhishek},
institution = {Insititute for Software Integrated Systems, Vanderbilt University},
title = {Meta-Tools For Designing Scientific Workflow Management Systems: Part-I, Survey},
year = {2011},
number = {ISIS-11-105},
abstract = {Scientific workflows require the coordination of data processing activities, resulting in executions driven by data dependencies. Due to the scales involved and the repetition of analysis, typically workflows are analyzed in coordinated campaigns, each execution managed and controlled by the workflow management system. In this respect, a workflow management system is required to (1) provide facilities for specifying workflows: intermediate steps, inputs/outputs, and parameters, (2) manage the execution of the workflow based on specified parameters, (3) provide facilities for managing data provenance, and (4) provide facilities to monitor the progress of the workflow, include facilities to detect anomalies, isolate faults and provide recovery actions. In this paper, part-I of a two part series, we provide a comparison of some state of the art workflow management systems with respect to these four primary requirements.},
attachments = {http://www.isis.vanderbilt.edu/sites/default/files/Survey-report.pdf},
contribution = {colab},
file = {:Saxena2011-Meta-tools_for_Designing_Scientific_Workflow_Management_Systems_Survey.pdf:PDF},
keywords = {scientific workflows, workflow management, distributed computing, fault tolerance, data provenance, monitoring}
}