# Publications

• W. M. P. van der Aalst, A. Adriansyah, A. A. K. de Medeiros, F. Arcieri, T. Baier, T. Blickle, J. C. R. P. Bose, P. van den Brand, R. Brandtjen, J. C. A. M. Buijs, A. Burattin, J. Carmona, M. Castellanos, J. Claes, J. Cook, N. Costantini, F. Curbera, E. Damiani, M. de Leoni, P. Delias, B. F. van Dongen, M. Dumas, S. Dustdar, D. Fahland, D. R. Ferreira, W. Gaaloul, F. van Geffen, S. Goel, C. W. GÃ¼nther, A. Guzzo, P. Harmon, A. H. M. ter Hofstede, J. Hoogland, J. Espen Ingvaldsen, K. Kato, R. Kuhn, A. Kumar, M. La Rosa, F. Maggi, D. Malerba, R. S. Mans, A. Manuel, M. McCreesh, P. Mello, J. Mendling, M. Montali, H. Motahari Nezhad, M. zur Muehlen, J. Munoz-Gama, L. Pontieri, J. Ribeiro, A. Rozinat, H. Seguel PÃ©rez, R. Seguel PÃ©rez, M. SepÃºlveda, J. Sinur, P. Soffer, M. S. Song, A. Sperduti, G. Stilo, C. Stoel, K. Swenson, M. Talamo, W. Tan, C. Turner, J. Vanthienen, G. Varvaressos, H. M. W. Verbeek, M. Verdonk, R. Vigo, J. Wang, B. Weber, M. Weidlich, A. J. M. M. Weijters, L. Wen, M. Westergaard, and M. T. Wynn, “Process mining manifesto,” in Bpm 2011 workshops, part i, 2012, pp. 169-194.
[Bibtex]
@InProceedings{Aalst12,
Title = {Process Mining Manifesto},
Author = {Aalst, W. M. P. van der and Adriansyah, A. and Medeiros, A. K. Alves de and F. Arcieri and T. Baier and T. Blickle and Bose, R. P. Jagadeesh Chandra and Brand, P. van den and Brandtjen, R. and Buijs, J. C. A. M. and Burattin, A. and Carmona, J. and Castellanos, M. and Claes, J. and Cook, J. and Costantini, N. and Curbera, F. and E. Damiani and M. de Leoni and P. Delias and Dongen, B. F. van and Dumas, M. and S. Dustdar and Fahland, D. and Ferreira, D. R. and Gaaloul, W. and Geffen, F. van and Goel, S. and G\"{u}nther, C. W. and Guzzo, A. and Harmon, P. and Hofstede, A. H. M. ter and Hoogland, J. and Espen Ingvaldsen, J. and Kato, K. and Kuhn, R. and Kumar, A. and La Rosa, M. and Maggi, F. and Malerba, D. and Mans, R. S. and Manuel, A. and McCreesh, M. and Mello, P. and Mendling, J. and Montali, M. and Motahari Nezhad, H. and zur Muehlen, M. and Munoz-Gama, J. and Pontieri, L. and Ribeiro, J. and Rozinat, A. and Seguel P\'{e}rez, H. and Seguel P\'{e}rez, R. and Sep\'{u}lveda, M. and Sinur, J. and Soffer, P. and Song, M. S. and Sperduti, A. and Stilo, G. and Stoel, C. and Swenson, K. and Talamo, M. and Tan, W. and Turner, C. and Vanthienen, J. and Varvaressos, G. and Verbeek, H. M. W. and Verdonk, M. and Vigo, R. and Wang, J. and Weber, B. and Weidlich, M. and Weijters, A. J. M. M. and Wen, L. and Westergaard, M. and Wynn, M. T.},
Booktitle = {BPM 2011 Workshops, Part I},
Year = {2012},
Editor = {Daniel, F. and Dustdar, S. and Barkaoui, K.},
Month = {August},
Organization = {LIMOS - Université Blaise Pascal, Clermont-Ferrand, France},
Pages = {169--194},
Publisher = {Springer-Verlag},
Series = {LNBIP},
Volume = {99},
Abstract = {Process mining techniques are able to extract knowledge from event logs commonly available in today’s information systems. These techniques provide new means to discover, monitor, and improve processes in a variety of application domains. There are two main drivers for the growing interest in process mining. On the one hand, more and more events are being recorded, thus, providing detailed information about the history of processes. On the other hand, there is a need to improve and support business processes in competitive and rapidly changing environments. This manifesto is created by the IEEE Task Force on Process Mining and aims to promote the topic of process mining. Moreover, by defining a set of guiding principles and listing important challenges, this manifesto hopes to serve as a guide for software developers, scientists, consultants, business managers, and end-users. The goal is to increase the maturity of process mining as a new tool to improve the (re)design, control, and support of operational business processes.},
Owner = {hverbeek},
Timestamp = {2011.11.28},
Url = {http://www.win.tue.nl/ieeetfpm/doku.php?id=shared:process_mining_manifesto}
}
• W. M. P. van der Aalst, T. Basten, H. M. W. Verbeek, P. A. C. Verkoulen, and M. Voorhoeve, “Adaptive workflow: on the interplay between flexibility and support,” in Enterprise information systems, J. Filipe, Ed., Norwell: Kluwer Academic Publishers, 2000, pp. 63-70.
[Bibtex]
@InCollection{Aalst00,
Title = {Adaptive Workflow: On the Interplay between Flexibility and Support},
Author = {Aalst, W. M. P. van der and Basten, T. and Verbeek, H. M. W. and Verkoulen, P. A. C. and Voorhoeve, M.},
Booktitle = {Enterprise Information Systems},
Year = {2000},
Editor = {Filipe, J.},
Pages = {63--70},
Abstract = {Todays information systems do not support adaptive workflow: either the information system abstracts from the workflow processes at hand and focuses on the management of data and the execution of individual tasks via applications or the workflow is supported by the information system but it is hard to handle changes. This paper addresses this problem by classifying the types of changes. Based on this classification, issues such as syntactic/semantic correctness, case transfer, and management information are discussed. It turns out that the trade-off between flexibility and support raises challenging questions. Only some of these questions are answered in this paper, most of them require further research. Since the success of the next generation of workflow management systems depends on the ability to support adaptive workflow, it is important to provide answers for the questions raised in this paper.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, T. Basten, H. M. W. Verbeek, P. A. C. Verkoulen, and M. Voorhoeve, “Adaptive workflow: on the interplay between flexibility and support,” in Proceedings of the first international conference on enterprise information systems, SetÃºbal, Portugal, 1999, pp. 353-360.
[Bibtex]
@InProceedings{Aalst99a,
Title = {Adaptive Workflow: On the Interplay between Flexibility and Support},
Author = {Aalst, W. M. P. van der and Basten, T. and Verbeek, H. M. W. and Verkoulen, P. A. C. and Voorhoeve, M.},
Booktitle = {Proceedings of the First International Conference on Enterprise Information Systems},
Year = {1999},
Editor = {Filipe, J. and Cordeiro, J.},
Month = {March},
Pages = {353--360},
Volume = {2},
Abstract = {Todays information systems do not support adaptive workflow: either the information system abstracts from the workflow processes at hand and focuses on the management of data and the execution of individual tasks via applications or the workflow is supported by the information system but it is hard to handle changes. This paper addresses this problem by classifying the types of changes. Based on this classification, issues such as syntactic/semantic correctness, case transfer, and management information are discussed. It turns out that the trade-off between flexibility and support raises challenging questions. Only some of these questions are answered in this paper, most of them require further research. Since the success of the next generation of workflow management systems depends on the ability to support adaptive workflow, it is important to provide answers for the questions raised in this paper.},
File = {Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst99a.pdf:PDF;First International Conference on Enterprise Information Systems:http\://ltodi.est.ips.pt/iceis/iceis1999.htm:URL},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, T. Basten, H. M. W. Verbeek, P. A. C. Verkoulen, and M. Voorhoeve, “Adaptive workflow: an approach based on inheritance,” in Proceedings of the ijcai99 workshop on intelligent workflow and process management: the new frontier for ai in business, Stockholm, Sweden, 1999, pp. 36-45.
[Bibtex]
@InProceedings{Aalst99,
Title = {Adaptive Workflow: An Approach Based on Inheritance},
Author = {Aalst, W. M. P. van der and Basten, T. and Verbeek, H. M. W. and Verkoulen, P. A. C. and Voorhoeve, M.},
Booktitle = {Proceedings of the IJCAI’99 Workshop on Intelligent Workflow and Process Management: The New Frontier for AI in Business},
Year = {1999},
Editor = {Ibrahim, M. and Drabble, B.},
Month = {August},
Pages = {36--45},
Abstract = {Today’s information systems do not support adaptive workflow: either the information system abstracts from the workflow processes at hand and focuses on the management of data and the execution of individual tasks via applications or the workflow is supported by the information system but it is hard to handle changes. This paper addresses this problem by classifying the types of changes. Based on this classification, issues such as syntactic/semantic correctness, case transfer, and management information are discussed. It turns out that the trade-off between flexibility and support raises challenging questions. Some of these questions can be answered using advanced inheritance notions. This paper provides four inheritance-preserving transformation rules which can be used to avoid the typical problems related to change.},
File = {Workshop on Intelligent Workflow and Process Management\: The New Frontier for AI in Business:http\://www.aiai.ed.ac.uk/project/plan/event/99-IJCAI-WORKFLOW-WORKSHOP/:URL;IJCAI’99:http\://www.dsv.su.se/ijcai-99/:URL;Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst99.pdf:PDF},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, B. Benatallah, F. Casati, F. Curbera, and H. M. W. Verbeek, “Business process management: where business processes and web services meet,” Data and knowledge engineering (dke), vol. 61, iss. 1, pp. 1-5, 2007.
[Bibtex]
@Article{Aalst07,
Title = {Business process management: Where business processes and web services meet},
Author = {Aalst, W. M. P. van der and Benatallah, B. and Casati, F. and Curbera, F. and Verbeek, H. M. W.},
Journal = {Data and Knowledge Engineering (DKE)},
Year = {2007},
Note = {Editorial},
Number = {1},
Pages = {1--5},
Volume = {61},
Doi = {10.1016/j.datak.2006.04.005},
File = {Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst07.pdf:URL;Data and Knowledge Engineering:http\://www.sciencedirect.com/science/journal/0169023X:URL;Volume 61, issue 1:http\://www.sciencedirect.com/science/issue/5630-2007-999389998-645006:URL},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• W. M. P. van der Aalst, B. F. van Dongen, C. W. GÃ¼nther, R. S. Mans, A. A. K. de Medeiros, A. Rozinat, V. Rubin, M. Song, H. M. W. Verbeek, and A. J. M. M. Weijters, “Prom 4.0: comprehensive support for real process analysis,” in Application and theory of petri nets and other models of concurrency 2007, Siedlce, Poland, 2007, pp. 484-494.
[Bibtex]
@InProceedings{Aalst07a,
Title = {ProM 4.0: Comprehensive Support for Real Process Analysis},
Author = {Aalst, W. M. P. van der and Dongen, B. F. van and G\"{u}nther, C. W. and Mans, R. S. and Medeiros, A. K. Alves de and Rozinat, A. and Rubin, V. and Song, M. and Verbeek, H. M. W. and Weijters, A. J. M. M.},
Booktitle = {Application and Theory of Petri nets and Other Models of Concurrency 2007},
Year = {2007},
Editor = {Kleijn, J. and Yakovlev, A.},
Month = {June},
Pages = {484--494},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in computer Science},
Volume = {4546},
Abstract = {This tool paper describes the functionality of ProM. Version 4.0 of ProM has been released at the end of 2006 and this version reflects recent achievements in process mining. Process mining techniques attempt to extract non-trivial and useful information from so-called “event logs”. One element of process mining is control-flow discovery, i.e., automatically constructing a process model (e.g., a Petri net) describing the causal dependencies between activities. Control-flow discovery is an interesting and practically relevant challenge for Petri-net researchers and ProM provides an excellent platform for this. For example, the theory of regions, genetic algorithms, free-choice-net properties, etc. can be exploited to derive Petri nets based on example behavior. However, as we will show in this paper, the functionality of ProM 4.0 is not limited to control-flow discovery. ProM 4.0 also allows for the discovery of other perspectives (e.g., data and resources) and supports related techniques such as conformance checking, model extension, model transformation, verification, etc. This makes ProM a versatile tool for process analysis which is not restricted to model analysis but also includes log-based analysis.},
Doi = {10.1007/978-3-540-73094-1_28},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, B. F. van Dongen, C. W. GÃ¼nther, A. Rozinat, H. M. W. Verbeek, and A. J. M. M. Weijters, “Prom: the process mining toolkit,” , 2009.
[Bibtex]
@Electronic{Aalst09a,
Title = {ProM: The Process Mining Toolkit},
Author = {Aalst, W. M. P. van der and Dongen, B. F. van and G\"{u}nther, C. W. and Rozinat, A. and Verbeek, H. M. W. and Weijters, A. J. M. M.},
Month = {September},
Organization = {CEUR-WS},
Url = {http://ceur-ws.org/Vol-489/paper3.pdf},
Year = {2009},
Abstract = {Nowadays, all kinds of information systems store detailed information in logs. Process mining has emerged as a way to analyze these systems based on these detailed logs. Unlike classical data mining, the focus of process mining is on processes. First, process mining allows us to extract a process model from an event log. Second, it allows us to detect discrepancies between a modeled process (as it was envisioned to be) and an event log (as it actually is). Third, it can enrich an existing model with knowledge derived from an event log. This paper presents our tool ProM, which is the world-leading tool in the area of process mining.},
Owner = {hverbeek},
Timestamp = {2009.09.15}
}
• W. M. P. van der Aalst, M. Dumas, A. H. M. ter Hofstede, N. Russell, H. M. W. Verbeek, and P. Wohed, “Life after bpel?,” in Formal techniques for computer systems and business processes (ws-fm 2005), Versailles, France, 2005, pp. 35-50.
[Bibtex]
@InProceedings{Aalst05,
Title = {Life after BPEL?},
Author = {Aalst, W. M. P. van der and Dumas, M. and Hofstede, A. H. M. ter and Russell, N. and Verbeek, H. M. W. and Wohed, P.},
Booktitle = {Formal Techniques for Computer Systems and Business Processes (WS-FM 2005)},
Year = {2005},
Editor = {Bravetti, M. and Kloul, L. and Zavattaro, G.},
Note = {Invited paper to the 2nd International Workshop on Web Services and Formal Methods},
Pages = {35--50},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {3670},
Abstract = {The Business Process Execution Language for Web Services (BPEL) has emerged as a standard for specifying and executing processes. It is supported by vendors such as IBM and Microsoft and positioned as the ”process language of the Internet”. This paper provides a critical analysis of BPEL based on the so-called workflow patterns. It also discusses the need for languages like BPEL. Finally, the paper addresses several challenges not directly addressed by BPEL but highly relevant to the support of web services.},
Doi = {10.1007/11549970_4},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, M. Dumas, C. Ouyang, A. Rozinat, and H. M. W. Verbeek, “Conformance checking of service behavior,” Acm transactions on internet technology (toit), vol. 8, iss. 3, pp. 1-30, 2008.
[Bibtex]
@Article{Aalst08a,
Title = {Conformance Checking of Service Behavior},
Author = {Aalst, W. M. P. van der and Dumas, M. and Ouyang, C. and Rozinat, A. and Verbeek, H. M. W.},
Journal = {ACM Transactions on Internet Technology (TOIT)},
Year = {2008},
Number = {3},
Pages = {1--30},
Volume = {8},
Abstract = {A service-oriented system is composed of independent software units, namely services, that interact with one another exclusively through message exchanges. The proper functioning of such system depends on whether or not each individual service behaves as the other services expect it to behave. Since services may be developed and operated independently, it is unrealistic to assume that this is always the case. This article addresses the problem of checking and quantifying how much the actual behavior of a service, as recorded in message logs, conforms to the expected behavior as specified in a process model. We consider the case where the expected behavior is defined using the BPEL industry standard (Business Process Execution Language for Web Services). BPEL process definitions are translated into Petri nets and Petri net-based conformance checking techniques are applied to derive two complementary indicators of conformance: fitness and appropriateness. The approach has been implemented in a toolset for business process analysis and mining, namely ProM, and has been tested in an environment comprising multiple Oracle BPEL servers.},
Doi = {10.1145/1361186.1361189},
File = {Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst08a:URL;CFP Special issue on Middleware for Service-Oriented Computing:http\://www.cis.uoguelph.ca/~qmahmoud/soa-middleware-cfp.pdf:PDF;ACM TOIT:http\://www.acm.org/toit/:URL},
Owner = {hverbeek},
Timestamp = {2008.11.03},
Url = {http://portal.acm.org/ft_gateway.cfm?id=1361189&type=pdf&coll=GUIDE&dl=GUIDE&CFID=35617930&CFTOKEN=70387686}
}
• W. M. P. van der Aalst, M. Dumas, C. Ouyang, A. Rozinat, and H. M. W. Verbeek, “Choreography conformance checking: an approach based on bpel and petri nets (revised version),” BPMcenter.org, BPM Center Report BPM-06-31, 2006.
[Bibtex]
@TechReport{Aalst06,
Title = {Choreography Conformance Checking: An Approach based on BPEL and Petri Nets (Revised version)},
Author = {Aalst, W. M. P. van der and Dumas, M. and Ouyang, C. and Rozinat, A. and Verbeek, H. M. W.},
Institution = {BPMcenter.org},
Year = {2006},
Number = {BPM-06-31},
Type = {BPM Center Report},
Abstract = {A service-oriented system is composed of independent software units, namely services, that interact with one another exclusively through message exchanges. The proper functioning of such system depends on whether or not each individual service behaves as the other services expect it to behave. Since services may be developed and operated independently, it is unrealistic to assume that this is always the case. This paper addresses the problem of checking and quantifying how much the actual behavior of a service, as recorded in message logs, conforms to the expected behavior as specified in some process model. Concretely, we consider the case where the expected behavior is defined using the de-facto industry standard BPEL (i.e., the Business Process Execution Language for Web Services). BPEL process definitions are translated into Petri nets and Petri net-based conformance checking techniques are applied to quantify two complementary indicators of conformance: fitness and appropriateness. The approach is supported by ProM (a toolset for business process analysis and mining) and has been applied in a setting using multiple Oracle BPEL servers.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, M. Dumas, C. Ouyang, A. Rozinat, and H. M. W. Verbeek, “Choreography conformance checking: an approach based on bpel and petri nets (extended version),” BPMcenter.org, BPM Center Report BPM-05-25, 2005.
[Bibtex]
@TechReport{Aalst05a,
Title = {Choreography Conformance Checking: An Approach based on BPEL and Petri Nets (extended version)},
Author = {Aalst, W. M. P. van der and Dumas, M. and Ouyang, C. and Rozinat, A. and Verbeek, H. M. W.},
Institution = {BPMcenter.org},
Year = {2005},
Number = {BPM-05-25},
Type = {BPM Center Report},
Abstract = {Recently, languages such as BPEL and CDL have been proposed to describe the way services can interact from a behavioral perspective. The emergence of these languages heralds an era where richer service descriptions, going beyond WSDL-like interfaces, will be available. However, what can these richer service descriptions serve for? This paper investigates a possible usage of behavioral service descriptions, namely as input for conformance checking. Conformance checking is the act of verifying whether one or more parties stick to the agreed-upon behavior by observing the actual behavior, e.g., the exchange of messages between all parties. This paper shows that it is possible to translate BPEL business protocols to Petri nets and to relate SOAP messages to transitions in the Petri net. As a result, Petri net-based conformance checking techniques can be used to quantify fitness (whether the observed behavior is possible in the business protocol) and appropriateness (whether the observed behavior “overfits” or “underfits” the business protocol). Moreover, non-conformance can be visualized to pinpoint deviations. The approach has been implemented in the context of the ProM framework.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, D. Hauschildt, and H. M. W. Verbeek, “A petri-net-based tool to analyze workflows,” in Proceedings of petri nets in system engineering (pnse97), Hamburg, Germany, 1997, pp. 78-90.
[Bibtex]
@InProceedings{Aalst97,
Title = {A Petri-net-based Tool to Analyze Workflows},
Author = {Aalst, W. M. P. van der and Hauschildt, D. and Verbeek, H. M. W.},
Booktitle = {Proceedings of Petri Nets in System Engineering (PNSE’97)},
Year = {1997},
Editor = {Farwer, B. and Moldt, D. and Stehr, M. O.},
Month = {September},
Note = {FBI-HH-B-205/97},
Pages = {78--90},
Publisher = {University of Hamburg},
Abstract = {Workflow management systems facilitate the everyday operation of business processes by taking care of the logistic control of work. In contrast to traditional information systems, they attempt to support frequent changes of the workflows at hand. Therefore, the need for analysis methods to verify the correctness of workflows is becoming more prominent. In this paper we present a tool based on Petri nets: Woflan. Woflan (WOrkFLow ANalyzer) is an analysis tool which can be used to verify the correctness of a workflow procedure. The analysis tool uses state-of-the-art techniques to find potential errors in the definition of a workflow procedure.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://www.informatik.uni-hamburg.de/TGI/aktuelles/pnse97/papers/aalst.ps.gz}
}
• W. M. P. van der Aalst, K. M. van Hee, A. H. M. ter Hofstede, N. Sidorova, H. M. W. Verbeek, M. Voorhoeve, and M. T. Wynn, “Soundness of workflow nets with reset arcs is undecidable!,” in Proceedings of the international workshop on concurrency methods issues and applications (china’08), J. Kleijn and M. Koutny, Eds., Xi’an, China: Xidian University, 2008, pp. 57-72.
[Bibtex]
@InCollection{Aalst08b,
Title = {Soundness of Workflow Nets with Reset Arcs is Undecidable!},
Author = {Aalst, W. M. P. van der and Hee, K. M. van and Hofstede, A. H. M. ter and Sidorova, N. and Verbeek, H. M. W. and Voorhoeve, M. and Wynn, M. T.},
Booktitle = {Proceedings of the International Workshop on Concurrency Methods Issues and Applications (CHINA'08)},
Publisher = {Xidian University},
Year = {2008},
Editor = {Kleijn, J. and Koutny, M.},
Month = {June},
Pages = {57--72},
Abstract = {Petri nets are often used to model and analyze workflows. Many workflow languages have been mapped onto Petri nets in order to provide formal semantics or to verify correctness properties. Typically, the so-called Workflow nets are used to model and analyze workflows and variants of the classical soundness property are used as a correctness notion. Since many workflow languages have cancelation features, a mapping to workflow nets is not always possible. Therefore, it is interesting to consider workflow nets with reset arcs. Unfortunately, soundness is undecidable for workflow nets with reset arcs. In this paper, we provide a proof and insights into the theoretical limits of workflow verification.},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• W. M. P. van der Aalst, K. M. van Hee, A. H. M. ter Hofstede, N. Sidorova, H. M. W. Verbeek, M. Voorhoeve, and M. T. Wynn, “Soundness of workflow nets: classiffication, decidability, and analysis,” Formal aspects of computing, vol. 23, iss. 3, pp. 333-363, 2011.
[Bibtex]
@Article{Aalst11,
Title = {Soundness of Workflow Nets: Classiffication, Decidability, and Analysis},
Author = {Aalst, W. M. P. van der and Hee, K. M. van and Hofstede, A. H. M. ter and Sidorova, N. and Verbeek, H. M. W. and Voorhoeve, M. and Wynn, M. T.},
Journal = {Formal Aspects of Computing},
Year = {2011},
Month = {May},
Note = {Accepted for publication},
Number = {3},
Pages = {333--363},
Volume = {23},
Abstract = {Workflow nets, a particular class of Petri nets, have become one of the standard ways to model and analyze workflows. Typically, they are used as an abstraction of the workflow that is used to check the so-called soundness property. This property guarantees the absence of livelocks, deadlocks, and other anomalies that can be detected without domain knowledge. Several authors have proposed alternative notions of soundness and have suggested to use more expressive languages, e.g., models with cancellations or priorities. This paper provides an overview of the different notions of soundness and investigates these in the presence of different extensions of workflow nets. We will show that the eight soundness notions described in the literature are decidable for workflow nets. However, most extensions will make all of these notions undecidable. These new results show the theoretical limits of workflow verification. Moreover, we discuss some of the analysis approaches described in the literature.},
Doi = {10.1007/s00165-010-0161-4},
Owner = {hverbeek},
Timestamp = {2010.07.19}
}
• W. M. P. van der Aalst, K. M. van Hee, A. H. M. ter Hofstede, N. Sidorova, H. M. W. Verbeek, M. Voorhoeve, and M. T. Wynn, “Soundness of workflow nets with reset arcs,” Lncs transactions on petri nets and other models of concurrency (topnoc), vol. 5800, pp. 50-70, 2009.
[Bibtex]
@Article{Aalst09,
Title = {Soundness of Workflow Nets with Reset Arcs},
Author = {Aalst, W. M. P. van der and Hee, K. M. van and Hofstede, A. H. M. ter and Sidorova, N. and Verbeek, H. M. W. and Voorhoeve, M. and Wynn, M. T.},
Journal = {LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC)},
Year = {2009},
Pages = {50--70},
Volume = {5800},
Abstract = {Petri nets are often used to model and analyze workflows. Many workflow languages have been mapped onto Petri nets in order to provide formal semantics or to verify correctness properties. Typically, the so-called Workflow nets are used to model and analyze workflows and variants of the classical soundness property are used as a correctness notion. Since many workflow languages have cancelation features, a mapping to workflow nets is not always possible. Therefore, it is interesting to consider workflow nets with reset arcs. Unfortunately, soundness is undecidable for workflow nets with reset arcs. In this paper, we provide a proof and insights into the theoretical limits of workflow verification.},
Doi = {10.1007/978-3-642-04856-2_3},
Owner = {hverbeek},
Timestamp = {2009.06.05},
}
• W. M. P. van der Aalst, K. M. van Hee, A. H. M. ter Hofstede, N. Sidorova, H. M. W. Verbeek, M. Voorhoeve, and M. T. Wynn, “Soundness of workflow nets: classification, decidability, and analysis,” BPMcenter.org, BPM Center Report BPM-08-02, 2008.
[Bibtex]
@TechReport{Aalst08c,
Title = {Soundness of Workflow Nets: Classification, Decidability, and Analysis},
Author = {Aalst, W. M. P. van der and Hee, K. M. van and Hofstede, A. H. M. ter and Sidorova, N. and Verbeek, H. M. W. and Voorhoeve, M. and Wynn, M. T.},
Institution = {BPMcenter.org},
Year = {2008},
Number = {BPM-08-02},
Type = {BPM Center Report},
Abstract = {Workflow nets, a particular class of Petri nets, have become one of the standard ways to model and analyze workflows. Typically, they are used as an abstraction of the workflow that is used to check the so-called soundness property. This property guarantees the absence of livelocks, deadlocks, and other anomalies that can be detected without domain knowledge. Several authors have proposed alternative notions of soundness and have suggested to use more expressive languages, e.g., models with cancellations or priorities. This paper provides an overview of the different notions of soundness and investigates these in the presence of different extensions of workflow nets. We will show that the eight soundness notions described in the literature are decidable for workflow nets. However, most extensions will make all of these notions undecidable. This nicely shows the theoretical limits of workflow verification. Moreover, we discuss some of the analysis approaches described in the literature.},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• W. M. P. van der Aalst, A. Hirnschall, and H. M. W. Verbeek, “An alternative way to analyze workflow graphs,” in Conference on advanced information systems (caise) 2002, 2002, pp. 535-552.
[Bibtex]
@InProceedings{Aalst02,
Title = {An Alternative Way to Analyze Workflow Graphs},
Author = {Aalst, W. M. P. van der and Hirnschall, A. and Verbeek, H. M. W.},
Booktitle = {Conference on Advanced Information Systems (CAiSE) 2002},
Year = {2002},
Editor = {Banks Pidduck, A. and Özsu, T. and Mylopoulos, J. and Woo, C.},
Pages = {535--552},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {2348},
Abstract = {At the CAiSE conference in Heidelberg in 1999, Wasim Sadiq and Maria Orlowska presented an algorithm to verify workflow graphs. The algorithm uses a set of reduction rules to detect structural conflicts. This paper shows that the set of reduction rules presented by Sadiq and Orlowska is not complete and proposes an alternative algorithm. The algorithm translates workflow graphs into so-called WF-nets. WF-nets are a class of Petri nets tailored towards workflow analysis. As a result, Petri-net theory and tools can be used to verify workflow graphs. In particular, our workflow verification tool Woflan can be used to detect design errors. It is shown that the absence of structural conflicts, i.e., deadlocks and lack of synchronization, conforms to soundness of the corresponding WF-net. In contrast to the algorithm presented by Sadiq and Orlowska, the algorithm presented in this paper is complete. Moreover, the complexity of this alternative algorithm is given.},
Doi = {10.1007/3-540-47961-9_37},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• W. M. P. van der Aalst, A. Kalenkova, V. Rubin, and H. M. W. Verbeek, “Process discovery using localized events,” in Petri nets 2015, 2015.
[Bibtex]
@InProceedings{Aalst15,
Title = {Process Discovery Using Localized Events},
Author = {Aalst, W. M. P. van der and Kalenkova, A. and Rubin, V. and Verbeek, H. M. W.},
Booktitle = {Petri Nets 2015},
Year = {2015},
Note = {Accepted for publication},
Publisher = {Springer},
}
• W. M. P. van der Aalst, A. Kumar, and H. M. W. Verbeek, “Organizational modeling in uml and xml in the context of workflow systems,” in Proceedings of the 18th annual acm symposium on applied computing (sac 2003), 2003, pp. 603-608.
[Bibtex]
@InProceedings{Aalst03,
Title = {Organizational Modeling in UML and XML in the context of Workflow Systems},
Author = {Aalst, W. M. P. van der and Kumar, A. and Verbeek, H. M. W.},
Booktitle = {Proceedings of the 18th Annual ACM Symposium on Applied Computing (SAC 2003)},
Year = {2003},
Pages = {603--608},
Publisher = {ACM Press},
Abstract = {Workflow technology plays a key role as an enabler in E-Commerce applications, such as supply chains. Until recently the major share of the attention of workflow systems researchers has gone to the exchange of information in cross-organizational processes. Increasingly the focus is shifting from the exchange of data to support for interorganizational workflow processes. One of the initiatives in this direction has been XRL (eXchangeable Routing Language), an extendible instance-based language having an XML syntax and Petri-net semantics. In this paper, we move to the next level by extending XRL with organizational entities, structures, and rules. Hence, we describe an organizational model first in UML and then convert it into an XML DTD. Our organizational model allows for the specification of non-human resources, collections of resources (e.g., departments, teams, etc.), availability of resources, delegation, and role inheritance. Additional features of our proposal are the tight integration of organizational concepts and routing concepts. An important goal of this work is to create standard for organizational modeling much like the X.500 standard for directories.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://portal.acm.org/ft_gateway.cfm?id=952652&type=pdf&dl=ACM&dl=ACM&CFID=14976314&CFTOKEN=70169400}
}
• W. M. P. van der Aalst, J. Mendling, B. F. van Dongen, and H. M. W. Verbeek, Fouten in sap-referentiemodel, 2006.
[Bibtex]
@Misc{Aalst06a,
Title = {Fouten in SAP-referentiemodel},
Author = {Aalst, W. M. P. van der and Mendling, J. and Dongen, B. F. van and Verbeek, H. M. W.},
HowPublished = {Automatiseringsgids},
Month = {May},
Note = {in Dutch},
Year = {2006},
Abstract = {Onderzoek leert dat er in het SAP R/3-referentiemodel veel fouten zitten. Een zorgwekkende situatie, stellen de auteurs van dit artikel, omdat het model wordt gebruikt voor ondermeer trainingsdoeleinden, systeemselectie en procesverbetering. Fouten in modellen leiden tot falende systemen.},
File = {Automatiseringsgids:http\://www.automatiseringsgids.nl/:URL},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://www.automatiseringgids.nl/achtergrond/2006/20/fouten-in-sap-referentiemodel}
}
• W. M. P. van der Aalst, H. A. Reijers, A. J. M. M. Weijters, B. F. van Dongen, A. A. K. de Medeiros, M. Song, and H. M. W. Verbeek, “Business process mining: an industrial application,” Information systems (is), vol. 32, iss. 1, pp. 713-732, 2007.
[Bibtex]
@Article{Aalst07b,
Title = {Business Process Mining: An Industrial Application},
Author = {Aalst, W. M. P. van der and Reijers, H. A. and Weijters, A. J. M. M. and Dongen, B. F. van and Medeiros, A. K. Alves de and Song, M. and Verbeek, H. M. W.},
Journal = {Information Systems (IS)},
Year = {2007},
Number = {1},
Pages = {713--732},
Volume = {32},
Abstract = {Contemporary information systems (e.g., WfM, ERP, CRM, SCM, and B2B systems) record business events in so-called event logs. Business process mining takes these logs to discover process, control, data, organizational, and social structures. Although many researchers are developing new and more powerful process mining techniques and software vendors are incorporating these in their software, few of the more advanced process mining techniques have been tested on real-life processes. This paper describes the application of process mining in one of the provincial offices of the Dutch National Public Works Department, responsible for the construction and maintenance of the road and water infrastructure. Using a variety of process mining techniques, we analyzed the processing of invoices sent by the various subcontractors and suppliers from three different perspectives: (1) the process perspective, (2) the organizational perspective, and (3) the case perspective. For this purpose, we used some of the tools developed in the context of the ProM framework. The goal of this paper is to demonstrate the applicability of process mining in general and our algorithms and tools in particular.},
Doi = {10.1016/j.is.2006.05.003},
Keywords = {Process mining; Social network analysis; Workflow management; Business process management; Business process analysis; Data mining; Petri nets},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• W. M. P. van der Aalst, V. Rubin, H. M. W. Verbeek, B. F. van Dongen, E. Kindler, and C. W. GÃ¼nther, “Process mining: a two-step approach to balance between underfitting and overfitting,” Software and systems modeling (sosym), vol. 9, iss. 1, pp. 87-111, 2010.
[Bibtex]
@Article{Aalst10,
Title = {Process Mining: A Two-Step Approach to Balance Between Underfitting and Overfitting},
Author = {Aalst, W. M. P. van der and Rubin, V. and Verbeek, H. M. W. and Dongen, B. F. van and Kindler, E. and G\"{u}nther, C. W.},
Journal = {Software and Systems Modeling (SoSyM)},
Year = {2010},
Number = {1},
Pages = {87--111},
Volume = {9},
Abstract = {Abstract. Process mining includes the automated discovery of processes from event logs. Based on observed events (e.g., activities being executed or messages being exchanged) a process model is constructed. One of the essential problems in process mining is that one cannot assume to have seen all possible behavior. At best, one has seen a representative subset. Therefore, classical synthesis techniques are not suitable as they aim at finding a model that is able to exactly reproduce the log. Existing process mining techniques try to avoid such "overfitting" by generalizing the model to allow for more behavior. This generalization is often driven by the representation language and very crude assumptions about completeness. As a result, parts of the model are "overfitting" (allow only what has actually been observed) while other parts may be "underfitting" (allow for much more behavior without strong support for it). None of the existing techniques enables the user to control the balance between "overfitting" and "underfitting". To address this, we propose a two-step approach. First, using a configurable approach, a transition system is constructed. Then, using the \theory of regions", the model is synthesized. The approach has been implemented in the context of ProM and overcomes many of the limitations of traditional approaches.},
Doi = {10.1007/s10270-008-0106-z},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, V. Rubin, H. M. W. Verbeek, B. F. van Dongen, E. Kindler, and C. W. GÃ¼nther, “Process mining: a two-step approach to balance between underfitting and overfitting,” BPMcenter.org, BPM Center Report BPM-08-01, 2008.
[Bibtex]
@TechReport{Aalst08,
Title = {Process Mining: A Two-Step Approach to Balance Between Underfitting and Overfitting},
Author = {Aalst, W. M. P. van der and Rubin, V. and Verbeek, H. M. W. and Dongen, B. F. van and Kindler, E. and G\"{u}nther, C. W.},
Institution = {BPMcenter.org},
Year = {2008},
Number = {BPM-08-01},
Type = {BPM Center Report},
Abstract = {Process mining includes the automated discovery of processes from event logs. Based on observed events (e.g., activities being executed or messages being exchanged) a process model is constructed. One of the essential problems in process mining is that one cannot assume to have seen all possible behavior. At best one has seen a representative subset. Therefore, classical synthesis techniques are not suitable as they aim at finding a model that is able to exactly reproduce the log. Existing process mining techniques try to avoid such “overfitting” by generalizing the model to allow for more behavior. This generalization is often driven by the representation language and very crude assumptions about completeness. As a result parts of the model are “overfitting” (allow only what has actually been observed) while other parts may be “underfitting” (allow for much more behavior without strong support for it). None of the existing techniques enables the user to control the balance between “overfitting” and “underfitting”. To address this, we propose a two-step approach. First, using a configurable approach, a transition system is constructed. Then, using the “theory of regions”, the model is synthesized. The approach has been implemented in the context of ProM and overcomes many of the limitations of traditional approaches.},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• W. M. P. van der Aalst and H. M. W. Verbeek, “Process discovery and conformance checking using passages,” Fundamenta informaticae, vol. 131, iss. 1, pp. 103-138, 2014.
[Bibtex]
@Article{Aalst14,
Title = {Process Discovery and Conformance Checking Using Passages},
Author = {Aalst, W. M. P. van der and Verbeek, H. M. W.},
Journal = {Fundamenta Informaticae},
Year = {2014},
Month = {March},
Number = {1},
Pages = {103--138},
Volume = {131},
Abstract = {The two most prominent process mining tasks are process discovery (i.e., learning a process model from an event log) and conformance checking (i.e., diagnosing and quantifying differences between observed and modeled behavior). The increasing availability of event data makes these tasks highly relevant for process analysis and improvement. Therefore, process mining is considered to be one of the key technologies for Business Process Management (BPM). However, as event logs and process models grow, process mining becomes more challenging. Therefore, we propose an approach to
decompose process mining problems into smaller problems using the notion of passages. A passage is a pair of two non-empty sets of activities (X;Y) such that the set of direct successors of X is Y and the set of direct predecessors of Y is X. Any Petri net can be partitioned using passages. Moreover, process discovery and conformance checking can be done per passage and the results can be aggregated. This has advantages in terms of efficiency and diagnostics. Moreover, passages can be used to distribute process mining problems over a network of computers. Passages are supported through
ProM plug-ins that automatically decompose process discovery and conformance checking tasks},
Doi = {10.3233/FI-2014-1006},
Owner = {hverbeek},
Timestamp = {2012.12.10}
}
• W. M. P. van der Aalst and H. M. W. Verbeek, “Process discovery and conformance checking using passages,” BPMCenter.org, BPM Center report BPM-12-21, 2012.
[Bibtex]
@TechReport{Aalst12a,
Title = {Process Discovery and Conformance Checking Using Passages},
Author = {Aalst, W. M. P. van der and Verbeek, H. M. W.},
Institution = {BPMCenter.org},
Year = {2012},
Number = {BPM-12-21},
Type = {BPM Center report},
Abstract = {The two most prominent process mining tasks are process discovery (i.e., learning a process model from an event log) and conformance checking (i.e., diagnosing and quantifying differences between observed and modeled behavior). The increasing availability of event data makes these tasks highly relevant for process analysis and improvement. Therefore, process mining is considered to be one of the key technologies for Business Process Management (BPM). However, as event logs and process models grow, process mining becomes more challenging. Therefore, we propose an approach to decompose process mining problems into smaller problems using the notion of passages. A passage is a pair of two non-empty sets of activities (X; Y ) such that the set of direct successors of X is Y and the set of direct predecessors of Y is X. Any Petri net can be partitioned using passages. Moreover, process discovery and conformance checking can be done per passage and the results can be aggregated. This has advantages in terms of efficiency and diagnostics. Moreover, passages can be used to distribute process mining problems over a network of computers. Passages are supported through ProM plug-ins that automatically decompose process discovery and conformance checking tasks.},
Owner = {hverbeek},
Timestamp = {2013.01.16},
}
• W. M. P. van der Aalst and H. M. W. Verbeek, “Process mining in web services: the websphere case,” Ieee bulletin of the technical committee on data engineering (tcde), vol. 31, iss. 3, pp. 46-49, 2008.
[Bibtex]
@Article{Aalst08d,
Title = {Process Mining in Web Services: The WebSphere Case},
Author = {Aalst, W. M. P. van der and Verbeek, H. M. W.},
Journal = {IEEE Bulletin of the Technical committee on Data Engineering (TCDE)},
Year = {2008},
Number = {3},
Pages = {46--49},
Volume = {31},
Abstract = {Process mining has emerged as a way to discover or check the conformance of processes based on event logs. This enables organizations to learn from processes as they really take place. Since web services are distributed over autonomous parties, it is vital to monitor the correct execution of processes. Fortunately, the “web services stack” assists in collecting structured event logs. This information can be used to extract new information about service processes (e.g., bottlenecks, unused paths, etc.) and to check the conformance (e.g., deviations from some predefined process). In this paper, we illustrate the potential of process mining in the context of web services. In particular, we show what a process mining tool like ProM can contribute in IBM's WebSphere environment.},
Owner = {hverbeek},
Timestamp = {2008.11.03},
Url = {http://sites.computer.org/debull/A08Sept/aalst.pdf}
}
• W. M. P. van der Aalst, H. M. W. Verbeek, and A. Kumar, “Xrl/woflan: verification of an xml/petri-net based language for inter-organizational workflows,” in Proceedings of the 6th informs conference on information systems and technology (cist-2001), 2001, pp. 30-45.
[Bibtex]
@InProceedings{Aalst01,
Title = {XRL/Woflan: Verification of an XML/Petri-net based language for inter-organizational workflows},
Author = {Aalst, W. M. P. van der and Verbeek, H. M. W. and Kumar, A.},
Booktitle = {Proceedings of the 6th Informs Conference on Information Systems and Technology (CIST-2001)},
Year = {2001},
Editor = {Altinkemer, K. and Chari, K.},
Pages = {30--45},
Publisher = {Informs, Linthicum, MD},
Abstract = {Internet-based technology, E-commerce, and the rise of networked virtual enterprises have fueled the need for inter-organizational workflows. Although XML allows trading partners to exchange information, it cannot be used to coordinate activities in different organizational entities. Therefore, we developed a workflow language named XRL (eXchangeable Routing Language) for supporting cross-organizational processes. XRL uses XML for the representation of process definitions and Petri nets for its semantics. Since XRL is instance-based, workflow definitions can be changed on the fly and sent across organizational boundaries. These features are vital for today’s dynamic and networked economy. However, these features make cross-organizational workflows susceptible to errors. In this paper, we present XRL/Woflan, a software tool using state-of-the-art Petri-net analysis techniques for verifying XRL workflows. The tool uses XSL (Extensible Style Language) Transformations (called XSLT) to translate XRL specifications to a specific class of Petri nets called workflow nets. The Petri-net representation is used to determine whether the workflow is correct. If the workflow is not correct, anomalies such as deadlocks and livelocks are reported. This approach also makes XRL extensible. Therefore, new, application-specific workflow patterns can be created and incorporated into XRL by expressing their semantics in XSLT.},
File = {6th INFORMS Conference on Information Systems and Technology (CIST-2001):http\://www.mgmt.purdue.edu/faculty/kemal/cist_MIAMI.htm:URL;Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst01.pdf:PDF},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. M. P. van der Aalst, H. M. W. Verbeek, and A. Kumar, “Verification of xrl: an xml-based workflow language,” in Proceedings of the sixth international conference on cscw in design (cscwd 2001), 2001, pp. 427-432.
[Bibtex]
@InProceedings{Aalst01a,
Title = {Verification of XRL: An XML-based Workflow Language},
Author = {Aalst, W. M. P. van der and Verbeek, H. M. W. and Kumar, A.},
Booktitle = {Proceedings of the Sixth International Conference on CSCW in Design (CSCWD 2001)},
Year = {2001},
Editor = {Shen, W. and Lin, Z. and Barths, J.-P. and Kamel, M.},
Month = {Jluy},
Pages = {427--432},
Abstract = {XRL (eXchangeable Routing Language) is an instance-based workflow language that uses XML for the representation of process definitions and Petri nets for its semantics. Since XRL is instance-based, workflow definitions can be changed on the fly and sent across organizational boundaries. These features are vital for todays dynamic and networked economy. However, the features also enable subtle, but highly disruptive, cross-organizational errors. On-the-fly changes and one-of-a-kind processes are destined to result in errors. Moreover, errors of a cross-organizational nature are difficult to repair. In this paper, we show soundness properties of XRL constructs by using a novel, constructive approach. We also describe a software tool based on XML and Petri-net technologies for verifying XRL workflows.},
File = {Sixth International Conference on CSCW in Design (CSCWD 2001):http\://www.cscwid.org/cscwd2001/:URL;Preprint of published paper:http\://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst01a.pdf:PDF},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• G. Acampora, A. Vitiello, B. Di Stefano, W. M. P. van der Aalst, C. W. GÃ¼nther, and H. M. W. Verbeek, “IEEE 1849TM: The XES Standard: The Second IEEE Standard Sponsored by IEEE Computational Intelligence Society,” IEEE Computational Intelligence Magazine, pp. 4-8, 2017.
[Bibtex]
@Article{Acampora17,
Title = {{IEEE 1849TM: The XES Standard: The Second IEEE Standard Sponsored by IEEE Computational Intelligence Society}},
Author = {Acampora, G. and Vitiello, A. and Di Stefano, B. and Aalst, W. M. P. van der and G\"{u}nther, C. W. and Verbeek, H. M. W.},
Journal = {{IEEE Computational Intelligence Magazine}},
Year = {2017},
Month = {May},
Pages = {4--8},
Doi = {10.1109/MCI.2017.2670420},
Owner = {hverbeek},
Timestamp = {2017.04.18}
}
• P. Barborka, L. Helm, G. Koldorfer, J. Mendling, G. Neumann, H. M. W. Verbeek, B. F. van Dongen, and W. M. P. van der Aalst, “Integration of epc-related tools with prom,” in Proceedings of fifth workshop on event-driven process chains (wi-epk 2006), Vienna, Austria, 2006, pp. 105-120.
[Bibtex]
@InProceedings{Barborka06,
Title = {Integration of EPC-related Tools with ProM},
Author = {Barborka, P. and Helm, L. and Koldorfer, G. and Mendling, J. and Neumann, G. and Verbeek, H. M. W. and Dongen, B. F. van and Aalst, W. M. P. van der},
Booktitle = {Proceedings of Fifth Workshop on Event-Driven Process Chains (WI-EPK 2006)},
Year = {2006},
Editor = {Nüttgens, M. and Rump, F. J. and Mendling, J.},
Month = {December},
Pages = {105--120},
Publisher = {Gesellschaft fuer Informatik, Bonn},
Abstract = {The heterogeneity of different formats for EPCs is a major problem for model interchange between specialized tools in practice. In this paper, we compare three different formats for storing EPCs, in particular, the proprietary formats of Microsoft Visio and ARIS Toolset as well as the tool-independent EPML format. Furthermore, we introduce the ProM framework and show using the case of a sales process model expressed in terms of an EPC that ProM is able to serve as a mediator between these formats. Beyond that, we demonstrate that the ProM framework can be used for the analysis of EPCs and to translate EPCs into YAWL models for execution or for further analysis.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• J. C. R. P. Bose, H. M. W. Verbeek, and W. M. P. van der Aalst, “Discovering hierarchical process models using prom,” in Caise forum 2011, London, UK, 2011, pp. 33-40.
[Bibtex]
@InProceedings{Bose11,
Title = {Discovering Hierarchical Process Models Using ProM},
Author = {Bose, R. P. Jagadeesh Chandra and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {CAiSE Forum 2011},
Year = {2011},
Editor = {Nurcan, S.},
Month = {June},
Pages = {33--40},
Publisher = {CEUR-WS.org},
Series = {CEUR-WS},
Volume = {734},
Abstract = {Process models can be seen as "maps" describing the operational processes of organizations. Traditional process discovery algorithms have problems dealing with fine-grained event logs and less-structured processes. The discovered models (i.e., "maps") are spaghettilike and are difficult to comprehend or even misleading. One of the reasons for this can be attributed to the fact that the discovered models are flat (without any hierarchy). In this paper, we demonstrate the discovery of hierarchical process models using a set of interrelated plugins implemented in ProM. The hierarchy is enabled through the automated discovery of abstractions (of activities) with domain signficance.},
Owner = {hverbeek},
Timestamp = {2011.05.13},
Url = {http://ceur-ws.org/Vol-734/PaperDemo05.pdf}
}
• P. M. Dixit, J. C. A. M. Buijs, H. M. W. Verbeek, and W. M. P. van der Aalst, “Fast incremental conformance analysis for interactive process discovery,” in Business information systems – 21st international conference, bis 2018, proceedings, 2018, pp. 163-175.
[Bibtex]
@Conference{Dixit18,
Title = {Fast Incremental Conformance Analysis for Interactive Process Discovery},
Author = {Dixit, P. M. and Buijs, J. C. A. M. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Business Information Systems - 21st International Conference, BIS 2018, Proceedings},
Year = {2018},
Editor = {Abramowicz, W. and Paschke, A.},
Pages = {163--175},
Publisher = {Springer},
Series = {LNBIP},
Volume = {320},
Abstract = {Interactive process discovery allows users to specify domain knowledge while discovering process models with the help of event logs. Typically the coherence of an event log and a process model is calculated using conformance analysis. Many state-of-the-art conformance techniques emphasize on the correctness of the results, and hence can be slow, impractical and undesirable in interactive process discovery setting, especially when the process models are complex. In this paper, we present a framework (and its application) to calculate conformance fast enough to guide the user in interactive process discovery. The proposed framework exploits the underlying techniques used for interactive process discovery in order to incrementally update the conformance results. We trade the accuracy of conformance for performance. However, the user is also provided with some diagnostic information, which can be useful for decision making in an interactive process discovery setting. The results show that our approach can be considerably faster than the traditional approaches and hence better suited in an interactive setting.},
Doi = {10.1007/978-3-319-93931-5_12},
Owner = {hverbeek},
Timestamp = {2018.03.27},
Url = {http://www.scopus.com/inward/record.url?scp=85050635542&partnerID=8YFLogxK}
}
• P. M. Dixit, H. M. W. Verbeek, and W. M. P. van der Aalst, “Fast conformance analysis based on activity log abstraction,” in 2018 ieee 22nd international enterprise distributed object computing conference, proceedings, 2018, pp. 135-144.
[Bibtex]
@InProceedings{Dixit18b,
Title = {Fast Conformance Analysis based on Activity Log Abstraction},
Author = {Dixit, P. M. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {2018 IEEE 22nd International Enterprise Distributed Object Computing Conference, Proceedings},
Year = {2018},
Editor = {Selmin Nurcan and Pontus Johnson},
Month = {October},
Pages = {135--144},
Publisher = {IEEE},
Abstract = {Process mining techniques focus on bridging the gap between activity logs and business process management. Process discovery is a sub-field of process mining which uses activity logs in order to discover process models. Some process discovery techniques, such as interactive process discovery and genetic algorithms, rely on the so-called conformance analysis. In such techniques, process models are discovered in an incremental way, and the quality of the process models is quantified by the results of conformance analysis. State-of-the-art conformance analysis techniques are typically optimized and devised for one-time use. However, in process discovery settings which are incremental in nature, it is imperative to have fast conformance analysis. Moreover, the activity logs used for conformance analysis at each stage remain the same. In this paper, we propose an approach that exploits this fact in order to expedite conformance analysis by approximating the conformance results. We use an abstracted version of an activity log, which can be used to compare withthe changing (or new) process models in an incremental processdiscovery setting. Our results show that the proposed technique isable to outperform traditional conformance techniques in terms of performance by approximating conformance scores.},
Doi = {10.1109/EDOC.2018.00026},
Owner = {hverbeek},
Timestamp = {2018.07.02},
Url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=8536157}
}
• P. M. Dixit, H. M. W. Verbeek, and W. M. P. van der Aalst, “Incremental computation of synthesis rules for free-choice petri nets,” in Formal aspects of component software – 15th international conference, facs 2018, proceedings, 2018, pp. 97-117.
[Bibtex]
@Conference{Dixit18c,
Author = {Dixit, P. M. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Formal Aspects of Component Software - 15th International Conference, FACS 2018, Proceedings},
Year = {2018},
Editor = {Ölveczky, P. C. and Bae, K.},
Pages = {97--117},
Series = {LNCS},
Volume = {11222},
Abstract = {In this paper, we propose a novel approach that calculates all the possible applications of synthesis rules, for well-formed free-choice Petri nets, in a speedy way to enable an interactive editing system. The proposed approach uses a so-called incremental synthesis structure, which can be used to extract all the synthesis rules, corresponding to a given net. Furthermore, this structure is updated incrementally, i.e. after usage of a synthesis rule, to obtain the incremental synthesis structure of the newly synthesized net. We prove that the proposed approach is correct and complete in order to synthesize any well-formed free-choice Petri net, starting with an initial well-formed atomic net and the corresponding incremental synthesis structure. A variant of the proposed approach has been implemented that allows interactive modeling (discovery) of sound business processes (from event logs). Experimental results show that the proposed approach is fast, and outperforms the baseline, and hence is well-suited for enabling interactive synthesis of very large nets.},
Comment = {Accepted for publication},
Owner = {hverbeek},
Timestamp = {2018.08.09}
}
• P. M. Dixit, H. M. W. Verbeek, J. C. A. M. Buijs, and W. M. P. v. d. Aalst, “Interactive data-driven process model construction,” in 37th international conference on conceptual modeling, er 2018, proceedings, 2018, pp. 251-265.
[Bibtex]
@Conference{Dixit18a,
Title = {Interactive Data-driven Process Model Construction},
Author = {Dixit, P. M. and Verbeek, H. M. W and Buijs, J. C. A. M. and Aalst, W. M. P. v. d.},
Booktitle = {37th International Conference on Conceptual Modeling, ER 2018, Proceedings},
Year = {2018},
Editor = {Li, Z., and Trujillo, J. C., and Du, X. and Lee, M. L. and Davis, K. C. and Ling, T. W. and Li, G.},
Month = {October},
Pages = {251--265},
Publisher = {Springer},
Series = {LNCS},
Volume = {11157},
Abstract = {Process discovery algorithms address the problem of learning process models from event logs. Typically, in such settings a user's activity is limited to conguring the parameters of the discovery algorithm, and hence the user expertise/domain knowledge can not be incorporated during traditional process discovery. In a setting where the event logs are noisy, incomplete and/or contain uninteresting activities, the process models discovered by discovery algorithms are often inaccurate and/or incomprehensible. Furthermore, many of these automated techniques can produce unsound models and/or cannot discover duplicate activities, silent activities etc. To overcome such shortcomings, we introduce a new concept to interactively discover a process model, by combining a user's domain knowledge with the information from the event log. The discovered models are always sound and can have duplicate activities, silent activities etc. An objective evaluation and a case study shows that the proposed approach can outperform traditional discovery techniques.},
Comment = {Accepted for publication},
Doi = {10.1007/978-3-030-00847-5_19},
Owner = {hverbeek},
Timestamp = {2018.06.18}
}
• B. F. van Dongen, M. H. Jansen-Vullers, H. M. W. Verbeek, and W. M. P. van der Aalst, “Verification of the sap reference models using epc reduction, state space analysis, and invariants,” Computers in industry (cii), vol. 58, iss. 6, pp. 578-601, 2007.
[Bibtex]
@Article{Dongen07,
Title = {Verification of the SAP Reference Models using EPC Reduction, State Space Analysis, and Invariants},
Author = {Dongen, B. F. van and Jansen-Vullers, M. H. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Journal = {Computers in Industry (CiI)},
Year = {2007},
Number = {6},
Pages = {578--601},
Volume = {58},
Abstract = {A reference model is a generic conceptual model that formalizes recommended practices for a certain domain. Today, the SAP reference models are among the most comprehensive reference models, including over 4000 entity types and covering over 1000 business processes and inter-organizational scenarios. The SAP reference models use Event-driven Process Chains (EPCs) to model these processes and scenarios. Like other informal languages, EPCs are intended to support the transition from a business model to an executable model. For this reason, researchers have tried to formalize the semantics of EPCs. However, in their approaches, they fail to acknowledge the fact that in EPCs constructs exist that require human judgment to assess correctness. This paper aims to acknowledge this fact by introducing a two-step approach. First, the EPC is reduced using universally accepted reduction rules. Second, the reduced EPC is analyzed using a mixture of state-space analysis, invariants, and human judgment. This approach has been implemented in a tool, and applying this tool to the SAP reference models showed that these contain errors, which clearly shows the added value of this verification approach.},
Doi = {10.1016/j.compind.2007.01.001},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• B. F. van Dongen, J. van Luin, and H. M. W. Verbeek, “Process mining in a multi-agent auctioning system,” in Fourth international workshop on modelling of objects, components, and agents, moca 2006, Turku, Finland, 2006, pp. 145-160.
[Bibtex]
@InProceedings{Dongen06,
Title = {Process Mining in a Multi-Agent Auctioning System},
Author = {Dongen, B. F. van and Luin, J. van and Verbeek, H. M. W.},
Booktitle = {Fourth International Workshop on Modelling of Objects, Components, and Agents, MOCA 2006},
Year = {2006},
Editor = {Moldt, D.},
Month = {June},
Note = {FBI-HH-B-272/06},
Pages = {145--160},
Publisher = {Department of Computer Science, University of Hamburg},
Series = {Bericht},
Volume = {272},
Abstract = {Both process mining and multi agent simulations are relatively new research areas, both evolving rapidly. In multi agent simulations, techniques are being developed to make agents more intelligent and more adaptive to their environment. In process mining, the focus has shifted from discovering a complete process model from an execution log, to the development of all kind of analysis techniques related to processes. In this paper, we combine the two research areas by showing that process mining techniques, applied to the communication logs of multi-agent simulations, can help in understanding the behaviour of agents. Furthermore, we show that process mining could actually help agents to adapt to their environment, by enabling them to discover why another agent makes certain decisions instead of just discovering that it made a decision.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• B. F. van Dongen, A. A. K. de Medeiros, H. M. W. Verbeek, A. J. M. M. Weijters, and W. M. P. van der Aalst, “The prom framework: a new era in process mining tool support,” in Application and theory of petri nets 2005, Miami, Florida, 2005, pp. 444-454.
[Bibtex]
@InProceedings{Dongen05,
Title = {The ProM framework: A new era in process mining tool support},
Author = {Dongen, B. F. van and Medeiros, A. K. Alves de and Verbeek, H. M. W. and Weijters, A. J. M. M. and Aalst, W. M. P. van der},
Booktitle = {Application and Theory of Petri nets 2005},
Year = {2005},
Editor = {Ciardo, G. and Darondeau, P.},
Month = {June},
Pages = {444--454},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in computer Science},
Volume = {3536},
Doi = {10.1007/11494744_25},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• B. F. van Dongen, H. M. W. Verbeek, and W. M. P. van der Aalst, “Verification of epcs: using reduction rules and petri nets,” in Advanced information systems engineering: 17th international conference (caise 2005), Porto, Portugal, 2005, pp. 372-386.
[Bibtex]
@InProceedings{Dongen05a,
Title = {Verification of EPCs: Using reduction rules and Petri nets},
Author = {Dongen, B. F. van and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Advanced Information Systems Engineering: 17th International Conference (CAiSE 2005)},
Year = {2005},
Editor = {Pastor, O. and Falcão e Cunha, J.},
Month = {June},
Pages = {372--386},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {3520},
Abstract = {Designing business models is a complicated and error prone task. On the one hand, business models need to be intuitive and easy to understand. On the other hand, ambiguities may lead to different interpretations and false consensus. Moreover, to configure process-aware information systems (e.g., a workflow system), the business model needs to be transformed into an executable model. Event-driven Process Chains (EPCs), but also other informal languages, are intended as a language to support the transition from a business model to an executable model. Many researchers have assigned formal semantics to EPCs and are using these semantics for execution and verification. In this paper, we use a different tactic. We propose a two-step approach where first the informal model is reduced and then verified in an interactive manner. This approach acknowledges that some constructs are correct or incorrect no matter what interpretation is used and that the remaining constructs require human judgment to assess correctness. This paper presents a software tool that supports this two-step approach and thus allows for the verification of real-life EPCs as illustrated by two case studies.},
Doi = {10.1007/11431855_26},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• C. W. GÃ¼nther and H. M. W. Verbeek, “Xes standard definition,” , BPM-14-09, 2014.
[Bibtex]
@TechReport{Guenther14,
Title = {XES – Standard Definition},
Author = {G\"{u}nther, C. W. and Verbeek, H. M. W.},
Year = {2014},
Number = {BPM-14-09},
}
• H. S. Garcia Caballero, M. A. Westenberg, H. M. W. Verbeek, and W. M. P. van der Aalst, “Visual analytics for soundness verification of process models,” in Bpm 2017 international workshops, barcelona, spain, september 10-11, 2017, revised papers, E. Teniente and M. Weidlich, Eds., Springer, 2018, vol. 308, pp. 744-756.
[Bibtex]
@InCollection{GarciaCaballero18,
Title = {Visual analytics for soundness verification of process models},
Author = {Garcia Caballero, H. S. and Westenberg, M. A. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {BPM 2017 International Workshops, Barcelona, Spain, September 10-11, 2017, Revised Papers},
Publisher = {Springer},
Year = {2018},
Editor = {E. Teniente and M. Weidlich},
Pages = {744--756},
Series = {Lecture Notes in Business Information Processing},
Volume = {308},
Abstract = {Soundness validation of process models is a complex task for process modelers due to all the factors that must be taken into account. Although there are tools to verify this property, they do not provide users with easy information on where soundness starts breaking and under which conditions. Providing insights such as states in which problems occur, involved activities, or paths leading to those states, is crucial for process modelers to better understand why the model is not sound. In this paper we address the problem of validating the soundness property of a process model by using a novel visual approach and a new tool called PSVis (Petri net Soundness Visualization) supporting this approach. The PSVis tool aims to guide expert users through the process models in order to get insights into the problems that cause the process to be unsound.},
Doi = {10.1007/978-3-319-74030-0_59},
Owner = {hverbeek},
Timestamp = {2019.05.15}
}
• H. S. Garcia Caballero, M. A. Westenberg, H. M. W. Verbeek, and W. M. P. van der Aalst, “Visual analytics for soundness verification of process models,” in Proceedings of TAProViz 2017, 2017.
[Bibtex]
@InProceedings{GarciaCaballero17,
Title = {Visual Analytics for Soundness Verification of Process Models},
Author = {Garcia Caballero, H. S. and Westenberg, M. A. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Proceedings of {TAProViz} 2017},
Year = {2017},
Note = {Accepted for publication},
Owner = {hverbeek},
Timestamp = {2017.07.06},
Url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/garciacaballero17.pdf}
}
• F. Gottschalk, W. M. P. van der Aalst, M. H. Jansen-Vullers, and H. M. W. Verbeek, “Protos2cpn: using colored petri nets for configuring and testing business processes,” in Proceedings of the seventh workshop on the practical use of coloured petri nets and cpn tools (cpn 2006), Aarhus, Denmark, 2006, pp. 137-156.
[Bibtex]
@InProceedings{Gottschalk06,
Title = {Protos2CPN: Using Colored Petri Nets for Configuring and Testing Business Processes},
Author = {Gottschalk, F. and Aalst, W. M. P. van der and Jansen-Vullers, M. H. and Verbeek, H. M. W.},
Booktitle = {Proceedings of the Seventh Workshop on the Practical Use of Coloured Petri Nets and CPN Tools (CPN 2006)},
Year = {2006},
Editor = {Jensen, K.},
Month = {October},
Pages = {137--156},
Publisher = {University of Aarhus},
Series = {DAIMI},
Volume = {579},
Abstract = {Protos is a popular tool for business process modelling used in more than 1500 organizations. It has a built-in Petri-net-based simulation engine which shows key performance indicators for the modelled processes. Reference process models offered for Protos reduce modelling efforts by providing generic solutions which only need to be adapted to individual requirements. However, the user can neither inspect or interact with simulations running in Protos, nor does Protos provide any explicit support for the adaptation of reference models. Hence, we aim at a more open and configurable simulation solution. To realize this we provide two transformations from Protos models to colored Petri nets (CPNs), which can be executed by CPN Tools. The first transformation enables the usage of the extensive simulation and measuring features of CPN Tools for the simulation of Protos models. The second transformation creates colored Petri nets with dedicated features for process configuration. Such configurable process models can be restricted directly within the process model without changing the model’s structure and provide therefore dedicated adaptation features for Protos’ reference process models.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• F. Gottschalk, W. M. P. van der Aalst, M. H. Jansen-Vullers, and H. M. W. Verbeek, “Protos2cpn: using colored petri nets for configuring and testing business processes,” International journal on software tools for technology transfer (sttt), vol. 10, iss. 1, pp. 95-110, 2008.
[Bibtex]
@Article{Gottschalk08,
Title = {Protos2CPN: using colored Petri nets for configuring and testing business processes},
Author = {Gottschalk, F. and Aalst, W. M. P. van der and Jansen-Vullers, M. H. and Verbeek, H. M. W.},
Journal = {International Journal on Software Tools for Technology Transfer (STTT)},
Year = {2008},
Number = {1},
Pages = {95--110},
Volume = {10},
Abstract = {Protos is a popular tool for business process modelling used in more than 1500 organizations. It has a built-in Petri-net-based simulation engine which shows key performance indicators for the modelled processes. Reference process models offered for Protos reduce modelling efforts by providing generic solutions which only need to be adapted to individual requirements. However, the user can neither inspect or interact with simulations running in Protos, nor does Protos provide any explicit support for the adaptation of reference models. Hence, we aim at a more open and configurable simulation solution. To realize this we provide two transformations from Protos models to colored Petri nets (CPNs), which can be executed by CPN Tools. The first transformation enables the usage of the extensive simulation and measuring features of CPN Tools for the simulation of Protos models. The second transformation creates colored Petri nets with dedicated features for process configuration. Such configurable process models can be restricted directly within the process model without changing the model’s structure and provide therefore dedicated adaptation features for Protos’ reference process models.},
Doi = {10.1007/s10009-007-0055-9},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• D. Hauschildt, H. M. W. Verbeek, and W. M. P. van der Aalst, “Woflan: a petri-net-based workflow analyzer,” Eindhoven University of Technology, Eindhoven, The Netherlands, Computing Science Reports 97/12, 1997.
[Bibtex]
@TechReport{Hauschildt97,
Title = {WOFLAN: a Petri-net-based Workflow Analyzer},
Author = {Hauschildt, D. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Institution = {Eindhoven University of Technology},
Year = {1997},
Number = {97/12},
Type = {Computing Science Reports},
Abstract = {Workflow management systems facilitate the everyday operation of business processes by taking care of the logistic control of work. In contrast to traditional information systems, the attempt to support frequent changes of the workflows at hand. Therefore, the need for analysis methods to verify the correctness of workflows is becoming more prominent. In this monograph we present a tool based on Petri nets: Woflan. Woflan (WOrkFLow ANalyser) is an analysis tool which can be used to verify the correctness of a workflow procedure. The analysis tool uses state-of-the-art techniques to find potential errors in the definition of a workflow procedure.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/extra1/wskrap/publichtml/9715985.pdf}
}
• K. M. van Hee, H. A. Reijers, H. M. W. Verbeek, and L. Zerguini, “On the optimal allocation of resources in stochastic workflow nets,” in Proceedings of the seventeenth uk performance engineering workshop, 2001, pp. 23-34.
[Bibtex]
@InProceedings{Hee01,
Title = {On the Optimal Allocation of Resources in Stochastic Workflow Nets},
Author = {Hee, K. M. van and Reijers, H. A. and Verbeek, H. M. W. and Zerguini, L.},
Booktitle = {Proceedings of the Seventeenth UK Performance Engineering Workshop},
Year = {2001},
Editor = {Djemame, K. and Kara, M.},
Pages = {23--34},
Publisher = {Print Services University of Leeds, Leeds},
Abstract = {Stochastic workflow nets are used for modelling and analysing business processes. For a specific subclass, a marginal allocation strategy of resources is presented that minimises the mean sojourn time of individual cases. A popular alternative allocation strategy is shown to be sub-optimal.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• K. M. van Hee, N. Sidorova, C. Stahl, and H. M. W. Verbeek, “A price of service in a compositional soa framework,” Eindhoven University of Technology, Computer Science Report 07/16, 2007.
[Bibtex]
@TechReport{Hee07,
Title = {A Price of Service in a Compositional SOA Framework},
Author = {Hee, K. M. van and Sidorova, N. and Stahl, C. and Verbeek, H. M. W.},
Institution = {Eindhoven University of Technology},
Year = {2007},
Number = {07/16},
Type = {Computer Science Report},
Abstract = {In this paper we propose a framework for SOA covering such important features as proper termination (soundness) and correct correlation of tasks. Within this framework, we define a method for the calculation of the price of services. Our framework is compositional in the sense that composing a system from subsystems that meet our correctness requirements we obtain a system that still meets these requirements.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/repository/books/631702.pdf}
}
• K. M. van Hee, H. M. W. Verbeek, C. Stahl, and N. Sidorova, “A framework for linking and pricing no-cure-no-pay services,” Lncs transactions on petri nets and other models of concurrency (topnoc) ii, vol. 5460, pp. 192-207, 2009.
[Bibtex]
@Article{Hee09,
Title = {A Framework for Linking and Pricing No-Cure-No-Pay Services},
Author = {Hee, K. M. van and Verbeek, H. M. W. and Stahl, C. and Sidorova, N.},
Journal = {LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC) II},
Year = {2009},
Pages = {192--207},
Volume = {5460},
Abstract = {In this paper, we present a framework that allows us to orchestrate web services such that the web services involved in this orchestration interact properly. To achieve this, we predefine service interfaces and certain routing constructs. Furthermore, we define a number of rules to incrementally compute the price of such a properly interacting orchestration (i.e. a web service) from the price of its web services. The fact that a web service gets only payed after its service is delivered (no-cure-no-pay) is reflected by considering a probability of success. To determine a safe price that includes the \emph{risk} a web service takes, we consider the variance of costs.},
Doi = {10.1007/978-3-642-00899-3_11},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• K. M. van Hee, H. M. W. Verbeek, C. Stahl, and N. Sidorova, “A framework for linking and pricing no-cure-no-pay services,” Eindhoven University of Technology, Computer Science Report 08/19, 2008.
[Bibtex]
@TechReport{Hee08,
Title = {A Framework for Linking and Pricing No-Cure-No-Pay Services},
Author = {Hee, K. M. van and Verbeek, H. M. W. and Stahl, C. and Sidorova, N.},
Institution = {Eindhoven University of Technology},
Year = {2008},
Number = {08/19},
Type = {Computer Science Report},
Abstract = {In this paper, we present a framework that allows us to orchestrate web services such that the web services involved in this orchestration interact properly. To achieve this, we predefine service interfaces and certain routing constructs. Furthermore, we define a number of rules to incrementally compute the price of such a properly interacting orchestration (i.e. a web service) from the price of its web services. The fact that a web service gets only payed after its service is delivered (no-cure-no-pay) is reflected by considering a probability of success. To determine a safe price that includes the risk a web service takes, we consider the variance of costs.},
Owner = {hverbeek},
Timestamp = {2008.11.03},
Url = {http://alexandria.tue.nl/repository/books/636538.pdf}
}
• B. F. A. Hompes, H. M. W. Verbeek, and W. M. P. vand der Aalst, “Finding suitable activity clusters for decomposed process discovery,” in Simpda 2014 post-proceedings, 2015.
[Bibtex]
@InProceedings{Hompes15,
Title = {Finding Suitable Activity Clusters for Decomposed Process Discovery},
Author = {Hompes, B. F. A. and Verbeek, H. M. W. and Aalst, W. M. P. vand der},
Booktitle = {SIMPDA 2014 Post-proceedings},
Year = {2015},
Note = {Accepted for publication},
}
• B. F. A. Hompes, H. M. W. Verbeek, and W. M. P. van der Aalst, “Finding suitable activity clusters for decomposed process discovery,” in Simpda 2014, 2014, pp. 16-30.
[Bibtex]
@InProceedings{Hompes14,
Title = {Finding Suitable Activity Clusters for Decomposed Process Discovery},
Author = {Hompes, B. F. A. and Verbeek, H. M. W and Aalst, W. M. P. van der},
Booktitle = {SIMPDA 2014},
Year = {2014},
Pages = {16--30},
Publisher = {CEUR-WS.org},
Volume = {1293},
Url = {http://ceur-ws.org/Vol-1293/paper2.pdf}
}
• IEEE 1849 (XES) WG, “IEEE Standard for eXtensible Event Stream (XES) for Achieving Interoperability in Event Logs and Event Streams,” IEEE Std 1849-2016, pp. 1-50, 2016.
[Bibtex]
@Article{XES16,
Title = {{IEEE Standard for eXtensible Event Stream (XES) for Achieving Interoperability in Event Logs and Event Streams}},
Author = {{IEEE 1849 (XES) WG}},
Journal = {{IEEE Std 1849-2016}},
Year = {2016},
Month = {Nov},
Pages = {1--50},
Doi = {10.1109/IEEESTD.2016.7740858},
Keywords = {IEEE standards;XML;grammars;open systems;IEEE Std 1849-2016;IEEE standard;XES extension prototypes;XML schema;event logs;event streams;extensible event stream;grammar;information systems;interoperability;tag-based language;Behavioral sciences;Event recognition;Grammar;IEEE Standards;Information systems;Semantics;XML;IEEE 1849(TM);XML;event log;event stream;extensions;system behavior},
Owner = {hverbeek},
Timestamp = {2018.07.05}
}
• A. Kumar, W. M. P. van der Aalst, and H. M. W. Verbeek, “Dynamic work distribution in workflow management systems: how to balance quality and performance?,” Journal of management information systems (jmis), vol. 18, iss. 3, pp. 157-193, 2001.
[Bibtex]
@Article{Kumar01,
Title = {Dynamic Work Distribution in Workflow Management Systems: How to balance quality and performance?},
Author = {Kumar, A. and Aalst, W. M. P. van der and Verbeek, H. M. W.},
Journal = {Journal of Management Information Systems (JMIS)},
Year = {2001},
Number = {3},
Pages = {157--193},
Volume = {18},
Abstract = {Todays workflow management systems offer work items to workers using rather primitive mechanisms. While most workflow systems support a role-based distribution of work, they have problems dealing with unavailability of workers as a result of vacation or illness, overloading, context dependent suitability, deadlines, and delegation. As a result, the work is offered to too few, too many, or even the wrong set of workers. Current practice is to offer a work item to one person, thus causing problems when the person is not present or too busy, or to offer it to a set of people sharing a given role, thus not incorporating the qualifications and preferences of people. Literature on work distribution is typically driven by considerations related to authorizations and permissions. However, workflow processes are operational processes where there is a highly dynamic trade-off between security and performance. For example, an approaching deadline and an overloaded specialist may be the trigger to offer work items to lesser-qualified workers. This paper addresses this problem by proposing a systematic approach to dynamically create a balance between quality and performance issues in workflow systems. We illustrate and evaluate the proposed approach with a realistic example and also compare how a workflow system would implement this scenario to highlight the shortcomings of current, state of the art workflow systems. Finally, a detailed simulation model is used to validate our approach.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• W. L. J. Lee, J. Munoz-Gama, H. M. W. Verbeek, W. M. P. van der Aalst, and M. SepÃºlveda, “Improving merging conditions for recomposing conformance checking,” in Proceedings of the BPI 2018 workshop, 2018.
[Bibtex]
@InProceedings{Lee18,
Title = {Improving merging conditions for recomposing conformance checking},
Author = {Lee, W. L. J. and Munoz-Gama, J. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Sep{\'u}lveda, M.},
Booktitle = {Proceedings of the {BPI 2018} workshop},
Year = {2018},
Abstract = {Efficient conformance checking is a hot topic in the field of process mining. Much of the recent work focused on improving the scalability of alignment-based approaches to support the larger and more complex processes. This is needed because process mining is increasingly applied in areas where models and logs are “big”. Decomposition techniques are able to achieve significant performance gains by breaking down a conformance problem into smaller ones. Moreover, recent work showed that the alignment problem can be resolved in an iterative manner by alternating between aligning a set of decomposed subcomponents before merging the computed sub-alignments and recomposing subcomponents to fix merging issues. Despite experimental results showing the gain of applying recomposition in large scenarios, there is still a need for improving the merging step, where log traces can take numerous recomposition steps before reaching the required merging condition. This paper contributes by defining and structuring the recomposition step, and proposes strategies with significant performance improvement on synthetic and real-life datasets over both the state-of-the-art decomposed and monolithic approaches.},
Owner = {hverbeek},
Timestamp = {2018.06.27}
}
• W. L. J. Lee, H. M. W. Verbeek, J. Munoz-Gama, W. M. P. van der Aalst, and M. SepÃºlveda, “Recomposing conformance: closing the circle on decomposed alignment-based conformance checking in process mining,” Information sciences, vol. 466, pp. 55-91, 2018.
[Bibtex]
@Article{Lee18a,
Title = {Recomposing conformance: Closing the circle on decomposed alignment-based conformance checking in process mining},
Author = {Lee, W. L. J. and Verbeek, H. M. W. and Munoz-Gama, J. and Aalst, W. M. P. van der and Sep{\'u}lveda, M.},
Journal = {Information Sciences},
Year = {2018},
Month = {October},
Pages = {55--91},
Volume = {466},
Abstract = {In the area of process mining, efficient conformance checking is one of the main challenges. Several process mining vendors are in the process of implementing conformance checking in their tools to allow the user to check how well a model fits an event log. Current approaches for conformance checking are monolithic and compute exact fitness values but this may take excessive time. Alternatively, one can use a decomposition approach, which runs much faster but does not always compute an exact fitness value.
This paper introduces a recomposition approach that takes the best of both: it returns the exact fitness value by using the decomposition approach in an iterative manner. Results show that similar speedups can be obtained as by using the decomposition approach, but now the exact fitness value is guaranteed. Even better, this approach supports a configurable time-bound: “Give me the best fitness estimation you can find within 10 minutes.” In such a case, the approach returns an interval that contains the exact fitness value. If such an interval is sufficiently narrow, there is no need to spend unnecessary time to compute the exact value.},
Doi = {10.1016/j.ins.2018.07.026},
Keywords = {Conformance checking
Process mining
Owner = {hverbeek},
Timestamp = {2018.07.16}
}
• W. L. J. Lee, H. M. W. Verbeek, J. Munoz-Gama, W. M. P. van der Aalst, and M. SepÃºlveda, “Replay using recomposition: alignment-based conformance checking in the large,” in 2017 bpm demo track and bpm dissertation award, bpm-d and da 2017, co-located with 15th international conference on business process management, bpm 2017, Barcelona, Spain, 2017.
[Bibtex]
@InProceedings{Lee17,
Title = {Replay using Recomposition: Alignment-Based Conformance Checking in the Large},
Author = {Lee, W. L. J. and Verbeek, H. M. W. and Munoz-Gama, J. and Aalst, W. M. P. van der and Sep\'{u}lveda, M.},
Booktitle = {2017 BPM Demo Track and BPM Dissertation Award, BPM-D and DA 2017, co-located with 15th International Conference on Business Process Management, BPM 2017},
Year = {2017},
Editor = {Mendling, J. and Weske, M. and Clariso, R. and Pentland, B. and Aalst W. M. P. van der and Leopold, H. and Kumar, A.},
Month = {September 2017},
Note = {Conditionally accepted},
Series = {CEUR Workshop Proceedings},
Volume = {1920},
Abstract = {In the area of process mining, efficient alignment-based conformance checking is a hot topic. Existing approaches for conformance checking are typically monolithic and compute exact fitness values. One limitation with monolithic approaches is that it may take a significant amount of computation time in large processes. Alternatively, decomposition approaches run much faster but do not always compute an exact fitness value. This paper presents the tool Replay using Recomposition which returns the exact fitness value and the resulting alignments using the decomposition approach in an iterative manner. Other than computing the exact fitness value, users can configure the balance between result accuracy and computation time to get a fitness interval within set constraints, e.g., "Give me the best fitness estimation you can find within 5 minutes".},
Owner = {hverbeek},
Timestamp = {2017.07.19},
Url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/lee17.pdf}
}
• N. Lohmann, H. M. W. Verbeek, and R. M. Dijkman, “Petri net transformations for business processes a survey,” Lncs transactions on petri nets and other models of concurrency (topnoc) ii, vol. 5460, pp. 46-63, 2009.
[Bibtex]
@Article{Lohmann09,
Title = {Petri Net Transformations for Business Processes – A Survey},
Author = {Lohmann, N. and Verbeek, H. M. W. and Dijkman, R. M.},
Journal = {LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC) II},
Year = {2009},
Pages = {46--63},
Volume = {5460},
Doi = {10.1007/978-3-642-00899-3_3},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• N. Lohmann, H. M. W. Verbeek, C. Ouyang, and C. Stahl, “Comparing and evaluating petri net semantics for bpel,” International journal of business process integration and management (ijbpim), vol. 4, iss. 1, pp. 60-73, 2009.
[Bibtex]
@Article{Lohmann09a,
Title = {Comparing and Evaluating Petri Net Semantics for BPEL},
Author = {Lohmann, N. and Verbeek, H. M. W. and Ouyang, C. and Stahl, C.},
Journal = {International Journal of Business Process Integration and Management (IJBPIM)},
Year = {2009},
Number = {1},
Pages = {60--73},
Volume = {4},
Abstract = {We compare two Petri net semantics for the Web Services Business Process Execution Language (BPEL). The comparison reveals different modelling decisions. These decisions together with their consequences are discussed. We also give an overview of the different properties that can be verified on the resulting models. A case study helps to evaluate the corresponding compilers which transform a BPEL process into a Petri net model.},
Doi = {10.1504/IJBPIM.2009.026986},
Keywords = {business process modelling; business process execution language; Petri nets; semantics; formal methods; verification; BPEL; web services; WS-BPEL.},
Owner = {hverbeek},
Timestamp = {2008.11.03}
}
• N. Lohmann, H. M. W. Verbeek, C. Ouyang, C. Stahl, and W. M. P. van der Aalst, “Comparing and evaluating petri net semantics for bpel,” Eindhoven University of Technology, Computer Science Report 07/23, 2007.
[Bibtex]
@TechReport{Lohmann07,
Title = {Comparing and Evaluating Petri Net Semantics for BPEL},
Author = {Lohmann, N. and Verbeek, H. M. W. and Ouyang, C. and Stahl, C. and Aalst, W. M. P. van der},
Institution = {Eindhoven University of Technology},
Year = {2007},
Number = {07/23},
Type = {Computer Science Report},
Abstract = {We compare two Petri net semantics for the Web Services Business Process Execution Language (BPEL). The comparison reveals different modeling decisions. These decisions together with their consequences are discussed.We also give an overview of the different properties that can be verified on the resulting models. A case study helps to evaluate the corresponding compilers which transform a BPEL process into a Petri net model.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/repository/books/630637.pdf}
}
• R. S. Mans, W. M. P. van der Aalst, and H. M. W. Verbeek, “Defining and executing process mining workflows with rapidprom,” in Rapidminer world 2014 conference, , 2014.
[Bibtex]
@InCollection{Mans14,
Title = {Defining and Executing Process Mining Workflows with RapidProM},
Author = {Mans, R. S. and Aalst, W. M. P. van der and Verbeek, H. M. W},
Booktitle = {RapidMiner World 2014 Conference},
Year = {2014},
}
• R. S. Mans, W. M. P. van der Aalst, and H. M. W. Verbeek, “Supporting process mining workflows with rapidprom,” in Bpm 2014 demos, CEUR-WS.org, 2014, vol. 1295, pp. 56-60.
[Bibtex]
@InCollection{Mans14a,
Title = {Supporting Process Mining Workflows with RapidProM},
Author = {Mans, R. S. and Aalst, W. M. P. van der and Verbeek, H. M. W},
Booktitle = {BPM 2014 Demos},
Publisher = {CEUR-WS.org},
Year = {2014},
Pages = {56--60},
Volume = {1295},
Url = {http://ceur-ws.org/Vol-1295/paper5.pdf}
}
• J. Mendling, W. M. P. van der Aalst, B. F. van Dongen, and H. M. W. Verbeek, “Errors in the sap reference model,” , 2006.
[Bibtex]
@Electronic{Mendling06,
Title = {Errors in the SAP Reference Model},
Author = {Mendling, J. and Aalst, W. M. P. van der and Dongen, B. F. van and Verbeek, H. M. W.},
Language = {English},
Month = {June},
Organization = {BPTrends},
Url = {http://www.bptrends.com/deliver_file.cfm?fileType=publication&fileName=06%2D06%2DWP%2DErrorsInSAPRefModel%2DMendling%2Detal%2Epdf},
Year = {2006},
Abstract = {The SAP Reference Model is a set of information models that is utilized to guide the configuration of SAP systems. A big part of these models are business process models represented in the Eventdriven Process Chains (EPC) notation. These EPC models provide a easy to comprehend overview of SAP business functionality on an abstract level. As such, the SAP Reference Model is considered as an important tool to bridge the gap between business logic and information system implementation. In a recent research project, we found that there are several errors in the reference model: 5.6% of the more than 600 processes that we analyzed revealed errors.},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• J. Mendling, W. M. P. van der Aalst, B. F. van Dongen, and H. M. W. Verbeek, Referenzmodell: sand im getriebe – webfehler, 2006.
[Bibtex]
@Misc{Mendling06a,
Title = {Referenzmodell: Sand im Getriebe - Webfehler},
Author = {Mendling, J. and Aalst, W. M. P. van der and Dongen, B. F. van and Verbeek, H. M. W.},
HowPublished = {iX - Magazin für Professionelle Informationstechnik},
Month = {August},
Note = {in German},
Year = {2006},
Abstract = {Referenzmodelle erleichtern die Einführung und die Anpassung komplexer Softwaresysteme. Dazu sollten sie selbst fehlerfrei sein. SAPs Referenzmodell enthält trotzdem einige Unstimmigkeiten.},
File = {iX:http\://www.heise.de/ix/:URL},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://www.heise.de/kiosk/archiv/ix/2006/8/131}
}
• J. Mendling, M. Moser, G. Neumann, H. M. W. Verbeek, B. F. van Dongen, and W. M. P. van der Aalst, “Faulty epcs in the sap reference model,” in Business process management, bpm 2006, Vienna, Austria, 2006, pp. 451-457.
[Bibtex]
@InProceedings{Mendling06b,
Title = {Faulty EPCs in the SAP Reference Model},
Author = {Mendling, J. and Moser, M. and Neumann, G. and Verbeek, H. M. W. and Dongen, B. F. van and Aalst, W. M. P. van der},
Booktitle = {Business Process Management, BPM 2006},
Year = {2006},
Editor = {Dustdar, S. and Fiadeiro, J. and Sheth, A. P.},
Month = {September},
Pages = {451--457},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in computer Science},
Volume = {4102},
Abstract = {Little is known about error probability in enterprise models as they are usually kept private. The SAP reference model is a publically available model that contains more than 600 non-trivial process models expressed in terms of Event-driven Process Chains (EPCs). We have automatically translated these EPCs into YAWL models and analyzed these models using WofYAWL, a verification tool based on Petri nets, in order to acquire knowledge about errors in large enterprise models. We discovered that at least 34 of these EPCs contain errors (i.e., at least 5.6% is flawed) and analyzed which parts of the SAP reference model contain most errors. This systematic analysis of the SAP reference model illustrates the need for verification tools such as WofYAWL.},
Doi = {10.1007/11841760_38},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• J. Mendling, M. Moser, G. Neumann, H. M. W. Verbeek, B. F. van Dongen, and W. M. P. van der Aalst, “A quantitative analysis of faulty epcs in the sap reference model,” BPMcenter.org, BPM Center Report BPM-06-08, 2006.
[Bibtex]
@TechReport{Mendling06c,
Title = {A Quantitative Analysis of Faulty EPCs in the SAP Reference Model},
Author = {Mendling, J. and Moser, M. and Neumann, G. and Verbeek, H. M. W. and Dongen, B. F. van and Aalst, W. M. P. van der},
Institution = {BPMcenter.org},
Year = {2006},
Number = {BPM-06-08},
Type = {BPM Center Report},
Abstract = {The SAP reference model contains more than 600 non-trivial process models expressed in terms of Event-driven Process Chains (EPCs). We have automatically translated these EPCs into YAWL models and analyzed these models usingWofYAWL, a verification tool based on Petri nets. We discovered that at least 34 of these EPCs contain errors (i.e., at least 5.6% is flawed). We analyzed which parts of the SAP reference model contain most errors. Moreover, based on 15 characteristics (e.g., the size of the model), we used logistic regression to find possible predictors for these errors. This systematic analysis of the SAP reference model illustrates the need for verification tools such as WofYAWL.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• J. Mendling, H. M. W. Verbeek, B. F. van Dongen, W. M. P. van der Aalst, and G. Neumann, “Detection and prediction of errors in epcs of the sap reference model,” Data and knowledge engineering (dke), vol. 64, iss. 1, pp. 312-329, 2008.
[Bibtex]
@Article{Mendling08,
Title = {Detection and prediction of errors in EPCs of the SAP reference model},
Author = {Mendling, J. and Verbeek, H. M. W. and Dongen, B. F. van and Aalst, W. M. P. van der and Neumann, G.},
Journal = {Data and Knowledge Engineering (DKE)},
Year = {2008},
Number = {1},
Pages = {312--329},
Volume = {64},
Abstract = {Up to now there is neither data available on how many errors can be expected in process model collections, nor is it understood why errors are introduced. In this article, we provide empirical evidence for these questions based on the SAP reference model. This model collection contains about 600 process models expressed as Event-driven Process Chains (EPCs). We translated these EPCs into YAWL models, and analyzed them using the verification tool WofYAWL. We discovered that at least 34 of these EPCs contain errors. Moreover, we used logistic regression to show that complexity of EPCs has a significant impact on error probability.},
Doi = {10.1016/j.datak.2007.06.019},
Owner = {hverbeek},
Timestamp = {2008.11.03}
}
• W. Meulemans, W. M. Sonke, B. Speckmann, H. M. W. Verbeek, and K. A. B. Verbeek, “Optimal algorithms for compact linear layouts,” in 34th european workshop on computational geometry (eurocg2018), proceedings, 2018, p. 10:1–10:6.
[Bibtex]
@Conference{Meulemans18,
Title = {Optimal Algorithms for Compact Linear Layouts},
Author = {Meulemans, W. and Sonke, W. M. and Speckmann, B. and Verbeek, H. M. W. and Verbeek, K. A. B.},
Booktitle = {34th European Workshop on Computational Geometry (EuroCG2018), Proceedings},
Year = {2018},
Month = {March},
Pages = {10:1--10:6},
Doi = {10.1109/PacificVis.2018.00010},
Owner = {hverbeek},
Timestamp = {2018.02.07}
}
• C. Ouyang, W. M. P. van der Aalst, S. Breutel, M. Dumas, A. H. M. ter Hofstede, and H. M. W. Verbeek, “Wofbpel: a tool for automated analysis of bpel processes,” in Proceedings of service-oriented computing (icsoc 2005), Amsterdam, The Netherlands, 2005, pp. 484-489.
[Bibtex]
@InProceedings{Ouyang05a,
Title = {WofBPEL: A Tool for Automated Analysis of BPEL Processes},
Author = {Ouyang, C. and Aalst, W. M. P. van der and Breutel, S. and Dumas, M. and Hofstede, A. H. M. ter and Verbeek, H. M. W.},
Booktitle = {Proceedings of Service-Oriented Computing (ICSOC 2005)},
Year = {2005},
Editor = {Benatallah, B. and Casati, F. and Traverso, P.},
Month = {December},
Pages = {484--489},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {3826},
Doi = {10.1007/11596141_37},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• C. Ouyang, W. M. P. van der Aalst, S. Breutel, M. Dumas, A. H. M. ter Hofstede, and H. M. W. Verbeek, “Formal semantics and analysis of control flow in ws-bpel,” BPMcenter.org, BPM Center Report BPM-05-15, 2005.
[Bibtex]
@TechReport{Ouyang05,
Title = {Formal Semantics and Analysis of Control Flow in WS-BPEL},
Author = {Ouyang, C. and Aalst, W. M. P. van der and Breutel, S. and Dumas, M. and Hofstede, A. H. M. ter and Verbeek, H. M. W.},
Institution = {BPMcenter.org},
Year = {2005},
Number = {BPM-05-15},
Type = {BPM Center Report},
Abstract = {Web service composition refers to the creation of new (Web) services by combination of functionality provided by existing ones. This paradigm has gained significant attention in the Web services community and is seen as a pillar for building service-oriented applications. A number of domain-specific languages for service composition have been proposed with consensus being formed around a process-oriented language known as WS-BPEL (or BPEL). The kernel of BPEL consists of simple communication primitives that may be combined using control-flow constructs expressing sequence, branching, parallelism, synchronisation, etc. As a result, BPEL process definitions lend themselves to static flow-based analysis techniques. In this report, we describe a tool that performs two useful types of static checks and extracts meta-data to optimise dynamic resource management. The tool operates by translating BPEL processes into Petri nets and exploiting existing Petri net analysis techniques. It relies on a comprehensive and rigorously defined mapping of BPEL constructs into Petri net structures.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• C. Ouyang, H. M. W. Verbeek, W. M. P. van der Aalst, S. Breutel, M. Dumas, and A. H. M. ter Hofstede, “Formal semantics and analysis of control flow in ws-bpel,” Science of computer programming (scp), vol. 67, pp. 162-198, 2007.
[Bibtex]
@Article{Ouyang07,
Title = {Formal Semantics and Analysis of Control Flow in WS-BPEL},
Author = {Ouyang, C. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Breutel, S. and Dumas, M. and Hofstede, A. H. M. ter},
Journal = {Science of Computer Programming (SCP)},
Year = {2007},
Pages = {162--198},
Volume = {67},
Abstract = {Web service composition refers to the creation of new (Web) services by combining functionalities provided by existing ones. A number of domain-specific languages for service composition have been proposed, with consensus being formed around a process-oriented language known as WS-BPEL (or BPEL). The kernel of BPEL consists of simple communication primitives that may be combined using control-flow constructs expressing sequence, branching, parallelism, synchronization, etc. We present a comprehensive and rigorously defined mapping of BPEL constructs onto Petri net structures, and use this for the analysis of various dynamic properties related to unreachable activities, conflicting messages, garbage collection, conformance checking, and deadlocks and lifelocks in interaction processes. We use a mapping onto Petri nets because this allows us to use existing theoretical results and analysis tools. Unlike approaches based on finite state machines, we do not need to construct the state space, and can use structural analysis (e.g., transition invariants) instead. We have implemented a tool that translates BPEL processes into Petri nets and then applies Petri-net-based analysis techniques. This tool has been tested on different examples, and has been used to answer a variety of questions.},
Doi = {10.1016/j.scico.2007.03.002},
Owner = {hverbeek},
Timestamp = {2008.11.04}
}
• L. Raichelson, P. Soffer, and H. M. W. Verbeek, “Merging event logs: combining granularity levels for process flow analysis,” Information systems, vol. 71, pp. 211-227, 2017.
[Bibtex]
@Article{Raichelson17,
Title = {Merging event logs: Combining granularity levels for process flow analysis},
Author = {Lihi Raichelson and Pnina Soffer and H. M. W. Verbeek},
Journal = {Information Systems},
Year = {2017},
Month = {November},
Pages = {211--227},
Volume = {71},
Abstract = {Process mining techniques enable the discovery and analysis of business processes and the identification of opportunities for improvement. Processes often comprise separately managed procedures documented in separate log files which are impossible to mine in an integrative manner as the complete end-to-end process flow is obscure. In this paper we present a merging algorithm that results in a comprehensive merged log that offers two views of the end-to-end process: the case view, tracking the order, and the instance view tracking the item. This enables the identification of process flow problems that could not be detected by previous techniques.
In addition, because our log-merging approach establishes the end-to-end process flow at two different abstraction levels, it is capable of handling both simple (n-to-one) and complex (n-to-many) relationships between log events. The unified log can be used by process mining techniques to identify flow problems, particularly at the point of integration between the processes under consideration. The procedure proposed in this paper has been implemented and evaluated using both synthetic logs and real-life logs.},
Doi = {10.1016/j.is.2017.08.010},
Owner = {hverbeek},
Timestamp = {2017.08.30}
}
• D. M. M. Schunselaar, T. F. van der Avoort, H. M. W. Verbeek, and W. M. P. van der Aalst, “Yawl in the cloud,” in Yawl 2013, Bonn, Germany, 2013.
[Bibtex]
@Conference{Schunselaar13,
Title = {YAWL in the Cloud},
Author = {Schunselaar, D. M. M. and Avoort, T. F. van der and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {YAWL 2013},
Year = {2013},
Month = {June},
Number = {982},
Series = {CEUR-WS},
Abstract = {In the context of the CoSeLoG project (which involves 10 Dutch municipalities), we realised a proof-of-concept implementation based on YAWL. The municipalities want to share a common IT infrastructure and learn from one another, but also allow for local differences. Therefore, we extended YAWL to run in a cloud-based environment leveraging on existing configuration possibilities. To support "YAWL in the Cloud" we developed load-balancing capabilities that allow for the distribution of work over multiple YAWL engines. Moreover, we extended YAWL with multi-tenancy capabilities: one municipality may effectively use multiple engines without knowing it and one engine may safely run the processes of multiple municipalities.},
Owner = {hverbeek},
Timestamp = {2013.05.08}
}
• D. M. M. Schunselaar, H. Leopold, H. M. W. Verbeek, and W. M. P. van der Aalst, “Configuring configurable process models made easier: an automated approach,” in Bpm 2014 workshops, Springer, 2015, vol. 202, pp. 105-117.
[Bibtex]
@InCollection{Schunselaar14a,
Title = {Configuring Configurable Process Models Made Easier: An Automated Approach},
Author = {Schunselaar, D. M. M. and Leopold, H. and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {BPM 2014 Workshops},
Publisher = {Springer},
Year = {2015},
Pages = {105--117},
Volume = {202},
Doi = {http://dx.doi.org/10.1007/978-3-319-15895-2_10},
}
• D. M. M. Schunselaar and H. M. W. Verbeek, “Task elimination may actually increase throughput time,” arXiv.org 2018.
[Bibtex]
@TechReport{Schunselaar18,
Title = {Task Elimination may Actually Increase Throughput Time},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W},
Institution = {arXiv.org},
Year = {2018},
Note = {arXiv identifier 1812.11793},
Abstract = {The well-known Task Elimination redesign principle suggests to remove unnecessary tasks from a process to improve on time and cost. Although there seems to be a general consensus that removing work can only improve the throughput time of the process, this paper shows that this is not necessarily the case by providing an example that uses plain M/M/c activities. This paper also shows that the Task Automation and Parallelism redesign principles may also lead to longer throughput times. Finally, apart from these negative results, the paper also show under which assumption these redesign principles indeed can only improve the throughput time.},
Owner = {hverbeek},
Timestamp = {2019.01.07},
Url = {http://arxiv.org/abs/1812.11793}
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. van der Aalst, and H. A. Reijers, “Creating sound and reversible configurable processes models using cosenets,” in Business information systems – 15th international conference, bis 2012, Vilnius, Lithuania, 2012, pp. 24-35.
[Bibtex]
@InProceedings{Schunselaar12,
Title = {Creating Sound and Reversible Configurable Processes Models using CoSeNets},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Reijers, H. A.},
Booktitle = {Business Information Systems - 15th International Conference, BIS 2012},
Year = {2012},
Editor = {Abramowicz, W. and Kriksciuniene, D. and Sakalauskas, V.},
Month = {May},
Pages = {24--35},
Publisher = {Springer},
Series = {Lecture Notes in Business Information Processing},
Volume = {117},
Abstract = {All Dutch municipalities offer the same range of services, and the processes delivering these services are quite similar. Therefore, these municipalities can benefit from configurable process models. This requires the merging of existing process variants into configurable models. Unfortunately, existing merging techniques (1) allow for configurable process models which can be instantiated to unsound process models, and (2) are not always reversible, which means that not all original models can be obtained by instantiation of the configurable process model. In this paper, we propose to capture the control-flow of a process by a CoSeNet: a configurable, tree-like representation of the process model, which is sound by construction, and we describe how to merge two CoSeNets into another CoSeNet such that the merge is reversible. Initial experiments show that this approach does not influence complexity significantly, i.e. it results in similar complexities for the configurable process model compared to existing techniques, while it guarantees soundness and reversibility.},
Doi = {10.1007/978-3-642-30359-3_3},
Owner = {hverbeek},
Timestamp = {2012.03.08}
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. van der Aalst, and H. A. Reijers, “A framework for efficiently deciding language inclusion for sound unlabelled wf-nets,” in Joint proceedings of pnse’13 and modbe’13, milano, italy, june 24-25, 2013., 2013, pp. 135-154.
[Bibtex]
@InProceedings{Schunselaar13a,
Title = {A Framework for Efficiently Deciding Language Inclusion for Sound Unlabelled WF-Nets},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Reijers, H. A.},
Booktitle = {Joint Proceedings of PNSE'13 and ModBE'13, Milano, Italy, June 24-25, 2013.},
Year = {2013},
Editor = {Moldt, D.},
Month = {June},
Pages = {135--154},
Publisher = {CEUR-WS.org},
Series = {CEUR Workshop Proceedings},
Volume = {989},
Owner = {hverbeek},
Timestamp = {2013.08.06},
Url = {http://ceur-ws.org/Vol-989/paper06.pdf}
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. v. d. Aalst, and H. A. Reijers, “A structural model comparison for finding the best performing models in a collection,” , BPM-15-05, 2015.
[Bibtex]
@TechReport{Schunselaar15,
Title = {A Structural Model Comparison for finding the Best Performing Models in a Collection},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W and Aalst, W. M. P. v. d. and Reijers, H. A.},
Year = {2015},
Number = {BPM-15-05},
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. v. d. Aalst, and H. A. Reijers, “A structural model comparison for finding the best performing models in a collection,” in Bpm 2015 proceedings, 2015.
[Bibtex]
@InProceedings{Schunselaar15a,
Title = {A Structural Model Comparison for finding the Best Performing Models in a Collection},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W and Aalst, W. M. P. v. d. and Reijers, H. A.},
Booktitle = {BPM 2015 Proceedings},
Year = {2015},
Note = {Accepted as short paper},
Publisher = {Springer, Heidelberg},
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. van der Aalst, and H. A. Reijers, “Petra: process model based extensible toolset for redesign and analys,” BPMcenter.org, BPM Center report BPM-14-01, 2014.
[Bibtex]
@TechReport{Schunselaar14,
Title = {Petra: Process model based Extensible Toolset for Redesign and Analys},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W and Aalst, W. M. P. van der and Reijers, H. A.},
Institution = {BPMcenter.org},
Year = {2014},
Number = {BPM-14-01},
Type = {BPM Center report},
Abstract = {In different settings, it is of great value to be able to compare the performance of processes that aim to fulfill the same purpose but do so in different ways. Petra is a toolset for the analysis of so-called process families, which support the use of a multitude of analysis tools, including simulation. Through the use of Petra, organisations can make an educated decision about the exact configuration of their processes as to satisfy their exact requirements and performance objectives. The CoSeLoG project, in which we work together with 10 municipalities, provides exactly the setting for this type of functionality to come into play.},
Owner = {hverbeek},
Timestamp = {2013.05.07},
}
• D. M. M. Schunselaar, H. M. W. Verbeek, W. M. P. van der Aalst, and H. A. Reijers, “Petra : process model based extensible toolset for redesign and analysis,” in Petri nets and software engineering 2014 (international workshop, pnse 2014, 2014, pp. 269-288.
[Bibtex]
@InProceedings{Schunselaar14c,
Title = {Petra : Process model based extensible toolset for redesign and analysis},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W and Aalst, W. M. P. van der and Reijers, H. A.},
Booktitle = {Petri Nets and Software Engineering 2014 (International Workshop, PNSE 2014},
Year = {2014},
Pages = {269--288},
Publisher = {CEUR-WS.org},
Volume = {1160},
Url = {http://ceur-ws.org/Vol-1160/paper16.pdf}
}
• D. M. M. Schunselaar, H. M. W. Verbeek, H. A. Reijers, and W. M. P. van der Aalst, “Yawl in the cloud: supporting process sharing and variability,” in Bpm 2014 workshops, Springer, 2015, vol. 202, pp. 367-379.
[Bibtex]
@InCollection{Schunselaar14b,
Title = {YAWL in the Cloud: Supporting Process Sharing and Variability},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W. and Reijers, H. A. and Aalst, W. M. P. van der},
Booktitle = {BPM 2014 Workshops},
Publisher = {Springer},
Year = {2015},
Pages = {367--379},
Volume = {202},
Doi = {http://dx.doi.org/10.1007/978-3-319-15895-2_31},
}
• D. M. M. Schunselaar, H. M. W. Verbeek, H. A. Reijers, and W. M. P. van der Aalst, “Using monotonicity to find optimal process configurations faster,” in Simpda 2014, 2014, pp. 123-137.
[Bibtex]
@InProceedings{Schunselaar14d,
Title = {Using Monotonicity to find Optimal Process Configurations Faster},
Author = {Schunselaar, D. M. M. and Verbeek, H. M. W and Reijers, H. A. and Aalst, W. M. P. van der},
Booktitle = {SIMPDA 2014},
Year = {2014},
Pages = {123--137},
Publisher = {CEUR-WS.org},
Volume = {1293},
Url = {http://ceur-ws.org/Vol-1293/paper9.pdf}
}
• W. M. Sonke, K. A. B. Verbeek, W. Meulemans, H. M. W. Verbeek, and B. Speckmann, “Optimal algorithms for compact linear layouts,” in 2018 ieee pacific visualization symposium, pacificvis 2018, proceedings, 2018, pp. 1-10.
[Bibtex]
@Conference{Sonke18,
Title = {Optimal Algorithms for Compact Linear Layouts},
Author = {Sonke, W. M. and Verbeek, K. A. B. and Meulemans, W. and Verbeek, H. M. W. and Speckmann, B.},
Booktitle = {2018 IEEE Pacific Visualization Symposium, PacificVis 2018, Proceedings},
Year = {2018},
Month = {May},
Pages = {1--10},
Publisher = {IEEE Computer Society},
Doi = {10.1109/PacificVis.2018.00010},
Owner = {hverbeek},
Timestamp = {2018.01.25}
}
• H. M. W. Verbeek, “Decomposed replay using hiding and reduction,” in PNSE 2016 workshop proceedings, Torun, Poland, 2016.
[Bibtex]
@InProceedings{Verbeek16a,
Title = {Decomposed Replay using Hiding and Reduction},
Author = {Verbeek, H. M. W.},
Booktitle = {{PNSE} 2016 Workshop Proceedings},
Year = {2016},
Editor = {Cabac, L. and Kristensen, L. and R\"{o}lke, H.},
Month = {June},
Note = {Accepted for publication},
Abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative decomposed replay, and shows that this alternative decomposed replay is faster than the monolithic replay even for the problematic cases as identified earlier.owever, the alternative decomposed replay is often slower than the original decomposed approach. An advantage of the alternative decomposed approach over the original approach is that its cost estimates are typically better.},
Url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/verbeek16a.pdf}
}
• H. M. W. Verbeek, “Bpi challenge 2012: the transition system case,” in Bpm 2012 workshops, 2013, pp. 225-226.
[Bibtex]
@InProceedings{Verbeek13,
Title = {BPI Challenge 2012: The Transition System Case},
Author = {H. M. W. Verbeek},
Booktitle = {BPM 2012 Workshops},
Year = {2013},
Editor = {La Rosa, M. and Soffer, P.},
Note = {To appear},
Pages = {225--226},
Publisher = {Springer},
Series = {LNBIP},
Volume = {132},
Owner = {hverbeek},
Timestamp = {2012.10.08},
}
• H. M. W. Verbeek, “Decomposed replay using hiding and reduction as abstraction,” LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC), vol. XII, pp. 166-186, 2017.
[Bibtex]
@Article{Verbeek17,
Title = {Decomposed Replay Using Hiding and Reduction as Abstraction},
Author = {Verbeek, H. M. W.},
Journal = {{LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC)}},
Year = {2017},
Pages = {166--186},
Volume = {XII},
Abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative way to abstract the subnets from the single net, and shows that the decomposed replay using this alternative abstraction is faster than the monolithic replay even for the problematic cases as identified earlier. However, the alternative abstraction often results in longer computation times for the decomposed replay than the original abstraction. An advantage of the alternative abstraction over the original abstraction is that its cost estimates are typically better.},
Doi = {10.1007/978-3-662-55862-1_8},
Owner = {hverbeek},
Timestamp = {2017.03.07},
}
• H. M. W. Verbeek, “Decomposed process mining with divideandconquer,” in Bpm 2014 demos, CEUR-WS.org, 2014, vol. 1295, pp. 86-90.
[Bibtex]
@InCollection{Verbeek14a,
Title = {Decomposed Process Mining with DivideAndConquer},
Author = {Verbeek, H. M. W},
Booktitle = {BPM 2014 Demos},
Publisher = {CEUR-WS.org},
Year = {2014},
Pages = {86--90},
Volume = {1295},
Url = {http://ceur-ws.org/Vol-1295/paper11.pdf}
}
• H. M. W. Verbeek, “Bpi challenge 2012: the transition system case,” , 2012.
[Bibtex]
@Electronic{Verbeek12b,
Title = {BPI Challenge 2012: The Transition System Case},
Author = {Verbeek, H. M. W.},
Month = {September},
Year = {2012},
Abstract = {The Transition System Miner, together with the Simple Log Filter and the Transition System Analyzer, is used to investigate the log used for the BPI Challenge 2012. Conclusions are drawn for the control-flow perspective, the date perspective, and the resource perspective, which shows the flexibility of the Miner. The results show that the process as captured in the event log is nicely structured, that it contains hardly any noise, and that the different events (Application, Offer, and Work Item) can be nicely captured by transition systems. Furthermore, it shows that the company who owns the process does not use case managers for handling the applications, as a lot of handover-of-works occur.},
Owner = {hverbeek},
Timestamp = {2012.09.17}
}
• H. M. W. Verbeek, “Verification of wf-nets,” PhD Thesis, 2004.
[Bibtex]
@PhdThesis{Verbeek04,
Title = {Verification of WF-nets},
Author = {Verbeek, H. M. W.},
School = {BETA},
Year = {2004},
Month = {June},
Abstract = {The workflow process definition (WPD) of a workflow management system (WFMS) is an important concept in this thesis. If we compare a WFMS to a conveyor belt system, then a WPD can be compared to a physical layout of the conveyor belts. Thus, both a WPD and a physical layout determine how items are moved around in the corresponding system.
However, a WPD can be far more complex than a layout of a conveyor belt system, because a conveyor belt system is forced to adhere to physical constraints, whereas a WFMS is not: A conveyor belt system is tangible and moves tangible work items around, whereas a WFMS, being a piece of software, is intangible and moves intangible information items (cases) around. For example, in a WFMS, cases can be copied with ease, which enables the parallel processing of multiple tasks on one case.
Unfortunately, todays WFMSs have almost no support for the verification of a WPD, even though there is a clear need for such a verification support. Because a WPD can be very complex, it may contain (very subtle) errors. If an erroneous WPD is put into production, then it might take a while before somebody realizes that there is something wrong, as the entire process is hidden in the WFMS. Only anticipated errors can be detected by the WFMS itself, the remaining errors can only be detected after having requested some reports from the WFMS and only if these reports contain sufficient information to detect the errors.
This thesis presents the WPD verification tool Woflan and its supporting concepts. Woflan maps a WPD onto a workflow net (WF-net, which is a Petri net with some additional requirements) and can verify, before the WPD is taken into production, the soundness property and four inheritance relations for the resulting WF-net. Note that he mappings used by Woflan should be of high quality, as the results obtained for the WF-net should be transferable to the originating WPD. This thesis presents such high-quality mappings for a number of commercial WFMSs (IBM MQSeries Workflow, Staffware, COSA Workflow), for a commercial BPR tool (Protos), and for a research workflow prototype (XRL).
The soundness property is a minimal property any WPD should satisfy, and consists of the following requirements: (i) whatever happens, every case can be completed, (ii) after a case has been completed, no dangling references are left behind to that case, and (iii) every part of the WPD is necessary for some case. Woflan exploits an existing relation between soundness and the (in Petri-net world well-known) boundedness and liveness properties for diagnosis purposes. Based on this relation, this thesis introduces a diagnosis process for the soundness property, and a concept of behavioral error sequences. The diagnosis process guides the designer of an unsound WPD towards correcting this WPD, using diagnostic information like the behavioral error sequences. These sequences pinpoint decisions in the WPD that, when taken into production, lead to a violation of the soundness property. This thesis presents a number of case studies that have put several mappings, the diagnosis process, and the behavioral error sequences to the test. From these case studies, we conclude that Woflan successfully guides the designer of an unsound WPD towards correcting that WPD.
The four inheritance relations can be used to guarantee that two WPDs behave in a similar way: If two WPDs share such a relation, then cases can be transferred between the WPDs without problems. Woflan implements algorithms to compute these four relations. However, a straightforward exhaustive search algorithm for computing the most complex relation, the life-cycle inheritance relation, can lead to excessive processing times. This thesis presents a backtracking algorithm that aims to alleviate this problem of excessive processing times for computing the life-cycle inheritance relation. This thesis also presents a number of case studies that have put both algorithms to the test. From these case studies, we conclude that the backtracking algorithm effectively reduces excessive processing times. From one of these case studies, we also concluded that the life-cycle inheritance relation can be very subtle and, therefore, hard to check for humans (even for experts). Therefore, we conclude that we need a tool like Woflan to check this inheritance relation.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/extra2/200411300.pdf}
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “An experimental evaluation of passage-based process discovery,” in Bpi 2012 workshop pre-proceedings, Tallinn, 2012, pp. 71-76.
[Bibtex]
@InProceedings{Verbeek12,
Title = {An Experimental Evaluation of Passage-Based Process Discovery},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {BPI 2012 Workshop Pre-proceedings},
Year = {2012},
Editor = {Dongen, B. F. van and Ferreira, D. R. and Weber, B.},
Month = {September},
Note = {Accepted as short paper},
Pages = {71--76},
Abstract = {In the area of process mining, the ILP Miner is known for the fact that it always returns a Petri net that perfectly ts a given event log. Like for most process discovery algorithms, its complexity is linear in the size of the event log and exponential in the number of event classes (i.e., distinct activities). As a result, the potential gain by partitioning the event classes is much higher than the potential gain by partitioning the traces in the event log over multiple event logs. This paper proposes to use the so-called passages to split up the event classes over multiple event logs, and shows what the results are for seven large event logs. The results show that indeed the use of passages alleviates the complexity, but that much hinges on the size of the largest passage detected: The smaller this passage, the better the run time.},
Owner = {hverbeek},
Timestamp = {2012.07.12},
}
• H. M. W. Verbeek and W. M. P. v. d. Aalst, “Merging alignments for decomposed replay,” in Application and theory of Petri nets and concurrency, Torun, Poland, 2016, pp. 219-239.
[Bibtex]
@InProceedings{Verbeek16,
Title = {Merging Alignments for Decomposed Replay},
Author = {Verbeek, H. M. W and Aalst, W. M. P. v. d.},
Booktitle = {Application and Theory of {P}etri Nets and Concurrency},
Year = {2016},
Editor = {Kordon, F and Moldt, D.},
Month = {June},
Pages = {219--239},
Publisher = {Springer International Publishing},
Series = {LNCS},
Volume = {9698},
Abstract = {In the area of process mining, conformance checking aims to find an optimal alignment between an event log (which captures the activities that actually have happened) and a Petri net (which describes expected or normative behavior). Optimal alignments highlight discrepancies between observed and modeled behavior. To find an optimal alignment, a potentially challenging optimization problem needs to be solved based on a predefined cost function for misalignments. Unfortunately, this may be very time consuming for larger logs and models and often intractable. A solution is to decompose the problem of finding an optimal alignment in many smaller problems that are easier to solve. Decomposition can be used to detect conformance problems in less time and provides a lower bound for the costs of an optimal alignment. Although the existing approach is able to decide whether a trace fits or not, it does not provide an overall alignment. In this paper, we provide an algorithm that is able provide such an optimal alignment from the decomposed alignments if this is possible. Otherwise, the algorithm produces a so-called pseudo-alignment that can still be used to pinpoint non-conforming parts of log and model. The approach has been implemented in ProM and tested on various real-life event logs.},
Doi = {10.1007/978-3-319-39086-4_14},
Url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/verbeek16.pdf}
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “An experimental evaluation of passage-based process discovery,” in Bpm 2012 workshop, 2013, pp. 205-210.
[Bibtex]
@InProceedings{Verbeek13a,
Title = {An Experimental Evaluation of Passage-Based Process Discovery},
Author = {Verbeek, H. M. W and Aalst, W. M. P. van der},
Booktitle = {BPM 2012 Workshop},
Year = {2013},
Editor = {La Rosa, M. and Soffer, P.},
Note = {To appear},
Pages = {205--210},
Publisher = {Springer},
Series = {LNBIP},
Volume = {132},
Owner = {hverbeek},
Timestamp = {2012.12.06}
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “Analyzing bpel processes using petri nets,” in 2nd international workshop on applications of petri nets to coordination, workflow and business process management (pncwb 2005), Miami, Florida, 2005, pp. 59-78.
[Bibtex]
@InProceedings{Verbeek05,
Title = {Analyzing BPEL processes using Petri nets},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {2nd International Workshop on Applications of Petri Nets to Coordination, Workflow and Business Process Management (PNCWB 2005)},
Year = {2005},
Editor = {Marinescu, D.},
Month = {June},
Pages = {59--78},
Publisher = {Florida International University, Miami, Florida},
Abstract = {Some years ago, BEA, IBM, Microsoft, SAP AG, and Siebel Systems teamed up and proposed the Business Process Execution Language for Web Services (BPEL or BPEL4WS) for application integration within and across organizational boundaries. By now, BPEL has become the de facto standard in this Web services composition arena. However, little effort has been dedicated so far concerning the verification of the modeled business processes. For example, there is no support to detect possible deadlocks, or to detect parts of the process that are not viable. For so-called WF-nets (workflow nets), techniques and tools exist which make it possible to detect such anomalies. Therefore, we could detect these anomalies in a BPEL process model provided that we can successfully map this model onto a WF-net. This papers describes a first attempt to map a BPEL process model onto a WF-net. Although not all BPEL constructs have been mapped yet, the results seem promising, as we are able to map typical examples from the BPEL 1.1 specification onto WF-nets.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “Decomposing replay problems: a case study,” in Joint proceedings of pnse’13 and modbe’13, milano, italy, june 24-25, 2013., 2013, pp. 219-235.
[Bibtex]
@InProceedings{Verbeek13c,
Title = {Decomposing Replay Problems: A Case Study},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Joint Proceedings of PNSE'13 and ModBE'13, Milano, Italy, June 24-25, 2013.},
Year = {2013},
Editor = {Moldt, D.},
Month = {June},
Pages = {219--235},
Publisher = {CEUR-WS.org},
Series = {CEUR Workshop Proceedings},
Volume = {989},
Owner = {hverbeek},
Timestamp = {2013.08.06},
Url = {http://ceur-ws.org/Vol-989/paper07.pdf}
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “Woflan 2.0: a petri-net-based workflow diagnosis tool,” in Application and theory of petri nets 2000, 2000, pp. 475-484.
[Bibtex]
@InProceedings{Verbeek00,
Title = {Woflan 2.0: A Petri-net-based Workflow Diagnosis Tool},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {Application and Theory of Petri Nets 2000},
Year = {2000},
Editor = {Nielsen, M. and Simpson, D.},
Pages = {475--484},
Publisher = {Springer, Berlin, Verlag},
Series = {Lecture Notes in Computer Science},
Volume = {1825},
Abstract = {Workflow management technology promises a flexible solution facilitating the easy creation of new business processes and modification of existing ones. Unfortunately, most of today’s workflow products allow for erroneous processes to be put in production: these products lack proper verification mechanisms in their process-definition tools for the created or modified processes. This paper presents the workflow diagnosis tool Woflan, which fills this gap. Using Petri-net based techniques, Woflan diagnoses process definitions before they are put into production. These process definitions can be imported from commercial workflow products. Furthermore, Woflan guides the modeler of a workflow process definition towards finding and correcting possible errors.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “Decomposed process mining: the ilp case,” in Bpm 2014 workshops, Springer, 2015, vol. 202, pp. 264-276.
[Bibtex]
@InCollection{Verbeek14,
Title = {Decomposed Process Mining: The ILP Case},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {BPM 2014 Workshops},
Publisher = {Springer},
Year = {2015},
Pages = {264-276},
Volume = {202},
Doi = {http://dx.doi.org/10.1007/978-3-319-15895-2_23},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “Decomposing replay problems: a case study,” BPMcenter.org, BPM Center report BPM-13-09, 2013.
[Bibtex]
@TechReport{Verbeek13b,
Title = {Decomposing Replay Problems: A Case Study},
Author = {Verbeek, H. M. W and Aalst, W. M. P. van der},
Institution = {BPMcenter.org},
Year = {2013},
Number = {BPM-13-09},
Type = {BPM Center report},
Owner = {hverbeek},
Timestamp = {2013.05.07},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “An experimental evaluation of passage-based process discovery,” BPMcenter.org, BPM Center Report BPM-12-14, 2012.
[Bibtex]
@TechReport{Verbeek12a,
Title = {An Experimental Evaluation of Passage-Based Process Discovery},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Institution = {BPMcenter.org},
Year = {2012},
Number = {BPM-12-14},
Type = {BPM Center Report},
Abstract = {In the area of process mining, the ILP Miner is known for the fact that it always returns a Petri net that perfectly fits a given event log. However, the downside of the ILP Miner is that its complexity is exponential in the number of event classes in that event log. As a result, the ILP Miner may take a very long time to return a Petri net. Partitioning the traces in the event log over multiple event logs does not really alleviate this problem. Like for most process discovery algorithms, the complexity is linear in the size of the event log and exponential in the number of event classes (i.e., distinct activities). Hence, the potential gain by partitioning the event classes is much higher. This paper proposes to use the so-called passages to split up the event classes over multiple event logs, and shows what the results are for seven large event logs. The results show that indeed the use of passages alleviates the complexity, but that much hinges on the size of the largest passage detected: The smaller this passage, the better the run time.},
Owner = {hverbeek},
Timestamp = {2011.10.03},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, Configuring ibm websphere monitor for process mining, 2010.
[Bibtex]
@Unpublished{Verbeek10d,
Title = {Configuring IBM WebSphere Monitor for Process Mining},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Year = {2010},
Abstract = {Process mining has emerged as a way to discover or check the conformance of processes based on event logs. This enables organizations to learn from processes as they really take place. Since web services are distributed over autonomous parties, it is vital to monitor the correct execution of service processes. Fortunately, the \web services stack" assists in collecting structured event logs. These logs can be used to extract new information about service processes (like bottlenecks) and to check the conformance. In this paper, we demonstrate that such an event log can be obtained in the context of the IBMs WebSphere environment. More specifically, we show how to configure the WebSphere Business Monitor in such a way that it collects all the information needed for generating an event log.},
Owner = {hverbeek},
Timestamp = {2012.09.20},
}
• H. M. W. Verbeek and W. M. P. van der Aalst, “On the verification of epcs using t-invariants,” BPMcenter.org, BPM Center Report BPM-06-05, 2006.
[Bibtex]
@TechReport{Verbeek06,
Title = {On the verification of EPCs using T-invariants},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der},
Institution = {BPMcenter.org},
Year = {2006},
Number = {BPM-06-05},
Type = {BPM Center Report},
Abstract = {To verify a (business) process model, for example expressed in terms of an Event-driven Process Chain (EPC), most of the approaches described in literature require the construction of its state space. Unfortunately, for complex business processes the state space can be extremely large (if at all finite) and, as a result, constructing the state space may require excessive time. Moreover, semi-formal modeling languages such as the EPC language require a rather lenient interpretation of their semantics. To circumvent both the state-explosion problem and the semantics-related problems of EPCs, we propose an alternative approach based on transition invariants (T-invariants). T-invariants are well-known in the Petri-net community. They do not require the construction of the state space and can be computed efficiently. Moreover, we will show that our interpretation of T-invariants in this context can be used to deal effectively with the semantics-related problems of EPCs. To demonstrate our approach we will use two case studies: one is based on the reference model of SAP R/3 while the other one is based on a trade execution process within a large Dutch bank. We will also argue that the approach can be applied to other (informal or formal) modeling techniques.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, W. M. P. van der Aalst, and A. H. M. ter Hofstede, “Verifying workflows with cancellation regions and or-joins: an approach based on relaxed soundness and invariants,” The computer journal (cj), vol. 50, iss. 3, pp. 294-314, 2007.
[Bibtex]
@Article{Verbeek07,
Title = {Verifying Workflows with Cancellation Regions and OR-joins: An Approach Based on Relaxed Soundness and Invariants},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter},
Journal = {The Computer Journal (CJ)},
Year = {2007},
Number = {3},
Pages = {294--314},
Volume = {50},
Abstract = {YAWL (Yet Another Workflow Language) workflow language supports the most frequent control-flow patterns found in the current workflow practice. As a result, most workflow languages can be mapped onto YAWL without the loss of control-flow details, even languages allowing for advanced constructs such as cancellation regions and OR-joins. Hence, a verification approach for YAWL is desirable, because such an approach could be used for any workflow language that can be mapped onto YAWL. Unfortunately, cancellation regions and OR-joins are ‘non-local’ properties, and in general we cannot even decide whether the desired final state is reachable if both patterns are present. This paper proposes a verification approach based on (i) an abstraction of the OR-join semantics; (ii) the relaxed soundness property; and (iii) transition invariants. This approach is correct (errors reported are really errors), but not necessarily complete (not every error might get reported). This incompleteness can be explained because, on the one hand, the approach abstracts from the OR-join semantics and on the other hand, it may use only transition invariants, which are structural properties. Nevertheless, our approach can be used to successfully detect errors in YAWL models. Moreover, the approach can be easily transferred to other workflow languages allowing for advanced constructs such as cancellations and OR-joins.},
Doi = {10.1093/comjnl/bxl074},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://comjnl.oxfordjournals.org/cgi/reprint/50/3/294}
}
• H. M. W. Verbeek, W. M. P. van der Aalst, and A. H. M. ter Hofstede, “Verifying workflows with cancellation regions and or-joins: an approach based on invariants,” Eindhoven University of Technology, BETA Working Paper 156, 2006.
[Bibtex]
@TechReport{Verbeek06a,
Title = {Verifying Workflows with Cancellation Regions and OR-joins: An Approach Based on Invariants},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter},
Institution = {Eindhoven University of Technology},
Year = {2006},
Month = {January},
Number = {156},
Type = {BETA Working Paper},
Abstract = {The YAWL (Yet Another Workflow Language) workflow language supports the most frequent control-flow patterns found in the current workflow practice. As a result, most workflow languages can be mapped onto YAWL without loss of control-flow details, even languages allowing for advanced constructs such as cancellation regions and OR-joins. At the moment no analysis techniques are available for such languages, because both cancellation regions and OR-joins are “non-local” properties and therefore difficult to verify. Hence, a verification approach for YAWL is desirable, because such an approach could be used for any workflow language that can be mapped onto YAWL. This paper introduces a verification approach for YAWL that abstracts from the actual semantics of the OR-join. This approach is correct (errors reported are really errors), but not necessarily complete (not every error might get reported). This incompleteness is due to the fact that the approach approximates the OR-join semantics. Nevertheless, our approach can be used to successfully detect errors in YAWL models. Moreover, the approach can easily be transferred to other workflow languages allowing for advanced constructs such as cancellations and OR-joins.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://beta.ieis.tue.nl/node/1264}
}
• H. M. W. Verbeek, W. M. P. van der Aalst, and A. Kumar, “Xrl/woflan: verification and extensibility of an xml/petri-net-based language for inter-organizational workflows,” Information technology and management (itm), vol. 5, iss. 1–2, pp. 65-110, 2004.
[Bibtex]
@Article{Verbeek04a,
Title = {XRL/Woflan: Verification and Extensibility of an XML/Petri-net-based language for inter-organizational workflows},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der and Kumar, A.},
Journal = {Information Technology and Management (ITM)},
Year = {2004},
Number = {1--2},
Pages = {65--110},
Volume = {5},
Abstract = {In this paper, we present XRL/Woflan. XRL/Woflan is a software tool using state-of-the-art Petri-net analysis techniques for verifying XRL workflows. The workflow language XRL (eXchangeable Routing Language) supports cross-organizational processes. XRL uses XML for the representation of process definitions and Petri nets for its semantics. XRL is instance-based, therefore, workflow definitions can be changed on the fly and sent across organizational boundaries. These features are vital for todays dynamic and networked economy. However, the features also enable subtle, but highly disruptive, cross-organizational errors. On-the-fly changes and one-of-a-kind processes are destined to result in errors. Moreover, errors of a cross-organizational nature are difficult to repair. XRL/Woflan uses eXtensible Stylesheet Language Transformations (XSLT) to transform XRL specifications to a specific class of Petri nets, and to allow users to design new routing constructs, thus making XRL extensibe. The Petri-net representation is used to determine whether the workflow is correct. If the workflow is not correct, anomalies such as deadlocks and livelocks are reported.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, W. M. P. van der Aalst, and A. Kumar, “Xrl/woflan: verification of an xml/petri-net-based language for inter-organizational workflows,” Eindhoven University of Technology, Beta Working Paper 65, 2001.
[Bibtex]
@TechReport{Verbeek01,
Title = {XRL/Woflan: verification of an XML/Petri-net-based language for inter-organizational workflows},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der and Kumar, A.},
Institution = {Eindhoven University of Technology},
Year = {2001},
Month = {December},
Number = {65},
Type = {Beta Working Paper},
Abstract = {Internet-based technology, E-commerce, and the rise of networked virtual enterprises have fueled the need for inter-organizational workflows. Although XML allows trading partners to exchange information, it cannot be used to coordinate activities in different organizational entities. Therefore, we developed a workflow language named XRL (eXchangeable Routing Language) for supporting cross-organizational processes. XRL uses XML for the representation of process definitions and Petri nets for its semantics. Since XRL is instance-based, workflow definitions can be changed on the fly and sent across organizational boundaries. These features are vital for todays dynamic and networked economy. However, the features also enable subtle, but highly disruptive, cross-organizational errors. On-the-fly changes and one-of-a-kind processes are destined to result in errors. Moreover, errors of a cross-organizational nature are difficult to repair. In this paper, we present XRL/Woflan. XRL/Woflan is a software tool using state-of-the-art Petri-net analysis techniques for verifying XRL workflows. The tool uses eXtensible Stylesheet Language Transformations (XSLT) to transform XRL specifications to a specific class of Petri nets. The Petri-net representation is used to determine whether the workflow is correct. If the workflow is not correct, anomalies such as deadlocks and livelocks are reported.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://beta.ieis.tue.nl/node/1360}
}
• H. M. W. Verbeek, W. M. P. van der Aalst, and J. Munoz-Gama, “Divide and conquer,” BPMCenter.org, BPM Center Report BPM-16-06, 2016.
[Bibtex]
@TechReport{Verbeek16b,
Title = {Divide and Conquer},
Author = {Verbeek, H. M. W. and Aalst, W. M. P. van der and Munoz-Gama, J.},
Institution = {BPMCenter.org},
Year = {2016},
Number = {BPM-16-06},
Type = {BPM Center Report},
}
• H. M. W. Verbeek and T. Basten, “Deciding life-cycle inheritance on petri nets,” in 24th international conference on application and theory of petri nets (icatpn 2003), Eindhoven, The Netherlands, 2003, pp. 44-63.
[Bibtex]
@InProceedings{Verbeek03,
Title = {Deciding life-cycle inheritance on Petri nets},
Author = {Verbeek, H. M. W. and Basten, T.},
Booktitle = {24th International Conference on Application and Theory of Petri Nets (ICATPN 2003)},
Year = {2003},
Editor = {Aalst, W. M. P. van der and Best, E.},
Month = {June},
Pages = {44--63},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {2679},
Abstract = {One of the key issues of object-oriented modeling is inheritance. It allows for the definition of a subclass that inherits features from some superclass. When considering the dynamic behavior of objects, as captured by their life cycles, there is no general agreement on the meaning of inheritance. Basten and Van der Aalst introduced the notion of life-cycle inheritance for this purpose. Unfortunately, the search tree needed for deciding life-cycle inheritance is in general prohibitively large. This paper presents a backtracking algorithm to decide life-cycle inheritance on Petri nets. The algorithm uses structural properties of both the base life cycle and the potential sub life cycle to prune the search tree. Test cases show that the results are promising.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, T. Basten, and W. M. P. van der Aalst, “Diagnosing workflow processes using woflan,” The computer journal, vol. 44, iss. 4, pp. 246-279, 2001.
[Bibtex]
@Article{Verbeek01a,
Title = {Diagnosing Workflow Processes using Woflan},
Author = {Verbeek, H. M. W. and Basten, T. and Aalst, W. M. P. van der},
Journal = {The Computer Journal},
Year = {2001},
Number = {4},
Pages = {246--279},
Volume = {44},
Abstract = {Workflow management technology promises a flexible solution for business-process support facilitating the easy creation of new business processes and modification of existing processes. Unfortunately, today’s workflow products have no support for workflow verification. Errors made at design-time are not detected and result in very costly failures at run-time. This paper presents the verification tool Woflan. Woflan analyzes workflow process definitions downloaded from commercial workflow products using state-of-the-art Petri-net-based analysis techniques. This paper describes the functionality of Woflan emphasizing diagnostics to locate the source of a design error. Woflan is evaluated via two case studies, one involving twenty groups of students designing a complex workflow process and one involving an industrial workflow process designed by Staffware Benelux. The results are encouraging and show that Woflan guides the user in finding and correcting errors in the design of workflows.},
Doi = {10.1093/comjnl/44.4.246},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://comjnl.oxfordjournals.org/cgi/reprint/44/4/246}
}
• H. M. W. Verbeek, T. Basten, and W. M. P. van der Aalst, “Diagnosing workflow processes using woflan,” Eindhoven University of Technology, Eindhoven, The Netherlands, BETA Working Paper 48, 2000.
[Bibtex]
@TechReport{Verbeek00a,
Title = {Diagnosing Workflow Processes Using Woflan},
Author = {Verbeek, H. M. W. and Basten, T. and Aalst, W. M. P. van der},
Institution = {Eindhoven University of Technology},
Year = {2000},
Number = {48},
Type = {BETA Working Paper},
Abstract = {Workflow management technology promises a flexible solution for business-process support facilitating the easy creation of new business processes and modification of existing processes. Unfortunately, today’s workflow products have no support for workflow verification. Errors made at design-time are not detected and result in very costly failures at run-time. This paper presents the verification tool Woflan. Woflan analyzes workflow process definitions downloaded from commercial workflow products using state-of-the-art Petri-net-based analysis techniques. This paper describes the functionality of Woflan emphasizing diagnostics to locate the source of a design error. Woflan is evaluated via two case studies, one involving twenty groups of students designing a complex workflow process and one involving an industrial workflow process designed by Staffware Benelux. The results are encouraging and show that Woflan guides the user in finding and correcting errors in the design of workflows.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://beta.ieis.tue.nl/node/1377}
}
• H. M. W. Verbeek, T. Basten, and W. M. P. van der Aalst, “Diagnosing workflow processes using woflan,” Eindhoven University of Technology, Eindhoven, The Netherlands, Computing Science Report 99/02, 1999.
[Bibtex]
@TechReport{Verbeek99,
Title = {Diagnosing Workflow Processes using Woflan},
Author = {Verbeek, H. M. W. and Basten, T. and Aalst, W. M. P. van der},
Institution = {Eindhoven University of Technology},
Year = {1999},
Number = {99/02},
Type = {Computing Science Report},
Abstract = {Workflow management technology promises a flexible solution for business-process support facilitating the easy creation of new business processes and modification of existing processes. Unfortunately, todays workflow products have no support for workflow verification. Errors made at design-time are not detected and result in very costly failures at run-time. This paper presents the verification tool Woflan. Woflan analyzes workflow process definitions downloaded from commercial workflow products using state-of-the-art Petri-net-based analysis techniques. This paper describes the functionality of Woflan emphasizing new diagnostics to locate the source of a design error. Based on a case study (involving twenty groups of students designing a complex workflow process), these new diagnostics have been evaluated and the results have been used to develop a method to guide the user of Woflan in finding and correcting errors in the design of workflows.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/extra1/wskrap/publichtml/9912075.pdf}
}
• H. M. W. Verbeek, J. C. A. M. Buijs, B. F. van Dongen, and W. M. P. van der Aalst, “Prom 6: the process mining toolkit,” in Proc. of bpm demonstration track 2010, Hoboken, USA, 2010, pp. 34-39.
[Bibtex]
@InProceedings{Verbeek10c,
Title = {ProM 6: The Process Mining Toolkit},
Author = {Verbeek, H. M. W. and Buijs, J. C. A. M. and Dongen, B. F. van and Aalst, W. M. P. van der},
Booktitle = {Proc. of BPM Demonstration Track 2010},
Year = {2010},
Editor = {La Rosa, M.},
Month = {September},
Pages = {34--39},
Publisher = {CEUR-WS.org},
Series = {CEUR Workshop Proceedings},
Volume = {615},
Abstract = {Process mining has been around for a decade, and it has proven to be a very fertile and successful research field. Part of this success can be contributed to the ProM tool, which combines most of the existing process mining techniques as plug-ins in a single tool. ProM 6 removes many limitations that existed in the previous versions, in particular with respect to the tight integration between the tool and the GUI.
ProM 6 has been developed from scratch and uses a completely redesigned architecture. The changes were driven by many real-life applications and new insights into the design of process analysis software. Furthermore, the introduction of XESame in this toolkit allows for the conversion of logs to the ProM native format without programming.},
Owner = {hverbeek},
Timestamp = {2010.07.22},
Url = {http://ceur-ws.org/Vol-615/paper13.pdf}
}
• H. M. W. Verbeek, J. C. A. M. Buijs, B. F. van Dongen, and W. M. P. van der Aalst, “Xes, xesame, and prom 6,” in Information system evolution, P. Soffer and E. Proper, Eds., Hammamet, Tunisia: Springer, 2011, vol. 72, pp. 60-75.
[Bibtex]
@InCollection{Verbeek11,
Title = {XES, XESame, and ProM 6},
Author = {Verbeek, H. M. W. and Buijs, J. C. A. M. and Dongen, B. F. van and Aalst, W. M. P. van der},
Booktitle = {Information System Evolution},
Publisher = {Springer},
Year = {2011},
Editor = {Soffer, P. and Proper, E.},
Month = {June 7-9},
Pages = {60--75},
Series = {Lecture Notes in Business Information Processing (LNBIP)},
Volume = {72},
Abstract = {Process mining has emerged as a new way to analyze business processes based on event logs. These events logs need to be extracted from operational systems and can subsequently be used to discover or check the conformance of processes. ProM is a widely used tool for process mining. In earlier versions of ProM, MXML was used as an input format. In future releases of ProM, a new logging format will be used: the eXtensible Event Stream (XES) format. This format has several advantages over MXML. The paper presents two tools that use this format - XESame and ProM 6 - and highlights the main innovations and the role of XES. XESame enables domain experts to specify how the event log should be extracted from existing systems and converted toXES.ProM 6 is a completely new process mining framework based onXES and enabling innovative process mining functionality.},
Doi = {10.1007/978-3-642-17722-4_5},
Owner = {hverbeek},
Timestamp = {2010.09.21}
}
• H. M. W. Verbeek, J. C. A. M. Buijs, B. F. van Dongen, and W. M. P. van der Aalst, Xes tools, 2010.
[Bibtex]
@Misc{Verbeek10b,
Title = {XES Tools},
Author = {Verbeek, H. M. W. and Buijs, J. C. A. M. and Dongen, B. F. van and Aalst, W. M. P. van der},
HowPublished = {CAiSE 2010 Forum},
Month = {June},
Year = {2010},
Abstract = {Process mining has emerged as a new way to analyze business processes based on event logs. These events logs need to be extracted from operational systems and can subsequently be used to discover or check the conformance of processes. ProM is a widely used tool for process mining. In earlier versions of ProM, MXML was used as an input format. In future releases of ProM, a new logging format will be used: the eXtensible Event Stream (XES) format. This format has several advantages over MXML. The paper presents two tools that use this format - XESame and ProM6 - and highlights the main innovations and the role of XES. XESame enables domain experts to specify how the event log should be extracted from existing systems and converted to XES. ProM6 is a completely new process mining framework based on XES and enabling innovative process mining functionality.},
Owner = {hverbeek},
Timestamp = {2010.06.25},
Url = {http://www.processmining.org/_media/publications/verbeek2010.pdf}
}
• H. M. W. Verbeek and B. F. van Dongen, “Translating labelled p/t nets into epcs for sake of communication,” Eindhoven University of Technology, BETA Working Paper 194, 2007.
[Bibtex]
@TechReport{Verbeek07a,
Title = {Translating labelled P/T nets into EPCs for sake of communication},
Author = {Verbeek, H. M. W. and Dongen, B. F. van},
Institution = {Eindhoven University of Technology},
Year = {2007},
Month = {January},
Number = {194},
Type = {BETA Working Paper},
Abstract = {Petri nets can be used to capture the behavior of a process in a formal and precise way. However, Petri nets are less suitable to communicate the process to its owner, as simple routing constructs in the process might require a large number of transitions. This paper introduces a translation from labelled P/T nets to EPCs in such a way that many transitions can be translated into one EPC connector. The algorithm even allows for translating a set of transitions into an OR connector, even though the concept of OR connectors (especially the OR join connector) has no real equal in Petri nets. Using this translation presented here, labelled P/T nets may be communicated to the process owner by means of the created EPC.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://beta.ieis.tue.nl/node/1226}
}
• H. M. W. Verbeek, B. F. van Dongen, J. Mendling, and W. M. P. van der Aalst, “Interoperability in the prom framework,” in Proceedings of the caise06 workshops and doctoral consortium, Luxembourg, 2006, pp. 619-630.
[Bibtex]
@InProceedings{Verbeek06b,
Title = {Interoperability in the ProM Framework},
Author = {Verbeek, H. M. W. and Dongen, B. F. van and Mendling, J. and Aalst, W. M. P. van der},
Booktitle = {Proceedings of the CAiSE’06 Workshops and Doctoral Consortium},
Year = {2006},
Editor = {Latour, T. and Petit, M.},
Month = {June},
Pages = {619--630},
Publisher = {Presses Universitaires de Namur},
Abstract = {Originally the ProM framework was developed as a design artifact for the process mining domain, i.e., extracting process models from event logs. However, in recent years the scope of the framework has become broader and now includes process verification, social network analysis, conformance checking, verification based on temporal logic, etc. Moreover, the framework supports a wide variety of process models, e.g., Petri nets, Event-driven Process Chains (EPCs), Heuristics nets, YAWL models, and is plug-able, i.e., people can add plug-ins without changing the framework itself. This makes the ProM framework an interesting environment for model interoperability. For example, people can take transaction log from IBM’s WebSphere, discover a process model in terms of a heuristics net, convert the heuristics net to a Petri net for analysis, load an EPC defined using the ARIS toolset, verify the EPC and convert it to a Petri net, determine the fitness of the ARIS model given the transaction log from WebSphere, and finally convert both models to a YAWL specification that is exported. Such application scenarios are supported by ProM and demonstrate true model interoperability. In this paper, we present ProM’s interoperability capabilities using a running example.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, M. van Hattem, H. A. Reijers, and W. de Munk, “Protos 7.0: simulation made accessible,” in Application and theory of petri nets 2005, Miami, Florida, 2005, pp. 465-474.
[Bibtex]
@InProceedings{Verbeek05a,
Title = {Protos 7.0: Simulation made accessible},
Author = {Verbeek, H. M. W. and Hattem, M. van and Reijers, H. A. and Munk, W. de},
Booktitle = {Application and Theory of Petri nets 2005},
Year = {2005},
Editor = {Ciardo, G. and Darondeau, P.},
Month = {June},
Pages = {465--474},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in computer Science},
Volume = {3536},
Abstract = {Many consider simulation to be a highly specialist activity: it is difficult to undertake and is even more difficult to understand its outcomes. The new version of the business process modeling tool Protos attempts to more closely integrate modeling and simulation facilities into one tool. The assumed benefit is that business professionals may more easily undertake simulation experiments when they are enabled with the same tool to extend their existing process models to carry out simulation experiments. This paper explains how the existing engine of the Petri-net based tool ExSpect is integrated into Protos 7.0. It also shows the extended user interface of Protos and the simulation reports it generates.},
Doi = {10.1007/11494744_27},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, A. Hirnschall, and W. M. P. van der Aalst, “Xrl/flower: supporting inter-organizational workflows using xml/petri-net technology,” in Web services, e-business and the semantic web, 2002, pp. 93-108.
[Bibtex]
@InProceedings{Verbeek02,
Title = {XRL/Flower: Supporting Inter-Organizational Workflows Using XML/Petri-net Technology},
Author = {Verbeek, H. M. W. and Hirnschall, A. and Aalst, W. M. P. van der},
Booktitle = {Web Services, E-business and the Semantic Web},
Year = {2002},
Pages = {93--108},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {2512},
Abstract = {In this paper, we present the architecture of XRL/Flower. XRL/Flower is a software tool, which benefits from the fact that it is based on both XML and Petri nets. Standard XML tools can be deployed to parse, check, and handle XRL documents. The Petri-net representation allows for a straight-forward and succinct implementation of the workflow engine. XRL constructs are automatically translated into Petri-net constructs. On the one hand, this allows for an efficient implementation. On the other hand, the system is easy to extend: For supporting a new routing primitive, only the translation to the Petri-net engine needs to be added and the engine itself does not need to change. Last, but not least, the Petri net representation can be analyzed using state-of-the-art analysis techniques and tools.},
Doi = {10.1007/3-540-36189-8_8},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek and F. Mannhardt, “The DrFurby Classifier submission to the Process Discovery Contest @ BPM 2016,” BPMCenter.org, BPM Center Report BPM-16-08, 2016.
[Bibtex]
@TechReport{Verbeek16c,
Title = {The {DrFurby Classifier} submission to the {Process Discovery Contest @ BPM 2016}},
Author = {Verbeek, H. M. W. and Mannhardt, F.},
Institution = {BPMCenter.org},
Year = {2016},
Number = {BPM-16-08},
Type = {BPM Center Report},
}
• H. M. W. Verbeek and R. Medeiros de Carvalho, “Log skeletons: a classification approach to process discovery,” arXiv.org 2018.
[Bibtex]
@TechReport{Verbeek18,
Title = {Log Skeletons: A Classification Approach to Process Discovery},
Author = {Verbeek, H. M. W. and Medeiros de Carvalho, R.},
Institution = {arXiv.org},
Year = {2018},
Note = {arXiv Identifier 1806.08247},
Abstract = {To test the effectiveness of process discovery algorithms, a Process Discovery Contest (PDC) has been set up. This PDC uses a classification approach to measure this effectiveness: The better the discovered model can classify whether or not a new trace conforms to the event log, the better the discovery algorithm is supposed to be. Unfortunately, even the state-of-the-art fully-automated discovery algorithms score poorly on this classification. Even the best of these algorithms, the Inductive Miner, scored only 147 correct classified traces out of 200 traces on the PDC of 2017. This paper introduces the rule-based log skeleton model, which is closely related to the Declare constraint model, together with a way to classify traces using this model. This classification using log skeletons is shown to score better on the PDC of 2017 than state-of-the-art discovery algorithms: 194 out of 200. As a result, one can argue that the fully-automated algorithm to construct (or: discover) a log skeleton from an event log outperforms existing state-of-the-art fully-automated discovery algorithms.},
HowPublished = {arXiv:1806.08247},
Organization = {arXiv.org},
Owner = {hverbeek},
Timestamp = {2018.06.21},
Url = {https://arxiv.org/abs/1806.08247}
}
• H. M. W. Verbeek, J. Munoz-Gama, and W. M. P. van der Aalst, “Divide and conquer: a tool framework for supporting decomposed discovery in process mining,” The Computer Journal, vol. 60, iss. 11, pp. 1649-1674, 2017.
[Bibtex]
@Article{Verbeek17a,
Title = {Divide And Conquer: A Tool Framework for Supporting Decomposed Discovery in Process Mining},
Author = {Verbeek, H. M. W. and Munoz-Gama, J. and Aalst, W. M. P. van der},
Journal = {{The Computer Journal}},
Year = {2017},
Month = {November},
Number = {11},
Pages = {1649--1674},
Volume = {60},
Abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative way to abstract the subnets from the single net, and shows that the decomposed replay using this alternative abstraction is faster than the monolithic replay even for the problematic cases as identified earlier. However, the alternative abstraction often results in longer computation times for the decomposed replay than the original abstraction. An advantage of the alternative abstraction over the original abstraction is that its cost estimates are typically better.},
Doi = {10.1093/comjnl/bxx040},
Owner = {hverbeek},
Timestamp = {2017.04.06}
}
• H. M. W. Verbeek, A. J. Pretorius, W. M. P. van der Aalst, and J. J. van Wijk, “On petri-net synthesis and attribute-based visualization,” in Proceedings of workshop on petri nets and software engineering (pnse’07), Siedlce, Poland, 2007, pp. 127-141.
[Bibtex]
@InProceedings{Verbeek07b,
Title = {On Petri-net synthesis and attribute-based visualization},
Author = {Verbeek, H. M. W. and Pretorius, A. J. and Aalst, W. M. P. van der and Wijk, J. J. van},
Booktitle = {Proceedings of Workshop on Petri Nets and Software Engineering (PNSE'07)},
Year = {2007},
Editor = {Moldt, D. and Kordon, F. and Hee, K. M. van and Colom, J.-M. and Bastide, R.},
Month = {June},
Pages = {127--141},
Publisher = {Publishing House of University of Podlasie, Siedlce, Poland},
Abstract = {State space visualization is important for a good understanding of the system's behavior. Unfortunately, today's visualization tools typically ignore the fact that states might have attributes. Based on these attributes, some states can be considered equivalent after abstraction, and can thus be clustered, which simplifies the state space. Attribute-based visualization tools are the exception to this rule. These tools can deal with attributes. In this paper, we investigate an approach based on Petri nets. Places in a Petri net correspond in a straightforward way to attributes. Furthermore, we can use existing techniques to automatically derive a Petri net from some state space, that is, to automatically add attributes to that state space. As a result, we can use attribute-based visualization tools for any state space. Unfortunately, the approach is hampered by the fact that not every state space results in a usable Petri net.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek, A. J. Pretorius, W. M. P. van der Aalst, and J. J. van Wijk, “Assessing state spaces using petri net synthesis and attribute-based visualisation,” Lncs transactions on petri nets and other models of concurrency (topnoc) i, vol. 5100, pp. 152-171, 2008.
[Bibtex]
@Article{Verbeek08,
Title = {Assessing State Spaces using Petri Net Synthesis and Attribute-based Visualisation},
Author = {Verbeek, H. M. W. and Pretorius, A. J. and Aalst, W. M. P. van der and Wijk, J. J. van},
Journal = {LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC) I},
Year = {2008},
Pages = {152--171},
Volume = {5100},
Abstract = {State spaces are commonly used representations of system behavior. A state space may be derived from a model of system behavior but can also be obtained through process mining. For a good understanding of the system's behavior, an analyst may need to assess the state space. Unfortunately, state spaces of realistic applications tend to be very large. This makes this assessment hard. In this paper, we tackle this problem by combining Petri-net synthesis (i.e., regions theory) and visualization. Using Petri-net synthesis we generate the attributes needed for attribute-based visualization. Using visualization we can assess the state space. We demonstrate that such an approach is possible and describe our implementation using existing tools. The only limiting factor of our approach is the performance of current synthesis techniques.},
Comment = {Appeared as Vol. 5100 in hte LNCS series},
Doi = {10.1007/978-3-540-89287-8_10},
Owner = {hverbeek},
Timestamp = {2008.11.03},
}
• H. M. W. Verbeek, A. J. Pretorius, W. M. P. van der Aalst, and J. J. van Wijk, “Visualizing state spaces with petri nets,” Eindhoven University of Technology, Computer Science Report 07/01, 2007.
[Bibtex]
@TechReport{Verbeek07c,
Title = {Visualizing State Spaces with Petri nets},
Author = {Verbeek, H. M. W. and Pretorius, A. J. and Aalst, W. M. P. van der and Wijk, J. J. van},
Institution = {Eindhoven University of Technology},
Year = {2007},
Number = {07/01},
Type = {Computer Science Report},
Abstract = {Many analysis techniques are based on state spaces, e.g., based on some system model or system log, state spaces are generated that can be used to verify behavioral properties (e.g., absence of deadlocks). State spaces can be inspected automatically without any human interpretation. However, for a good understanding of the system's behavior the analyst needs to inspect and interpret the corresponding state space. Therefore, state space visualization is important. Unfortunately, this aspect has been neglected and today's analysis tools typically represent states spaces in such a way that they are difficult to interpret and inspection is only possible for toy examples. Attribute-based visualization methods address this issue by enabling users to study state spaces in terms of data they understand: the attributes associated with every state. These techniques can deal with many types of attributes. In this paper we propose an approach based on Petri nets. Using existing synthesis techniques based on regions, we automatically derive a Petri net from a given state space. We consider the places of this Petri net as new derived state attributes. Using these, we combine attribute-based visualizations of the state space with diagrams of the associated Petri net. This approach has been implemented and provides an innovative and versatile way to visualize state spaces.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
Url = {http://alexandria.tue.nl/extra1/wskrap/publichtml/200701.pdf}
}
• H. M. W. Verbeek and R. A. van der Toorn, “Transit case study,” in 25th international conference on application and theory of petri nets (icatpn 2004), Bologna, Italy, 2004, pp. 391-410.
[Bibtex]
@InProceedings{Verbeek04b,
Title = {Transit Case Study},
Author = {Verbeek, H. M. W. and Toorn, R. A. van der},
Booktitle = {25th International Conference on Application and Theory of Petri Nets (ICATPN 2004)},
Year = {2004},
Editor = {Cortadella, J. and Reisig, W.},
Month = {June},
Pages = {391--410},
Publisher = {Springer, Berlin, Germany},
Series = {Lecture Notes in Computer Science},
Volume = {3099},
Abstract = {One of the key issues of object-oriented modeling is inheritance. It allows for the definition of a subclass that inherits features from some superclass. When considering the dynamic behavior of objects, as captured by their life cycles, there is no general agreement on the meaning of inheritance. Basten and Van der Aalst introduced the notion of life-cycle inheritance for this purpose. Unfortunately, the search tree needed for deciding life-cycle inheritance is in general prohibitively large. This paper presents a comparative study between two possible algorithms. The first algorithm uses structural properties of both the base life cycle and the potential sub life cycle to prune the search tree, while the second is a plain exhaustive search algorithm. Test cases show that the computation times of the second algorithm can indeed be prohibitively expensive (weeks), while the computation times of the first algorithm are all within acceptable limits (seconds). An unexpected result of this case study is that it shows that we need tools for checking life-cycle inheritance.},
Doi = {10.1007/b98283},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• H. M. W. Verbeek and M. T. Wynn, “Verification,” in Modern business process automation: yawl and its support environment, A. H. M. ter Hofstede, W. M. P. van der Aalst, M. Adams, and N. Russell, Eds., Springer, Berlin, Germany, 2010, pp. 517-545.
[Bibtex]
@InCollection{Verbeek10a,
Title = {Verification},
Author = {Verbeek, H. M. W. and Wynn, M. T.},
Booktitle = {Modern Business Process Automation: YAWL and its Support Environment},
Publisher = {Springer, Berlin, Germany},
Year = {2010},
Chapter = {20},
Editor = {Hofstede, A. H. M. ter and Aalst, W. M. P. van der and Adams, M. and Russell, N.},
Pages = {517--545},
Series = {Database Management \& Info Retrieval},
Doi = {10.1007/978-3-642-03121-2 20},
Owner = {hverbeek},
Timestamp = {2009.06.05}
}
• H. M. W. Verbeek, M. T. Wynn, W. M. P. van der Aalst, and A. H. M. ter Hofstede, “Reduction rules for reset/inhibitor nets,” Journal of computer and system sciences, vol. 76, iss. 2, pp. 125-143, 2010.
[Bibtex]
@Article{Verbeek10,
Title = {Reduction Rules for Reset/Inhibitor Nets},
Author = {Verbeek, H. M. W. and Wynn, M. T. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter},
Journal = {Journal of Computer and System Sciences},
Year = {2010},
Number = {2},
Pages = {125--143},
Volume = {76},
Abstract = {Reset/inhibitor nets are Petri nets extended with reset arcs and inhibitor arcs. \wil{}{These extensions can be used to model cancelation and blocking. }A reset arc allows a transition to remove all tokens from a certain place when the transition fires. An inhibitor arc can stop a transition from being enabled if the place contains one or more tokens. While reset/inhibitor nets increase the expressive power of Petri nets, they also result in increased complexity of analysis techniques. One way of speeding up Petri net analysis is to apply reduction rules. Unfortunately, many of the rules defined for classical Petri nets do not hold in the presence of reset and/or inhibitor arcs. Moreover, new rules can be added. This is the first paper systematically presenting a comprehensive set of reduction rules for reset/inhibitor nets. These rules are liveness and boundedness preserving and are able to dramatically reduce models and their state spaces. \arthur{Note}{It can be observed} that most of the modeling languages used in practice have features related to cancelation and blocking. Therefore, this work is highly relevant for all kinds of application areas where analysis is currently intractable.},
Doi = {10.1016/j.jcss.2009.06.003},
Owner = {hverbeek},
Timestamp = {2009.06.05}
}
• J. J. L. C. Vogelaar, H. M. W. Verbeek, B. Luka, and W. M. P. van der Aalst, “Comparing business processes to determine the feasibility of configurable models: a case study,” in Bpm 2011 workshops, part ii, 2012, pp. 50-61.
[Bibtex]
@InProceedings{Vogelaar12,
Title = {Comparing Business Processes to Determine the Feasibility of Configurable Models: A Case Study},
Author = {Vogelaar, J. J. L. C. and Verbeek, H. M. W. and Luka, B. and Aalst, W. M. P. van der},
Booktitle = {BPM 2011 Workshops, Part II},
Year = {2012},
Editor = {Daniel, F. and Dustdar, S. and Barkaoui, K.},
Month = {August},
Organization = {LIMOS - Université Blaise Pascal, Clermont-Ferrand, France},
Pages = {50--61},
Publisher = {Springer-Verlag},
Series = {LNBIP},
Volume = {100},
Abstract = {Organizations are looking for ways to collaborate in the area of process management. Common practice until now is the (partial) standardization of processes. This has the main disadvantage that most organizations are forced to adapt their processes to adhere to the standard. In this paper we analyze and compare the actual processes of ten Dutch municipalities. Configurable process models provide a potential solution for the limitations of classical standardization processes as they contain all the behavior of individual processes, while only needing one model. The question rises where the limits are though. It is obvious that one configurable model containing all models that exist is undesirable. But are company-wide configurable models feasible? And how about cross-organizational configurable models, should all partners be considered or just certain ones? In this paper we apply a similarity metric on individual models to determine means of answering questions in this area. This way we propose a new means of determining beforehand whether configurable models are feasible. Using the selected metric we can identify more desirable partners and processes before computing a configurable process model.},
Doi = {10.1007/978-3-642-28115-0_6},
Owner = {hverbeek},
Timestamp = {2011.10.03}
}
• J. J. L. C. Vogelaar, H. M. W. Verbeek, B. Luka, and W. M. P. van der Aalst, “Comparing business processes to determine the feasibility of configurable models: a case study,” BPMcenter.org, BPM Center Report BPM-11-17, 2011.
[Bibtex]
@TechReport{Vogelaar11,
Title = {Comparing Business Processes to Determine the Feasibility of Configurable Models: A Case Study},
Author = {Vogelaar, J. J. L. C. and Verbeek, H. M. W. and Luka, B. and Aalst, W. M. P. van der},
Institution = {BPMcenter.org},
Year = {2011},
Number = {BPM-11-17},
Type = {BPM Center Report},
Abstract = {Organizations are looking for ways to collaborate in the area of process management. Common practice until now is the (partial) standardization of processes. This has the main disadvantage that most organizations are forced to adapt their processes to adhere to the standard. In this paper we analyze and compare the actual processes of ten Dutch municipalities. Configurable process models provide a potential solution for the limitations of classical standardization processes as they contain all the behavior of individual models, while only needing one model. The question rises where the limits are though. It is obvious that one configurable model containing all models that exist is undesirable. But are company-wide configurable models feasible? And how about crossorganizational configurable models, should all partners be considered or just certain ones? In this paper we apply a similarity metric on individual models to determine means of answering questions in this area. This way we propose a new means of determining beforehand whether configurable models are feasible. Using the selected metric we can identify more desirable partners and processes before computing configurable process models.},
Owner = {hverbeek},
Timestamp = {2011.10.03},
}
• J. M. E. M. van der Werf and H. M. W. Verbeek, “Online compliance monitoring of service landscapes,” in Bpm 2014 international workshops, Springer, 2015, vol. 202, pp. 89-95.
[Bibtex]
@InCollection{Werf14,
Title = {Online Compliance Monitoring of Service Landscapes},
Author = {Werf, J. M. E. M. van der and Verbeek, H. M. W.},
Booktitle = {BPM 2014 International Workshops},
Publisher = {Springer},
Year = {2015},
Pages = {89--95},
Volume = {202},
Doi = {http://dx.doi.org/10.1007/978-3-319-15895-2_8},
}
• J. M. E. M. van der Werf, H. M. W. Verbeek, and W. M. P. van der Aalst, “Context-aware compliance checking,” in Bpm 2012 proceedings, Tallinn, 2012, pp. 98-113.
[Bibtex]
@InProceedings{Werf12,
Title = {Context-Aware Compliance Checking},
Author = {Werf, J. M. E. M. van der and Verbeek, H. M. W. and Aalst, W. M. P. van der},
Booktitle = {BPM 2012 Proceedings},
Year = {2012},
Editor = {Barros, A. and Gal, A. and Kindler, E.},
Month = {September},
Note = {Accepted as regular paper},
Pages = {98--113},
Publisher = {Springer},
Series = {LNCS},
Volume = {7481},
Abstract = {Organizations face more and more the burden to show that their business is compliant with respect to many different boundaries. The activity of compliance checking is commonly referred to as auditing. As information systems supporting the organization’s business record their usage, process mining techniques such as conformance checking offer the auditor novel tools to automate the auditing activity. However, these techniques tend to look at process instances (i.e., cases) in isolation, whereas many compliance rules can only be evaluated when considering interactions between cases and contextual information. For example, a rule like “a paper should not be reviewed by a reviewer that has been a co-author” cannot be checked without considering the corresponding context (i.e., other papers, other issues, other journals, etc.). To check such compliance rules, we link event logs to the context. Events modify a pre-existing context and constraints can be checked on the resulting context. The approach has been implemented in ProM. The resulting context is represented as an ontology, and the semantic web rule language is used to formalize constraints.},
Doi = {http://dx.doi.org/10.1007/978-3-642-32885-5_7},
Owner = {hverbeek},
Timestamp = {2012.07.12},
}
• M. Weske, W. M. P. van der Aalst, and H. M. W. Verbeek, “Advances in business process management,” Data & knowledge engineering (dke), vol. 50, iss. 1, pp. 1-8, 2004.
[Bibtex]
@Article{Weske04,
Author = {Weske, M. and Aalst, W. M. P. van der and Verbeek, H. M. W.},
Journal = {Data \& Knowledge Engineering (DKE)},
Year = {2004},
Number = {1},
Pages = {1--8},
Volume = {50},
Comment = {Guest editorial},
Doi = {10.1016/j.datak.2004.01.001},
Owner = {hverbeek},
Timestamp = {2009.01.28},
Url = {http://www.sciencedirect.com/science?_ob=MImg&_imagekey=B6TYX-4BJ5XYV-1-9&_cdi=5630&_user=499892&_orig=browse&_coverDate=07/31/2004&_sk=999499998&view=c&wchp=dGLbVzz-zSkWz&md5=25903ba0bbacc8c2d57c4d9a43abd28e&ie=/sdarticle.pdf}
}
• M. Westergaard and H. M. W. Verbeek, “Efficient implementation of prioritized transitions for high-level petri nets,” in Pnse’11, Newcatle upon Tyne, UK, 2011.
[Bibtex]
@InProceedings{Westergaard11,
Title = {Efficient Implementation of Prioritized Transitions for High-level Petri Nets},
Author = {Westergaard, M. and Verbeek, H. M. W.},
Booktitle = {PNSE'11},
Year = {2011},
Address = {Newcatle upon Tyne, UK},
Editor = {Moldt, D. and Duvigneau, M. and Hiraishi, K.},
Month = {June},
Note = {Accepted},
Abstract = {Transition priorities can be a useful mechanism when modeling using Petri nets. For example, high-priority transitions can be used to model exception handling and low-priority transitions can be used to model background tasks that should only be executed when no other transition is enabled. Transition priorities can be simulated in Petri nets using, e.g., inhibitor arcs, but such constructs tend to unnecessarily clutter models, making it useful to support priorities directly.
Computing the enabling of transitions in high-level Petri nets is an expensive operation and should be avoided. As transition priorities introduce a nonlocal enabling condition, at first sight this forces us to compute enabling for all transitions in a highest-priority-first order, but it is possible to do better. Here we describe our implementation of transition priorities in CPN Tools 3.0, where we minimize the number of enabling computations. We describe algorithms for executing transitions at random, useful for automatic simulation without user interactions, and for maintaining a set of known enabled transitions, useful for interactive user-guided simulation. Experiments show that using our algorithms we can execute 4-7 million transitions a minute for real-life models and more than 20 million transitions a minute for other models, a significant improvement over the 1-5 million transitions a minute possible for simpler algorithms.},
Owner = {hverbeek},
Timestamp = {2011.05.13},
Url = {http://ceur-ws.org/Vol-723/paper3.pdf}
}
• M. Westergaard and H. M. W. Verbeek, “Efficient implementation of simulation of prioritized transitions for high-level petri nets,” BPMcenter.org, BPM Center Report BPM-13-24, 2013.
[Bibtex]
@TechReport{Westergaard13,
Title = {Efficient Implementation of Simulation of Prioritized Transitions for High-level Petri Nets},
Author = {Westergaard, M. and Verbeek, H. M. W},
Institution = {BPMcenter.org},
Year = {2013},
Number = {BPM-13-24},
Type = {BPM Center Report},
Abstract = {Transition priorities can be a useful mechanism when modeling using Petri nets. For example, exception handling can be modeled using high-priority transitions and background tasks can be modeled using low-priority transitions. Although transition priorities can be simulated in Petri nets using, e.g., inhibitor arcs, such constructs tend to unnecessarily clutter models. Hence, it is useful to support priorities directly. The main problem with transition priorities is that they introduce a nonlocal enabling condition. At first sight, this forces us to compute enabling for all transitions in a highest-priority-first order. However, this should be avoided whenever possible as computing whether transitions in high-level Petri nets are enabled is an expensive operation. This paper shows that we can minimize the number of enabling computations, and hence can do better. Experiments show that using the algorithms presented in this paper we can execute approximately 10 times as many transitions a second as is possible for simpler algorithms. This holds for both toy examples and real-life models, though the gain is often larger for real-life models.},
Owner = {hverbeek},
Timestamp = {2013.10.17},
}
• M. T. Wynn, H. M. W. Verbeek, W. M. P. van der Aalst, A. H. M. ter Hofstede, and D. Edmond, “Soundness-preserving reduction rules for reset workflow nets,” Information sciences, vol. 179, iss. 6, pp. 769-790, 2009.
[Bibtex]
@Article{Wynn09,
Title = {Soundness-preserving Reduction Rules for Reset Workflow Nets},
Author = {Wynn, M. T. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter and Edmond, D.},
Journal = {Information Sciences},
Year = {2009},
Number = {6},
Pages = {769--790},
Volume = {179},
Abstract = {The application of reduction rules to any Petri net may assist in its analysis as its reduced version may be significantly smaller while still retaining the original net's essential properties. Reset nets extend Petri nets with the concept of a reset arc, allowing one to remove all tokens from a certain place. Such nets have a natural application in business process modelling where possible cancellation of activities need to be modelled explicitly and in workflow management where such process models with cancellation behaviours should be enacted correctly. As cancelling the entire workflow or even cancelling certain activities in a workflow has serious implications during execution (for instance, a workflow can deadlock because of cancellation), such workflows should be thoroughly tested before deployment. However, verification of large workflows with cancellation behaviour is time consuming and can become intractable due to the state space explosion problem. One way of speeding up verification of workflows based on reset nets is to apply reduction rules. Even though reduction rules exist for Petri nets and some of its subclasses and extensions, there are no documented reduction rules for reset nets. This paper systematically presents such reduction rules. Because we want to apply the results to the workflow domain, this paper focusses on Reset Workflow nets (RWF-nets), i.e. a subclass tailored to the modelling of workflows. The approach has been implemented in the context of the workflow system YAWL.},
Doi = {10.1016/j.ins.2008.10.033},
Owner = {hverbeek},
Timestamp = {2008.11.03}
}
• M. T. Wynn, H. M. W. Verbeek, W. M. P. van der Aalst, A. H. M. ter Hofstede, and D. Edmond, “Reduction rules for yawl workflows with cancellation regions and or-joins,” Information and software technology (infsof), vol. 51, pp. 1010-1020, 2009.
[Bibtex]
@Article{Wynn09b,
Title = {Reduction rules for YAWL workflows with cancellation regions and OR-joins},
Author = {Wynn, M. T. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter and Edmond, D.},
Journal = {Information and Software Technology (InfSof)},
Year = {2009},
Pages = {1010--1020},
Volume = {51},
Abstract = {As the need for concepts such as cancellation and OR-joins occurs naturally in business scenarios, comprehensive support in a workflow language is desirable. However, there is a clear trade-off between the expressive power of a language (i.e., introducing complex constructs such as cancellation and OR-joins) and ease of verification. When a workflow contains a large number of tasks and involves complex control flow dependencies, verification can take too much time or it may even be impossible. There are a number of different approaches to deal with this complexity. Reducing the size of the workflow, while preserving its essential properties with respect to a particular analysis problem, is one such approach. In this paper, we present a set of reduction rules for workflows with cancellation regions and OR-joins and demonstrate how they can be used to improve the efficiency of verification. Our results are presented in the context of the YAWL workflow language.},
Doi = {10.1016/j.infsof.2008.12.002},
Owner = {hverbeek},
Timestamp = {2009.02.06}
}
• M. T. Wynn, H. M. W. Verbeek, W. M. P. van der Aalst, A. H. M. ter Hofstede, and D. Edmond, “Reduction rules for reset workflow nets,” BPMcenter.org, BPM Center Report BPM-06-25, 2006.
[Bibtex]
@TechReport{Wynn06,
Title = {Reduction rules for Reset Workflow Nets},
Author = {Wynn, M. T. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter and Edmond, D.},
Institution = {BPMcenter.org},
Year = {2006},
Number = {BPM-06-25},
Type = {BPM Center Report},
Abstract = {When a workflow contains a large number of tasks and involves complex control flow dependencies, verification can take too much time or it may even be impossible. Reduction rules can be used to abstract from certain transitions and places in a large net and thus could cut down the size of the net used for verification. Petri nets have been proposed to model and analyse workflows and Petri nets reduction rules have been used for efficient verification of various properties of workflows, such as liveness and boundedness. Reset nets are Petri nets with reset arcs, which can remove tokens from places when a transition fires. The nature of reset arcs closely relates to the cancellation behaviour in work- flows. As a result, reset nets have been proposed to formally represent workflows with cancellation behaviour, which is not easily modelled in ordinary Petri nets. Even though reduction rules exist for Petri nets, the nature of reset arcs could invalidate the transformation rules applicable to Petri nets. This motivated us to consider possible reduction rules for reset nets. In this paper, we propose a number of reduction rules for Reset Workflow Nets (RWF-nets) that are soundness preserving. These reduction rules are based on reduction rules available for Petri nets [19] and we present the necessary conditions under which these rules hold in the context of reset nets.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• M. T. Wynn, H. M. W. Verbeek, W. M. P. van der Aalst, A. H. M. ter Hofstede, and D. Edmond, “Reduction rules for yawl workflow nets with cancellation regions and or-joins,” BPMcenter.org, BPM Center Report BPM-06-24, 2006.
[Bibtex]
@TechReport{Wynn06a,
Title = {Reduction Rules for YAWL Workflow Nets with Cancellation Regions and OR-joins},
Author = {Wynn, M. T. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A. H. M. ter and Edmond, D.},
Institution = {BPMcenter.org},
Year = {2006},
Number = {BPM-06-24},
Type = {BPM Center Report},
Abstract = {A reduction rule can transform a large net into a smaller and simple net while preserving certain interesting properties and it is usually applied before verification to reduce the complexity and to prevent state space explosion. Reset nets have been proposed to formally describe workflows with cancellation behaviour. In our previous work, we have presented a set of reduction rules for Reset Workflow Net (RWF-net), which is a subclass of reset nets. In this paper, we will present a set of reduction rules for YAWL nets with cancellation regions and OR-joins. The reduction rules for RWF-nets combined with the formal mappings from YAWL nets provide us with the means to define a set of reduction rules for YAWL nets. We will also demonstrate how these reduction rules can be used for efficient verification of YAWL nets these features.},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}
• M. T. Wynn, H. M. W. Verbeek, W. M. P. van der Aalst, A. H. M. ter Hofstede, and D. Edmond, “Business process verification -finally a reality!,” Business process management journal, vol. 15, iss. 1, pp. 74-92, 2009.
[Bibtex]
@Article{Wynn09a,
Title = {Business Process Verification -Finally a Reality!},
Author = {Wynn, M. T. and Verbeek, H. M. W. and Aalst, W. M. P. van der and Hofstede, A.H.M. ter and Edmond, D.},
Journal = {Business Process Management Journal},
Year = {2009},
Number = {1},
Pages = {74--92},
Volume = {15},
Abstract = {The goal of this paper is to demonstrate that process verification has matured to a level where it can be used in practice. Earlier techniques assumed simplified process models without the more advanced constructs available in today’s modelling languages (e.g., cancellation and OR-joins). This paper reports on new verification techniques that can be used to assess the correctness of real-life models.},
Doi = {10.1108/14637150910931479},
Owner = {hverbeek},
Timestamp = {2008.11.03}
}
• S. J. van Zelst, A. Burattin, B. F. van Dongen, and H. M. W. Verbeek, “Data streams in prom 6: a single-node architecture,” in Bpm 2104 demos, CEUR-WS.org, 2014, vol. 1295, pp. 81-85.
[Bibtex]
@InCollection{Zelst14,
Title = {Data Streams in ProM 6: A Single-node Architecture},
Author = {Zelst, S. J. van and Burattin, A. and Dongen, B. F. van and Verbeek, H. M. W},
Booktitle = {BPM 2104 Demos},
Publisher = {CEUR-WS.org},
Year = {2014},
Pages = {81--85},
Volume = {1295},
Url = {http://ceur-ws.org/Vol-1295/paper6.pdf}
}
• S. J. van Zelst, B. F. van Dongen, W. M. P. van der Aalst, and H. M. W. Verbeek, “Discovering workflow nets using integer linear programming,” Computing, vol. 100, iss. 5, pp. 529-556, 2018.
[Bibtex]
@Article{Zelst18,
Title = {Discovering workflow nets using integer linear programming},
Author = {Zelst, S. J. van and Dongen, B. F. van and Aalst, W. M. P. van der and Verbeek, H. M. W.},
Journal = {Computing},
Year = {2018},
Month = {May},
Number = {5},
Pages = {529--556},
Volume = {100},
Abstract = {Process mining is concerned with the analysis, understanding and improvement of business processes. Process discovery, i.e. discovering a process model based on an event log, is considered the most challenging process mining task. State-of-the-art process discovery algorithms only discover local control flow patterns and are unable to discover complex, non-local patterns. Region theory based techniques, i.e. an established class of process discovery techniques, do allow for discovering such patterns. However, applying region theory directly results in complex, overfitting models, which is less desirable. Moreover, region theory does not cope with guarantees provided by state-of-the-art process discovery algorithms, both w.r.t. structural and behavioural properties of the discovered process models. In this paper we present an ILP-based process discovery approach, based on region theory, that guarantees to discover relaxed sound workflow nets. Moreover, we devise a filtering algorithm, based on the internal working of the ILP-formulation, that is able to cope with the presence of infrequent, exceptional behaviour. We have extensively evaluated the technique using different event logs with different levels of exceptional behaviour. Our experiments show that the presented approach allows us to leverage the inherent shortcomings of existing region-based approaches. The techniques presented are implemented and readily available in the HybridILPMiner package in the open-source process mining tool-kits ProM (http://promtools.org) and RapidProM (http://rapidprom.org).},
Doi = {10.1007/s00607-017-0582-5},
Owner = {hverbeek},
Timestamp = {2017.11.10},
Url = {https://doi.org/10.1007/s00607-017-0582-5}
}
• Tool demonstrations of the 24th international conference on application and theory of petri nets and the 1st international conference on business process management, 2003.
[Bibtex]
@Misc{03,
Title = {Tool Demonstrations of the 24th International Conference on Application and Theory of Petri Nets
and the 1st International Conference on Business Process Management},
Month = {June},
Note = {Organized by Verbeek, H. M. W.},
Year = {2003},
Owner = {hverbeek},
Timestamp = {2008.11.04},
}