Dixit, P. M.; Verbeek, H. M. W.; van der Aalst, W. M. P.
Incremental Computation of Synthesis Rules for Free-Choice Petri nets Conference
Formal Aspects of Component Software - 15th International Conference, FACS 2018, Proceedings, vol. 11222, LNCS 2018.
@conference{Dixit18c,
title = {Incremental Computation of Synthesis Rules for Free-Choice Petri nets},
author = {P. M. Dixit and H. M. W. Verbeek and W. M. P. van der Aalst},
editor = {P. C. Ölveczky and K. Bae},
year = {2018},
date = {2018-01-01},
booktitle = {Formal Aspects of Component Software - 15th International Conference, FACS 2018, Proceedings},
volume = {11222},
pages = {97--117},
series = {LNCS},
abstract = {In this paper, we propose a novel approach that calculates all the possible applications of synthesis rules, for well-formed free-choice Petri nets, in a speedy way to enable an interactive editing system. The proposed approach uses a so-called incremental synthesis structure, which can be used to extract all the synthesis rules, corresponding to a given net. Furthermore, this structure is updated incrementally, i.e. after usage of a synthesis rule, to obtain the incremental synthesis structure of the newly synthesized net. We prove that the proposed approach is correct and complete in order to synthesize any well-formed free-choice Petri net, starting with an initial well-formed atomic net and the corresponding incremental synthesis structure. A variant of the proposed approach has been implemented that allows interactive modeling (discovery) of sound business processes (from event logs). Experimental results show that the proposed approach is fast, and outperforms the baseline, and hence is well-suited for enabling interactive synthesis of very large nets.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Caballero, H. S. Garcia; Westenberg, M. A.; Verbeek, H. M. W.; van der Aalst, W. M. P.
Visual analytics for soundness verification of process models Book Section
In: Teniente, E.; Weidlich, M. (Ed.): BPM 2017 International Workshops, Barcelona, Spain, September 10-11, 2017, Revised Papers, vol. 308, pp. 744–756, Springer, 2018.
@incollection{GarciaCaballero18,
title = {Visual analytics for soundness verification of process models},
author = {H. S. Garcia Caballero and M. A. Westenberg and H. M. W. Verbeek and W. M. P. van der Aalst},
editor = {E. Teniente and M. Weidlich},
doi = {10.1007/978-3-319-74030-0_59},
year = {2018},
date = {2018-01-01},
booktitle = {BPM 2017 International Workshops, Barcelona, Spain, September 10-11, 2017, Revised Papers},
volume = {308},
pages = {744--756},
publisher = {Springer},
series = {Lecture Notes in Business Information Processing},
abstract = {Soundness validation of process models is a complex task for process modelers due to all the factors that must be taken into account. Although there are tools to verify this property, they do not provide users with easy information on where soundness starts breaking and under which conditions. Providing insights such as states in which problems occur, involved activities, or paths leading to those states, is crucial for process modelers to better understand why the model is not sound. In this paper we address the problem of validating the soundness property of a process model by using a novel visual approach and a new tool called PSVis (Petri net Soundness Visualization) supporting this approach. The PSVis tool aims to guide expert users through the process models in order to get insights into the problems that cause the process to be unsound.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Lee, W. L. J.; Munoz-Gama, J.; Verbeek, H. M. W.; van der Aalst, W. M. P.; Sepúlveda, M.
Improving merging conditions for recomposing conformance checking Proceedings Article
In: Proceedings of the BPI 2018 workshop, 2018.
@inproceedings{Lee18,
title = {Improving merging conditions for recomposing conformance checking},
author = {W. L. J. Lee and J. Munoz-Gama and H. M. W. Verbeek and W. M. P. van der Aalst and M. Sepúlveda},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Proceedings of the BPI 2018 workshop},
abstract = {Efficient conformance checking is a hot topic in the field of process mining. Much of the recent work focused on improving the scalability of alignment-based approaches to support the larger and more complex processes. This is needed because process mining is increasingly applied in areas where models and logs are “big”. Decomposition techniques are able to achieve significant performance gains by breaking down a conformance problem into smaller ones. Moreover, recent work showed that the alignment problem can be resolved in an iterative manner by alternating between aligning a set of decomposed subcomponents before merging the computed sub-alignments and recomposing subcomponents to fix merging issues. Despite experimental results showing the gain of applying recomposition in large scenarios, there is still a need for improving the merging step, where log traces can take numerous recomposition steps before reaching the required merging condition. This paper contributes by defining and structuring the recomposition step, and proposes strategies with significant performance improvement on synthetic and real-life datasets over both the state-of-the-art decomposed and monolithic approaches.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Schunselaar, D. M. M.; Verbeek, H. M. W.
Task Elimination may Actually Increase Throughput Time Technical Report
arXiv.org 2018, (arXiv identifier 1812.11793).
@techreport{Schunselaar18,
title = {Task Elimination may Actually Increase Throughput Time},
author = {D. M. M. Schunselaar and H. M. W. Verbeek},
url = {http://arxiv.org/abs/1812.11793},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
institution = {arXiv.org},
abstract = {The well-known Task Elimination redesign principle suggests to remove unnecessary tasks from a process to improve on time and cost. Although there seems to be a general consensus that removing work can only improve the throughput time of the process, this paper shows that this is not necessarily the case by providing an example that uses plain M/M/c activities. This paper also shows that the Task Automation and Parallelism redesign principles may also lead to longer throughput times. Finally, apart from these negative results, the paper also show under which assumption these redesign principles indeed can only improve the throughput time.},
note = {arXiv identifier 1812.11793},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Verbeek, H. M. W.; de Carvalho, R. Medeiros
Log Skeletons: A Classification Approach to Process Discovery Technical Report
arXiv.org 2018, (arXiv Identifier 1806.08247).
@techreport{Verbeek18,
title = {Log Skeletons: A Classification Approach to Process Discovery},
author = {H. M. W. Verbeek and R. Medeiros de Carvalho},
url = {https://arxiv.org/abs/1806.08247},
year = {2018},
date = {2018-01-01},
institution = {arXiv.org},
organization = {arXiv.org},
abstract = {To test the effectiveness of process discovery algorithms, a Process Discovery Contest (PDC) has been set up. This PDC uses a classification approach to measure this effectiveness: The better the discovered model can classify whether or not a new trace conforms to the event log, the better the discovery algorithm is supposed to be. Unfortunately, even the state-of-the-art fully-automated discovery algorithms score poorly on this classification. Even the best of these algorithms, the Inductive Miner, scored only 147 correct classified traces out of 200 traces on the PDC of 2017. This paper introduces the rule-based log skeleton model, which is closely related to the Declare constraint model, together with a way to classify traces using this model. This classification using log skeletons is shown to score better on the PDC of 2017 than state-of-the-art discovery algorithms: 194 out of 200. As a result, one can argue that the fully-automated algorithm to construct (or: discover) a log skeleton from an event log outperforms existing state-of-the-art fully-automated discovery algorithms.},
howpublished = {arXiv:1806.08247},
note = {arXiv Identifier 1806.08247},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Raichelson, Lihi; Soffer, Pnina; Verbeek, H. M. W.
Merging event logs: Combining granularity levels for process flow analysis Journal Article
In: Information Systems, vol. 71, pp. 211–227, 2017.
@article{Raichelson17,
title = {Merging event logs: Combining granularity levels for process flow analysis},
author = {Lihi Raichelson and Pnina Soffer and H. M. W. Verbeek},
doi = {10.1016/j.is.2017.08.010},
year = {2017},
date = {2017-11-01},
journal = {Information Systems},
volume = {71},
pages = {211--227},
abstract = {Process mining techniques enable the discovery and analysis of business processes and the identification of opportunities for improvement. Processes often comprise separately managed procedures documented in separate log files which are impossible to mine in an integrative manner as the complete end-to-end process flow is obscure. In this paper we present a merging algorithm that results in a comprehensive merged log that offers two views of the end-to-end process: the case view, tracking the order, and the instance view tracking the item. This enables the identification of process flow problems that could not be detected by previous techniques.
In addition, because our log-merging approach establishes the end-to-end process flow at two different abstraction levels, it is capable of handling both simple (n-to-one) and complex (n-to-many) relationships between log events. The unified log can be used by process mining techniques to identify flow problems, particularly at the point of integration between the processes under consideration. The procedure proposed in this paper has been implemented and evaluated using both synthetic logs and real-life logs.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In addition, because our log-merging approach establishes the end-to-end process flow at two different abstraction levels, it is capable of handling both simple (n-to-one) and complex (n-to-many) relationships between log events. The unified log can be used by process mining techniques to identify flow problems, particularly at the point of integration between the processes under consideration. The procedure proposed in this paper has been implemented and evaluated using both synthetic logs and real-life logs.
Verbeek, H. M. W.; Munoz-Gama, J.; van der Aalst, W. M. P.
Divide And Conquer: A Tool Framework for Supporting Decomposed Discovery in Process Mining Journal Article
In: The Computer Journal, vol. 60, no. 11, pp. 1649–1674, 2017.
@article{Verbeek17a,
title = {Divide And Conquer: A Tool Framework for Supporting Decomposed Discovery in Process Mining},
author = {H. M. W. Verbeek and J. Munoz-Gama and W. M. P. van der Aalst},
doi = {10.1093/comjnl/bxx040},
year = {2017},
date = {2017-11-01},
journal = {The Computer Journal},
volume = {60},
number = {11},
pages = {1649--1674},
abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative way to abstract the subnets from the single net, and shows that the decomposed replay using this alternative abstraction is faster than the monolithic replay even for the problematic cases as identified earlier. However, the alternative abstraction often results in longer computation times for the decomposed replay than the original abstraction. An advantage of the alternative abstraction over the original abstraction is that its cost estimates are typically better.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lee, W. L. J.; Verbeek, H. M. W.; Munoz-Gama, J.; van der Aalst, W. M. P.; Sepúlveda, M.
Replay using Recomposition: Alignment-Based Conformance Checking in the Large Proceedings Article
In: Mendling, J.; Weske, M.; Clariso, R.; Pentland, B.; der, Aalst W. M. P.; Leopold, H.; Kumar, A. (Ed.): 2017 BPM Demo Track and BPM Dissertation Award, BPM-D and DA 2017, co-located with 15th International Conference on Business Process Management, BPM 2017, Barcelona, Spain, 2017, (Conditionally accepted).
@inproceedings{Lee17,
title = {Replay using Recomposition: Alignment-Based Conformance Checking in the Large},
author = {W. L. J. Lee and H. M. W. Verbeek and J. Munoz-Gama and W. M. P. van der Aalst and M. Sepúlveda},
editor = {J. Mendling and M. Weske and R. Clariso and B. Pentland and Aalst W. M. P. der and H. Leopold and A. Kumar},
url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/lee17.pdf},
year = {2017},
date = {2017-09-01},
booktitle = {2017 BPM Demo Track and BPM Dissertation Award, BPM-D and DA 2017, co-located with 15th International Conference on Business Process Management, BPM 2017},
volume = {1920},
address = {Barcelona, Spain},
series = {CEUR Workshop Proceedings},
abstract = {In the area of process mining, efficient alignment-based conformance checking is a hot topic. Existing approaches for conformance checking are typically monolithic and compute exact fitness values. One limitation with monolithic approaches is that it may take a significant amount of computation time in large processes. Alternatively, decomposition approaches run much faster but do not always compute an exact fitness value. This paper presents the tool Replay using Recomposition which returns the exact fitness value and the resulting alignments using the decomposition approach in an iterative manner. Other than computing the exact fitness value, users can configure the balance between result accuracy and computation time to get a fitness interval within set constraints, e.g., "Give me the best fitness estimation you can find within 5 minutes".},
note = {Conditionally accepted},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Acampora, G.; Vitiello, A.; Stefano, B. Di; van der Aalst, W. M. P.; Günther, C. W.; Verbeek, H. M. W.
IEEE 1849TM: The XES Standard: The Second IEEE Standard Sponsored by IEEE Computational Intelligence Society Journal Article
In: IEEE Computational Intelligence Magazine, pp. 4–8, 2017.
@article{Acampora17,
title = {IEEE 1849TM: The XES Standard: The Second IEEE Standard Sponsored by IEEE Computational Intelligence Society},
author = {G. Acampora and A. Vitiello and B. Di Stefano and W. M. P. van der Aalst and C. W. Günther and H. M. W. Verbeek},
doi = {10.1109/MCI.2017.2670420},
year = {2017},
date = {2017-05-01},
journal = {IEEE Computational Intelligence Magazine},
pages = {4--8},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Caballero, H. S. Garcia; Westenberg, M. A.; Verbeek, H. M. W.; van der Aalst, W. M. P.
Visual Analytics for Soundness Verification of Process Models Proceedings Article
In: Proceedings of TAProViz 2017, 2017, (Accepted for publication).
@inproceedings{GarciaCaballero17,
title = {Visual Analytics for Soundness Verification of Process Models},
author = {H. S. Garcia Caballero and M. A. Westenberg and H. M. W. Verbeek and W. M. P. van der Aalst},
url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/garciacaballero17.pdf},
year = {2017},
date = {2017-01-01},
booktitle = {Proceedings of TAProViz 2017},
note = {Accepted for publication},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Verbeek, H. M. W.
Decomposed Replay Using Hiding and Reduction as Abstraction Journal Article
In: LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC), vol. XII, pp. 166–186, 2017.
@article{Verbeek17,
title = {Decomposed Replay Using Hiding and Reduction as Abstraction},
author = {H. M. W. Verbeek},
url = {http://www.springerlink.com/content/f15t41545m061682/fulltext.pdf},
doi = {10.1007/978-3-662-55862-1_8},
year = {2017},
date = {2017-01-01},
journal = {LNCS Transactions on Petri Nets and Other Models of Concurrency (ToPNoC)},
volume = {XII},
pages = {166--186},
abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative way to abstract the subnets from the single net, and shows that the decomposed replay using this alternative abstraction is faster than the monolithic replay even for the problematic cases as identified earlier. However, the alternative abstraction often results in longer computation times for the decomposed replay than the original abstraction. An advantage of the alternative abstraction over the original abstraction is that its cost estimates are typically better.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
WG, IEEE 1849 (XES)
IEEE Standard for eXtensible Event Stream (XES) for Achieving Interoperability in Event Logs and Event Streams Journal Article
In: IEEE Std 1849-2016, pp. 1–50, 2016.
@article{XES16,
title = {IEEE Standard for eXtensible Event Stream (XES) for Achieving Interoperability in Event Logs and Event Streams},
author = {IEEE 1849 (XES) WG},
doi = {10.1109/IEEESTD.2016.7740858},
year = {2016},
date = {2016-11-01},
journal = {IEEE Std 1849-2016},
pages = {1--50},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Verbeek, H. M. W.
Decomposed Replay using Hiding and Reduction Proceedings Article
In: Cabac, L.; Kristensen, L.; Rölke, H. (Ed.): PNSE 2016 Workshop Proceedings, Torun, Poland, 2016, (Accepted for publication).
@inproceedings{Verbeek16a,
title = {Decomposed Replay using Hiding and Reduction},
author = {H. M. W. Verbeek},
editor = {L. Cabac and L. Kristensen and H. Rölke},
url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/verbeek16a.pdf},
year = {2016},
date = {2016-06-01},
booktitle = {PNSE 2016 Workshop Proceedings},
address = {Torun, Poland},
abstract = {In the area of process mining, decomposed replay has been proposed to be able to deal with nets and logs containing many different activities. The main assumption behind this decomposition is that replaying many subnets and sublogs containing only some activities is faster then replaying a single net and log containing many activities. Although for many nets and logs this assumption does hold, there are also nets and logs for which it does not hold. This paper shows an example net and log for which the decomposed replay may take way more time, and provides an explanation why this is the case. Next, to mitigate this problem, this paper proposes an alternative decomposed replay, and shows that this alternative decomposed replay is faster than the monolithic replay even for the problematic cases as identified earlier.owever, the alternative decomposed replay is often slower than the original decomposed approach. An advantage of the alternative decomposed approach over the original approach is that its cost estimates are typically better.},
note = {Accepted for publication},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Verbeek, H. M. W.; van der Aalst, W. M. P.
Merging Alignments for Decomposed Replay Proceedings Article
In: Kordon, F; Moldt, D. (Ed.): Application and Theory of Petri Nets and Concurrency, pp. 219–239, Springer International Publishing, Torun, Poland, 2016.
@inproceedings{Verbeek16,
title = {Merging Alignments for Decomposed Replay},
author = {H. M. W. Verbeek and W. M. P. van der Aalst},
editor = {F Kordon and D. Moldt},
url = {http://www.win.tue.nl/~hverbeek/wp-content/papercite-data/pdf/verbeek16.pdf},
doi = {10.1007/978-3-319-39086-4_14},
year = {2016},
date = {2016-06-01},
urldate = {2016-06-01},
booktitle = {Application and Theory of Petri Nets and Concurrency},
volume = {9698},
pages = {219--239},
publisher = {Springer International Publishing},
address = {Torun, Poland},
series = {LNCS},
abstract = {In the area of process mining, conformance checking aims to find an optimal alignment between an event log (which captures the activities that actually have happened) and a Petri net (which describes expected or normative behavior). Optimal alignments highlight discrepancies between observed and modeled behavior. To find an optimal alignment, a potentially challenging optimization problem needs to be solved based on a predefined cost function for misalignments. Unfortunately, this may be very time consuming for larger logs and models and often intractable. A solution is to decompose the problem of finding an optimal alignment in many smaller problems that are easier to solve. Decomposition can be used to detect conformance problems in less time and provides a lower bound for the costs of an optimal alignment. Although the existing approach is able to decide whether a trace fits or not, it does not provide an overall alignment. In this paper, we provide an algorithm that is able provide such an optimal alignment from the decomposed alignments if this is possible. Otherwise, the algorithm produces a so-called pseudo-alignment that can still be used to pinpoint non-conforming parts of log and model. The approach has been implemented in ProM and tested on various real-life event logs.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Verbeek, H. M. W.; van der Aalst, W. M. P.; Munoz-Gama, J.
Divide and Conquer Technical Report
BPMCenter.org no. BPM-16-06, 2016.
@techreport{Verbeek16b,
title = {Divide and Conquer},
author = {H. M. W. Verbeek and W. M. P. van der Aalst and J. Munoz-Gama},
url = {http://bpmcenter.org/wp-content/uploads/reports/2016/BPM-16-06.pdf},
year = {2016},
date = {2016-01-01},
number = {BPM-16-06},
institution = {BPMCenter.org},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Verbeek, H. M. W.; Mannhardt, F.
The DrFurby Classifier submission to the Process Discovery Contest @ BPM 2016 Technical Report
BPMCenter.org no. BPM-16-08, 2016.
@techreport{Verbeek16c,
title = {The DrFurby Classifier submission to the Process Discovery Contest @ BPM 2016},
author = {H. M. W. Verbeek and F. Mannhardt},
url = {http://bpmcenter.org/wp-content/uploads/reports/2016/BPM-16-08.pdf},
year = {2016},
date = {2016-01-01},
number = {BPM-16-08},
institution = {BPMCenter.org},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
van der Aalst, W. M. P.; Kalenkova, A.; Rubin, V.; Verbeek, H. M. W.
Process Discovery Using Localized Events Proceedings Article
In: Petri Nets 2015, Springer, 2015, (Accepted for publication).
@inproceedings{Aalst15,
title = {Process Discovery Using Localized Events},
author = {W. M. P. van der Aalst and A. Kalenkova and V. Rubin and H. M. W. Verbeek},
url = {http://www.win.tue.nl/~hverbeek/downloads/preprints/Aalst15.pdf},
year = {2015},
date = {2015-01-01},
booktitle = {Petri Nets 2015},
publisher = {Springer},
note = {Accepted for publication},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Hompes, B. F. A.; Verbeek, H. M. W.; van der Aalst, W. M. P.
Finding Suitable Activity Clusters for Decomposed Process Discovery Proceedings Article
In: SIMPDA 2014 Post-proceedings, 2015, (Accepted for publication).
@inproceedings{Hompes15,
title = {Finding Suitable Activity Clusters for Decomposed Process Discovery},
author = {B. F. A. Hompes and H. M. W. Verbeek and W. M. P. van der Aalst},
url = {http://www.win.tue.nl/~hverbeek/downloads/preprints/Hompes15.pdf},
year = {2015},
date = {2015-01-01},
urldate = {2015-01-01},
booktitle = {SIMPDA 2014 Post-proceedings},
note = {Accepted for publication},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Schunselaar, D. M. M.; Leopold, H.; Verbeek, H. M. W.; van der Aalst, W. M. P.
Configuring Configurable Process Models Made Easier: An Automated Approach Book Section
In: BPM 2014 Workshops, vol. 202, pp. 105–117, Springer, 2015.
@incollection{Schunselaar14a,
title = {Configuring Configurable Process Models Made Easier: An Automated Approach},
author = {D. M. M. Schunselaar and H. Leopold and H. M. W. Verbeek and W. M. P. van der Aalst},
url = {http://www.win.tue.nl/~hverbeek/downloads/preprints/Schunselaar14a.pdf},
doi = {http://dx.doi.org/10.1007/978-3-319-15895-2_10},
year = {2015},
date = {2015-01-01},
booktitle = {BPM 2014 Workshops},
volume = {202},
pages = {105--117},
publisher = {Springer},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Schunselaar, D. M. M.; Verbeek, H. M. W.; van der Aalst, W. M. P.; Reijers, H. A.
A Structural Model Comparison for finding the Best Performing Models in a Collection Technical Report
no. BPM-15-05, 2015.
@techreport{Schunselaar15,
title = {A Structural Model Comparison for finding the Best Performing Models in a Collection},
author = {D. M. M. Schunselaar and H. M. W. Verbeek and W. M. P. van der Aalst and H. A. Reijers},
url = {http://bpmcenter.org/wp-content/uploads/reports/2015/BPM-15-05.pdf},
year = {2015},
date = {2015-01-01},
urldate = {2015-01-01},
number = {BPM-15-05},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}