@inproceedings{wimmer2023d, title = {Towards Modeling Process Mining for Graphical Editors}, author = {MohammadHadi Dehghani and Luca Berardinelli and Manuel Wimmer }, url = {https://ieeexplore.ieee.org/document/10350801}, doi = {10.1109/MODELS-C59198.2023.00146}, isbn = {979-8-3503-2499-0}, year = {2023}, date = {2023-12-22}, urldate = {2023-12-22}, abstract = {Engineering tools typically offer graphical environments to help modelers deal with the complexity of designing software-intensive systems. Collecting and analyzing how different users perform modeling actions is a valuable asset to improve the user experience, the quality of the modeled system, and the evolution of the modeling language and accompanying tool support. This tool paper presents a novel tool that captures user interaction events in graphical modeling editors and enables mining modeling processes. Modeling events from Sirius-based graphical editors and GLSP-compliant editors are saved in IEEE eXtensible Event Stream (XES) format and integrated with the open-source ProM process mining tool as one example. By importing modeling traces into process mining tools, analysts can visualize and gain insights into the underlying modeling processes, enabling informed decision-making for designing modeling language and tool improvements. Initial experimental results demonstrate the applicability of our tool in capturing and analyzing user interactions on desktop and Web editors in solo and collaborative modeling sessions.}, howpublished = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C)}, keywords = {Analytical models, Collaboration, Decision making, PROM, User experience, Visualization, Xenon}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2023e, title = {Preface to the 3rd International Workshop on Model-Driven Engineering for Digital Twins (MoDDiT 2023)}, author = {Tony Clark and Leok Cleophas and Romina Eramo and Vinay Kulkarni and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/10350496}, doi = {10.1109/MODELS-C59198.2023.00075}, isbn = {979-8-3503-2499-0}, year = {2023}, date = {2023-12-22}, urldate = {2023-12-22}, abstract = {Digital twin (DT) is a concept that is gaining growing attention in many disciplines to support engineering, monitoring, controlling, optimizing, and maintaining cyber- physical systems (CPSs) and beyond. It refers to the ability to clone an actual system into a virtual counterpart that reflects all the important properties and characteristics of the original systems within a specific application context. While the benefits of DTs have been demonstrated in many contexts, their development, maintenance, and evolution, yield major challenges. Part of these needs to be addressed from a Model-Driven Engineering (MDE) perspective. MoDDiT'23 aims at bringing together researchers and practitioners on DTs to shape the future of systematically designing, engineering, evolving, maintaining, and evaluating DTs across different disciplines.}, howpublished = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C), Vasteras, Sweden, 1-6 October 2023.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2023f, title = {Preface to the 5th Workshop on Artificial Intelligence and Model-Driven Engineering (MDE 2023)}, author = {Lola Burgueño and Dominik Bork and Jessie Galasso and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/10350710}, doi = {10.1109/MODELS-C59198.2023.00093}, isbn = {979-8-3503-2499-0}, year = {2023}, date = {2023-12-22}, urldate = {2023-12-22}, abstract = {Model-driven engineering (MDE) and Artificial Intelligence (AI) have gained momentum in recent years, and the fusion of techniques and tools in the two domains paves the way for several applications. Such integrations—which we call MDE Intelligence—are bidirectional, i.e., MDE activities can benefit from the integration of AI ideas and, in return, AI can benefit from the automation and subject-matter-expert integration offered by MDE. The 5th edition of the Workshop on Artificial Intelligence and Model-driven Engineering (MDE Intelligence), held in conjunction with the IEEE/ACM 26th International Conference on Model-Driven Engineering Languages and Systems (MODELS 2023), follows up on the success of the previous four editions, and provides a forum to discuss, study, and explore the opportunities offered and the challenges raised by integrating AI and MDE.}, howpublished = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C)}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2023g, title = {Preface to the 4th International Workshop on Modeling in Low-Code Development Platforms (LowCode 2023)}, author = {Dimitris Kolovos and Juan de Lara and Manuel Wimmer }, url = {https://ieeexplore.ieee.org/document/10350812}, doi = {10.1109/MODELS-C59198.2023.00134}, isbn = {979-8-3503-2498-3}, year = {2023}, date = {2023-12-22}, urldate = {2023-12-22}, abstract = {The growing need for secure, trustworthy, and cost-efficient software, the recent developments in cloud computing technologies, and the shortage of highly skilled professional software developers, have given rise to a new generation of low-code software development platforms, such as Google AppSheet and Microsoft PowerApps. Low-code platforms enable the development and deployment of fully functional applications using mainly visual abstractions and interfaces and requiring little or no procedural code. This makes them accessible to an increasingly digital-native and tech-savvy workforce who can directly and effectively contribute to the software development process, even if they lack a programming background.}, howpublished = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C), Vasteras, Sweden, 1-6 October 2023.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @article{lehner2023f, title = {IoT: Chancen und Herausforderungen für Software-Entwickler:innen}, author = {Daniel Lehner}, url = {https://www.informatik-aktuell.de/betrieb/netzwerke/iot-chancen-und-herausforderungen-fuer-software-entwicklerinnen.html}, issn = {2511-7564}, year = {2023}, date = {2023-11-14}, urldate = {2023-11-14}, journal = {Informatik Aktuell}, abstract = {Das Internet of Things ermöglicht uns, in Software-Systemen auf Geräte in der "echten" (physischen) Welt zuzugreifen. Durch immer günstigere Hardware sowie die Erforschung neuer Einsatzmöglichkeiten findet das IoT mittlerweile Einzug in fast alle Lebensbereiche – von Haushalten (Smart Home) über Gesundheit (Quantify Me) und Pflege bis zur industriellen Produktion (Industrie 4.0). Die Entwicklung von Software, welche die Möglichkeiten dieser IoT-Geräte ausschöpft und erweitert, erlaubt es Software-Entwickler:innen, die damit einhergehende Innovation aktiv mitzugestalten. In diesem Artikel gehen wir darauf ein, welche Chancen wir als Software-Entwickler:innen nutzen können, indem wir Software für das IoT entwickeln, und welchen Herausforderungen wir bei der Entwicklung solcher Software gegenüberstehen.}, keywords = {IoT, Smart Home}, pubstate = {published}, tppubtype = {article} } @article{gemeinhardtc, title = {Quantum Combinatorial Optimization in the NISQ Era: A Systematic Mapping Study}, author = {Felix G. Gemeinhardt and Antonio Garmendia and Manuel Wimer and Bemjamin Weder and Frank Leymann}, url = {https://dl.acm.org/doi/pdf/10.1145/3620668 https://se.jku.at/quantum-combinatorial-optimization-in-the-nisq-era-a-systematic-mapping-study/}, doi = {10.1145/3620668}, year = {2023}, date = {2023-10-16}, urldate = {2023-10-16}, journal = {ACM Computing Surveys}, volume = {56}, number = {3}, pages = {36}, abstract = {The application of quantum computing to combinatorial optimization problems is attracting increasing research interest, resulting in diverse approaches and research streams. This study aims at identifying, classifying, and understanding existing solution approaches as well as typical use cases in the field. The obtained classification schemes are based on a full-text analysis of 156 included papers. Our results can be used by researchers and practitioners to (i) better understand adaptations to and utilizations of existing gate-based and quantum annealing approaches, and (ii) identify typical use cases for quantum computing in areas like graph optimization, routing and scheduling.}, keywords = {NISQ, quantum annealing, systematic mapping study, variational quantum algorithms}, pubstate = {published}, tppubtype = {article} } @proceedings{wimmer2023h, title = {Exploring Refactoring Operations for IEC 61499}, author = {Michael Oberlehner and Andreas Eigner and Manuel Wimmer and Alois Zoitl}, url = {https://ieeexplore.ieee.org/document/10275635}, doi = {10.1109/ETFA54631.2023.10275635}, isbn = {979-8-3503-3991-8}, year = {2023}, date = {2023-10-12}, urldate = {2023-10-12}, booktitle = {2023 IEEE 28th International Conference on Emerging Technologies and Factory Automation (ETFA)}, abstract = {In the domain of CPPS, long-term maintenance of software is necessary. Refactoring, a technique used to improve maintainability is a viable solution. However, there is a scarcity of research on refactoring in the context of CPPS. In this work, we propose a systematic approach for analyzing and deriving refactoring operations for IEC 61499. In particular, we propose a generic meta-model for block-based languages, which is accompanied by a corresponding generic refactoring catalog. Based on the generic meta-model, an existing refactoring catalog for IEC 61499 is extended by comparing and evaluating meta-models and their associated refactoring operations derived from literature of different modeling languages. We demonstrate that our approach provides a systematic way of transferring refactoring operations across block-based modeling languages and allows to extend existing refactoring collections for IEC 61499.}, howpublished = {2023 IEEE 28th International Conference on Emerging Technologies and Factory Automation (ETFA), Sinaia, Romania, 12-15 Septe3mer 2023.}, keywords = {Cyber-Physical Systems, model-driven software engineering, Refactoring}, pubstate = {published}, tppubtype = {proceedings} } @inproceedings{Sint2023c, title = {An Interdisciplinary Course on Model-Based Systems Engineering}, author = {Azad Khandoker and Sabine Sint and Manuel Wimmer and Klaus Zemann}, url = {https://se.jku.at/an-interdisciplinary-course-on-model-based-systems-engineering/ https://ieeexplore.ieee.org/document/10350649}, doi = { 10.1109/MODELS-C59198.2023.00033}, year = {2023}, date = {2023-10-06}, urldate = {2023-10-06}, booktitle = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C), Västeras, Schweden, October 1-6/2023}, pages = {8}, abstract = {Model-Based Systems Engineering (MBSE) has emerged as a promising approach to design and develop complex engineering systems. Its adoption is steadily increasing in various industries and along additional system life cycle phases, showcasing its potential to enhance system development processes, to improve overall system performance, to support traceability, safety & security, maintenance, condition monitoring, upcycling, recycling, and even circular economy. As MBSE is becoming more prevalent in several industries, it is crucial to incorporate MBSE education into engineering curricula to prepare future engineers with the necessary knowledge, methods, skills, and tools. In this paper, we present our interdisciplinary MBSE course at Johannes Kepler University in Linz and further explore the challenges and opportunities of the current state of MBSE education for the effective integration of MBSE into engineering education.}, keywords = {MBSE}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{lehner2023b, title = {A Model-Driven Platform for Engineering Holistic Digital Twins}, author = {Daniel Lehner}, url = {https://se.jku.at/a-model-driven-platform-for-engineering-holistic-digital-twins/}, year = {2023}, date = {2023-10-06}, urldate = {2023-10-06}, booktitle = {26th International Conference on Model Driven Engineering Languages and Systems, MODELS 2023, Västeras, Schweden, October 1-6, 2023}, journal = {A Model-Driven Platform for Engineering Holistic Digital Twins, 26th International Conference on Model Driven Engineering Languages and Systems, MODELS 2023, Västeras, Schweden, October 1-6, 2023.}, abstract = {With the combination of software and physical devices into so-called cyber-physical systems (CPSs), Digital Twins (DTs) have emerged to handle the resulting complexity and efficiently connect software to physical devices, the so-called physical twins (PTs). While DTs have gained more and more interest in both industry and academia in recent years, several vendors started to provide so-called DT platforms that offer soft[1]ware tools that promise to make it easier to develop and maintain DTs. When investigating these platforms in more detail, we found that they require the redundant specification of information that is usually already defined in engineering models describing the underlying PT. Additionally, they focus on connecting services to the running PT. Most DT applications however also need a connection to a simulation of the PT, which is currently not supported by the examined DT platforms. As different DT platforms usually each use their own proprietary language and software tooling, it is also currently time-demanding to integrate them with the software services that realize functionality based on these platforms. In the described thesis project, we propose an extended DT platform that solves the mentioned problems by leveraging Model-driven Engineering (MDE) techniques. More precisely, we (i) develop model transformations from existing engineering models to the proprietary DT models used by current DT platforms, (ii) create a DT megamodel that integrates DT models of existing platforms with models representing different endpoints such as PTs or simulations, and generic service descriptions, and (iii) propose a workflow model to define the interactions between different services and DTs, and a method that automates the integration of services and DTs into DT architectures based on this workflow model and the DT megamodel. We aim to evaluate our work by performing case studies on a set of CPSs . In these case studies, we measure the steps required for setting up and maintaining DT architectures for these CPSs, comparing our extended DT platform to existing DT platform support. }, keywords = {Digital Twin, Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{eisenberg2023, title = {Model-Driven Optimization for Quantum Program Synthesis with MOMoT}, author = {Felix Gemeinhardt and Martin Eisenberg and Stefan Klikovits and Manuel Wimmer}, url = {https://se.jku.at/model-driven-optimization-for-quantum-program-synthesis-with-momot/ https://ieeexplore.ieee.org/document/10350515}, doi = {10.1109/MODELS-C59198.2023.00100}, year = {2023}, date = {2023-10-06}, urldate = {2023-10-06}, booktitle = {26th International Conference on Model Driven Engineering Languages and Systems MODELS 2023, Västeras, Schweden, October 1-6, 2023}, abstract = {In the realm of classical software engineering, model-driven optimization has been widely used for different problems such as (re)modularization of software systems. In this paper, we investigate how techniques from model-driven optimization can be applied in the context of quantum software engineering. In quantum computing, creating executable quantum programs is a highly non-trivial task which requires significant expert knowledge in quantum information theory and linear algebra. Although different approaches for automated quantum program synthesis exist—e.g., based on reinforcement learning and genetic programming—these approaches represent tailor-made solutions requiring dedicated encodings for quantum programs. This paper applies the existing model-driven optimization approach MOMoT to the problem of quantum program synthesis. We present the resulting platform for experimenting with quantum program synthesis and present a concrete demonstration for a well-known quantum algorithm.}, keywords = {Model-Driven, MOMoT}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Klikovits2023, title = {Towards Generating Model-Driven Speech Interfaces for Digital Twins}, author = {Ramya Jayaraman and Daniel Lehner and Stefan Klikovits and Manuel Wimmer }, url = {https://se.jku.at/towards-generating-model-driven-speech-interfaces-for-digital-twins/ https://ieeexplore.ieee.org/document/10350472}, doi = {10.1109/MODELS-C59198.2023.00080}, year = {2023}, date = {2023-10-06}, urldate = {2023-10-06}, booktitle = {26th International Conference on Model Driven Engineering Languages and Systems, MODELS 2023, Västeras, Schweden, October 1-6, 2023}, abstract = {The recent decade saw enormous advances of the capabilities of speech synthesis and speech recognition. While specific benefits depend on the individual applications, speech interfaces typically increase accessibility, enable “hands-free” and “no-screen” interaction, and often increase interaction speed and allow for more flexible usage patterns and increased multitasking, leading to higher user satisfaction. This paper presents a method to transfer these powerful benefits to the digital twin (DT) domain by automatically generating speech interfaces for DT applications. Our approach is based on Model Driven Engineering principles, where we automatically deduce command patterns from structural model information as used in, e.g., DT platform specifications. The speech interface generation is highly configurable and extendable, thus it can be applied to different DT models. The concepts behind the generator are generic as well, thus they can be ported to other applications and platforms. We validate our approach by applying it to two DT demonstration cases and provide a detailed description of the sketch interface configuration workflow.}, keywords = {Digital Twin, Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @article{wimmer2023o, title = {Fault localization in DSLTrans model transformations by combining symbolic execution and spectrum-based analysis}, author = {Bentley James Oakes and Javier Troya and Jessie Galasso and Manuel Wimmer }, url = {https://link.springer.com/article/10.1007/s10270-023-01123-3}, doi = {10.1007/s10270-023-01123-3}, year = {2023}, date = {2023-09-29}, urldate = {2024-09-29}, journal = {Software and Systems Modeling}, volume = {22}, issue = {6}, pages = {22}, abstract = {The verification of model transformations is important for realizing robust model-driven engineering technologies and quality-assured automation. Many approaches for checking properties of model transformations have been proposed. Most of them have focused on the effective and efficient detection of property violations by contract checking. However, there remains the fault localization step between identifying a failing contract for a transformation based on verification feedback and precisely identifying the faulty rules. While there exist fault localization approaches in the model transformation verification literature, these require the creation and maintenance of test cases, which imposes an additional burden on the developer. In this paper, we combine transformation verification based on symbolic execution with spectrum-based fault localization techniques for identifying the faulty rules in DSLTrans model transformations. This fault localization approach operates on the path condition output of symbolic transformation checkers instead of requiring a set of test input models. In particular, we introduce a workflow for running the symbolic execution of a model transformation, evaluating the defined contracts for satisfaction, and computing different measures for tracking the faulty rules. We evaluate the effectiveness of spectrum-based analysis techniques for tracking faulty rules and compare our approach to previous works. We evaluate our technique by introducing known mutations into five model transformations. Our results show that the best spectrum-based analysis techniques allow for effective fault localization, showing an average EXAM score below 0.30 (less than 30% of the transformation needs to be inspected). These techniques are also able to locate the faulty rule in the top-three ranked rules in 70% of all cases. The impact of the model transformation, the type of mutation and the type of contract on the results is discussed. Finally, we also investigate the cases where the technique does not work properly, including discussion of a potential pre-check to estimate the prospects of the technique for a certain transformation.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @article{wimmer2023m, title = {GRuM — A flexible model-driven runtime monitoring framework and its application to automated aerial and ground vehicles}, author = {Michael Vierhauser and Antonio Garmendia and Marco Stadler and Manuel Wimmer and Jane Cleland-Huang }, url = {https://www.sciencedirect.com/science/article/pii/S0164121223001280?via%3Dihub}, doi = {10.1016/j.jss.2023.111733}, year = {2023}, date = {2023-09-15}, urldate = {2023-09-15}, journal = {Journal of Systes and Software}, volume = {203}, pages = {16}, abstract = {Runtime monitoring is critical for ensuring safe operation and for enabling self-adaptive behavior of Cyber-Physical Systems (CPS). Monitors are established by identifying runtime properties of interest, creating probes to instrument the system, and defining constraints to be checked at runtime. For many systems, implementing and setting up a monitoring platform can be tedious and time-consuming, as generic monitoring platforms do not adequately cover domain-specific monitoring requirements. This situation is exacerbated when the System under Monitoring (SuM) evolves, requiring changes in the monitoring platform. Most existing approaches lack support for the automated generation and setup of monitors for diverse technologies and do not provide adequate support for dealing with system evolution. In this paper, we present GRuM (Generating CPS Runtime Monitors), a framework that combines model-driven techniques and runtime monitoring, to automatically generate a customized monitoring platform for a given SuM. Relevant properties are captured in a Domain Model Fragment, and changes to the SuM can be easily accommodated by automatically regenerating the platform code. To demonstrate the feasibility and performance we evaluated GRuM against two different systems using TurtleBot robots and Unmanned Aerial Vehicles. Results show that GRuM facilitates the creation and evolution of a runtime monitoring platform with little effort and that the platform can handle a substantial amount of events and data.}, keywords = {Cyber-Physical Systems, Model-Driven Engineering, runtime monitoring}, pubstate = {published}, tppubtype = {article} } @article{wimmer2023n, title = {Quo Vadis modeling?}, author = {Judith Michael and Dominik Bork and Manuel Wimmer and Heinrich C. Mayr }, url = {https://link.springer.com/article/10.1007/s10270-023-01128-y}, doi = {10.1007/s10270-023-01128-y}, year = {2023}, date = {2023-09-04}, urldate = {2024-09-04}, journal = {Software and Systems Modeling}, volume = {22}, issue = {6}, pages = {22}, abstract = {Models are the key tools humans use to manage complexity in description, development, and analysis. This applies to all scientific and engineering disciplines and in particular to the development of software and data-intensive systems. However, different methods and terminologies have become established in the individual disciplines, even in the sub-fields of Informatics, which raises the need for a comprehensive and cross-sectional analysis of the past, present, and future of modeling research. This paper aims to shed some light on how different modeling disciplines emerged and what characterizes them with a discussion of the potential toward a common modeling future. It focuses on the areas of software, data, and process modeling and reports on an analysis of the research approaches, goals, and visions pursued in each, as well as the methods used. This analysis is based on the results of a survey conducted in the communities concerned, on a bibliometric study, and on interviews with a prominent representative of each of these communities. The paper discusses the different viewpoints of the communities, their commonalities and differences, and identifies possible starting points for further collaboration. It further discusses current challenges for the communities in general and modeling as a research topic in particular and highlights visions for the future.}, keywords = {Data modeling, Information systems, Process modeling, Research communities, Software Engineering, Software Modeling}, pubstate = {published}, tppubtype = {article} } @inproceedings{lehner2023c, title = {An Architectural Extension for Digital Twin Platforms to Leverage Behavioral Models}, author = {Daniel Lehner and S.Gil and P.h. Mikkelsen and Peter Gorm Larsen and Manuel Wimmer }, url = {https://se.jku.at/an-architectural-extension-for-digital-twin-platforms-to-leverage-behavioral-models/ }, year = {2023}, date = {2023-08-30}, urldate = {2023-08-30}, booktitle = {19th International Conference on Automation Science and Engineering (CASE 2023), Cordis, Auckland, New Zealand, August 26-30}, abstract = {To reduce the work effort for developing and managing Digital Twin (DT)-based services, so-called DT platforms have emerged recently. These DT platforms make it easier to collect, store, and manage data from physical devices, but often they do not provide means for running behavioral models or synchronizing these with runtime data from the physical devices. These aspects are however crucial for developing many DT-based services. As a result, the implementation of a DT that uses behavioral models still requires a lot of implementation effort, which hinders the adoption of DTs for many applications, in particular for small and medium-sized enterprises that often do not have the resources to implement such complex systems.}, howpublished = {19th International Conference on Automation Science and Engineering (CASE 2023), Cordis, Auckland, New Zealand, August 26-30, 2023}, keywords = {Digital Twin, leverage behavioral Models}, pubstate = {published}, tppubtype = {inproceedings} } @inbook{wimmer2023i, title = {Leveraging Artificial Intelligence for Model-based Software Analysis and Design}, author = {Antonio Garmendia and Dominik Bork and Martin Eisenberg and Thiago Ferreira and Marouane Kessentini and Manuel Wimmer}, editor = {Jose Raul Romero and Inmaculada Medina-Bulo and Francisco Chicano}, url = {https://link.springer.com/chapter/10.1007/978-981-19-9948-2_4}, doi = {10.1007/978-981-19-9948-2_4}, isbn = {978-981-19-9947-5}, year = {2023}, date = {2023-07-20}, urldate = {2023-07-20}, booktitle = {Optimising the Software Development Process with Artificial Intelligence}, journal = {Optimising the Softeware Development }, pages = {93-177}, publisher = {Springer Verlag}, abstract = {Fundamental decisions are made in the early phases of software development. The typical outcomes of these phases are models of different kinds, such as architectural models, data models, and process models. Automation support is required to efficiently and effectively handle large models and conduct continuous quality improvement processes. Thus, several approaches have been proposed that integrate modeling with Artificial Intelligence (AI) methods such as Genetic Algorithms (GAs), among others. These approaches, e.g., transform models to improve their quality by searching for good solutions within the potential solution space. In this chapter, we first review existing applications of AI methods to model-based software engineering problems. Subsequently, we show a representative use case of how a model-based software analysis and design problem can be solved using GAs. In particular, we focus on the well-known and challenging modularization problem: splitting an overarching, monolithic model into smaller modules. We present two encodings, the model-based and the transformation-based encoding, which are both applied for the modularization of Entity-Relationship (ER) diagrams. We further discuss how these encodings may be adapted to other structural models and conclude with an outlook on future research lines related to software modeling intelligence.}, keywords = {Artificial intelligence, Model-Based Software}, pubstate = {published}, tppubtype = {inbook} } @inproceedings{wimmer2023b, title = {Hybrid Multi-Objective Genetic Programming for Parameterized Quantum Operator Discovery}, author = {Felix Gemeinhardt and Stefan Klikovits and Manuel Wimmer }, url = {https://se.jku.at/hybrid-multi-objective-genetic-programming-for-parameterized-quantum-operator-discovery/}, doi = {10.1145/3583133.3590696,}, isbn = {79-8-4007-0120-7/ 23/07}, year = {2023}, date = {2023-07-19}, urldate = {2023-07-19}, booktitle = {Genetic and Evolutionary Computation Conference Companion (GECCO ’23 @ Lisabon), Lisbon, Portugal, July 15–19, 2023, hybrid}, pages = {4}, abstract = {The processing of quantum information is defined by quantum circuits. For applications on current quantum devices, these are usually parameterized, i.e., they contain operations with variable parameters. The design of such quantum circuits and aggregated higher-level quantum operators is a challenging task which requires significant knowledge in quantum information theory, provided a polynomial-sized solution can be found analytically at all. Moreover, finding an accurate solution with low computational cost represents a significant trade-off, particularly for the current generation of quantum computers. To tackle these challenges, we propose a multi-objective genetic programming approach—hybridized with a numerical parameter optimizer—to automate the synthesis of parameterized quantum operators. To demonstrate the benefits of the proposed approach, it is applied to a quantum circuit of a hybrid quantum-classical algorithm, and then compared to an analytical solution as well as a non-hybrid version. The results show that, compared to the non-hybrid version, our method produces more diverse solutions and more accurate quantum operators which even reach the quality of the analytical baseline. }, keywords = {Genetic Programming, Hybrid Research, Quantum Circuit Synthesis, Search-Based Quantum Software Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @proceedings{gemeinhardtd, title = {Hybrid Multi-Objective Genetic Programming for Parameterized Quantum Operator Discovery}, author = {Felix G. Gemeinhardt and Stefan Klikivits and Manuel Wimmer}, url = {10.1145/3583133.3590696 https://se.jku.at/hybrid-multi-objective-genetic-programming-for-parameterized-quantum-operator-discovery/ }, doi = {10.1145/3583133.3590696}, isbn = {979-8-4007-0120-7/ 23/07}, year = {2023}, date = {2023-06-24}, urldate = {2023-06-24}, booktitle = {GECCO '23 Companion: Proceedings of the Companion Conference on Genetic and Evolutionary Computation}, abstract = {The processing of quantum information is defined by quantum circuits. For applications on current quantum devices, these are usually parameterized, i.e., they contain operations with variable parameters. The design of such quantum circuits and aggregated higher-level quantum operators is a challenging task which requires significant knowledge in quantum information theory, provided a polynomial-sized solution can be found analytically at all. Moreover, finding an accurate solution with low computational cost represents a significant trade-off, particularly for the current generation of quantum computers. To tackle these challenges, we propose a multi-objective genetic programming approach-hybridized with a numerical parameter optimizer – to automate the synthesis of parameterized quantum operators. To demonstrate the benefits of the proposed approach, it is applied to a quantum circuit of a hybrid quantum-classical algorithm, and then compared to an analytical solution as well as a non-hybrid version. The results show that, compared to the non-hybrid version, our method produces more diverse solutions and more accurate quantum operators which even reach the quality of the analytical baseline.}, howpublished = {he Genetic and Evolutionary Computation Conference (GECCO ’23@Lisbon)}, keywords = {Gene, Genetic Programming, Hybrid search, Quantum, Quantum Circuit Synthesis, Search-Based Quantum Software Engineering}, pubstate = {published}, tppubtype = {proceedings} } @article{wimmer2023l, title = {A Model-based Mode-switching Framework based on Security Vulnerability Scores}, author = {Michael Riegler and Johannes Sametinger and Michael Vierhauser and Manuel Wimmer}, url = {https://www.sciencedirect.com/science/article/pii/S0164121223000286?via%3Dihub}, doi = {10.1016/j.jss.2023.111633}, year = {2023}, date = {2023-06-15}, journal = {Journal of Systes and Software}, volume = {200}, pages = {16}, abstract = {Software vulnerabilities can affect critical systems within an organization impacting processes, workflows, privacy, and safety. When a software vulnerability becomes known, affected systems are at risk until appropriate updates become available and eventually deployed. This period can last from a few days to several months, during which attackers can develop exploits and take advantage of the vulnerability. It is tedious and time-consuming to keep track of vulnerabilities manually and perform necessary actions to shut down, update, or modify systems. Vulnerabilities affect system components, such as a web server, but sometimes only target specific versions or component combinations. In this paper, we propose a novel approach for automated mode switching of software systems to support system administrators in dealing with vulnerabilities and reducing the risk of exposure. We rely on model-driven techniques and use a multi-modal architecture to react to discovered vulnerabilities and provide automated contingency support. We have developed a dedicated domain-specific language to describe potential mitigation as mode switches. We have evaluated our approach with a web server case study, analyzing historical vulnerability data. Based on the vulnerabilities scores sum, we demonstrated that switching to less vulnerable modes reduced the attack surface in 98.9% of the analyzed time.}, keywords = {Domain-Specific Languages, mode switching, Resilience, Security, Vulnerabilities}, pubstate = {published}, tppubtype = {article} } @article{Sint2023, title = {A pattern catalog for augmenting Digital Twin models with behavior}, author = {Daniel Lehner and Sabine Sint and Martin Eisenberg and Manuel Wimmer }, url = {https://www.degruyter.com/document/doi/10.1515/auto-2022-0144/html }, doi = {10.1515/auto-2022-0144}, year = {2023}, date = {2023-06-07}, journal = {at - Automatisierungstechnik}, volume = {71}, number = {6}, pages = {20}, abstract = {Digital Twins are emerging as a solution to build and extend existing software systems to make better use of data produced by physical systems. For supporting the development of Digital Twins, several software vendors are offering dedicated tool support, often referred to as Digital Twin platforms. The modeling capabilities of these platforms are mostly concerned with structural viewpoints, i.e., providing an overview of available components including their current and historical sensor values. However, behavioral viewpoints did not yet receive much attention on these platforms. As behavioral models are often used during the design processes, e.g., for simulation and synthesis, it would be beneficial for having them included in Digital Twin platforms, e.g., for reasoning on the set of possible next actions or for checking the execution history to perform runtime validation. In this paper, we present a catalog of modeling patterns for augmenting Digital Twin models with behavioral models and their corresponding runtime information without requiring any extension of the code bases of Digital Twin platforms. We demonstrate the presented modeling patterns by applying them to the Digital Twin platform offered by Microsoft, in an additive manufacturing use case of a 3D printer in a production line.}, keywords = {Behavior Modeling, Digital Twin, Language engineering, Model-Driven Engineering, Modeling Patterns}, pubstate = {published}, tppubtype = {article} } @article{lehner2023e, title = {Digitale Zwillinge – viele Tools, ein Überblick}, author = {Daniel Lehner}, url = {https://www.heise.de/select/ix/2023/4/2232111035410609397}, year = {2023}, date = {2023-04-20}, urldate = {2023-04-20}, journal = {iX Magazine, Seite 88}, abstract = {D. Lehner: Digitale Zwillinge - viele Tools, ein Überblick in iX Magazine, page 88, April 2023. article Digitale Zwillinge gelten als Schlüsseltechnik in der vernetzten Industrieproduktion. Als virtuelle Abbilder von Maschinen machen sie nicht nur die Datenlandschaft übersichtlicher. Auswahl an Tools gibt es reichlich: Digitale Zwillinge sind eine Kernsoftwarekomponente der Industrie 4.0. Sie haben eine Reihe von Vorteilen und bieten eine einheitliche Schnittstelle sowohl zum physischen System als auch zur einer virtuellen Softwareversion des Geräts. So ermöglichen sie unter anderem die Simulation von Aktionen und Predictive Maintenance. Ein System für digitale Zwillinge besteht gewöhnlich aus drei Komponenten: dem zentralen Digital-Twin-Interface, einem verbindenden IoT-Hub und häufig einem Simulationstool. Alle drei Bereiche haben eigene Tools, die es in Einklang zu bringen gilt. Die derzeitige Marktsituation ist unübersichtlich, kein Anbieter bietet in allen Teilbereichen vollständige Softwareprodukte an. Das eigene Toolpaket sollte man daher sorgfältig zusammenzustellen.}, keywords = {digital twins}, pubstate = {published}, tppubtype = {article} } @article{wimmer2023, title = {Modelling assistants based on information reuse: a user evaluation for language engineering}, author = {Angel Mora Segura and Juan de Lara and Manuel Wimmer}, url = {https://se.jku.at/modelling-assistants-based-on-information-reuse-a-user-evaluation-for-language-engineering/}, doi = {10.1007/s10270-023-01094-5}, year = {2023}, date = {2023-04-17}, urldate = {2023-04-17}, journal = {Journal of Software Systems Modeling}, abstract = {Model-driven engineering (MDE) uses models as first-class artefacts during the software development lifecycle. MDE often relies on domain-specific languages (DSLs) to develop complex systems. The construction of a new DSL implies a deep understanding of a domain, whose relevant knowledge may be scattered in heterogeneous artefacts, like XML documents, (meta-)models, and ontologies, among others. This heterogeneity hampers their reuse during (meta-)modelling processes. Under the hypothesis that reusing heterogeneous knowledge helps in building more accurate models, more efficiently, in previous works we built a (meta-)modelling assistant called Extremo. Extremo represents heterogeneous information sources with a common data model, supports its uniform querying and reusing information chunks for building (meta-)models. To understand how and whether modelling assistants—like Extremo—help in designing a new DSL, we conducted an empirical study, which we report in this paper. In the study, participants had to build a meta-model, and we measured the accuracy of the artefacts, the perceived usability and utility and the time to completion of the task. Interestingly, our results show that using assistance did not lead to faster completion times. However, participants using Extremo were more effective and efficient, produced meta-models with higher levels of completeness and correctness, and overall perceived the assistant as useful. The results are not only relevant to Extremo, but we discuss their implications for future modelling assistants.}, keywords = {Empirical studies, Language engineering, Modelling, Modelling assistants, Modelling process}, pubstate = {published}, tppubtype = {article} } @article{lehner2023d, title = {Model-Driven Engineering of Digital Twins}, author = { Loek Cleophas and Thomas Godfrey and Djamel Eddine Khelladi and Daniel Lehner and Benoit Combemale and Bernhard Rumpe and Steffen Zschaler }, url = {https://drops.dagstuhl.de/storage/04dagstuhl-reports/volume12/issue09/22362/DagRep.12.9.20/DagRep.12.9.20.pdf}, doi = {10.4230/DagRep.12.9.20}, year = {2023}, date = {2023-04-04}, urldate = {2023-04-04}, journal = {Dagstuhl Report}, volume = {12}, issue = {9}, pages = {21}, abstract = {This report documents the program and the outcomes of Dagstuhl Seminar 22362 "Model-Driven Engineering of Digital Twins". Digital twins are an emerging concept with the potential for revolutionising the way we interact with the physical world. Digital twins can be used for improved analysis and understanding of complex systems as well as for control and transformation of these systems. Digital twins are themselves complex software systems, posing novel software-engineering challenges, which have so far not been sufficiently addressed by the software-engineering research community. The seminar aimed as a key outcome to contribute to a solid research roadmap for the new Software Engineering subdiscipline of Model-Based Development of Digital Twins. This paper is an intermediate result, which is thought to be further discussed in the research community that has also been built using this seminar.}, keywords = {Data Management, Digital Twin, digital twins, model management, Model-Driven Engineering, Models@Runtime, Software Engineering}, pubstate = {published}, tppubtype = {article} } @inproceedings{lehner2023, title = {Towards a Product Line Architecture for Digital Twins}, author = {Jerome Pfeiffer and Daniel Lehner and Andreas Wortmann and Manuel Wimmel}, url = {https://se.jku.at/wp-content/uploads/2023/01/2023_ICSA_DTProductLineArchitecture_CR_final.pdf}, year = {2023}, date = {2023-03-15}, urldate = {2023-03-15}, booktitle = {20th IEEE International Conference on Software Architecture (ICSA 2023), L' Aquila, Italy, March 13-17}, pages = {4}, abstract = {Digital twins are a new kind of software systems for which corresponding architectures in different engineering domains have emerged for enabling the efficient interaction of software systems with physical systems to realize cyber-physical systems (CPS). To facilitate the development of digital twins, various software platforms emerged in recent years, which often come with a certain architecture for the developed systems together with a set of domain-specific languages (DSLs) that help domain experts to configure the platform and implement the digital twins. This results in a set of architectures and DSLs which are currently used to realize the various concerns of digital twins. Thus, creating a comprehensive digital twin for a given system requires the combination of several architectures and DSLs, which is challenging as (i) the components of the different architectures have to be combined on a technological level, and (ii) the concerns specified with the different DSLs are developed in isolation which potentially leads to inconsistencies, especially during the evolution of digital twins.}, howpublished = {20th IEEE International Conference on Software Architecture (ICSA 2023), poster präsentation, L'Aquila, Italy, 13-17 March}, keywords = {Digital Twin, Domain-Specific Languages, Product Lines, Software Integration}, pubstate = {published}, tppubtype = {inproceedings} } @inbook{wimmer2023j, title = {Engineering Digital Twins and Digital Shadows as Key Enablers for Industry 4.0}, author = {Stefan Braun and Manuela Dalibor and Nico Jansen and Matthias Jarke and Istvan Koren and Christoph Quix and Bernhard Rumpe and Manuel Wimmer and Andreas Wortmann }, editor = {Birgit Vogel-Heuser and Mannuel Wimmer}, url = {https://link.springer.com/chapter/10.1007/978-3-662-65004-2_1}, doi = {10.1007/978-3-662-65004-2_1}, issn = {978-3-662-65003-5}, year = {2023}, date = {2023-02-03}, booktitle = {Digital Transformation }, pages = {3-31}, publisher = {Springer Verlag}, abstract = {Industry 4.0 opens up new potentials for the automation and improvement of production processes, but the associated digitization also increases the complexity of this development. Monitoring and maintenance activities in production processes still require high manual effort and are only partially automated due to immature data aggregation and analysis, resulting in expensive downtimes, inefficient use of machines, and too much production of waste. To maintain control over the growing complexity and to provide insight into the production, concepts such as Digital Twins, Digital Shadows, and model-based systems engineering for Industry 4.0 emerge. Digital Shadows consist of data traces of an observed Cyber-Physical Production System. Digital Twins operate on Digital Shadows to enable novel analysis, monitoring, and optimization. We present a general overview of the concepts of Digital Twins, Digital Shadows, their usage and realization in Data Lakes, their development based on engineering models, and corresponding engineering challenges. This provides a foundation for implementing Digital Twins, which constitute a main driver for future innovations in Industry 4.0 digitization.}, keywords = {Data Lake, digital shadow, Digital Twin, Industry 4.0, Model-Based Systems Engineering}, pubstate = {published}, tppubtype = {inbook} } @book{wimmer2023k, title = {Digital Transformation: Core Technologies and Emerging Topics from a Computer Science Perspective}, editor = {Birgit Vogel-Heuser and Manuel Wimmer }, doi = {10.1007/978-3-662-65004-2_1}, isbn = {978-3-662-65003-5}, year = {2023}, date = {2023-02-03}, urldate = {2023-02-03}, publisher = {Springer Verlag}, abstract = {Digital Transformation in Industry 4.0/5.0 requires the effective and efficient application of digitalization technologies in the area of production systems. This book elaborates on concepts, techniques, and technologies from computer science in the context of Industry 4.0/5.0 and demonstrates their possible applications. Thus, the book serves as an orientation but also as a reference work for experts in the field of Industry 4.0/5.0 to successfully advance digitization in their companies.}, keywords = {Blockchain, Cloud Computing, Data Anlalytics, Data Management, digital, Digital Twin, Digitization, Industry 4.0, IoT, model integration}, pubstate = {published}, tppubtype = {book} } @mastersthesis{eisenberg2022b, title = {Reinforcement Learning for Model Transformations}, author = {Co-Advisor: Univ.-Prof. Mag. Dr. Manuel Wimmer Advisor: Univ.-Prof. Mag. Dr. Rick Rabiser Martin Eisenberg}, url = {https://digital.obvsg.at/urn/urn:nbn:at:at-ubl:1-56819}, year = {2022}, date = {2022-12-19}, urldate = {2022-12-19}, abstract = {Model transformations (MTs) are a key technology of model-driven engineering (MDE), where models are at the center of engineering processes. They are used for various tasks in the whole development lifecycle such as the verification, debugging, and simulation of systems or to generate artifacts for documentational and deployment purposes. In-place transformations in particular are characterized by direct modification of a model's composition and features. Given a set of possible modification options and means to assess a model's quality, determining the right transformations in the right order is subject towards optimizing models. Employing techniques to carry out the search for quality-improving changes unites search-based optimization and MDE, where concepts from the latter can be used to model optimization problems. In order to solve such problems, existing approaches rely primarily on meta-heurstic search. In this work we apply for the first time reinforcement learning (RL) for in-place MTs. We identify the preliminaries to employ different RL approaches like the requirement of a model encoding for policy gradient methods. Furthermore, we provide a selection of algorithms for single- and multi-objective scenarios and evaluate them on several case studies. To this extent, a framework for model-driven optimization was extended to support value-based and policy-based methods. Evaluation results suggest that RL algorithms can compete with existing approaches performance-wise and motivate further investigation and research lines to embrace the benefits of machine learning approaches, such as transfer learning and generalization.}, keywords = {MDE, Model Transformations}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Wimmer2022g, title = {From Coverage Computation to Fault Localization: A Generic Framework for Domain-Specific Languages}, author = {Faezeh Khorram and Erwan Bousse and Antonio Garmendia and Jean-Marie Mottu and Gerson Sunyé and Manuel Wimmer }, url = {https://se.jku.at/from-coverage-computation-to-fault-localization-a-generic-framework-for-domain-specific-languages/ https://dl.acm.org/doi/pdf/10.1145/3567512.3567532}, doi = {10.1145/3567512.3567532}, year = {2022}, date = {2022-12-01}, urldate = {2022-12-01}, booktitle = {Proceedings of the 15th ACM SIGPLAN International Conference on Software Language Engineering, SLE 2022, Auckland, New Zealand, December 5-10}, pages = {235-248}, abstract = {To test a system efficiently, we need to know how good are the defined test cases and to localize detected faults in the system. Measuring test coverage can address both concerns as it is a popular metric for test quality evaluation and, at the same time, is the foundation of advanced fault localization techniques. However, for Domain-Specific Languages (DSLs), coverage metrics and associated tools are usually manually defined for each DSL representing costly, error-prone, and non-reusable work. To address this problem, we propose a generic coverage computation and fault localization framework for DSLs. Considering a test suite executed on a model conforming to a DSL, we compute a coverage matrix based on three ingredients: the DSL specification, the coverage rules, and the model’s execution trace. Using the test execution result and the computed coverage matrix, the framework calculates the suspiciousness-based ranking of the model’s elements based on existing spectrum-based techniques to help the user in localizing the model’s faults. We provide a tool atop the Eclipse GEMOC Studio and evaluate our approach using four different DSLs, with 297 test cases for 21 models in total. Results show that we can successfully create meaningful coverage matrices for all investigated DSLs and models. The applied fault localization techniques are capable of identifying the defects injected in the models based on the provided coverage measurements, thus demonstrating the usefulness of the automatically computed measurements.}, keywords = {Coverage, Executable DSL, Executable Models, Fault localization, Testing}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{lehner2022, title = {A Community-Sourced View on Engineering Digital Twins: A Report from the EDT.Community}, author = {Loek Cleophas and Thomas Godfrey and Djamel Eddine Khelladi and Daniel Lehner and Benoit Combemale and Mark van den Brand and Michael Vierhauser and Manuel Wimmer and Steffen Zschaler}, editor = {Thomas Kühn and Vasco Sousa}, url = {https://dl.acm.org/doi/pdf/10.1145/3550356.3561549}, doi = {10.1145/3550356.3561549}, year = {2022}, date = {2022-11-09}, urldate = {2022-11-09}, booktitle = {Proceedings of the 25th International Conference on Model Driven Engineering Languages and Systems: Companion Proceedings, MODELS 2022, Montreal, Quebec, Canada, October 23-28}, pages = {481-485}, publisher = {ACM}, abstract = {Digital Twins are an important concept, enabling what-if scenario exploration, predictive maintenance, and other approaches. They help in saving time and physical resources when developing and evolving systems, whether natural or engineered. However, constructing and maintaining digital twins is a challenging engineering task - and, to date, there is a lack of understanding of the engineering techniques and methodologies required. To address these challenges, we created EDT.Community, a programme of seminars on the engineering of digital twins hosting digital twins experts from academia and industry. In this paper, we report on the main topics of discussion from the first year of the programme. We contribute by providing (1) a common understanding of open challenges in research and practice of the engineering of digital twins, and (2) an entry point to researchers who aim to close gaps in the current state of the art.}, keywords = {digital engineering, Digital Twin, Systems Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer2022h, title = {Using Trace Alignments for Measuring the Similarity between a Physical and its Digital Twin}, author = {Paula Munoz and Manuel Wimmer and Javier Troya and Antonio Vallecillo}, url = {https://se.jku.at/using-trace-alignments-for-measuring-the-similarity-between-a-physical-and-its-digital-twin/ https://dl.acm.org/doi/pdf/10.1145/3550356.3563135}, doi = {10.1145/3550356.3563135}, year = {2022}, date = {2022-11-09}, urldate = {2022-11-09}, booktitle = {Proceedings of the 25th International Conference on Model Driven Engineering Languages and Systems, MODELS '22, Montreal, Canada, October 23-28}, pages = {503-510}, abstract = {A common problem in the development of digital twin systems is the validation that the behavior of both twins, the physical and the digital, is the same, or at least similar enough given the requirements of the digital twin system. In this paper, we propose a method for the alignment of the traces of both twins. Traces are sequences of snapshots that capture the progressive states of each entity. Our approach is based on a bioinformatic algorithm that we adapt and use for the alignment of snapshots. Additionally, we include a set of measures to evaluate the quality of these alignments and reason about the level of fidelity of the digital twin system. Two case studies are used to demonstrate our proposal and evaluate its accuracy and effectiveness.}, keywords = {Conformance testing, Digital Twin, Trace analysis}, pubstate = {published}, tppubtype = {inproceedings} } @proceedings{Wimmer2022k, title = {Proceedings of the 25th International Conference on Model Driven Engineering Languages and Systems, (MODELS), Montreal, Quebec, Canada, October 23-28, 2022}, editor = {Eugene Syriani and Houari Sahraoui and Nelly Bencomo and Manuel Wimmer }, url = {https://doi.org/10.1145/3550355}, doi = {10.1145/3550355}, year = {2022}, date = {2022-10-27}, urldate = {2022-10-27}, publisher = {ACM}, abstract = {MODELS is the premier conference series for model-based software and systems engineering since 1998. It covers all aspects of modeling, from languages and methods to tools and applications. The conference is a forum for participants to exchange cutting-edge research results and innovative practical experiences around modeling, modeling languages, and modelbased software and systems engineering. Attendees of MODELS come from diverse backgrounds, including researchers, academics, engineers, and industrial professionals.}, keywords = {modeling languages, Systems Engineering}, pubstate = {published}, tppubtype = {proceedings} } @inproceedings{eisenberg2022, title = {Towards Reactive Planning with Digital Twins and Model-Driven Optimization}, author = {Martin Eisenberg and Daniel Lehner and Radek Sindelar and Manuel Wimmer }, editor = {Tiziana Margaria and Bernhard Steffen}, url = {https://link.springer.com/chapter/10.1007/978-3-031-19762-8_5}, doi = {10.1007/978-3-031-19762-8_5}, year = {2022}, date = {2022-10-21}, urldate = {2022-10-21}, booktitle = {Leveraging Applications of Formal Methods, Verification and Validation. Practice - 11th International Symposium, ISoLA 2022, Rhodes, Greece, October 22-30}, volume = {13704}, pages = {54-70}, publisher = {Springer Cham}, abstract = {Digital Twins are emerging in several domains. They allow to connect various models with running systems based on bi-directional data exchange. Thus, design models can be extended with runtime views which also opens the door for many additional techniques such as identifying unexpected system changes during runtime. However, dedicated reactions to these unexpected changes, such as adapting an existing plan which has been computed in advance and may no longer be seen beneficial, are still often neglected in Digital Twins. To tackle this shortcoming, we propose so-called reactive planning that integrates Digital Twins with planning approaches to react to unforeseen changes during plan execution. In particular, we introduce an extended Digital Twin architecture which allows to integrate existing model-driven optimization frameworks. Based on this integration, we present different strategies how the replanning can be performed by utilizing the information and services available in Digital Twins. We evaluate our approach for a stack allocation case study. This evaluation yields promising results on how to effectively improve existing plans during runtime, but also allows to identify future lines of research in this area.}, keywords = {Digital Twin, Models@Runtime, Optimization, Planning}, pubstate = {published}, tppubtype = {inproceedings} } @article{Wimmer2022f, title = {Revisiting Fault Localization Techniques for Model Transformations: Towards A Hybrid Approach}, author = {Paula Munoz and Javier Troya and Manuel Wimmer and Gerti Kappel}, url = {https://se.jku.at/revisiting-fault-localization-techniques-for-model-transformations-towards-a-hybrid-approach/ https://www.jot.fm/contents/issue_2022_04/article7.html}, doi = {10.5381/jot.2022.21.4.a7}, year = {2022}, date = {2022-10-10}, urldate = {2022-10-10}, journal = {Journal of Object Technology}, volume = {21}, number = {4}, pages = {4:1-17}, abstract = {The correctness of software built through model transformations highly depends on the correctness of these transformations. Different approaches have been proposed to ensure the correctness of model transformations by checking if pairs of input-output models satisfy a set of contracts. If a contract is not satisfied, at least one transformation rule must contain a bug. Localizing the rules that contain bugs is key for repairing the model transformation. Among others, Spectrum-Based Fault Localization (SBFL) is a dynamic technique to locate the faulty component of a software, and it has already been applied in the context of model transformations considering the rules as the components. As a result, this technique proposes an order (a so-called suspiciousness ranking) in which the rules should be inspected in order to locate the bug. However, SBFL relies on so-called suspiciousness formulae that were created in different domains, so none of them offers a perfect behavior in the context of model transformations. Indeed, some of the rankings for model transformations present many ties, so the tester is uncertain as of which rule to inspect first in the ties. In this paper, we explore how SBFL can be combined with static information in a hybrid approach in order to improve the results obtained from SBFL, specially in the case of ties in the rankings. Our evaluation shows the potential of the hybrid approach to improve previous SBFL results for model transformations.}, keywords = {Debugging, Fault localization, Model Transformations, Testing}, pubstate = {published}, tppubtype = {article} } @article{wimmer2022, title = {Conceptualizing Digital Twins}, author = {Romina Eramo and Francis Bordeleau and Mark van den Brand and Andreas Wortmann and Manuel Wimmer}, doi = { 10.1109/MS.2021.3130755}, year = {2022}, date = {2022-10-02}, urldate = {2022-10-02}, journal = {Journal of IEEE Software}, volume = {39}, issue = {2}, pages = {39-46}, abstract = {Properly arranging models, data sources, and their relations to engineer digital twins is challenging. We propose a conceptual modeling framework for digital twins that captures the combined usage of heterogeneous models and their respective evolving data for the twin’s entire lifecycle.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @article{wimmer2022b, title = {Digital Twin Platforms: Requirements, Capabilities, and Future Prospects}, author = {Daniel Lehner and Jerome Pfeiffer and Erik-Felix Tinsel and Matthias Milan Strljic and Sabine Sint and Michael Vierhauser and Andreas Wortmann and Manuel Wimmer}, doi = {10.1109/MS.2021.3133795}, year = {2022}, date = {2022-10-02}, urldate = {2022-10-02}, journal = {Journal of IEEE Software}, volume = {39}, issue = {2}, pages = {53-61}, abstract = {Digital twins (DTs) have emerged as a paradigm for the virtual representation of complex systems alongside their underlying hardware. We investigate the benefits of Amazon, Eclipse, and Microsoft DT platforms and assess the extent to which they meet standard requirements.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @article{Wimmer2022e, title = {A Cross-Domain Systematic Mapping Study on Software Engineering for Digital Twins}, author = {Manuela Dalibor and Nico Jansen and Bernhard Rumpe and David Schmalzing and Louis Wachtmeister and Manuel Wimmer and Andreas Wortmann}, url = {https://se.jku.at/a-cross-domain-systematic-mapping-study-on-software-engineering-for-digital-twins/}, doi = {10.1016/j.jss.2022.111361}, year = {2022}, date = {2022-09-22}, urldate = {2022-09-22}, journal = {Journal of Systems and Software}, volume = {193}, pages = {36}, abstract = {Digital Twins are currently investigated as the technological backbone for providing an enhanced understanding and management of existing systems as well as for designing new systems in various domains, e.g., ranging from single manufacturing components such as sensors to large-scale systems such as smart cities. Given the diverse application domains of Digital Twins, it is not surprising that the characterization of the term Digital Twin, as well as the needs for developing and operating Digital Twins are multi-faceted. Providing a better understanding what the commonalities and differences of Digital Twins in different contexts are, may allow to build reusable support for developing, running, and managing Digital Twins by providing dedicated concepts, techniques, and tool support. In this paper, we aim to uncover the nature of Digital Twins based on a systematic mapping study which is not limited to a particular application domain or technological space. We systematically retrieved a set of 1471 unique publications of which 356 were selected for further investigation. In particular, we analyzed the types of research and contributions made for Digital Twins, the expected properties Digital Twins have to fulfill, how Digital Twins are realized and operated, as well as how Digital Twins are finally evaluated. Based on this analysis, we also contribute a novel feature model for Digital Twins from a software engineering perspective as well as several observations to further guide future software engineering research in this area.}, keywords = {Digital Twin}, pubstate = {published}, tppubtype = {article} } @article{wimmer2022d, title = {Engineering Web Augmentation software: A development method for enabling end-user maintenance}, author = {Diego Firmenich and Sergio Firmenich and Gustavo Rossi and Irene Garrigós and Cesar Gonzalez-Mora and Manuel Wimmer }, url = {https://se.jku.at/engineering-web-augmentation-software-a-development-method-for-enabling-end-user-maintenance/}, doi = {10.1016/j.infsof.2021.106735}, year = {2022}, date = {2022-08-16}, urldate = {2022-08-16}, journal = {Journal of Information and Software Technology}, volume = {141}, pages = {19}, abstract = {Nowadays, end-users are able to adapt Web applications when some of their requirements have not been taken into account by developers. One possible way to do adaptations is by using Web Augmentation techniques. Web Augmentation allows end-users to modify the Web sites’ user interfaces once these are loaded on the client-side, i.e., in the browser. They achieve these adaptations by developing and/or installing Web browser plugins (“augmenters”) that modify the user interface with new functionalities. This particular kind of software artifacts requires special attention regarding maintenance as–in most cases–they depend on third-party resources, such as HTML pages. When these resources are upgraded, unexpected results during the augmentation process may occur. Many communities have arisen around Web Augmentation, and today there are large repositories where developers share their augmenters; end-users may give feedback about existing augmentations and even ask for new ones. Maintenance is a key phase in the augmenters’ life-cycle, and currently, this task falls (as usual) on the developers. In this paper, we present a participatory approach for allowing end-users without programming skills to participate in the augmenters’ maintenance phase. In order to allow this, we also provide support for the development phase to bootstrap a first version of the augmenter and to reduce the load on developers in both phases, development and maintenance. We present an analysis of more than eight thousand augmenters, which helped us devise the approach. Finally, we present an experiment with 48 participants to validate our approach.}, keywords = {end-user driven maintenance, End-user programming, Web adaptation, Web Augmentation}, pubstate = {published}, tppubtype = {article} } @inproceedings{lehner2022d, title = {Modeling Capabilities of Digital Twin Platforms – Old Wine in New Bottles?}, author = {Jerome Pfeiffer and Daniel Lehner and Andreas Wortmann and Manuel Wimmer }, url = {https://se.jku.at/modeling-capabilities-of-digital-twin-platforms-old-wine-in-new-bottles/ https://davidediruscio.github.io/ECMFA2022/ https://www.semanticscholar.org/paper/Modeling-Capabilities-of-Digital-Twin-Platforms-Old-Pfeiffer-Lehner/03b565e78f730d36b38820df3e9da1c0aa0dddcb}, year = {2022}, date = {2022-07-02}, urldate = {2022-07-02}, booktitle = {18th European Conference on Modelling Foundations and Applications, co-located with STAF 2022, Nantes, France, 6-7 July }, abstract = {Digital twins are seen as core technologies to tackle the growing complexity of cyber-physical systems to better understand, monitor, and optimize their behavior. Digital twin platforms aim to facilitate the systematic engineering of digital twins by providing dedicated languages and corresponding tools to describe their abilities. However, with the emergence of these languages for digital twins, the question arises what the nature of these languages is and how they differentiate from existing modeling languages already used in the area of cyber-physical systems. To shed more light on this new modeling area, we study in this paper the modeling capabilities of three industrial digital twin platforms and frame them in existing and well-known modeling concepts provided by UML. In particular, we (i) extract the conceptual metamodels of three industrial digital twin platforms, (ii) compare them with common object-oriented modeling concepts of UML, (iii) and provide first insight about the portability of models between the platforms by performing an experiment. In particular, we use UML class diagrams as an anchor for relating the modeling concepts of digital twin platforms and as pivot for DT platform portability. Our investigation summarizes current modeling capabilities of digital twin platforms to provide a better understanding of their shared concepts to developers using such platforms. It also shows that these modeling capabilities often rely on well-known modeling concepts, but also add some new aspects. The performed experiment additionally gives first insights into the portability of different DT platform metamodels. To sum up, this work can be see as a starting point for uncovering the nature of digital twin modeling and providing a digital twin language family enabling developers to select appropriate modeling features for describing different aspects of digital twins without having to reinventing the wheel.}, keywords = {Digital Twin}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer2022m, title = {Flexible Model-Driven Runtime Monitoring Support for Cyber-Physical Systems}, author = {Marco Stadler and Michael Vierhauser and Antonio Garmendia and Manuel Wimmer and Jane Cleland-Huang}, url = {https://se.jku.at/flexible-model-driven-runtime-monitoring-support-for-cyber-physical-systems/ https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=9793768}, doi = {10.1145/3510454.352864}, year = {2022}, date = {2022-06-13}, urldate = {2022-06-13}, booktitle = {2022 IEEE/ACM 44th International Conference on Software Engineering: Companion Proceedings (ICSE-Companion)}, pages = {2}, publisher = {IEEE }, abstract = {Providing adequate runtime monitoring is critical for ensuring safe operation and for enabling self-adaptive behavior of Cyber-Physical Systems. This requires identifying runtime properties of interest, creating Probes to instrument the system, and defining constraints to be checked at runtime. Implementing and setting up a monitoring framework for a system is typically a challenging task, and most existing approaches lack support for the automated generation and setup of monitors. GRuM significantly eases the task of creating monitors and maintaining them throughout the lifetime of the system by automatically generating runtime models and providing support for updating and adapting them when needed.}, keywords = {Cyber-Physical Systems, MDE, runtime monitoring}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer2022d, title = {Towards Interoperable Metamodeling Platforms: The Case of Bridging ADOxx and EMF}, author = {Dominik Bork and Konstantinos Anagnostou and Manuel Wimmer}, url = {https://se.jku.at/towards-interoperable-metamodeling-platforms-the-case-of-bridging-adoxx-and-emf/}, doi = {10.1007/978-3-031-07472-1_28}, year = {2022}, date = {2022-06-03}, urldate = {2022-06-03}, booktitle = {34th International Conference on Advanced Information Systems Engineering, CAiSE 2022, Leuven, Belgium, June 6-10, Lecture Notes in Computer Science}, volume = {13295}, pages = {479-497}, publisher = {Springer Cham}, abstract = {Metamodeling platforms are an important cornerstone for building domain-specific modeling languages in an efficient and effective way. Two prominent players in the field are ADOxx and the Eclipse Modeling Framework (EMF) which both provide rich ecosystems on modeling support and related technologies. However, until now, these two worlds live in isolation while there would be several benefits of having a bridge to exchange metamodels and models for different purposes (e.g., reuse of features and plugins that are only available on one platform, access to additional modeler and developer communities). Therefore, in this paper, we propose first steps toward establishing interoperability between ADOxx and EMF. For this, we thoroughly analyze the metamodeling concepts employed by both platforms before proposing a bridge that enables bidirectional exchange of metamodels. We evaluate the bidirectional bridge with several openly available metamodels created with ADOxx and EMF, respectively. Moreover, we quantitatively and qualitatively analyze the bridge by an evaluation that incorporates the instantiation and use of the metamodels on both platforms. We show that the metamodels can be exchanged without information loss and similar modeling experiences with respect to the resulting models can be achieved.}, keywords = {Metamodeling}, pubstate = {published}, tppubtype = {inproceedings} } @article{Wimmer2022p, title = {Guest editorial to the theme section on AI-enhanced model-driven engineering}, author = {Lola Burgueno and Jordi Cabot and Manuel Wimmer and Steffen Zschaler }, url = {https://se.jku.at/guest-editorial-to-the-theme-section-on-ai-enhanced-model-driven-engineering/ https://link.springer.com/content/pdf/10.1007/s10270-022-00988-0.pdf?pdf=button}, doi = {10.1007/s10270-022-00988-0}, year = {2022}, date = {2022-06-02}, urldate = {2022-06-02}, journal = {Journal of Software Systems Modeling}, volume = {21}, number = {3}, pages = {963-965}, abstract = {This theme section brings together the latest research at the intersection of artificial intelligence (AI) and model-driven engineering (MDE). Over the past years, we have witnessed a substantial rise of AI successfully applied to different domains, including software development and MDE. Dedicated events at the intersection of AI and MDE have been created, too, such as the MDE Intelligence workshop series co-located with the MODELS conference. This theme section covers research contributions integrating AI components into MDE approaches—increasing the current benefits of MDE processes and tools and pushing the limits of “classic” MDE with the goal to provide software and systems engineers with the right techniques to develop the next generation of highly complex model-based systems—and applications of MDE to the development of AI components. In total, nine submissions were accepted in the theme section after a thorough peer-reviewing process.}, keywords = {Artificial intelligence, Model-Driven Engineering, Software Engineering, Systems Engineering}, pubstate = {published}, tppubtype = {article} } @article{sint2021, title = {Towards a logical framework for ideal MBSE tool selection based on discipline specific requirements}, author = {Azad Khandoker and Sabine Sint and Guido Gessl and Klaus Zeman and Franz Jungreitmayr and Helmut Wahl and Andreas Wenigwieser and Roland Kretschmer}, url = {https://www.sciencedirect.com/science/article/pii/S0164121222000553?via%3Dihub}, doi = {10.1016/j.jss.2022.111306}, year = {2022}, date = {2022-05-18}, urldate = {2022-05-18}, journal = {Journal of Systems and Software}, volume = {189}, pages = {15}, abstract = {Model-Based Systems Engineering (MBSE) has emerged with great potential to fulfil the non-linearly rising demand in interdisciplinary engineering, e.g., product development. However, the variety and complexity of MBSE tools pose difficulties in particular industrial applications. This paper tries to serve as a guideline to find the ideal tool for a specific industrial application as well as to highlight the key criteria that an industry might consider. For this purpose, we propose a logical framework for MBSE tool selection, which is based on market research, the approaches of Quality Function Deployment (QFD), and decision matrix. As customers are at the centre of any product, accordingly the needs of MBSE tool users are addressed within this research as the fundamental starting point. Market research and extensive discussions with MBSE tool vendors and academia show the current situation of MBSE tools. To compare the performance of the considered tools, a set of user needs is defined. QFD is performed to analyse the user needs with respect to evaluable technical properties. Subsequently each tool performance is assessed using a decision matrix. Through this process, a well-defined functional structure of MBSE tools is sketched, and in order to identify the properties of an ideal tool, all the attributes of different MBSE tools are mapped to a common platform. For the purpose of evaluation, we apply our proposed logical framework to select an exemplary MBSE tool for interdisciplinary application.}, keywords = {Filtration method, MBSE, Software QFD, Tool selection}, pubstate = {published}, tppubtype = {article} } @article{sint, title = {Predicting the unpredictable: General Aviation (GA) aircraft cost estimation evaluation}, author = {Ali Shahriar and Azad Khandoker and Guido Gessl and Sabine Sint and M.A. Hamid and Abrar Tariq and Al Rahman }, url = {https://www.sciencedirect.com/science/article/pii/S0969699722000424?via%3Dihub#d1e663}, doi = {10.1016/j.jairtraman.2022.102221}, year = {2022}, date = {2022-05-07}, urldate = {2022-05-07}, journal = {Journal of Air Transport Management}, volume = {102}, pages = {11}, abstract = {Cost estimation is an important part of project planning as well as research endeavor. Since the well-established cost estimation models used in the GA aircraft industry are already several decades old, a re-evaluation of their applicability to current market conditions is essential. Reliable cost estimation may also improve the chances to get external funding - a vital point for start-ups. To tackle this issue, we developed a research method to investigate potential cost models for GA aircraft that can serve as a guideline for, e.g., start-ups and research works. After gathering existing cost estimation models, they are classified and analyzed to find the ones most suitable for small aircraft. For evaluation purpose, the two most promising ones are applied to data from existing aircraft models to compare their accuracy and finally the best one is coded as an application in Python to improve usability. With our presented research method we show a possibility to perform early cost estimation for small GA aircraft and offer a software tool to simplify its application.}, keywords = {Cost estimation, Cost model/method, Development cost, GA aircraft, Start-ups}, pubstate = {published}, tppubtype = {article} } @article{Wimmer2022o, title = {Guest editorial to the theme section on multi-level modeling}, author = {Adrian Rutle and Manuel Wimmer }, url = {https://link.springer.com/article/10.1007/s10270-022-00987-1#citeas}, doi = {10.1007/s10270-022-00987-1}, year = {2022}, date = {2022-04-12}, urldate = {2022-04-12}, journal = {Journal of Software Systems Modeling}, volume = {21}, number = {2}, pages = {447-449}, abstract = {Multi-level modeling (MLM) [5] represents a significant extension to the traditional two-level object-oriented paradigm with the potential to improve upon the utility, reliability, and complexity of models. Different from conventional approaches, MLM approaches allow for an arbitrary number of classification levels and introduce other concepts that foster expressiveness, reuse, and adaptability. A key aspect of the MLM paradigm is the use of entities (so-called clabjects) that are simultaneously types and instances [6], a feature which has consequences for conceptual modeling, for language engineering, and for the model-based development of software-intensive systems. MLM facilitates also deep instantiation [7], which, in contrast to shallow instantiation, allows model elements at a level to not only specify a scheme for elements at the next lower level but also to specify schemes for elements located at levels further down in the hierarchy. Different MLM approaches use different techniques to control and maintain this kind of instantiation. In Potency-based approaches [6, 8], for instance, a natural number (potency) is assigned to each model element indicating how many levels down in the hierarchy that element can be instantiated. Different variants of potency have been proposed to satisfy practical requirements, such as leap potency (facilitating jumps over levels) and depth (enforcing the last level at which an element may be instantiated).}, keywords = {}, pubstate = {published}, tppubtype = {article} } @proceedings{Wimmer2022l, title = {Software Quality: The Next Big Thing in Software Engineering and Quality}, editor = {Daniel Mendez and Manuel Wimmer and Dietmar Winkler and Stefan Biffl and Johannes Bergsmann}, url = {https://se.jku.at/software-quality-the-next-big-thing-in-software-engineering-and-quality/ https://link.springer.com/book/10.1007/978-3-031-04115-0}, doi = {10.1007/978-3-031-04115-0}, isbn = {978-3-031-04115-0}, year = {2022}, date = {2022-04-11}, urldate = {2022-04-11}, booktitle = {Software Quality: The Next Big Thing in Software Engineering and Quality - 14th International Conference on Software Quality, SWQD 2022, Vienna, Austria, May 17–19, 2022, Proceedings}, abstract = {This book constitutes the refereed proceedings of the 14th Software Quality Days Conference, SWQD 2022, held in Vienna, Austria, during May 17-19, 2022. The Software Quality Days (SWQD) conference started in 2009 and has grown to the biggest conference on software quality in Europe. The program of the SWQD conference is designed to encompass a stimulating mixture of practical presentations and new research topics in scientific presentations. The guiding conference topic of the SWQD 2022 is “What's The Next Big Thing in Software Engineering and Quality?”. The 4 full papers presented in this volume were carefully reviewed and selected from 8 submissions. The contributions were organized in two topical sections named: AI in Software Engineering; and Quality Assurance for Software-Intensive Systems. The book also contains two invited talks. }, keywords = {machine learning applications, quality assurance, software maintenance, software quality, software testing}, pubstate = {published}, tppubtype = {proceedings} } @proceedings{Wimmer2022j, title = {Fundamental Approaches to Software Engineering}, editor = {Einar Broch Johnsen and Manuel Wimmer}, url = {https://se.jku.at/fundamental-approaches-to-software-engineering/ https://link.springer.com/book/10.1007/978-3-030-99429-7}, doi = {10.1007/978-3-030-99429-7}, isbn = {978-3-030-99429-7}, year = {2022}, date = {2022-03-28}, urldate = {2022-03-28}, booktitle = {Fundamental Approaches to Software Engineering - 25th International Conference, FASE 2022, Held as Part of the European Joint Conferences on Theory and Practice of Software, ETAPS 2022, Munich, Germany, April 2–7, 2022}, number = {XIV}, pages = {357}, publisher = {Springer Cham}, abstract = {This open access book constitutes the proceedings of the 25th International Conference on Fundamental Approaches to Software Engineering, FASE 2022, which was held during April 4-5, 2022, in Munich, Germany, as part of the European Joint Conferences on Theory and Practice of Software, ETAPS 2022. The 17 regular papers presented in this volume were carefully reviewed and selected from 64 submissions. The proceedings also contain 3 contributions from the Test-Comp Competition. The papers deal with the foundations on which software engineering is built, including topics like software engineering as an engineering discipline, requirements engineering, software architectures, software quality, model-driven development, software processes, software evolution, AI-based software engineering, and the specification, design, and implementation of particular classes of systems, such as (self-)adaptive, collaborative, AI, embedded, distributed, mobile, pervasive, cyber-physical, or service-oriented applications.}, keywords = {embedded Systems, formal methods, model checking, model-driven software engineering, program analysis, software quality}, pubstate = {published}, tppubtype = {proceedings} } @article{Wimmer2022i, title = {Model Transformation Testing and Debugging: A Survey}, author = {Javier Troya and Sergio Segura and Lola Burgueno and Manuel Wimmer}, url = {https://se.jku.at/model-transformation-testing-and-debugging-a-survey/ https://dl.acm.org/doi/pdf/10.1145/3523056}, doi = {10.1145/3523056}, year = {2022}, date = {2022-02-28}, urldate = {2022-02-28}, journal = {Journal of ACM Computing Surveys}, volume = {55}, number = {4}, pages = {72:1-72:39}, abstract = {Model transformations are the key technique in Model-Driven Engineering (MDE) to manipulate and construct models. As a consequence, the correctness of software systems built with MDE approaches relies mainly on the correctness of model transformations, and thus, detecting and locating bugs in model transformations have been popular research topics in recent years. This surge of work has led to a vast literature on model transformation testing and debugging, which makes it challenging to gain a comprehensive view of the current state of the art. This is an obstacle for newcomers to this topic and MDE practitioners to apply these approaches. This paper presents a survey on testing and debugging model transformations based on the analysis of 140 papers on the topics. We explore the trends, advances, and evolution over the years, bringing together previously disparate streams of work and providing a comprehensive view of these thriving areas. In addition, we present a conceptual framework to understand and categorise the different proposals. Finally, we identify several open research challenges and propose specific action points for the model transformation community.}, keywords = {Debugging, Model Transformations, survey, Testing}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wimmer2022n, title = {Workshop on Software Engineering in Cyber-Physical Production Systems}, author = {Rick Rabiser and Birgit Vogel-Heuser and Manuel Wimmer and Alois Zoitl}, editor = {Lars Grunske and Janet Siegmund and Andreas Voglsang}, url = {https://se.jku.at/workshop-on-software-engineering-in-cyber-physical-production-systems/ https://dl.gi.de/bitstream/handle/20.500.12116/34551/C1-05.pdf?sequence=1&isAllowed=y}, doi = {10.18420/SE2021_53}, isbn = {978-3-88579-704-3}, year = {2022}, date = {2022-01-21}, urldate = {2022-01-21}, booktitle = {Software Engineering 2022, Fachtagung des GI-Fachbereichs Softwaretechnik, 21.-25. Februar, Virtuell}, volume = {P-320}, pages = {105-106}, publisher = {Gesellschaft für Informatik e.V.}, abstract = {This workshop focuses on Software Engineering in Cyber-Physical Production Systems. It is an interactive workshop opened by keynotes and statements by participants, followed by extensive discussions in break-out groups. The output of the workshop is a research roadmap as well as concrete networking activities to further establish a community in this interdisciplinary field.}, keywords = {Cyber-Physical Systems, Software Engineering, Workshop}, pubstate = {published}, tppubtype = {inproceedings} } @misc{mazak21, title = {Temporal Model -Driven Systems Engineering}, author = {Alexandra Mazak-Huemer}, url = {https://se.jku.at/wp-content/uploads/2021/11/HabilitationThesis_Alexandra_Mazak-Huemer.pdf}, year = {2021}, date = {2021-12-31}, urldate = {2021-12-31}, school = {Johannes Kepler University Linz}, abstract = {Due to the paradigm shift towards Industry 4.0, the role of software-intensive systems is becoming more and more important. In particular, the trend towards physical components being controlled by software has led to the Internet-of-Things (IoT) and Cyber-Physical-Systems (CPS). As a consequence, companies face highly complex systems that are undergoing a constant change process resulting from shorter innovation cycles and rapidly changing customer needs. It is important that they keep their high-level requirements organized and consistent over multiple revision cycles across the entire life cycle of such a system, i.e., from design over development to implementation and operation. Modeling is considered as a promising technique to better understand the dependencies within such complex systems. By following the Model-Driven Engineering (MDE) paradigm, systems are developed on a higher level of abstraction, and therefore, models are used as an integral part covering requirements, analysis, design, implementation, and verification. Although the term “model-integrated computing” has been coined almost twenty years ago, it has to be emphasized that the integration of models in the system life cycle is still mainly concerned with forward engineering, i.e., the development of new systems through generative techniques. Much less effort in MDE is spent on the evolutionary aspects of systems changing over time. For tackling this issue, models must no longer be considered as isolated one-shot system prescriptions, but as evolutionary and reusable descriptions of reality. The research scope of this cumulative habilitation thesis is explicitly addressing this evolutionary aspect by focusing on temporal aspects of models of CPS. It follows a Model-Driven Systems Engineering (MDSE) approach by identifying and integrating appropriate concepts, languages, techniques, and tools for the systematic adoption of models throughout the engineering process. Models are continuously revised, often by considering feedback from other resources, until they are released. However, also the feedback after the release, i.e., from the operation, is reflected in the models. In the first part of this cumulative habilitation thesis, we elaborate on the integration of data from heterogeneous sources in order to provide a homogenized meaningful stack of information from the running system to a higher level of abstraction. In the second part, we cover the evolutionary aspects of engineering artefacts, i.e., models. Thereby, the focus is not only to represent the current state to steer the system, but on the representation of the system’s history. In the final part, we provide MDE techniques for analyzing runtime data and extracting descriptive models for reasoning about and validating the operation of systems.}, keywords = {model-driven software engineering}, pubstate = {published}, tppubtype = {misc} } @inproceedings{wimmer2021i, title = {Preface to the 3rd Multi-Paradigm Modeling for Cyber-Physical Systems (MPM4CPS 2021)}, author = {Amrani Moussa and Dominique Blouin and Moharram Challenger and Julien Deantoni and Robert Heinrich and Manuel Wimmer}, url = {http://msdl.uantwerpen.be/conferences/MPM4CPS/2021/ https://ieeexplore.ieee.org/document/9643616}, doi = {10.1109/MODELS-C53483.2021.00049}, year = {2021}, date = {2021-12-28}, urldate = {2021-12-28}, booktitle = {ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion, {MODELS} 2021 Companion, Fukuoka, Japan, virtual event, October 10-15}, journal = {International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2021}, pages = {284-285}, abstract = {Multi-Paradigm Modelling (MPM) offers a foundational framework for connecting several engineering disciplines in a comprehensive and consistent way which is of particular importance for Cyber-Physical Systems (CPS). The MPM4CPS Workshop acts as a continuation of the successful MPM Workshop series hosted at the MODELS, but focuses on the application of the MPM approach to CPSs. The workshop aims at further advancing the state-of-the-art as well as identifying future research lines by bringing together international experts from academia and industry.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Govindasamy21, title = {Air Quality Management: An Exemplar for Model-Driven Digital Twin Engineering}, author = {Hari Shankar Govindasamy and Ramya Jayamaran and Burcu Taspinar and Daniel Lehner and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/9643702}, doi = {10.1109/MODELS-C53483.2021.00040}, year = {2021}, date = {2021-12-28}, urldate = {2021-12-28}, booktitle = {ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion, {MODELS} 2021 Companion, Fukuoka, Japan, virtual, October 10-15}, pages = {229-232}, publisher = {IEEE}, abstract = {Since its first mentioning in the literature, the concept of Digital Twin has gained traction in both industry and academia. However, there are still many open challenges when applying Digital Twins to industry-scale use cases. Applying Model-Driven Engineering techniques to the creation and maintenance of Digital Twins (also referred to as Model-Driven Digital Twin Engineering) promises automation and consistency throughout the life cycle of a Digital Twin. The exemplar provided in this paper can be used to identify open challenges when it comes to Model-Driven Digital Twin Engineering, and to demonstrate how approaches can solve them. This exemplar applies Digital Twins to an indoor air quality management use case, where CO2, temperature, and humidity values of rooms within a building are measured. These values can be used to derive actions to improve work productivity and reduce the risk for virus infections. We describe three applications that make use of this Digital Twin (i.e., runtime visualization, physical simulation, and ML-based predictions), and provide an online repository with the artefacts of this exemplar.}, keywords = {Workshop}, pubstate = {published}, tppubtype = {inproceedings} } @conference{wimmer2021q, title = {From In-Person to Distance Learning: Teaching Model-Driven Software Engineering in Remote Settings}, author = {Dominik Bork and Andreas Fend and Dominik Scheffknecht and Gerti Kappel and Manuel Wimmer}, year = {2021}, date = {2021-12-20}, urldate = {2021-12-20}, booktitle = {2021 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C) - Educator Symposium, Fukuoka, Japan, virtual, October 10-15 2021.}, pages = {702-711}, publisher = {IEEE}, abstract = {The COVID-19 pandemic did not only dramatically impact the personal and social lives, for many academics, it also demanded immediate changes to the way their courses are taught. While a pragmatic approach is to do conventional lectures via video streaming platforms, much more may be done to educate students also in a remote setting properly. This particularly holds true for practice-oriented and technology-engaging courses. This paper describes our experience of transforming an in-person Master level class on model-driven software engineering into a distance learning one. We describe the structure, the content, the teaching and examination format, and the used platforms in detail. We critically reflect on our experiences and report the feedback gained by a postclass student evaluation. We believe this paper provides meaningful lessons learned and best practices for other educators challenged with the task of teaching similar courses in a remote setting. With this paper, we publish an openly available Github repository that features all course content including sample solutions for all practical lab assignments.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{wimmer2021n, title = {Weaving Open Services with Runtime Models for Continuous Smart Cities KPIs Assessment}, author = {Martina De Sanctis and Ludovico Iovino and Maria Teresa Rossi and Manuel Wimmer}, editor = {Hakim Hasid and Odej Kao and Massimo Mecella and Naouel Moha aund Hye-young Paik}, url = {https://se.jku.at/weaving-open-services-with-runtime-models-for-continuous-smart-cities-kpis-assessment/}, doi = {10.1007/978-3-030-91431-8_43}, year = {2021}, date = {2021-12-15}, urldate = {2021-12-15}, booktitle = {19th International Conferene on Service-Oriented Computing, ICSOC 2021, November 22-25 }, volume = {13121}, pages = {672-681}, publisher = {Springer}, abstract = {The automatic Key Performance Indicators (KPIs) assessment for smart cities is challenging, since the input parameters needed for the KPIs calculations are highly dynamic and change with different frequencies. Moreover, they are provided by heterogeneous data sources (e.g., IoT infrastructures, Web Services, open repositories), with different access protocol. Open services are widely adopted in this area on top of open data, IoT, and cloud services. However, KPIs assessment frameworks based on smart city models are currently decoupled from open services. This limits the possibility of having runtime up-to-date data for KPIs assessment and synchronized reports. Thus, this paper presents a generic service-oriented middleware that connects open services and runtime models, applied to a model-based KPIs assessment framework for smart cities. It enables a continuous monitoring of the KPIs’ input parameters provided by open services, automating the data acquisition process and the continuous KPIs evaluation. Experiment shows how the evolved framework enables a continuous KPIs evaluation, by drastically decreasing (∼88%) the latency compared to its baseline.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{lehner2021, title = {AML4DT: A Model-Driven Framework for Developing and Maintaining Digital Twins with AutomationML}, author = {Daniel Lehner and Sabine Sint and Michael Vierhauser and Wolfgang Narzt and Manuel Wimmer}, url = {https://se.jku.at/wp-content/uploads/2021/10/2021_etfa_aml4dt_cr_final.pdf}, doi = {10.1109/ETFA45728.2021.9613376}, year = {2021}, date = {2021-12-07}, urldate = {2021-09-08}, booktitle = {IEEE 26th International Conference on Emerging Technologies and Factory Automation, September 7-10, Vasteras, Sweden, virtual event}, journal = {IEEE 26th International Conference on Emerging Technologies and Factory Automation, September 7-10, 2021, Vasteras Sweden, virtual event.}, pages = {1-8}, publisher = {IEEE}, abstract = {As technologies such as the Internet of Things (IoT) and Cyber-Physical Systems (CPS) are becoming ubiquitous, systems adopting these technologies are getting increasingly complex. Digital Twins (DTs) provide comprehensive views on such systems, the data they generate during runtime, as well as their usage and evolution over time. Setting up the required infrastructure to run a Digital Twin is still an ambitious task that involves significant upfront efforts from domain experts, although existing knowledge about the systems, such as engineering models, may be already available for reuse. To address this issue, we present AML4DT, a model-driven framework supporting the development and maintenance of Digital Twin infrastructures by employing AutomationML (AML) models. We automatically establish a connection between systems and their DTs based on dedicated DT models. These DT models are automatically derived from existing AutomationML models, which are produced in the engineering phases of a system. Additionally, to alleviate the maintenance of the DTs, AML4DT facilitates the synchronization of the AutomationML models with the DT infrastructure for several evolution cases. A case study shows the benefits of developing and maintaining DTs based on AutomationML models using the proposed AML4DT framework. For this particular study, the effort of performing the required tasks could be reduced by about 50%.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wiesmayr21, title = {A Model-based Execution Framework for Interpreting Control Software}, author = {Bianca Wiesmayr and Alois Zoitl and Antonio Garmendia and Manuel Wimmer}, url = {https://epub.jku.at/obvulioa/download/pdf/6404796?%20originalFilename=true}, doi = {10.1109/ETFA45728.2021.9613716}, year = {2021}, date = {2021-12-07}, urldate = {2021-12-07}, booktitle = {26th IEEE International Conference on Emerging Technologies and Factory Automation, ETFA 2021, Västeras, Sweden, virtual event, September 7-10, 2021}, publisher = {IEEE}, abstract = {Industrial standards define domain-specific languages that are frequently used for developing control software. For instance, IEC 61499 standardizes a graphical modeling language that includes a platform-independent application model. The application is composed of Function Blocks. A runtime can execute the model by implementing the semantics that is described in the standard in natural language. By defining an interpreter for IEC 61499 models, we can directly execute them without prior code generation. This enables providing feedback directly on the model level. We present an interpreter for Basic Function Blocks, which encapsulate a state-based Execution Control Chart. An existing EMF meta-model for IEC 61499 was extended with an operational semantics implemented in Java and Xtend. The test cases are defined either in Java or as an interface model. Such a model is standardized in IEC 61499 as Service Sequences. We evaluate our interpreter by executing the Basic Function Blocks that are defined in the standard and compare our results to those of the open-source runtime 4diac FORTE. As a practical use case, we show how developers can use the interpreter for unit testing self-defined Basic Function Blocks.}, keywords = {IEC 61499, industrial automation, Interpreter, Model-Driven Engineering, Operational semantics}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Garmendia2021, title = {Leveraging Model-Driven Technologies for JSON Artefacts: The Shipyard Case Study}, author = {Alessandro Colantoni and Antonio Garmendia and Luca Berardinelli and Manuel Wimmer and Johannes Bräuer}, url = {https://se.jku.at/wp-content/uploads/2021/08/Leveraging_Model_Driven_Technologies_for_JSONArtefacts__The_Shipyard_Case_Study.pdf https://ieeexplore.ieee.org/document/9592494}, doi = {10.1109/MODELS50736.2021.00033}, year = {2021}, date = {2021-10-21}, urldate = {2021-10-21}, booktitle = {IEEE/ACM 24th International Conference on Model Driven Engineering Languages and Systems (MODELS), October 10-15}, journal = {ACM / IEEE 24th International Conference on Model Driven Engineering Languages and Systems (MODELS), October 10-15, 2021}, abstract = {With JSON’s increasing adoption, the need for structural constraints and validation capabilities led to JSON Schema, a dedicated meta-language to specify languages which are in turn used to validate JSON documents. Currently, the standardisation process of JSON Schema and the implementation of adequate tool support (e.g., validators and editors) are work in progress. However, the periodic issuing of newer JSON Schema drafts makes tool development challenging. Nevertheless, many JSON Schemas as language definitions exist, but JSON documents are still mostly edited in basic text-based editors. To tackle this challenge, we investigate in this paper how Model-Driven Engineering (MDE) methods for language engineering can help in this area. Instead of re-inventing the wheel of building up particular technologies directly for JSON, we study how the existing MDE infrastructures may be utilized for JSON. In particular, we present a bridge between the JSONware and Modelware technical spaces to exchange languages and documents. Based on this bridge, our approach supports language engineers, domain experts, and tool providers in editing, validating, and generating tool support with enhanced capabilities for JSON schemas and their documents. We evaluate our approach with Shipyard, a JSON Schema-based language for the workflow specification for Keptn, an open-source tool for DevOps automation of cloud-native applications. The results of the case study show that proper editors and language evolution support from MDE can be reused and, at the same time, the surface syntax of JSON is maintained.}, keywords = {Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @misc{wimmer2021p, title = {Searching for Models with Hybrid AI Techniques (Talk)}, author = {Martin Eisenberg and Hans-Peter Pichler and Antonio Garmendia and Manuel Wimmer }, url = {https://se.jku.at/searching-for-models-with-hybrid-ai-techniques/ https://workshop-cmai.github.io/2021/papers/%5BEisenberg+21%5DSearchingForModelsWithHybridAITechniques.pdf}, year = {2021}, date = {2021-10-21}, urldate = {2021-10-21}, booktitle = {3rd International Workshop on Conceptual Modeling Meets Artificial Intelligence (CMAI 2021), co-Located with the 40th International Conference on Conceptual Modeling (ER 2021), 18-21 October 2021, St. Johns, Canada, virtual}, abstract = {The Model-Driven Engineering (MDE) [3] paradigm advocates for the use of models as an abstraction layer to represent complex systems. Model transformations are a central technique within MDE [10]. They either modify existing models or create new ones from scratch. Generally, these models should represent an optimal state of the system that has to be found within a large space of possible solutions. Model-driven optimization [1, 2, 4–6, 9] is a research area within MDE that proposes to automatically find optimal solutions which are constructed by a set of transformation rules given certain objectives. In order to search into large solution spaces, model-driven optimization approaches combine the expressiveness of models and domain-specific modeling languages, with the computational effectiveness of Artificial Intelligence (AI) methods to find the best model for a particular scenario. In this talk, we will present the framework Marrying Optimization and Model Transformations (MOMoT) which formulates the quest of finding the best models as an optimization problem [2, 8]. By this, MOMoT provides a general bridge between MDE and AI in which users may apply different AI techniques for the model search without requiring problem-specific encodings. MOMoT is built atop of the Eclipse Modeling Framework (EMF) using Henshin as a model transformation tool and MOEA for providing different evolutionary algorithms for performing the search process. In a recent work, we extended MOMoT with reinforcement learning approaches for performing the search process [7]. We will present some case studies that show the applicability of MOMoT to several scenarios such as the class responsibility assignment, software modularization, and object-oriented refactoring. In addition, we compare the outcome and performance of different AI techniques and also show some interesting combinations of the different techniques. Based on this evaluation, we will present some research lines and lessons learned that we found of interest for the community. }, keywords = {Model Transformations, Model-Driven Engineering, MOMoT}, pubstate = {published}, tppubtype = {misc} } @inproceedings{wimmer2021j, title = {Preface to MoDDiT 2021 }, author = {Francis Bordeleau and Loek Cleophas and Benoit Combemale and Romina Eramo and Mark Brand and Manuel Wimmer and Andreas Wortmann}, url = {https://gemoc.org/events/moddit2021}, year = {2021}, date = {2021-10-15}, urldate = {2021-10-15}, booktitle = {1st International Workshop on Model-Driven Engineering for Digital Twins co-located with MODELS 2021, Fukuoka, Japan, virtual event, October 10-15}, journal = {International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2021}, pages = {210-211}, abstract = {Digital twins promise tremendous potential to better understand and make use of cyber-physical systems in automotive, avionics, manufacturing, medicine, and many more domains. Despite many of the twinned systems being developed using models, engineering digital twins currently is ad-hoc and demands integrating different piecemeal technologies, which effectively hinders the application of digital twins. The focus of many digital twins and frameworks to create digital twins is on data acquisition and visualization via dashboards. Current research on digital twins focuses on specific implementations (bottom-up) or abstract models on how digital twins could be conceived (top down). Yet, there is a huge gap between both views that only research on model-driven engineering (MDE) can reduce. Hence, MDE is crucial to fully and systematically leverage the potential of digital twins. Currently, a venue bringing together experts from the modelling community on this topic is missing: ModDIT’21 brings together researchers on and developers of digital twins come together to shape the future of systematically designing, engineering, evolving, maintaining, and evaluating digital twins.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2021k, title = {Preface to MDE Intelligence 2021}, author = {Loli Burgueño and Marouane Kessentini and Manuel Wimmer and Steffen Zschaler}, url = {https://mde-intelligence.github.io/}, year = {2021}, date = {2021-10-15}, urldate = {2021-10-15}, booktitle = {3rd Workshop on Artificial Intelligence and Model-Driven Engineering co-located with MODELS 2021, Fukuoka, Japan, virtual event, October 10-15}, journal = {International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2021}, pages = {148-149}, abstract = {Artificial Intelligence (AI) has become part of everyone's life. It is used by companies to exploit the information they collect to improve the products and/or services they offer and, wanted or unwanted, it is present in almost every device around us. Lately, AI is also starting to impact all aspects of the system and software development lifecycle, from their upfront specification to their design, testing, deployment and maintenance, with the main goal of helping engineers produce systems and software faster and with better quality while being able to handle ever more complex systems. The hope is that AI will help dealing with the increasing complexity of systems and software. There is no doubt that MDE has been a means to tame until now part of this complexity. However, its adoption by industry still relies on their capacity to manage the underlying methodological changes including among other things the adoption of new tools. To go one step further, we believe there is a clear need for AI-empowered MDE, which will push the limits of "classic" MDE and provide the right techniques to develop the next generation of highly complex model-based system and software systems engineers will have to design tomorrow. This workshop provides a forum to discuss, study and explore the opportunities and challenges raised by the integration of AI and MDE.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2021l, title = {Preface to Low-Code 2021}, author = {Davide Di Ruscio and Dimitris S. Kolovos and Juan Lara and Massimo Tisi and Manuel Wimmer}, url = {https://lowcode-workshop.github.io/}, year = {2021}, date = {2021-10-15}, urldate = {2021-10-15}, booktitle = {2nd Workshop on Modeling in Low-Code Development Platforms co-located with MODELS 2021, Fukuoka, Japan, virtual event, October 10-15}, journal = {International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2021}, pages = {45-46}, abstract = {The growing need for secure, trustworthy, and cost-efficient software as well as recent developments in cloud computing technologies, and the shortage of highly skilled professional software developers, have given rise to a new generation of low-code software development platforms, such as Google AppMaker (soon AppSheet) and Microsoft PowerApps. Low-code platforms enable the development and deployment of fully functional applications using mainly visual abstractions and interfaces and requiring little or no procedural code. This makes them accessible to an increasingly digital-native and tech-savvy workforce who can directly and effectively contribute to the software development process, even if they lack a programming background. At the heart of low-code applications are typically models of the structure, the behaviour and the presentation of the application. Low-code application models need to be edited (using graphical and textual interfaces), validated, version-controlled and eventually transformed or interpreted to deliver user-facing applications. As all of these activities have been of core interest to the MoDELS community over the last two decades, we feel that a workshop on low-code software development at MoDELS is a very natural fit, and an opportunity to attract low-code platform vendors and users to our community, with substantial benefits to be reaped from both sides.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{eisenberg2021, title = {Towards Reinforcement Learning for In-Place Model Transformations}, author = {Martin Eisenberg and Hans-Peter Pichler and Antonio Garmendia and Manuel Wimmer}, url = {https://se.jku.at/wp-content/uploads/2021/08/paper-towards.pdf https://ieeexplore.ieee.org/abstract/document/9592463}, doi = {10.1109/MODELS50736.2021.00017}, year = {2021}, date = {2021-10-14}, urldate = {2021-10-14}, booktitle = {ACM / IEEE 24th International Conference on Model Driven Engineering Languages and Systems (MODELS), Fukuoka, Japan, October 10-15}, pages = {7}, abstract = {Model-driven optimization has gained much interest in the last years which resulted in several dedicated extensions for in-place model transformation engines. The main idea is to exploit domain-specific languages to define models which are optimized by applying a set of model transformation rules. Objectives are guiding the optimization processes which are currently mostly realized by meta-heuristic searchers such as different kinds of Genetic Algorithms. However, meta-heuristic search approaches are currently challenged by reinforcement learning approaches for solving optimization problems. In this new ideas paper, we apply for the first time reinforcement learning for in-place model transformations. In particular, we extend an existing model-driven optimization approach with reinforcement learning techniques. We experiment with valuebased and policy-based techniques. We investigate several case studies for validating the feasibility of using reinforcement learning for model-driven optimization and compare the performance against existing approaches. The initial evaluation shows promising results but also helped in identifying future research lines for the whole model transformation community.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @misc{wimmer2021r, title = {Managing Quality in Software and Systems Engineering: A Modeling Perspective}, author = {Manuel Wimmer }, url = {https://2021.quatic.org/program/invited-keynotes}, year = {2021}, date = {2021-09-10}, urldate = {2021-09-10}, booktitle = {14th International Conference on the Quality of Information and Communications Technology - QUATIC 2021 Keynote Speaker, September 8-10, 2021, online}, journal = {14th International Conference on the Quality of Information and Communications Technology - QUATIC 2021 Keynote Speaker, September 8-10, 2021, online}, abstract = {We are currently facing a dramatically increasing complexity in engineering, operation, and management of systems with the emergence of Cyber-Physical Systems (CPS) and the Internet-of-Things (IoT). This demands for comprehensive and systematic views on all system aspects (e.g., mechanical engineering, electrical engineering, and software engineering) throughout the whole system life-cycle, moving Software Engineering closer to Systems Engineering. To engineer such interdisciplinary systems, modeling is traditionally considered as the technique to understand and simplify reality through abstraction. In addition, models are nowadays used beyond the engineering phases by connecting them to the observed runtime data of the operating systems in so-called digital twins. However, in order to use models and their digital twin extensions in an effective and efficient way, the quality of models must be ensured throughout the complete system life-cycle. In my talk, I will outline several concepts and techniques to assess, and if required, to improve the quality of models and associated artefacts, e.g., model transformations, language definitions, and digital twins. In particular, I will talk about AI-based techniques, which can be applied directly on models to improve their quality, e.g., by finding refactoring sequences. Finally, I will conclude with some lessons learned from several projects and outline future challenges for managing the quality of models in software and systems engineering.}, keywords = {Systems Engineering}, pubstate = {published}, tppubtype = {misc} } @article{gemeinhardt2021, title = {Quantum k-community detection: Algorithm proposals and cross-architectural evaluation}, author = {Felix G. Gemeinhardt and Robert Wille and Manuel Wimmer}, doi = {10.1007/s11128-021-03239-1}, year = {2021}, date = {2021-09-09}, urldate = {2021-09-09}, journal = {Journal of Quantum Information Processing}, volume = {20}, number = {9}, pages = {302}, publisher = {Springer}, abstract = {Emerging quantum technologies represent a promising alternative for solving hard combinatorial problems in the post Moore's law era. For practical purposes however, the current number of qubits limits the direct applicability to larger real world instances in the near-term future. Therefore, a promising strategy to overcome this issue is represented by hybrid quantum classical algorithms which leverage classical as well as quantum devices. One prominent example of a hard computational problem is the community detection problem: a partition of a graph into distinct communities such that the ratio between intra-community and inter-community connectivity is maximized. In this paper, we explore the current potential of quantum annealing and gate-based quantum technologies to solve the community detection problem for an arbitrary number of communities. For this purpose, existing algorithms are (re-)implemented and new hybrid algorithms, that can be run on gate-model devices, are proposed. Their performance on standardized benchmark graphs has been evaluated and compared to the one of a state-of-the-art classical heuristic algorithm. Although no quantum speed-up has been achieved, the existing quantum annealing based methods as well as the novel hybrid algorithms for gate based quantum computers yield modularity values, which are similar to those of the classical heuristic. However, the modular architecture of the used algorithms allows for fast utilization of more powerful quantum technologies once they become available.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{lehner2021b, title = {Towards Flexible Evolution of Digital Twins with Fluent APIs}, author = {Daniel Lehner and Antonio Garmendia and Manuel Wimmer}, url = {https://se.jku.at/wp-content/uploads/2021/10/2021_etfa_coevolution_cr_final.pdf https://ieeexplore.ieee.org/document/9613222}, doi = {10.1109/ETFA45728.2021.9613222}, year = {2021}, date = {2021-09-09}, urldate = {2021-09-09}, booktitle = {IEEE 26th International Conference on Emerging Technologies and Factory Automation, September 7-10, Vasteras, Schweden, virtual event}, abstract = {With the increase of technologies such as the Internet of Things (IoT) and Cyber-Physical Systems, a huge amount of data is generated by current systems. To gain insights from this data, it must be combined with meta-information about its origins. Therefore, Digital Twins (DTs), as a common representation of a system and its data, are currently gaining traction in both industry and academia. However, these DTs have of course to be evolvable in order to reflect the high need of flexibility of the systems to support extensions, adaptations, customizations, etc. Evolving the DT representations currently not only involves a lot of manual effort, but might also lead to loss of data if not done correctly. To provide dedicated evolution support, we propose a dedicated framework for realizing evolution strategies between the schema, instance, and data level of a DT. In particular, we present a fluent API which allows the flexible but systematic manipulation of DTs during runtime and demonstrate its usage for a use case.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @article{wimmer2021gb, title = {MIKADO: a smart city KPIs assessment modeling framework}, author = {Martina De Sanctis and Ludovico Iovino and Maria Teresa Rossi and Manuel Wimmer}, url = {https://link.springer.com/article/10.1007/s10270-021-00907-9}, doi = {10.1007/s10270-021-00907-9}, year = {2021}, date = {2021-08-04}, urldate = {2021-08-04}, journal = {Journal of Software Systems Modeling}, pages = {28}, abstract = {Smart decision making plays a central role for smart city governance. It exploits data analytics approaches applied to collected data, for supporting smart cities stakeholders in understanding and effectively managing a smart city. Smart governance is performed through the management of key performance indicators (KPIs), reflecting the degree of smartness and sustainability of smart cities. Even though KPIs are gaining relevance, e.g., at European level, the existing tools for their calculation are still limited. They mainly consist in dashboards and online spreadsheets that are rigid, thus making the KPIs evolution and customization a tedious and error-prone process. In this paper, we exploit model-driven engineering (MDE) techniques, through metamodel-based domain-specific languages (DSLs), to build a framework called MIKADO for the automatic assessment of KPIs over smart cities. In particular, the approach provides support for both: (i) domain experts, by the definition of a textual DSL for an intuitive KPIs modeling process and (ii) smart cities stakeholders, by the definition of graphical editors for smart cities modeling. Moreover, dynamic dashboards are generated to support an intuitive visualization and interpretation of the KPIs assessed by our KPIs evaluation engine. We provide evaluation results by showing a demonstration case as well as studying the scalability of the KPIs evaluation engine and the general usability of the approach with encouraging results. Moreover, the approach is open and extensible to further manage comparison among smart cities, simulations, and KPIs interrelations.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @article{wimmer2021e, title = {A Hitchhiker's Guide to Model-Driven Engineering for Data-Centric Systems}, author = {Benoit Combemale and Jörg Kienzle and Gunter Mussbacher and Ali Hyacinth and Daniel Amyot and Mojtaba Bagherzadeh and Edouard Batot and Nelliy Bencomo and Benjamin Benni and Jean-Michel Bruel and Jordi Cabot and Betty Cheng and Philippe Collet and Gregor Engels and Robert Heinrich and Jean-Marc Jézéquel and Anne Koziolek and Sébastien Mosser and Ralf Reussner and Houari Sahraoui and Rijul Saini and June Sallou and Serge Stinckwich and Eugene Syriani and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/9094197}, doi = {10.1109/MS.2020.2995125}, year = {2021}, date = {2021-07-13}, urldate = {2021-07-13}, journal = {Journal of IEEE Software}, volume = {38}, issue = {4}, pages = {71-84}, abstract = {A broad spectrum of application domains are increasingly making use of heterogeneous and large volumes of data with varying degrees of humans in the loop. The recent success of Artificial Intelligence (AI) and, in particular, Machine Learning (ML) further amplifies the relevance of data in the development, maintenance, evolution, and execution management of systems built with model-driven engineering techniques. Applications include critical infrastructure areas such as intelligent transportation, smart energy management, public healthcare, and emergency and disaster management; many of these systems are considered socio-technical systems given the human, social, and organizational factors that must be considered during the system life-cycle [1]. This article introduces a conceptual reference framework – the Models and Data (MODA) framework – to support a data-centric and model-driven approach for the integration of heterogeneous models and their respective data for the entire life-cycle of socio-technical systems.}, keywords = {Model-Driven Engineering}, pubstate = {published}, tppubtype = {article} } @inproceedings{wimmer2021o, title = {RoboMAX: Robotic Mission Adaptation eXemplars}, author = {Mehrnoosh Askarpour and Christos Tsigkanos and Claudio Menghi and Radu Calinescu and Patrizio Pelliccione and Sergio Garcia and Ricardo Caldas and Tim J. von Oertzen and Manuel Wimmer and Luca Berardinelli and Matteo Rossi and Marcello M. Bersani and Gabriel S. Rodrigues}, url = {https://ieeexplore.ieee.org/document/9462005}, doi = {10.1109/SEAMS51251.2021.00040}, isbn = {978-1-6654-0289-7}, year = {2021}, date = {2021-06-29}, urldate = {2021-06-29}, booktitle = {16th International Symposium on Software Engineering for Adaptive and Self-Managing Systems, SEAMS@ICSE 2021, Madrid, Spain, May 18-24}, pages = {245-251}, publisher = {IEEE}, abstract = {Emerging and future applications of robotic systems pose unique self-adaptation challenges. To support the research needed to address these challenges, we provide an extensible repository of robotic mission adaptation exemplars. Co-designed with robotic application stakeholders including researchers, developers, operators, and end-users, our repository captures key sources of uncertainty, adaptation concerns, and other distinguishing characteristics of such applications. An online form enables external parties to supply new exemplars for curation and inclusion into the repository. We envisage that our RoboMAX repository will enable the development, evaluation, and comparison of self-adaptation approaches for the robotic systems domain.}, keywords = {Adaptive Systems, Robots, Software Engineering, Stakeholders, Uncertainty}, pubstate = {published}, tppubtype = {inproceedings} } @article{wimmer2021f, title = {Guest editorial to the theme section on Multi-Paradigm Modeling for Cyber-Physical Systems}, author = {Eugene Syriani and Manuel Wimmer}, url = {https://se.jku.at/guest-editorial-to-the-theme-section-on-multi-paradigm-modeling-for-cyber-physical-systems/}, doi = {10.1007/s10270-021-00882-1}, year = {2021}, date = {2021-06-21}, urldate = {2021-06-21}, journal = {Journal of Software Systems Modeling}, volume = {20}, number = {3}, pages = {607-609}, abstract = {This theme section aims to disseminate the latest research results in the area of Multi-Paradigm Modeling for Cyber-Physical Systems (MPM4CPS). MPM has a long tradition within the Model-Driven Engineering community, e.g., several workshops have been held at the MODELS conference for over more than a decade. The MPM4CPS workshop series is a continuation of the successful MPM workshop series with a stronger focus on CPS as especially these systems pose several new challenges on the engineering process and beyond. This theme section covers papers on the foundations and applications of MPM for CPS. In total, we accepted five submissions for publication in the theme section after a thorough peer-reviewing process.}, keywords = {Cy Engineering, Cyber-Physical Systems, Model-Driven Engineering, Systems}, pubstate = {published}, tppubtype = {article} } @article{wimmer2021, title = {Dealing with Non-Functional Requirements in Model-Driven Development: A Survey}, author = {Davis Ameller and Xavier Franch and Cristina Gómez and Silverio Martínez-Fernández and J. Araujo and Stefan Biffl and Jordi Cabot and Vittorio Cortellessa and Daniel Méndez Fernández and Ana Moreira and Henry Muccini and Antonio Vallecillo and Manuel Wimmer and Vasco Amaral and Wolfgang Böhm and Hugo Brunelière and Loli Burgueño and Miguel Goulao and Sabine Teufl and Luca Berardinelli}, url = {https://se.jku.at/dealing-with-non-functional-requirements-in-model-driven-development-a-survey/}, doi = {10.1109/TSE.2019.2904476}, year = {2021}, date = {2021-05-19}, urldate = {2021-05-19}, journal = {Journal of IEEE Transcations on Software Engineering}, volume = {47}, number = {4}, pages = {818-835}, abstract = {Managing Non-Functional Requirements (NFRs) in software projects is challenging, and projects that adopt Model-Driven Development (MDD) are no exception. Although several methods and techniques have been proposed to face this challenge, there is still little evidence on how NFRs are handled in MDD by practitioners. Knowing more about the state of the practice may help researchers to steer their research and practitioners to improve their daily work. Objective: In this paper, we present our findings from an interview-based survey conducted with practitioners working in 18 different companies from 6 European countries. From a practitioner´s point of view, the paper shows what barriers and benefits the management of NFRs as part of the MDD process can bring to companies, how NFRs are supported by MDD approaches, and which strategies are followed when (some) types of NFRs are not supported by MDD approaches. Results: Our study shows that practitioners perceive MDD adoption as a complex process with little to no tool support for NFRs, reporting productivity and maintainability as the types of NFRs expected to be supported when MDD is adopted. But in general, companies adapt MDD to deal with NFRs. When NFRs are not supported, the generated code is sometimes changed manually, thus compromising the maintainability of the software developed. However, the interviewed practitioners claim that the benefits of using MDD outweight the extra effort required by these manual adaptations. Conclusion: Overall, the results indicate that it is important for practitioners to handle NFRs in MDD, but further research is necessary in order to lower the barrier for supporting a broad spectrum of NFRs with MDD. Still, much conceptual and tool implementation work seems to be necessary to lower the barrier of integrating the broad spectrum of NFRs in practice.}, keywords = {Companies, Productivity, Security; Analytical models, Software, Software Engineering, Unified modeling language}, pubstate = {published}, tppubtype = {article} } @inproceedings{wimmer2021c, title = {Towards a Model-Integrated Runtime Monitoring Infrastructure for Cyber-Physical Systems}, author = {Michael Vierhauser and Hussein Marah and Antonio Garmendia and Jane Cleland-Huang and Manuel Wimmer}, url = {https://se.jku.at/towards-a-model-integrated-runtime-monitoring-infrastructure-for-cyber-physical-systems/}, doi = {10.1109/ICSE-NIER52604.2021.00028}, year = {2021}, date = {2021-05-07}, urldate = {2021-05-07}, booktitle = {IEEE/ACM 43rd International Conference on Software Engineering: New Ideas and Emerging Results (ICSE-NIER), May 25-28, Madrid, Spain, virtual event}, publisher = {IEEE}, abstract = {Runtime monitoring is essential for ensuring the safe operation and enabling self-adaptive behavior of Cyber-Physical Systems (CPS). It requires the creation of system monitors, instrumentation for data collection, and the definition of constraints. All of these aspects need to evolve to accommodate changes in the system. However, most existing approaches lack support for the automated generation and set up of monitors and constraints for diverse technologies and do not provide adequate support for evolving the monitoring infrastructure. Without this support, constraints and monitors can become stale and become less effective in long-running, rapidly changing CPS. In this “new and emerging results” paper we propose a novel framework for model-integrated runtime monitoring. We combine model-driven techniques and runtime monitoring to automatically generate large parts of the monitoring framework and to reduce the maintenance effort necessary when parts of the monitored system change. We build a prototype and evaluate our approach against a system for controlling the flights of unmanned aerial vehicles.}, keywords = {Cyber-Physical Systems, Monitoring; Instruments; Prototypes; Maintenance engineering, Runtime}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2020b, title = {The Digital Twin as a Core Component for Industry 4.0 Smart Production Planning}, author = {Petr Novak and Jiri Vyskocil and Bernhard Wally}, url = {https://www.sciencedirect.com/science/article/pii/S2405896320336314}, doi = {10.1016/j.ifacol.2020.12.2865}, year = {2021}, date = {2021-04-14}, urldate = {2021-04-14}, booktitle = {21st IFAC World Congress, July 11-17, 2020, Berlin, Germany, virtual}, abstract = {Production systems that adhere the Industry 4.0 vision require new ways of control and integration of individual components, such as robots, transportation system shuttles or mobile platforms. This paper proposes a new production system control concept based on closing a feedback loop between a production planning system and a digital twin of the physical production system. The digital twin keeps up-to-date information about the current state of the physical production system and it is combined with the production planner utilizing arti ficial intelligence methods. Production recipes and concrete process instantiations are planned for each production order on-the-fly, based on the production system state retrieved form the digital twin. This approach provides a high exibility in terms of ability to add and to remove products as well as production resources. It also enables error recovery by re-planning the production if some failure happens. The proposed approach is tested and evaluated on an internally hosted Industry 4.0 testbed, which con firms its effciency and exibility.}, keywords = {Digital Twin, Flexible and Reconfigurable Manufacturing Systems, Intelligent Manufacturing Systems, Production Planning and Control}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{gemeinhardt, title = {Towards Model-Driven Quantum Software Engineering}, author = {Felix Gemeinhardt and Antonio Garmendia and Manuel Wimmer}, url = {https://epub.jku.at/obvulioa/download/pdf/5894932?originalFilename=true 10.1109/Q-SE52541.2021.00010}, doi = {10.1109/Q-SE52541.2021.00010}, year = {2021}, date = {2021-04-10}, urldate = {2021-04-10}, booktitle = {Second International Workshop on Quantum Software Engineering (Q-SE 2021) co-located with ICSE 2021, April 9-11, Qingdoa, China, virtual event}, pages = {3}, publisher = {IEEE/ACM}, abstract = {Quantum technologies are emerging. Dedicated languages for programming Quantum machines are emerging as well and already used in different settings. Orthogonal to this development, Model-Driven Engineering (MDE) is explored to ease the development of software systems by applying modeling techniques such as Domain-Specific Modeling Languages and generative techniques such as code generation. In this position paper, we argue for a dedicated research line which deals with the exploration of how MDE may be applied for Quantum technologies. This combination would allow to speed-up the development of Quantum software, open the door for domain experts to utilize Quantum technologies, and may provide an additional abstraction layer over existing Quantum hardware architectures and programming languages. We outline several research challenges which we derived from a demonstration case of how to exploit domain-specific modeling for social network analysis on Quantum technologies.}, keywords = {Domain-Specific Languages, Model-Driven Engineering, Quantum Computing, Software Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2021g, title = {Behavioral Interfaces for Executable DSLs}, author = {Dorian Leroy and Erwan Bousse and Manuel Wimmer and Benoit Combemale and Tanja Mayerhofer and Wieland Schwinger}, editor = {Anne Koziolek and Ina Schaefer and Christoph Seidl}, doi = {10.18420/SE2021_25}, isbn = {978-3-88579-704-3}, year = {2021}, date = {2021-02-18}, urldate = {2021-02-18}, booktitle = {Software Engineering 2021, Fachtagung des GI-Fachbereichs Softwaretechnik, February 22-26, Braunschweig, Deutschland, virtual event}, volume = {P310}, pages = {73-74}, publisher = {Gesellschaft für Informatik e.V.}, abstract = {A large amount of domain-specific languages (DSLs) are used to represent behavioral aspects of systems in the form of behavioral models [BCW17]. Executable domain-specific languages (xDSLs) enable the execution of behavioral models [Ma13]. While an execution is mostly driven by the model’s content (e.g., control structures, conditionals, transitions,method calls), many use cases require interacting with the running model, such as simulating scenarios in an automated or interactive way or coupling system models with environment models. The management of these interactions is usually hard-coded into the semantics of xDSLs, which prevents its reuse for other xDSLs and the provision of generic interaction tools. To tackle these issues, we propose a novel metalanguage for complementing the definition ofxDSLs with explicit behavioral interfaces to enable external tools to interact with executable models in a unified way. A behavioral interface defines a set of events specifying how external tools can interact with models that conform to xDSLs implementing the interface.Additionally, we define two types of relationships involving behavioral interfaces the implementation relationship and the subtyping relationship. An implementation relationship ties a behavioral interface to a given operational semantics implementation. Subtyping relationships allow to build event abstraction hierarchies, indicating that events from one interface can be abstracted or refined as events from another interface.}, keywords = {Domain-Specific Languages, Metamodeling}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2021cb, title = {Workshop on Software Engineering in Cyber-Physical Production Systems (SECPPS’21)}, author = {Rick Rabiser and Birgit Vogel-Heuser and Manuel Wimmer and Alois Zoitl}, editor = {Anne Koziolek and Ina Schaefer and Christoph Seidl}, doi = {10.18420/SE2021_53}, year = {2021}, date = {2021-02-18}, urldate = {2021-02-18}, booktitle = {Software Engineering 2021, Fachtagung des GT-Fachbereichs Softwaretechnik, Workshop am Software Engineering in Cyber-Physical Production Systems (SECPPS’21), February 22-26, Bonn, Germany, virtual event}, volume = {P310}, pages = {133-134}, publisher = {Gesellschaft für Informatik e.V.}, abstract = {This workshop focuses on Software Engineering in Cyber-Physical Production Systems. Itis an interactive workshop opened by keynotes and statements by participants, followed by extensive discussions in break-out groups. The output of the workshop is a research roadmap as well as concrete networking activities to further establish a community in this interdisciplinary field.}, keywords = {Cyber-Physical Systems}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer21, title = {From Model Versioning to Variability-Augmented Modelling Technologies}, author = {Manuel Wimmer}, editor = {Paul Grünbacher and Christoph Seidl and Deepak Dhungana and Helena Lovasz-Bukvova}, doi = {10.1145/3442391.3442394}, year = {2021}, date = {2021-02-12}, urldate = {2021-02-12}, booktitle = {VAMOS 2021, 15th International Working Conference on Variability Modelling of Software-Intensive Systems, February 9-11, Krems, Austria, virtual event}, pages = {2:1}, publisher = {ACM}, abstract = {Version control systems are an essential part of the software development infrastructure. While traditional systems mostly focus on code-based artefacts, recent trends such as Cyber Physical Systems (CPS) require to support model-based artefacts as well – especially in interdisciplinary settings. As a consequence, several dedicated approaches for model versioning have been proposed recently. In this talk, I will review the active research field of model versioning, establish a common terminology, introduce the various techniques and technologies applied in current model versioning systems, and conclude with open issues and challenges such as the need for variability-augmented modelling technologies.}, keywords = {Cyber-Physical Systems}, pubstate = {published}, tppubtype = {inproceedings} } @proceedings{DBLP:conf/swqd/2021, title = {Software Quality: Future Perspectives on Software Engineering Quality}, editor = {Dietmar Winkler and Stefan Biffl and Daniel Mendez and Manuel Wimmer and Johannes Bergsmann}, doi = {10.1007/978-3-030-65854-0}, isbn = {978-3-030-65853-3}, year = {2021}, date = {2021-01-27}, urldate = {2021-01-27}, volume = {404}, publisher = {Springer}, series = {LNBIP}, abstract = {This book constitutes the refereed proceedings of the 13th Software Quality Days Conference, SWQD 2021, which was planned to be held in Vienna, Austria, during January 19–21, 2021. Due to the COVID-19 pandemic, the conference was cancelled and will be merged with SWQD 2022. The Software Quality Days (SWQD) conference started in 2009 and has grown to the biggest conference on software quality in Europe with a strong community. The program of the SWQD conference is designed to encompass a stimulating mixture of practical presentations and new research topics in scientific presentations. The guiding conference topic of the SWQD 2021 is “Future Perspectives on Software Engineering Quality”. The 3 full papers and 5 short papers presented in this volume were carefully reviewed and selected from 13 submissions. The volume also contains 2 invited talks and one introductory paper for an interactive session. The contributions were organized in topical sections named: automation in software engineering; quality assurance for AI-based systems; machine learning applications; industry-academia collaboration; and experimentation in software engineering. }, keywords = {Machine Learning, Requirements Engineering, Software Architecture, software maintenance, software quality}, pubstate = {published}, tppubtype = {proceedings} } @article{wally2021, title = {Leveraging Iterative Plan Refinement for Reactive Smart Manufacturing Systems}, author = {Bernhard Wally and Jiri Novak and Petr Novak and Christian Huemer and Radek Sindelar and Petr Kaderar and Alexandra Mazak-Huemer and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/9190077}, doi = {10.1109/TASE.2020.3018402}, year = {2021}, date = {2021-01-26}, urldate = {2021-01-26}, journal = {Journal of IEEE Transactions on Automation Science and Engineering}, volume = {18}, number = {1}, pages = {230-243}, abstract = {Industry 4.0 production systems must support flexibility in various dimensions, such as for the products to be produced, for the production processes to be applied, and for the available machinery. In this article, we present a novel approach to design and control smart manufacturing systems. The approach is reactive, that is responds to unplanned situations and implements an iterative refinement technique, that is, optimizes itself during runtime to better accommodate production goals. For realizing these advances, we present a model-driven methodology and we provide a prototypical implementation of such a production system. In particular, we employ Planning Domain Definition Language (PDDL) as our artificial intelligence environment for automated planning of production processes and combine it with one of the most prominent Industry 4.0 standards for the fundamental production system model: IEC 62264. We show how to plan the assembly of small trucks from available components and how to assign specific production operations to available production resources, including robotic manipulators and transportation system shuttles. Results of the evaluation indicate that the presented approach is feasible and that it is able to significantly strengthen the flexibility of production systems during runtime.}, keywords = {Automation, IEC Standards, Planning, Production Systems}, pubstate = {published}, tppubtype = {article} } @article{PASKALEVA2021, title = {Leveraging integration facades for model-based tool interoperability}, author = {Galina Paskaleva and Alexandra Mazak-Huemer and Manuel Wimmer and Thomas Bednar}, doi = {https://doi.org/10.1016/j.autcon.2021.103689}, issn = {0926-5805}, year = {2021}, date = {2021-01-01}, urldate = {2021-01-01}, journal = {Journal of Automation in Construction}, volume = {128}, pages = {103689}, abstract = {Data exchange and management methods are of paramount importance in areas as complex as the Architecture, Engineering and Construction industries and Facility Management. For example, Big Open BIM requires seamless information flow among an arbitrary number of applications. The backbone of such information flow is a robust integration, whose tasks include overcoming technological as well as semantic and pragmatic gaps and conflicts both within and between data models. In this work, we introduce a method for integrating the pragmatics at design-time and the semantics of independent applications at run-time into so-called “integration facades”. We utilize Model-driven Engineering for the automatic discovery of functionalities and data models, and for finding a user-guided consensus. We present a case study involving the domains of architecture, building physics and structural engineering for evaluating our approach in object-oriented as well as data-oriented programming environments. The results produce, for each scenario, a single integration facade that acts as a single source of truth in the data exchange process.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{Rossi21, title = {Leveraging Multi-Level Modeling for Multi-Domain Quality Assessment}, author = {Maria Teresa Rossi and Martina Dal Molin and Ludovico Iovino and Martina De Sanctis and Manuel Wimmer}, url = {https://www.computer.org/csdl/proceedings-article/models-c/2021/248400a546/1zutEepNq3m}, doi = {10.1109/MODELS-C53483.2021.00085}, year = {2021}, date = {2021-01-01}, urldate = {2021-01-01}, booktitle = {International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2021}, pages = {546-555}, abstract = {Quality Evaluation Systems (QESs) are a class of software systems which receive evaluation requests and quality requirement specifications as inputs and produce results as outputs of an assessment process. This class of systems usually work with a quality model including quality definitions and metrics, and produce the output as a quantitative evaluation of a subject. QESs can be implemented using model-driven techniques and dedicated languages, for domain-specific evaluation of different subjects. However, applying traditional two-level metamodeling techniques for this scenario entails that every time a QES is required, a new modeling framework, with consequent engine for interpreting the newly defined models, must be re-developed from scratch. To overcome this repetitive process, in this paper, we propose a Multi-Level Modeling (MLM) approach for realizing the artifacts involved in the development phase of a QES which are reusable across multiple domains. We demonstrate the approach with running examples from three different application domains comprising different evaluation scenarios.}, keywords = {Workshop}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2020c, title = {Automated Variability Injection for Graphical Modelling Languages}, author = {Antonio Garmendia and Manuel Wimmer and Esther Guerra and Elena Gómez-Martínez and Juan Lara}, url = {https://doi.org/10.1145/3425898.3426957}, doi = {10.1145/3425898.3426957}, year = {2020}, date = {2020-12-15}, urldate = {2020-12-15}, booktitle = {Proceedings of the 19th ACM SIGPLAN International Conference on Generative Programming, Concepts and Experiences (GPCE 2020), November 16–17, 2020, Chicago, USA, Virtual}, pages = {15-21}, publisher = {ACM}, abstract = {Model-based development approaches, such as Model-Driven Engineering (MDE), heavily rely on the use of modelling languages to achieve and automate software development tasks. To enable the definition of model variants (e.g., supporting the compact description of system families), one solution is to combine MDE with Software Product Lines. However, this is technically costly as it requires adapting many MDE artefacts associated to the modelling language – especially the meta-models and graphical environments. To alleviate this situation, we propose a method for the automated injection of variability into graphical modelling languages. Given the meta-model and graphical environment of a particular language, our approach permits configuring the allowed model variability, and the graphical environment is automatically adapted to enable creating models with variability. Our solution is implemented atop the Eclipse Modeling Framework and Sirius, and synthesizes adapted graphical editors integrated with Feature IDE.}, keywords = {Model-Driven Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{BorkGW20, title = {Towards a Multi-Objective Modularization Approach for Entity-Relationship Models}, author = {Dominik Bork and Antonio Garmendia and Manuel Wimmer}, editor = {Judith Michael and Victoria Torres}, url = {http://ceur-ws.org/Vol-2716/paper4.pdf}, year = {2020}, date = {2020-11-05}, booktitle = {Forum, Demo and Posters 2020 co-located with 39th International Conference on Conceptual Modeling ER 2020, Vienna, Austria, November 3-6, 2020}, volume = {2716}, pages = {45-58}, publisher = {CEUR-WS.org}, abstract = {Legacy systems and their associated data models often evolve into large, monolithic artifacts. This threatens comprehensibility and maintainability by human beings. Breaking down a monolith into a modular structure is an established technique in software engineering. Several previous works aimed to adapt modularization also for conceptual data models. However, we currently see a research gap manifested in the absence of:(i)a flexible and extensible modularization concept for Entity Relationship (ER) models; (ii )of openly available tool support; and (iii) empirical evaluation. With this paper, we introduce a generic encoding of a modularization concept for ER models which enables the use of meta-heuristic search approaches. For the efficient application we introduce the ModulER tool. Eventually, we report on a twofold evaluation: First, we demonstrate feasibility and performance of the approach by two demonstration cases. Second, we report on an initial empirical experiment and a survey we conducted with modelers to compare automated modularizations with manually created ones and to better understand how humans approach ER modularization.}, keywords = {Modularization}, pubstate = {published}, tppubtype = {inproceedings} } @article{MazakWGCWK20, title = {Temporal Models on Time Series Databases}, author = {Alexandra Mazak-Huemer and Sabine Wolny and Abel Gómez and Jordi Cabot and Manuel Wimmer and Gerti Kappel}, url = {http://www.jot.fm/contents/issue_2020_03/article14.html}, doi = {10.5381/jot.2020.19.3.a14}, year = {2020}, date = {2020-11-03}, urldate = {2020-11-03}, journal = {Journal of Object Technology}, volume = {19}, number = {3}, pages = {3:1-15}, abstract = {With the emergence of Cyber-Physical Systems (CPS), several sophisticated runtime monitoring solutions have been proposed in order to deal with extensive execution logs. One promising development in this respect is the integration of time series databases that support the storage of massive amounts of historical data as well as to provide fast query capabilities to reason about runtime properties of such CPS. In this paper, we discuss how conceptual modeling can benefit from time series databases, and vice versa. In particular, we present how metamodels and their instances, i.e., models, can be partially mapped to time series databases. Thus, the traceability between design and simulation/runtime activities can be ensured by retrieving and accessing runtime information, i.e., time series data, in design models. On this basis, the contribution of this paper is four-fold. First, a dedicated profile for annotating design models for time series databases is presented. Second, a mapping for integrating the metamodeling framework EMF with InfluxDB is introduced as a technology backbone enabling two distinct mapping strategies for model information. Third, we demonstrate how continuous time series queries can be combined with the Object Constraint Language (OCL) for navigation through models, now enriched with derived runtime properties. Finally, we also present an initial evaluation of the different mapping strategies with respect to data storage and query performance. Our initial results show the efficiency of applying derived runtime properties as time series queries also for large model histories.}, keywords = {Time-Series}, pubstate = {published}, tppubtype = {article} } @inproceedings{Horvath0W20, title = {Towards the next generation of reactive model transformations on low-code platforms: three research lines}, author = {Benedek Horváth and Ákos Hováth and Manuel Wimmer}, editor = {Esther Guerra and Ludovico Iovino}, url = {https://doi.org/10.1145/3417990.3420199}, doi = {10.1145/3417990.3420199}, year = {2020}, date = {2020-10-29}, booktitle = {23rd International Conference on Model Driven Engineering Languages and Systems, Virtual Event, Canada, 16-23 October, 2020, Companion Proceedings}, pages = {65:1-65:10}, publisher = {ACM}, abstract = {Low-Code Development Platforms have emerged as the next-generation, cloud-enabled collaborative platforms. These platforms adopt the principles of Model-Driven Engineering, where models are used as first-class citizens to build complex systems, and model transformations are employed to keep a consistent view between the different aspects of them. Due to the online nature of low-code platforms, users expect them to be responsive, to complete complex operations in a short time. To support such complex collaboration scenarios, the next-generation of low-code platforms must (𝑖) offer a multi-tenant environment to manage the collaborative work of engineers, (𝑖𝑖) provide a model processing paradigm scaling up to hundreds of millions of elements, and (𝑖𝑖𝑖) provide engineers a set of selection criteria to choose the right model transformation engine in multi-tenant execution environments.In this paper, we outline three research lines to improve the performance of reactive model Transformations on low-code platforms, by motivating our research with a case study from a systems engineering domain.}, keywords = {Model Transformations}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{ColantoniBW20, title = {DevOpsML: Towards Modeling DevOps Processes and Platforms}, author = {Alessandro Colantoni and Luca Berardinelli and Manuel Wimmer}, editor = {Esther Guerra and Ludovico Iovino}, url = {https://doi.org/10.1145/3417990.3420203}, doi = {10.1145/3417990.3420203}, year = {2020}, date = {2020-10-29}, booktitle = {23rd International Conference on Model Driven Engineering Languages and Systems, Virtual Event, Canada, 16-23 October, 2020, Companion Proceedings}, pages = {69:1 - 69:10}, publisher = {ACM}, abstract = {DevOps and Model Driven Engineering (MDE) provide differently skilled IT stakeholders with methodologies and tools for organizing and automating continuous software engineering activities–from development to operations, and using models as key engineering artifacts, respectively. Both DevOps and MDE aim at shortening the development life-cycle, dealing with complexity, and improve software process and product quality. The integration of DevOps and MDE principles and practices in low-code engineering platforms (LCEP) are gaining attention by the research community. However, at the same time, new requirements are upcoming for DevOps and MDE as LCEPs are often used by non-technical users, to deliver fully functional software. This is in particular challenging for current DevOps processes, which are mostly considered on the technological level, and thus, excluding most of the current LCEP users. The systematic use of models and modeling to lowering the learning curve of DevOps processes and platforms seems beneficial to make them also accessible for non-technical users. In this paper, we introduce DevOpsML, a conceptual framework for modeling and combining DevOps processes and platforms. Tools along with their interfaces and capabilities are the building blocks of DevOps platform configurations, which can be mapped to software engineering processes of arbitrary complexity. We show our initial endeavors on DevOpsML and present a research roadmap how to employ the resulting DevOpsML framework for different use cases.}, keywords = {DevOpsML, Model-Driven Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @article{AlkhaziAKLW20, title = {Multi-criteria test cases selection for model transformations}, author = {Bader Alkhazi and Chaima Abid and Marouane Kessentini and Dorian Leroy and Manuel Wimmer}, url = {https://doi.org/10.1007/s10515-020-00271-w}, doi = {10.1007%2Fs10515-020-00271-w}, year = {2020}, date = {2020-10-26}, urldate = {2020-10-26}, journal = {Journal of Automated Software Engineering}, volume = {27}, number = {1}, pages = {91-118}, abstract = {Model transformations play an important role in the evolution of systems in various fields such as healthcare, automotive and aerospace industry. Thus, it is important to check the correctness of model transformation programs. Several approaches have been proposed to generate test cases for model transformations based on different coverage criteria (e.g., statements, rules, metamodel elements, etc.). However, the execution of a large number of test cases during the evolution of transformation programs is time-consuming and may include a lot of overlap between the test cases. In this paper, we propose a test case selection approach for model transformations based on multi-objective search. We use the non-dominated sorting genetic algorithm (NSGA-II) to find the best trade-offs between two conflicting objectives: (1) maximize the coverage of rules and (2) minimize the execution time of the selected test cases. We validated our approach on several evolution cases of medium and large ATLAS Transformation Language programs.}, keywords = {Model Transformations, multi-criteria selection}, pubstate = {published}, tppubtype = {article} } @article{AlkhaziAKW20, title = {On the value of quality attributes for refactoring ATL model transformations: A multi-objective approach}, author = {Bader Alkhazi and Chaima Abid and Marouane Kessentini and Manuel Wimmer}, url = {https://doi.org/10.1016/j.infsof.2019.106243}, doi = {10.1016/j.infsof.2019.106243}, year = {2020}, date = {2020-10-26}, urldate = {2020-10-26}, journal = {Journal of Information and Software Technology}, volume = {120}, pages = {106243}, abstract = {Model transformations play a fundamental role in Model-Driven Engineering (MDE) as they are used to manipulate models and to transform them between source and target metamodels. However, model transformation programs lack significant support to maintain good quality which is in contrast to established programming paradigms such as object-oriented programming. In order to improve the quality of model transformations, the majority of existing studies suggest manual support for the developers to execute a number of refactoring types on model transformation programs. Other recent studies aimed to automate the refactoring of model transformation programs, mostly focusing on the ATLAS Transformation Language (ATL), by improving mainly few quality metrics using a number of refactoring types.}, keywords = {ATL, Model Transformations}, pubstate = {published}, tppubtype = {article} } @inproceedings{BordeleauCEBW20, title = {Towards Model-Driven Digital Twin Engineering: Current Opportunities and Future Challenges}, author = {Francis Bordeleau and Benoit Combemale and Romina Eramo and Mark Brand and Manuel Wimmer}, editor = {Önder Babur and Joachim Enil and Birgit Vogel-Heuser}, url = {https://doi.org/10.1007/978-3-030-58167-1_4}, doi = {10.1007/978-3-030-58167-1_4}, year = {2020}, date = {2020-10-22}, booktitle = {Systems Modelling and Management - First International Conference, ICSMM 2020, Bergen, Norway, June 25-26, 2020, Proceedings}, volume = {1262}, pages = {43-54}, publisher = {Springer}, abstract = {Digital Twins have emerged since the beginning of this millennium to better support the management of systems based on (real-time) data collected in different parts of the operating systems. Digital Twins have been successfully used in many application domains, and thus, are considered as an important aspect of Model-Based Systems Engineering (MBSE). However, their development , maintenance, and evolution still face major challenges, in particular: (i) the management of heterogeneous models from different disciplines, (ii) the bi-directional synchronization of digital twins and the actual systems, and (iii) the support for collaborative development throughout the complete life-cycle. In the last decades, the Model-Driven Engineering (MDE) community has investigated these challenges in the context of software systems. Now the question arises, which results may be applicable for digital twin engineering as well. In this paper, we identify various MDE techniques and technologies which may contribute to tackle the three mentioned digital twin challenges as well as outline a set of open MDE research challenges that need to be addressed in order to move towards a digital twin engineering discipline.}, keywords = {Digital Twin, modeling languages}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{LehnerWMW20, title = {Towards a Reference Architecture for Leveraging Model Repositories for Digital Twins}, author = {Daniel Lehner and Sabine Wolny and Alexandra Mazak-Huemer and Manuel Wimmer}, url = {https://doi.org/10.1109/ETFA46521.2020.9212109}, doi = {10.1109/ETFA46521.2020.9212109}, year = {2020}, date = {2020-10-15}, booktitle = {25th (IEEE) International Conference on Emerging Technologies and Factory Automation, ETFA 2020, Vienna, Austria, September 8-11, 2020}, pages = {1077-1080}, publisher = {IEEE}, abstract = {In the area of Cyber-Physical Systems (CPS), the degree of complexity continuously increases mainly due to new key-enabling technologies supporting those systems. One way to deal with this increasing complexity is to create a digital representation of such systems, a so-called Digital Twin (DT), which virtually acts in parallel ideally across the entire life-cycle of a CPS. For this purpose, the DT uses simulated or real-time data to mimic operations, control, and may modify the CPS’s behaviour at runtime. However, building such DTs from scratch is not trivial, mainly due to the integration needed to deal with heterogeneous systems residing in different technological spaces. In order to tackle this challenge, Model-Driven Engineering (MDE) allows to logically model a CPS with its physical components. Usually, in MDE such “logical models” are created at design time which keep them detached from the deployed system during runtime. Instead of building bilateral solutions between each runtime environment and every engineering tool, a dedicated integration layer is needed which can deal with both, design and runtime aspects. Therefore, we present a reference architecture that allows on the one side to query data from model repositories to enrich the running system with design-time knowledge, and on the other side, to be able to reasoning about system states at runtime in design-time models. We introduce a model repository query and management engine as mediator and show its feasibility by a demonstration case.}, keywords = {Digital Twin}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{GarmendiaWMGL20, title = {Modelling Production System Families with AutomationML}, author = {Antonio Garmendia and Manuel Wimmer and Alexandra Mazak-Huemer and Esther Guerra and Juan Lara}, url = {https://doi.org/10.1109/ETFA46521.2020.9211894}, doi = {10.1109/ETFA46521.2020.9211894}, year = {2020}, date = {2020-10-15}, booktitle = {25th IEEE International Conference on Emerging Technologies and Factory Automation, ETFA 2020, Vienna, Austria, September 8-11, 2020}, pages = {1057-1060}, publisher = {IEEE}, abstract = {The description of families of production systems usually relies on the use of variability modelling. This aspect of modelling is gaining increasing interest with the emergence of Industry 4.0 to facilitate the product development as new requirements appear. As a consequence, there are several emerging modelling techniques able to apply variability in different domains. In this paper, we introduce an approach to establish product system families in AutomationML. Our approach is based on the definition of feature models describing the variability space, and on the assignment of presence conditions to AutomationML model elements. These conditions (de-)select the model elements depending on the chosen configuration. This way, it is possible to model a large set of model variants in a compact way using one single model. To realize our approach, we started from an existing EMF-based AutomationML workbench providing graphical modelling support. From these artifacts,we synthesized an extended graphical modelling editor with variability support, integrated with FeatureIDE. Furthermore, we validated our approach by creating and managing a production system family encompassing six scenarios of the Pick and Place Unit Industry 4.0 demonstrator.}, keywords = {AutomationML}, pubstate = {published}, tppubtype = {inproceedings} } @misc{wimmer2020k, title = {Modeling Language Engineering 4.0: From Design-Time to Runtime and Back Again}, author = {Manuel Wimmer}, url = {https://se.jku.at/modeling-language-engineering-4-0-from-design-time-to-runtime-and-back-again/ https://www.jku.at/news-events/events/detail/news/manuel-wimmer-modeling-language-engineering-40-from-design-time-to-runtime-and-back-again-keynot/ https://is.ieis.tue.nl/edoc20/keynotes/}, year = {2020}, date = {2020-10-08}, urldate = {2020-11-27}, booktitle = {EDOC 2020, October 5-8, Eindhoven, Niederlande, virtuel}, journal = {Software Systems Modeling}, volume = {19}, number = {1}, pages = {67-94}, abstract = {Modeling languages started as key elements for sketching and documenting software-intensive systems. Today, we often recognize a discrepancy between design models concentrating on the desired behaviour of a system and its real world correspondents reflecting deviations taking place at runtime. In order to close this gap, design models must not be static elements, but evolutionary ones. However, this requires a new generation of modeling languages equipped with an explicit runtime perspective incorporating operational data. Efficiently developing such modeling languages with novel language engineering methods is our quest in the research laboratory CDL-MINT (https://cdl-mint.se.jku.at). In particular, we focus the model-driven continuous evolution of Industry 4.0 systems based on operational data gathered and analysed at runtime. In my talk, I will present some initial results of this project, in particular a novel language engineering method for linking design models with operational data. I will also elaborate on the proposed technologies for the respective architectural layers for realizing such modeling languages and identify the research challenges ahead. }, keywords = {Engineering 4.0, Industry 4.0, modeling languages, Runtime}, pubstate = {published}, tppubtype = {misc} } @article{wimmer2020l, title = {Guest Editorial Recent Trends in Engineering Software-Intensive Systems}, author = {Manuel Wimmer }, url = {https://www.sciencedirect.com/journal/journal-of-systems-and-software/special-issue/10NNHMDSF8L}, year = {2020}, date = {2020-09-30}, urldate = {2020-09-30}, journal = {Journal of Systems and Software}, abstract = {The development of software-intensive systems is continuously evolving and faces many new challenges. New technologies, such as cloud computing, Internet of Things (IoT), Artificial Intelligence (AI), etc. as well as continuous engineering processes, such as DevOps, are being employed in new open contexts with more pervasive software, e.g., in Smart Cities, Smart Manufacturing, Smart Mobility, etc. This special issue will focus on collecting the latest research results on such emerging topics. The topics relevant to this special issue include, but are not restricted to, the following: – Software management & processes – Software quality & technical debt – Cloud-native computing & DevOps – Embedded systems, edge computing & IoT }, keywords = {AI, DevOps, IoT, Smart Cities}, pubstate = {published}, tppubtype = {article} } @inproceedings{wimmer2020e, title = {A Graphical Toolkit for IEC 62264-2}, author = {Laurens Lang and Bernhard Wally and Christian Huemer and Radek Sindelar and Alexandra Mazak-Huemer and Manuel Wimmer}, editor = {Robert X. Gao and Kornel Ehmann}, url = {https://doi.org/10.1016/j.procir.2020.03.049}, doi = {10.1016/j.procir.2020.03.049}, year = {2020}, date = {2020-09-22}, booktitle = {53rd CIRP Conference on Manufactoring Systems 2020, July 1-3, 2020, Chicago, USA}, journal = {Procedia CIRP}, volume = {93}, pages = {532-537}, abstract = {Among the plethora of industrial standards available in the context of smart manufacturing, one series of standards is consistently being mentioned for dealing with manufacturing operations management: IEC 62264. Its second part provides a conceptual model for the description of production systems and their capabilities, including runtime information such as concrete maintenance schedules or achieved production goals. In this work, we present a concrete graphical syntax and toolkit for the creation and presentation of IEC 62264-2 compliant models, using techniques from model-driven (software) engineering. We have evaluated our tool by conducting a user study for assessing its usability and effectiveness.}, keywords = {IEC 62264, Model-Driven Engineering, Usability Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{SanctisIRW20, title = {A Flexible Architecture for Key Performance Indicators Assessment in Smart Cities}, author = {Martina De Sanctis and Ludovico Iovino and Maria Teresa Rossi and Manuel Wimmer}, editor = {Anton Jansen and Ivano Malavolta and Henry Muccini and Olaf Zimmermann}, url = {https://doi.org/10.1007/978-3-030-58923-3_8}, doi = {10.1007/978-3-030-58923-3_8}, isbn = {978-3-030-58922-6}, year = {2020}, date = {2020-09-19}, booktitle = {Software Architecture for Key Performance Indicators Assessment in Smart Cities}, volume = {12292}, pages = {118-135}, publisher = {Springer}, abstract = {The concept of smart and sustainable city has been on the agenda for the last decade. Smart governance is about the use of innovation for supporting enhanced decision making and planning to make a city smart, by leveraging on Key Performance Indicators (KPIs) as procedural tools. However, developing processes and instruments able to evaluate smart cities is still a challenging task, due to the rigidity showed by the existing frameworks in the definition of KPIs and modeling of the subjects to be evaluated. Web-based platforms, spreadsheets or even Cloud-based applications offer limited flexibility, if the stakeholder is interested not only in using but also in defining the pieces of the puzzle to be composed. In this paper we present a flexible architecture supporting a model-driven approach for the KPIs assessment in smart cities. It identifies both required and optional components and functionalities needed for realizing the automatic KPIs assessment, while showing flexibility points allowing for different specification of the architecture, thus of the overall methodology.}, keywords = {Flexible Architecture, Key Performance, Smart Cities}, pubstate = {published}, tppubtype = {inproceedings} } @article{WortmannBCW20, title = {Modeling languages in Industry 4.0: an extended systematic mapping study}, author = {Andreas Wortmann and Olivier Barais and Benoît Combemale and Manuel Wimmer}, url = {https://doi.org/10.1007/s10270-019-00757-6}, doi = {10.1007/s10270-019-00757-6}, year = {2020}, date = {2020-09-18}, urldate = {2020-09-18}, journal = {Journal of Software and Systems Modeling}, volume = {19}, number = {1}, pages = {67-94}, abstract = {Industry 4.0 integrates cyber-physical systems with the Internet of Things to optimize the complete value-added chain. Successfully applying Industry 4.0 requires the cooperation of various stakeholders from different domains. Domain-specific modeling languages promise to facilitate their involvement through leveraging (domain-specific) models to primary development artifacts. We aim to assess the use of modeling in Industry 4.0 through the lens of modeling languages in a broad sense. Based on an extensive literature review, we updated our systematic mapping study on modeling languages and modeling techniques used in Industry 4.0 (Wortmann et al., Conference on model-driven engineering languages and systems (MODELS’17), IEEE, pp 281–291, 2017) to include publications until February 2018. Overall, the updated study considers 3344 candidate publications that were systematically investigated until 408 relevant publications were identified. Based on these, we developed an updated map of the research landscape on modeling languages and techniques for Industry 4.0. Research on modeling languages in Industry 4.0 focuses on contributing methods to solve the challenges of digital representation and integration. To this end, languages from systems engineering and knowledge representation are applied most often but rarely combined. There also is a gap between the communities researching and applying modeling languages for Industry 4.0 that originates from different perspectives on modeling and related standards. From the vantage point of modeling, Industry 4.0 is the combination of systems engineering, with cyber-physical systems, and knowledge engineering. Research currently is splintered along topics and communities and accelerating progress demands for multi-disciplinary, integrated research efforts.}, keywords = {Industry 4.0, modeling languages}, pubstate = {published}, tppubtype = {article} } @article{LeroyBWMCS20, title = {Behavioral interfaces for executable DSLs}, author = {Dorian Leroy and Erwan Bousse and Manuel Wimmer and Tanja Mayerhofer and Benoît Combemale and Wieland Schwinger}, url = {https://link.springer.com/content/pdf/10.1007/s10270-020-00798-2.pdf}, doi = {10.1007/s10270-020-00798-2}, year = {2020}, date = {2020-09-18}, urldate = {2020-09-18}, journal = {Journal of Software and Systems Modeling}, volume = {19}, number = {4}, pages = {1015-1043}, abstract = {Executable domain-specific languages (DSLs) enable the execution of behavioral models. While an execution is mostly driven by the model content (e.g., control structures), many use cases require interacting with the running model, such as simulating scenarios in an automated or interactive way, or coupling the model with other models of the system or environment. The management of these interactions is usually hardcoded into the semantics of the DSL, which prevents its reuse for other DSLs and the provision of generic interaction-centric tools (e.g., event injector). In this paper, we propose a metalanguage for complementing the definition of executable DSLs with explicit behavioral interfaces to enable external tools to interact with executed models in a unified way. We implemented the proposed metalanguage in the GEMOC Studio and show how behavioral interfaces enable the realization of tools that are generic and thus usable for different executable DSLs.}, keywords = {behavioral interfaces, Domain-Specific Languages}, pubstate = {published}, tppubtype = {article} } @article{WolnyMCGW20, title = {Thirteen years of SysML: a systematic mapping study}, author = {Sabine Wolny and Alexandra Mazak-Huemer and Christine Carpella and Verena Geist and Manuel Wimmer}, url = {https://doi.org/10.1007/s10270-019-00735-y}, doi = {10.1007/s10270-019-00735-y}, year = {2020}, date = {2020-09-18}, urldate = {2020-09-18}, journal = {Journal of Software Systems Modeling}, volume = {19}, number = {1}, pages = {111-169}, abstract = {The OMG standard Systems Modeling Language (SysML) has been on the market for about thirteen years. This standard is an extended subset of UML providing a graphical modeling language for designing complex systems by considering software as well as hardware parts. Over the period of thirteen years, many publications have covered various aspects of SysML in different research fields. The aim of this paper is to conduct a systematic mapping study about SysML to identify the different categories of papers, (i) to get an overview of existing research topics and groups, (ii) to identify whether there are any publication trends, and (iii) to uncover possible missing links. We followed the guidelines for conducting a systematic mapping study by Petersen et al. (Inf Softw Technol 64:1–18, 2015) to analyze SysML publications from 2005 to 2017. Our analysis revealed the following main findings: (i) there is a growing scientific interest in SysML in the last years particularly in the research field of Software Engineering, (ii) SysML is mostly used in the design or validation phase, rather than in the implementation phase, (iii) the most commonly used diagram types are the SysML-specific requirement diagram, parametric diagram, and block diagram, together with the activity diagram and state machine diagram known from UML, (iv) SysML is a specific UML profile mostly used in systems engineering; however, the language has to be customized to accommodate domain-specific aspects, (v) related to collaborations for SysML research over the world, there are more individual research groups than large international networks. This study provides a solid basis for classifying existing approaches for SysML. Researchers can use our results (i) for identifying open research issues, (ii) for a better understanding of the state of the art, and (iii) as a reference for finding specific approaches about SysML.}, keywords = {SysML}, pubstate = {published}, tppubtype = {article} } @mastersthesis{Lehner2020, title = {Model-based Detection of Runtime Inconsistencies}, author = {Co-Advisor: DI Sabine Wolny Advisor: Univ.-Prof. Mag. Dr. Manuel Wimmer Lehner Daniel}, url = {https://www.researchgate.net/publication/344403670_Model-based_Detection_of_Runtime_Inconsistencies}, doi = {10.13140/RG.2.2.35159.50080}, year = {2020}, date = {2020-09-01}, urldate = {2020-09-01}, address = {Fakultät für Informatik}, school = {TU Wien, Fakultät für Informatik}, abstract = {With an increasing dependency on software in more and more aspects of our lives, the information whether these software systems work as expected at any point in time becomes crucial. However, limitations of existing approaches lead to a loss of information about consistency between expected and actual behavior, especially after system deployment.(1) Full validation of software artifacts is often not possible any longer in modern software systems. Increasing complexity leads to an exploding number of execution possibilities,which can be infeasible to compute and check. (2) Software testing allows to overcome this challenge by allowing to check particular parts of a system, but with limitations when it comes to consistency after system deployment. (3) Runtime monitoring systems provide information on system errors after deployment, but the high manual effort required to set up such a system limits its application in practice. In this thesis, it is examined to which extent reusing existing model information can provide a solution to the challenges faced with the approaches mentioned above. Therefore, the Inconsistency Detection Based on Models (IDBoM) framework is implemented as a showcase how existing model information can be used for inconsistency checking. The IDBoM framework uses UML Activity Diagrams and UML Class Diagrams as a representation of the expected behavior of a system, and UML Sequence Diagrams as representation of its actual behavior after deployment. After the inconsistency checking process is triggered by a running system, a set of consistency rules is executed on these diagrams to retrieve a checking result. This checking result indicates whether the actual behavior of the checked system is consistent with its expected behavior. In order to facilitate the consistency between expected system behavior and existing models, a reusable solution for programatically interacting with models with the focus on usability is created as part of the IDBoM framework.The intention of this solution is to contribute to the implementation of use cases for reusing existing model information, to increase benefits of keeping models up to date.An evaluation of the implemented artifact shows that full automation of a model-based inconsistency checking process is possible, from process initiation to processing of the checking result. The covered inconsistency cases are comparable to software testing.Execution times of the implemented inconsistency checking process scale linearly for all tested parameters. Finally, the usability of model interactions is improved compared to existing solutions on all tested parameters. These results indicate that a model-based inconsistency checking process can be used to provide information about the correct behavior of a system after deployment}, keywords = {Model-based Detection}, pubstate = {published}, tppubtype = {mastersthesis} } @article{MartinezWC20, title = {Efficient plagiarism detection for software modeling assignments}, author = {Salvador Martinez and Manuel Wimmer and Jordi Cabot}, url = {https://doi.org/10.1080/08993408.2020.1711495}, doi = {10.1080/08993408.2020.1711495}, year = {2020}, date = {2020-08-06}, urldate = {2020-08-06}, journal = {Journal of Computer Science Education}, volume = {30}, number = {2}, pages = {187-215}, abstract = {Reports suggest plagiarism is a common occurrence in universities. While plagiarism detection mechanisms exist for textual artifacts, this is less so for non-code related ones such as software design artifacts like models, metamodels or model transformations. Objective: To provide an efficient mechanism for the detection of plagiarism in repositories of Model-Driven Engineering (MDE) assignments. Method: Our approach is based on the adaptation of the Locality Sensitive Hashing, an approximate nearest neighbor search mechanism, to the modeling technical space. We evaluate our approach on a real use case consisting of two repositories containing 10 years of student answers to MDE course assignments. Findings: We have found that: (i) effectively, plagiarism occurred on the aforementioned course assignments (ii) our tool was able to efficiently detect them. Implications: Plagiarism detection must be integrated into the toolset and activities of MDE instructors in order to correctly evaluate students.}, keywords = {Software Modeling}, pubstate = {published}, tppubtype = {article} } @article{LeroyJBWC20, title = {Runtime Monitoring for Executable DSLs}, author = {Dorian Leroy and Pierre Jeanjean and Erwan Bousse and Manuel Wimmel and Benoit Combemale}, url = {https://doi.org/10.5381/jot.2020.19.2.a6}, doi = {10.5381/jot.2020.19.2.a6}, year = {2020}, date = {2020-08-03}, urldate = {2020-08-03}, journal = {Journal of Object Technology}, volume = {19}, number = {2}, pages = {6:1-23}, abstract = {Runtime monitoring is a fundamental technique used throughout the lifecycle of a system for many purposes, such as debugging, testing, or live analytics. While runtime monitoring for general purpose programming languages has seen a great amount of research, developing such complex facilities for any executable Domain Specific Language (DSL) remains a challenging, reoccurring and error prone task. A generic solution must both support a wide range of executable DSLs (xDSLs) and induce as little execution time overhead as possible. Our contribution is a fully generic approach based on a temporal property language with a semantics tailored for runtime verification. Properties can be compiled to efficient runtime monitors that can be attached to any kind of executable discrete event model within an integrated development environment. Efficiency is bolstered using a novel combination of structural model queries and complex event processing. Our evaluation on 3 xDSLs shows that the approach is applicable with an execution time overhead of 121% (on executions shorter than 1s), to 79% (on executions shorter than 20s) making it suitable for model testing and debugging.}, keywords = {Domain-Specific Languages, runtime monitoring}, pubstate = {published}, tppubtype = {article} } @article{wimmer2020cb, title = {Efficient execution of ATL model transformations using static analysis and parallelism}, author = {Jesus Sanchez Cuadrao and Loli Burgueño and Manuel Wimmer and Antonio Vallecillo}, doi = {10.1109/TSE.2020.3011388}, year = {2020}, date = {2020-07-23}, urldate = {2020-07-23}, journal = {Journal of IEEE Transactions on Software Engineering}, volume = {48}, issue = {4}, pages = {1097-1114}, abstract = {Although model transformations are considered to be the heart and soul of Model Driven Engineering (MDE), there are still several challenges that need to be addressed to unleash their full potential in industrial settings. Among other shortcomings, their performance and scalability remain unsatisfactory for dealing with large models, making their wide adoption difficult in practice. This paper presents A2L, a compiler for the parallel execution of ATL model transformations, which produces efficient code that can use existing multicore computer architectures, and applies effective optimizations at the transformation level using static analysis. We have evaluated its performance in both sequential and multi-threaded modes obtaining significant speedups with respect to current ATL implementations. In particular, we obtain speedups between 2.32x and 38.28x for the A2L sequential version, and between 2.40x and 245.83x when A2L is executed in parallel, with expected average speedups of 8.59x and 22.42x, respectively.}, keywords = {ATL, MDE, Model Transformations}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wally2020bb, title = {The Digital Twin as a Core Component for Industry 4.0 Smart Production Planning}, author = {Petr Novak and Jiri Vyskocil and Bernhard Wally}, url = {http://ifatwww.et.uni-magdeburg.de/ifac2020/media/pdfs/3228.pdf https://www.sciencedirect.com/science/article/pii/S2405896320336314}, doi = {10.1016/j.ifacol.2020.12.2865}, year = {2020}, date = {2020-07-17}, urldate = {2020-07-17}, booktitle = {21st IFAC World Congress, July 11-17, 2020, Berlin, Germany, virtual}, abstract = {Production systems that adhere the Industry 4.0 vision require new ways of control and integration of individual components, such as robots, transportation system shuttles or mobile platforms. This paper proposes a new production system control concept based on closing a feedback loop between a production planning system and a digital twin of the physical production system. The digital twin keeps up-to-date information about the current state of the physical production system and it is combined with the production planner utilizing arti ficial intelligence methods. Production recipes and concrete process instantiations are planned for each production order on-the-fly, based on the production system state retrieved form the digital twin. This approach provides a high exibility in terms of ability to add and to remove products as well as production resources. It also enables error recovery by re-planning the production if some failure happens. The proposed approach is tested and evaluated on an internally hosted Industry 4.0 testbed, which con firms its effciency and exibility.}, keywords = {Digital Twin, Flexible and Reconfigurable Manufacturing Systems, Intelligent Manufacturing Systems, Production Planning and Control}, pubstate = {published}, tppubtype = {inproceedings} } @phdthesis{Neubauer2020, title = {A Framework for Modernizing Domain-Specific Languages - From XML Schema to Consistency-Achieving Editors with Reusable Notations}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Neubauer Patrick}, url = {https://doi.org/10.34726/hss.2020.31485}, doi = {10.34726/hss.2020.31485}, year = {2020}, date = {2020-07-08}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {The introduction of Extensible Markup Language (XML) Schema Definitions (XSDs) represented a tremendous leap towards the design of domain-specific languages (DSLs) by enabling machine processibility of domain models conforming to formally described language grammar, i.e. capturing vocabulary and valid sentences. Consequently, it elevated the need for automating the creation and maintenance of dedicated and modern integrated development environments (IDEs) evading inherent XML limitations, such as rigid angle-bracket syntax, as well as enabling the support of valid domain model construction. Techniques and tools provided by model-driven engineering frameworks and language workbench frameworks offer elementary assistance during the initial implementation of a DSL. These frameworks, however, fail to automate DSL generation due to disparities exposed by the transformation and synthesis of XSDs, language grammars, and metamodels. Moreover, fundamental differences in the nature of language grammars and metamodels challenge the construction and application of language notations. Although language workbenches are capable of integrating seamlessly dedicated validators, context assistants, and quick fix providers into domain-specific IDEs, their implementation and maintenance still requires proficient language knowledge and development. This thesis contributes towards addressing the above-mentioned problems. First, it proposes an approach to generate automatically DSL grammars from XSD-based languages by bridging gaps in the transformations of structural components, and by enriching metamodels with structural constraints imposed by XSD restrictions. Second, it proposes an approach to generate automatically domain-specific IDEs with accurate validators, sensible context assistants, and cost-effective quick fix providers by employing search-based software engineering at runtime. Third, it proposes an approach to formulate domain structure-agnostic textual notations for modeling languages by decoupling representational from structural information in grammar definitions, and by providing a language style framework capable of generating grammars from arbitrary metamodels and style specifications. In order to demonstrate the usefulness of the proposed approaches, the developed prototypical implementations are evaluated based on a comprehensive industrial standard for the classification and description of products, a comparison with state-of-the-art language workbench frameworks, integration with model assistance tooling, and individual case studies such as cloud topology and orchestration modeling.}, keywords = {Domain-Specific Languages, Model-Driven Engineering, XMl Schema, Xtext}, pubstate = {published}, tppubtype = {phdthesis} } @inproceedings{wimmer2020g, title = {Modeling Smart Cities}, author = {Ludovico Iovino and Manuel Wimmer and Juri Di Rocco}, url = {http://ceur-ws.org/Vol-2707/moscpreface.pdf}, year = {2020}, date = {2020-07-01}, booktitle = {STAF 2020 Workshop Proceedings: 4th Workshop on Model-Driven Engineering for the Internet-of-Things, co-located with Software Technologies: Applications and Foundations federation of conferences (STAF 2020) Bergen, Norway, June 22-26, 2020.}, volume = {2707}, pages = {3}, abstract = {Making a city ”smart” is an emerging strategy to mitigate the problems generated by the urban population growth and rapid urbanization. Each city models the technological, organizational and policy aspects of that city, and for this reason a smart city is an interplay among technological, organizational and policy innovation. The complexity of inter actions not only in hardware and software involved, but also in the actors and processes interplay, makes the availability of systematic design processes a must.Model Driven Engineering (MDE) improves coordination between the various stakeholders, resulting in the qualitative production of software and other artifacts involved. MDE has been successfully used in businesses with a need for complex and error-proof software, such as companies operating in the high tech industry. Integrating MDE approaches in Smart City design processes may lead to more robust solutions.}, keywords = {Smart Cities}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2020f, title = {Model-Driven Engineering for the Internet-of-Things}, author = {Federico Ciccozzi and Nikolas Ferry and Sebastian Mosser and Amor Solberg and Manuel Wimmer}, url = {http://ceur-ws.org/Vol-2707/mde4iotpreface.pdf}, year = {2020}, date = {2020-06-30}, booktitle = {STAF 2020 Workshop Proceedings: 4th Workshop on Model-Driven Engineering for the Internet-of-Things, co-located with Software Technologies: Applications and Foundations federation of conferences (STAF 2020) Bergen, Norway, June 22-26, 2020.}, volume = {2707}, pages = {3}, abstract = {A recent forecast from the International Data Corporation (IDC) envi-sions that 41 billion Internet-of-Things (IoT) endpoints will be in use by20251, representing great business opportunities. The next generation IoTsystems needs to perform distributed processing and coordinated behavioracross IoT, edge and cloud infrastructures, manage the closed loop fromsensing to actuation, and cope with vast heterogeneity, scalability and dy-namicity of IoT systems and their environments.}, keywords = {Model-Driven Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @phdthesis{Wally2020, title = {Smart Manufacturing Systems: Model-Driven Integration of ERP and MOM}, author = {Co-Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Advisor: a.o.Univ.-Prof. Mag. Dr. Christian Huemer Wally Bernhard}, url = {https://www.researchgate.net/publication/342419445_Smart_Manufacturing_Systems_Model-Driven_Integration_of_ERP_and_MOM}, year = {2020}, date = {2020-06-17}, urldate = {2020-06-17}, address = {Institute of Information Systems Engineering Business Informatics Group (BIG) Favoritenstraße 9-11/194-3, 1040 Vienna, Austria}, school = {TU Wien, Institute of Information Systems Engineering Business Informatics Group (BIG)}, abstract = {Automated production systems are following a general technological trend: increasingly complex products, combined with drastically reduced lot-sizes per product variant, as well as shorter response and production times are being demanded. In order to be able to meet these expectations, modern IT systems at all levels of the automation hierarchy are required: from business related software at the corporate management level, down to the programmable logic controllers at the field level. For a well-designed coupling of systems that are located at different levels, it is necessary to find, define, and implement clear data conversion mechanisms - this endeavor is also known as vertical integration. At the same time, it is necessary to automate the inter-organizational data exchange - an aspect of horizontal integration. In this thesis, we are recapitulating a selection of own contributions in the context of information integration for smart manufacturing systems. With respect to conceptual models we have been employing established industrial standards, in order to facilitate industrial application. We have conceptualized, implemented and tested a series of conceptual models, inter-model mappings and transformations. Our approaches and implementations have been successfully evaluated by a number of experiments and case studies and are therefore a contribution towards model-driven smart manufacturing systems.}, keywords = {Model-Driven}, pubstate = {published}, tppubtype = {phdthesis} } @inproceedings{FranchSOFGVW20, title = {Towards Integrating Data-Driven Requirements Engineering into the Software Development Process: A Vision Paper}, author = {Xavier Franch and Norbert Seyff and Marc Oriol and Samuel Fricker and Iris Groher and Michael Vierhauser and Manuel Wimmer}, editor = {Nazim H. Madhavji and Liliana Pasquale and Alessio Ferrari and Stefania Gnesi}, url = {https://doi.org/10.1007/978-3-030-44429-7_10}, doi = {10.1007/978-3-030-44429-7_10}, year = {2020}, date = {2020-06-10}, booktitle = {26th International Working Conference on Requirements Engineering: Foundation for Software Quality, REFSQ 2020, Pisa, Italy, March 24-27, 2020, was postponed}, volume = {12045}, pages = {135-142}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {[Context and motivation] Modern software engineering processes have shifted from traditional upfront requirements engineering (RE) to a more continuous way of conducting RE, particularly including data-driven approaches. [Question/problem] However, current research on data-driven RE focuses more on leveraging certain techniques such as natural language processing or machine learning than on making the concept fit for facilitating its use in the entire software development process. [Principal ideas/results] In this paper, we propose a research agenda composed of six distinct research directions. These include a data-driven RE infrastructure, embracing data heterogeneity, context-aware adaptation, data analysis and decision support, privacy and confidentiality, and finally process integration. Each of these directions addresses challenges that impede the broader use of data-driven RE. [Contribution] For researchers, our research agenda provides topics relevant to investigate. For practitioners, overcoming the underlying challenges with the help of the proposed research will allow to adopt a data-driven RE approach and facilitate its seamless integration into modern software engineering. For users, the proposed research will enable the transparency, control, and security needed to trust software systems and software providers.}, keywords = {Requirements Engineering, Software Development}, pubstate = {published}, tppubtype = {inproceedings} } @article{wimmer2020d, title = {A Hitchhiker's Guide to Model-Driven Engineering for Data-Centric Systems}, author = {Benoit Combemale and Jörg Kienzle and Gunter Mussbacher and Ali Hyacinth and Daniel Amyot and Mojtaba Bagherzadeh and Edouard Batot and Nelliy Bencomo and Benjamin Benni and Jean-Michel Bruel and Jordi Cabot and Betty Cheng and Philippe Collet and Gregor Engels and Robert Heinrich and Jean-Marc Jézéquel and Anne Koziolek and Sébastien Mosser and Ralf Reussner and Houari Sahraoui and Rijul Saini and June Sallou and Serge Stinckwich and Eugene Syriani and Manuel Wimmer}, url = {https://hal.inria.fr/hal-02612087/file/ieeesw-moda-preprint.pdf}, doi = {10.1109/MS.2020.2995125}, year = {2020}, date = {2020-05-18}, urldate = {2020-05-18}, journal = {Journal of IEEE Software}, volume = {38}, issue = {4}, pages = {71-84}, abstract = {A broad spectrum of application domains are increasingly making use of heterogeneous and large volumes of data with varying degrees of humans in the loop. The recent success of Artificial Intelligence (AI) and, in particular, Machine Learning (ML) further amplifies the relevance of data in the development, maintenance, evolution, and execution management of systems built with model-driven engineering techniques. Applications include critical infrastructure areas such as intelligent transportation, smart energy management, public healthcare, and emergency and disaster management; many of these systems are considered socio-technical systems given the human, social, and organizational factors that must be considered during the system life-cycle [1]. This article introduces a conceptual reference framework – the Models and Data (MODA) framework – to support a data-centric and model-driven approach for the integration of heterogeneous models and their respective data for the entire life-cycle of socio-technical systems.}, keywords = {Model-Driven Engineering}, pubstate = {published}, tppubtype = {article} } @workshop{wally2020c, title = {ISA-95 und AutomationML für die Modellierung von Produktionsanlagen und -prozessen}, author = {Bernhard Wally}, year = {2020}, date = {2020-04-23}, booktitle = {Workshop der PLIM Gruppe des Prostep IVIP Vereins}, journal = {Workshop der PLIM Gruppe des Prostep IVIP Vereins}, abstract = {Hr. Wally stellt Grundlagen von ISA-95 und das Zusammenspiel von ISA-95 und AutomationML vor Diskussion/Kommentare • Spontan ergibt sich die Einschätzung, dass alles aus ISA mit AML abgebildet werden kann • Es müsste nicht aller Inhalt in einem einzigen AML file abgebildet sein, die Information ist auch in Bibliotheken auslagerbar/zerlegbar. • Aktuelles Ziel ist eine Implementierung mit AML • Die Audi und ZF Abläufe sollen in AML modelliert werden • Typische Herausforderung bei mehreren Daten Quellen/-Senken ist die eindeutige Beschreibung der Objekte.}, keywords = {AutomationML}, pubstate = {published}, tppubtype = {workshop} } @mastersthesis{wimmer2020i, title = {Eine Cloud-basierte Programmierumgebung für QML}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Korath Christoph}, url = {https://repositum.tuwien.at/handle/20.500.12708/79723}, year = {2020}, date = {2020-03-02}, urldate = {2020-03-02}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Das Aufkommen von Cloud-Computing in den letzten Jahren hat die Softwareentwicklung nachhaltig verändert, denn die Einsatzmöglichkeiten der Cloud könnten vielfältiger nicht sein. Ob der Einsatz im Internet of Things, als Basis für eine Streaming-Plattform oder für andere rechenintensive Operationen, das Aufkommen der Cloud hat viele neue Lösungen ermöglicht. Bei all den Möglichkeiten der Cloud scheint es nur logisch, diese als Softwareentwickler und Softwareentwicklerin nicht nur für das eigene Produkt einzusetzen, sondern auch zum eigenen Vorteil während der Softwareentwicklung zu nutzen. Das Konzept hinter dieser Arbeit greift diesen Gedanken auf und versucht, dazu einen Teil der Aufgaben der Softwareentwicklung in der Cloud abzubilden. Dafür soll eine Cloud-basierte Entwicklungsumgebung für QML entstehen. Mit Hilfe dieser soll die Entwicklung von Applikationen für mobile Geräte ermöglicht werden, wobei die Entwicklungsschritte beginnend beim Schreiben der Applikation bis zur Auslieferung dieser über den Cloud-Service abgedeckt werden können. Durch die Einbindung der Cloud sollen lokale Installationen von Entwicklungssoftware verringert und auf längere Sicht obsolet gemacht werden. Durch den Wegfall der lokalen Entwicklungsumgebungen kann eine häufige Fehlerquelle eliminiert werden, die Installation der Software selbst. Fehlerquellen wie diese, die dazu führen können, dass Anwender und Anwenderinnen ein Entwicklungstool nicht effektiv einsetzen können, sind die eigentlichen Hintergründe dieser Arbeit. Das Ergebnis dieser Arbeit zeigt, wie eine funktionierende Alternative zu lokalen Entwicklungsumgebungen aussehen kann. Dazu wurde eine Cloud-basierte IDE entwickelt, welche in ein bereits zum Teil existierendes Cloud-System integriert wurde. In einer eingehenden Recherche wurden zuerst mögliche Optionen für eine adäquate Basis der IDE, die für die weitere Entwicklung eingesetzt werden sollte, abgewogen. Durch Einbringen von Änderungen und durch Einbinden von weiteren Ressourcen wurde die Anwendung GitLab in eine Cloud-basierte IDE umgewandelt. Die Funktionen des Cloud-basierten Ansatzes wurde gemeinsam mit Experten und Expertinnen einer Analyse unterzogen, welche auch dazu genutzt wurde, um mögliche Schwachstellen und Verbesserungspotential zu identifizieren.}, keywords = {cloud-computing, Language Server Protocol, QML, Software Development}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{wimmer2020, title = {Portierung eines universellen IoT-basierten Software Stacks}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Mann Markus}, url = {https://repositum.tuwien.at/handle/20.500.12708/78411}, year = {2020}, date = {2020-03-02}, urldate = {2020-03-02}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Derzeit keine Kurzfassung verfügbar.}, keywords = {cross-platform, embedded, IoT, portability, Qt, software strack}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{0001BB0TZW20, title = {Searching for Optimal Models: Comparing Two Encoding Approaches}, author = {Stefan John and Alexandru Burdusel and Robert Bill and Daniel Strüber and Gabriele Taentzer and Steffen Zschaler and Manuel Wimmer}, editor = {Michael Felderer and Wilhelm Hasselbring and Rick Rabiser and Reiner Jung}, url = {https://doi.org/10.18420/SE2020_30}, doi = {10.18420/SE2020_30}, year = {2020}, date = {2020-02-06}, booktitle = {Software Engineering 2020, Fachtagung des GI-Fachbereichs Softwaretechnik, 24.-28. Februar 2020, Innsbruck, Austria}, volume = {P-300}, pages = {101-103}, publisher = {Gesellschaft für Informatik e.V.}, series = {LNI}, abstract = {Search-Based Software Engineering (SBSE) is about solving software development problems by formulating them as optimisation problems. In the last years, combining SBSE and Model-Driven Engineering (MDE), where models and model transformations are treated as key artifacts in the development of complex systems, has become increasingly popular. While search-based techniques have often successfully been applied to tackle MDE problems, a recent line of research investigates how a model-driven design can make optimisation more easily accessible to a wider audience. In previous model-driven optimisation efforts, a major design decision concerns the way in which solutions are encoded. Two main options have been explored: a model-based encoding representing candidate solutions as models, and a rule-based encoding representing them as sequences of transformation rule applications. While both encodings have been applied to different use cases, no study has yet compared them systematically. To close this gap, we evaluate both approaches on a common set of optimization problems, investigating their impact on the optimization performance. Additionally, we discuss their differences, strengths, and weaknesses laying the foundation for a knowledgeable choice of the right encoding for the right problem.}, keywords = {Encoding}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{FeldmannKMV20, title = {Managing Inter-Model Inconsistencies in Model-based Systems Engineering}, author = {Stefan Feldmann and Konstantin Kernschmidt and Manuel Wimmer and Birgit Vogel-Heuser}, editor = {Michael Felderer and Wilhelm Hasselbring and Rick Rabiser and Reiner Jung}, url = {https://dl.gi.de/handle/20.500.12116/31706}, doi = {10.18420/SE2020_29}, year = {2020}, date = {2020-02-06}, booktitle = {Software Engineering 2020, Fachtagung des GI-Fachbereichs Softwaretechnik, 24.-28. Februar 2020, Innsbruck, Austria}, volume = {300}, pages = {99-100}, publisher = {Gesellschaft für Informatik e.V.}, abstract = {This work summarizes our paper [Fe19] originally published in the Journal of Systems and Software in 2019 about a model-based inconsistency management approach.}, keywords = {Model-Based Systems Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @phdthesis{bill2020, title = {Model Integration by Hybrid Model Virtualization}, author = {Co-Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Advisor: Prof. Dr. Gerti Kappel Bill Robert}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-137914 http://hdl.handle.net/20.500.12708/1067}, year = {2020}, date = {2020-01-31}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Multiple teams working on a single system may each have different viewpoints, and thus, use different models. These models may have partly shared, unique, or interrelated information, requiring model integration. To work faster and in a more parallel way, temporary inconsistencies between multiple models may be accepted. However, shared information only edited by a single team could still be immediately made known globally. The two main approaches to model integration are model virtualization, i.e., deriving all models from a single source of truth and model synchronization, i.e., propagating changes between different materialized models. While model virtualization does not allow temporary inconsistencies between models, model synchronization may require storing duplicate information redundantly, even if only a single team is involved. Thus, this thesis combines model virtualization with model synchronization into a hybrid approach. A new model virtualization approach helps arbitrarily adding or subtracting models from a base model. The base model can be a single model, an intersection or union of multiple models, a modification of another base model, or a model derivation. As we can store arbitrary (user) changes to the base model without affecting it, we allow temporary inconsistencies and arbitrary changes to the base model, e.g., as a result of changing the derivations source model. Incompatible changes never require user intervention, but just cause semantic constraint violations in a newly defined synchronization model, which is valid if and only if all inter-model constraints including feature derivations are fulfilled. To produce quickfix suggestions in (textual) model editors, optimal model synchronization is regarded as finding an optimal synchronization model. For this optimization, both model finders and heuristic search is employed. Model derivations can be specified using a new basic model derivation language, which includes both derivation and synchronization constraints in a single model. This allows for pure derivation by not editing the derived model as well as pure synchronization by specifying constraints just for inter-model consistency, but not for derivation. This hybrid approach is feasible and can support use cases like editing multiple models simultaneously using virtualization. Our proposed model repair does significantly reduce the number of (synchronization) constraint violations and prevent new ones due to improved autocompletion as shown in our evaluation scenarios.}, keywords = {Ecore, MDE, Model Engineering, Model Virtualization, OCL, Xtext}, pubstate = {published}, tppubtype = {phdthesis} } @inproceedings{CombemaleW19, title = {Towards a Model-Based DevOps for Cyber-Physical Systems}, author = {Benoît Combemale and Manuel Wimmer}, editor = {Jean-Michel Bruel and Manuel Mazzara and Bertrand Meyer}, url = {https://doi.org/10.1007/978-3-030-39306-9_6}, doi = {10.1007/978-3-030-39306-9_6}, year = {2020}, date = {2020-01-21}, booktitle = {Software Engineering Aspects of Continuous Development and New Paradigms of Software Production and Deployment - Second International Workshop, DEVOPS 2019, Château de Villebrumier, France, May 6-8, 2019, Revised Selected Papers}, volume = {12055}, pages = {84-94}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {The emerging field of Cyber-Physical Systems (CPS) calls for new scenarios of the use of models. In particular, CPS require to support both the integration of physical and cyber parts in innovative complex systems or production chains, together with the management of the data gathered from the environment to drive dynamic reconfiguration at runtime or finding improved designs. In such a context, the engineering of CPS must rely on models to uniformly reason about various heterogeneous concerns all along the system life cycle. In the last decades, the use of models has been intensively investigated both at design time for driving the development of complex systems, and at runtime as a reasoning layer to support deployment, monitoring and runtime adaptations. However, the approaches remain mostly independent. With the advent of DevOps principles, the engineering of CPS would benefit from supporting a smooth continuum of models from design to runtime, and vice versa. In this vision paper, we introduce a vision for supporting model-based DevOps practices, and we infer the corresponding research roadmap for the modeling community to address this vision by discussing a CPS demonstrator.}, keywords = {Cyber-Physical Systems, DevOps}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{wimmer2020h, title = {A Framework for Execution-based Model Profiling}, author = {Co-Advisor: DI Mag. Dr. Alexandra Mazak-Huemer Advisor: Univ.-Prof. Mag. Dr. Manuel Wimmer Patsuk-Boesch Polina}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-135392 http://hdl.handle.net/20.500.12708/1190}, year = {2020}, date = {2020-01-10}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {In Model-Driven Engineering (MDE) models are put in the center and used throughout the software development process in prescriptive ways. Although these prescriptive models are important during system implementation, descriptive models derived from runtime data offer valuable information in later phases of the system life cycle. Unfortunately, such descriptive models are only marginally explored in the field of MDE. Current MDE approaches mostly neglect the possibility to describe an existing and operating system using the information upstream from operations to design. To create a link between prescriptive and descriptive models, we propose a unifying framework for a combined but loosely-coupled usage of MDE approaches and process mining (PM) techniques. This framework embodies the execution-based model profiling as a continuous process to improve prescriptive models at design-time through runtime information. We provide an evaluation case study in order to demonstrate the feasibility and benefits of the introduced approach. In this case study we implement a prototype of our framework to register logs from a running system. The implemented prototype transforms the registered logs into XES-format for further processing and analysis via PM algorithms. We prove that the resulting model profiles are sufficient enough for runtime verification. Furthermore, we demonstrate the possibility to maintain model profiles for multiple concerns, such as functionality, performance and components interrelations, through the unifying framework.}, keywords = {Model Profiling, Model-Driven Engineering, Process Mining}, pubstate = {published}, tppubtype = {mastersthesis} } @article{Syriani2019BW19, title = {Domain-Specific Model Distance Measures}, author = {Eugene Syriani and Robert Bill and Manuel Wimmer}, url = {https://doi.org/10.5381/jot.2019.18.3.a3}, doi = {10.5381/jot.2019.18.3.a3}, year = {2019}, date = {2019-12-31}, booktitle = {In Proceedings of the 12th International Conference on Model Transformations (ICMT 2019)}, journal = {Journal of Object Technology}, volume = {18}, number = {3}, pages = {1-19}, abstract = {A lot of research was invested in the last decade to develop differencing methods to identify the changes performed between two modelversions. Typically, these changes are captured in an explicit difference model. However, less attention was paid to quantifying the distance between model versions. While different versions of a model may have the same amount of differences, their distance to the base model may be drastically different. Therefore, we present distance metrics for models. We provide a method to automatically generate tool support for computing domain-specific distance measures. We show the benefits of distance measures over model differences in the use case of searching for the explanation of model evolution in terms of domain-specific change operations. The results of our experiments show that using distance metrics outperforms common difference models techniques.}, keywords = {Domain-Specific}, pubstate = {published}, tppubtype = {article} } @inproceedings{NeubauerBKPW19, title = {Reusable Textual Notations for Domain-Specific Languages}, author = {Patrick Neubauer and Robert Bill and Dimitris S. Kolovos and Richard F. Paige and Manuel Wimmer}, editor = {Achim D. Brucker and Gwendal Daniel and Frédéric Jouault}, url = {http://ceur-ws.org/Vol-2513/paper6.pdf}, year = {2019}, date = {2019-12-31}, booktitle = {19th International Workshop in OCL and Textual Modeling (OCL 2019) co-located with IEEE/ACM 22nd International Conference on Model Driven Engineering Languages and Systems (MODELS 2019), Munich, Germany, September 16, 2019}, volume = {2513}, pages = {67-80}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {Domain-specific languages enable concise and precise formalization of domain concepts and promote direct employment by domain experts. Therefore, syntactic constructs are introduced to empower users to associate concepts and relationships with visual textual symbols. Model-based language engineering facilitates the description of concepts and relationships in an abstract manner. However, concrete representations are commonly attached to abstract do-main representations, such as annotations in metamodels, or directly encoded into language grammar and thus introduce redundancy between metamodel elements and grammar elements. In this work, we propose an approach that enables autonomous development and maintenance of domain concepts and textual language notations in a distinctive and metamodel-agnostic manner by employing style models containing grammar rule templates and injection-based property selection. We provide an implementation and showcase the proposed notation-specification language in a comparison with state of the art practices during the creation of notations for an executable domain-specific modeling language based on the Eclipse Modeling Framework and Xtext.}, keywords = {Domain-Specific Languages, textual notation}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wolny2019mdrsi, title = {Model-driven Runtime State Identification}, author = {Sabine Wolny and Alexandra Mazak and Manuel Wimmer and Christian Huemer}, url = {https://cdl-mint.se.jku.at/wp-content/uploads/2020/04/EMISA_2019.pdf https://cdl-mint.se.jku.at/case-study-artefacts-for-emisa-2019/}, year = {2019}, date = {2019-12-20}, booktitle = {Proceedings of the Conference on Digital Ecosystems of the Future: Methods, Techniques and Applications (EMISA) - EMISA Forum}, volume = {39}, number = {1}, pages = {29-44}, abstract = {With new advances such as Cyber-Physical Systems (CPS) and Internet of Things (IoT), more and more discrete software systems interact with continuous physical systems. State machines are a classical approach to specify the intended behavior of discrete systems during development. However, the actual realized behavior may deviate from those specified models due to environmental impacts, or measurement inaccuracies. Accordingly, data gathered at runtime should be validated against the specified model. A first step in this direction is to identify the individual system states of each execution of a system at runtime. This is a particular challenge for continuous systems where system states may be only identified by listening to sensor value streams. A further challenge is to raise these raw value streams on a model level for checking purposes. To tackle these challenges, we introduce a model-driven runtime state identification approach. In particular, we automatically derive corresponding time-series database queries from state machines in order to identify system runtime states based on the sensor value streams of running systems. We demonstrate our approach for a subset of SysML and evaluate it based on a case study of a simulated environment of a five-axes grip-arm robot within a working station.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @article{Feldmann2019imi, title = {Managing Inter-Model Inconsistencies in Model-Based Systems Engineering: Application in Automated Production Systems Engineering}, author = {Stefan Feldmann and Konstantin Kernschmidt and Manuel Wimmer and Birgit Vogel-Heuser}, doi = {10.1016/j.jss.2019.03.060}, year = {2019}, date = {2019-12-02}, journal = {Journal of Systems and Software Engineering}, volume = {153}, pages = {105-134}, abstract = {To cope with the challenge of managing the complexity of automated production systems, model-based approaches are applied increasingly. However, due to the multitude of different disciplines involved in automated production systems engineering, e.g., mechanical, electrical, and software engineering, several modeling languages are used within a project to describe the system from different perspectives. To ensure that the resulting system models are not contradictory, the necessity to continuously diagnose and handle inconsistencies within and in between models arises. This article proposes a comprehensive approach that allows stakeholders to specify, diagnose, and handle inconsistencies in model-based systems engineering. In particular, to explicitly capture the dependencies and consistency rules that must hold between the disparate engineering models, a dedicated graphical modeling language is proposed. By means of this language, stakeholders can specify, diagnose, and handle inconsistencies in the accompanying inconsistency management framework. The approach is implemented based on the Eclipse Modeling Framework (EMF) and evaluated based on a demonstrator project as well as a small user experiment. First findings indicate that the approach is expressive enough to capture typical dependencies and consistency rules in the automated production system domain and that it requires less effort compared to manually developing inter-model inconsistency management solutions.}, keywords = {Model-Based Systems Engineering}, pubstate = {published}, tppubtype = {article} } @inproceedings{BousseW19, title = {Domain-Level Observation and Control for Compiled Executable DSLs}, author = {Erwan Bousse and Manuel Wimmer}, editor = {Marouane Kessentini and Tao Yue and Alexander Pretschner and Sebastian Voss and Loli Burgueno}, url = {https://www.se.jku.at/domain-level-observation-and-control-for-compiled-executable-dsls/}, doi = {10.1109/MODELS.2019.000-6}, year = {2019}, date = {2019-12-02}, urldate = {2019-12-02}, booktitle = {22nd ACM/IEEE International Conference on Model Driven Engineering Languages and Systems, MODELS 2019, Munich, Germany, September 15-20, 2019}, pages = {150-160}, publisher = {IEEE}, abstract = {Executable Domain-Specific Languages (DSLs) are commonly defined with either operational semantics (i.e., interpretation) or translational semantics (i.e., compilation). An interpreted DSL relies on domain concepts to specify the possible execution states and steps, which enables the observation and control of executions using the very same domain concepts. In contrast, a compiled DSL relies on a transformation to an arbitrarily different target language. This creates a conceptual gap, where the execution can only be observed and controlled through target domain concepts, to the detriment of experts or tools that only understand the source domain. To address this problem, we propose a language engineering architecture for compiled DSLs that enables the observation and control of executions using source domain concepts. The architecture requires the definition of the source domain execution steps and states, along with a feedback manager that translates steps and states of the target domain back to the source domain. We evaluate the architecture with two different compiled DSLs, and show that it does enable domain-level observation and control while increasing execution time by 2× in the worst observed case.}, keywords = {Domain-Specific Languages}, pubstate = {published}, tppubtype = {inproceedings} } @incollection{Mazak2019dbmde, title = {On the Need for Data-based Model-driven Engineering}, author = {Alexandra Mazak and Sabine Wolny and Manuel Wimmer}, editor = {Stefan Biffl and Matthias Eckhart and Arndt Lüder and Edgar R. Weippl}, doi = {10.1007/978-3-030-25312-7_5}, year = {2019}, date = {2019-11-25}, urldate = {2019-11-25}, booktitle = {Security and Quality in Cyber-Physical Systems Engineering, With Forewords by Robert M. Lee and Tom Gilb}, pages = {103-127}, publisher = {Springer}, chapter = {5}, abstract = {In order to deal with the increasing complexity of modern systems such as in software-intensive environments, models are used in many research fields as abstract descriptions of reality. On the one side, a model serves as an abstraction for a specific purpose, as a kind of “blueprint” of a system, describing a system’s structure and desired behavior in the design phase. On the other side, there are so-called runtime models providing real abstractions of systems during runtime, e.g., to monitor runtime behavior. Today, we recognize a discrepancy between the early snapshots and their real world correspondents. To overcome this discrepancy, we propose to fully integrate models from the very beginning within the lifecycle of a system. As a first step in this direction, we introduce a data-based model-driven engineering approach where we provide a unifying framework to combine downstream information from the model-driven engineering process with upstream information gathered during a system’s operation at runtime, by explicitly considering also a timing component. We present this temporal model framework step-by-step by selected use cases with increasing complexity.}, keywords = {Data-Driven Engineering, Model Profiling, Model Repository, Model-Driven Engineering, Sequence mining}, pubstate = {published}, tppubtype = {incollection} } @article{BurguenoCFKLMPP19, title = {Contents for a Model-Based Software Engineering Body of Knowledge}, author = {Loli Burgueno and Federico Ciccozzi and Michalis Famelis and Gerti Kappel and Leen Lambers and Sébastien Mosser and Richard F. Paige and Alfonso Pierantonio and Arend Rensink and Rick Salay and Gabriele Taentzer and Antonio Vallecillo and Manuel Wimmer}, url = {https://link.springer.com/article/10.1007/s10270-019-00746-9}, doi = {10.1007/s10270-019-00746-9}, year = {2019}, date = {2019-10-31}, urldate = {2019-10-31}, journal = {Journal of Software and Systems Modeling}, volume = {18}, number = {6}, pages = {3193-3205}, abstract = {Although Model-Based Software Engineering (MBE) is a widely accepted Software Engineering (SE) discipline, no agreed-upon core set of concepts and practices (i.e., a Body of Knowledge) has been defined for it yet. With the goals of characterizing the contents of the MBE discipline, promoting a global consistent view of it, clarifying its scope with regard to other SE disciplines, and defining a foundation for the development of educational curricula on MBE, this paper proposes the contents for a Body of Knowledge for MBE. We also describe the methodology that we have used to come up with the proposed list of contents, as well as the results of a survey study that we conducted to sound out the opinion of the community on the importance of the proposed topics and their level of coverage in the existing SE curricula.}, keywords = {Model-Based Software Engineering}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wolny2019reverse, title = {Automatic Reverse Engineering of Interaction Models from System Logs}, author = {Sabine Wolny and Alexandra Mazak and Manuel Wimmer}, url = {https://cdl-mint.se.jku.at/case-study-artefacts-for-etfa-2019/}, doi = {10.1109/ETFA.2019.8869502}, year = {2019}, date = {2019-10-24}, booktitle = {Proceedings of the 24th IEEE Conference on Emerging Technologies and Factory Automation (ETFA), Zaragoza, Spain, September 10-13, 2019}, pages = {57-64}, publisher = {IEEE}, abstract = {Nowadays, software- as well as hardware systems produce log files that enable a continuous monitoring of the system during its execution. Unfortunately, such text-based log traces are very long and difficult to read, and therefore, reasoning and analyzing runtime behavior is not straightforward. However, dealing with log traces is especially needed in cases, where (i) the execution of the system did not perform as intended, (ii) the process flow is unknown because there are no records, and/or (iii) the design models do not correspond to its realworld counterpart. These facts cause that log data has to be prepared in a more user-friendly way (e.g., in form of graphical representations) and it takes that algorithms are needed for automatically monitoring the system’s operation, and for tracking the system components interaction patterns. For this purpose we present an approach for transforming raw sensor data logs to a UML or SysML sequence diagram in order to provide a graphical representation for tracking log traces in a time-ordered manner. Based on this sequence diagram, we automatically identify interaction models in order to analyze the runtime behavior of system components. We implement this approach as prototypical plug-in in the modeling tool Enterprise Architect and evaluate it by an example of a self-driving car.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2019gsa, title = {Generating Structured AutomationML Models from IEC 62264 Information}, author = {Bernhard Wally and Laurens Lang and Rafal Wlodarski and Radek Sindelar and Christian Huemer and Alexandra Mazak and Manuel Wimmer}, url = {https://cdl-mint.se.jku.at/generating-structured-automationml/}, year = {2019}, date = {2019-09-26}, booktitle = {Proceedings of the 5th AutomationML PlugFest 2019}, abstract = {AutomationML provides a versatile modeling environment for the description of production systems. However, when starting a new AutomationML project, or when serializing existing data with the AutomationML format, there are no rules on how to structure these models in a meaningful way. In this work, we present an approach for structuring AutomationML models, based on the IEC 62264 standard. In our approach we are implementing the process of serializing IEC 62264 information declaratively, by leveraging the power of model transformations, as they are applied in the context of model-driven (software) engineering.}, keywords = {ATL, AutomationML, IEC 62264, ISA-95, Model Transformations, Model-Driven Engineering, Module 2}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wurl2019exploring, title = {Exploring Robustness in a Combined Feature Selection Approach}, author = {Alexander Wurl and Andreas Falkner and Alois Haselböck and Alexandra Mazak and Peter Filzmoser}, editor = {Slimane Hammoudi and Christoph Quix and Jorge Bernardino}, doi = {10.5220/0007924400840091}, year = {2019}, date = {2019-09-18}, booktitle = {Proceedings of the 8th International Conference on Data Science, Technology and Applications, DATA 2019, Prague, Czech Republic, July 26-28, 2019}, pages = {84-91}, publisher = {SciTePress}, abstract = {A crucial task in the bidding phase of industrial systems is a precise prediction of the number of hardware components of specific types for the proposal of a future project. Linear regression models, trained on data of past projects, are efficient in supporting such decisions. The number of features used by these regression models should be as small as possible, so that determining their quantities generates minimal effort. The fact that training data are often ambiguous, incomplete, and contain outlier makes challenging demands on the robustness of the feature selection methods used. We present a combined feature selection approach: (i) iteratively learn a robust well-fitted statistical model and rule out irrelevant features, (ii) perform redundancy analysis to rule out dispensable features. In a case study from the domain of hardware management in Rail Automation we show that this approach assures robustness in the calculation of hardware components. Ist auch noch nicht online.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2019c, title = {Design Science for Model-Driven Software and Systems Engineering}, author = {Manuel Wimmer}, url = {https://modelsconf19.org/?page_id=1933}, year = {2019}, date = {2019-09-17}, booktitle = {IEEE / ACM 22nd International Conference on Model Driven Engineering Languages and Systems (MODELS), Doctoral Symposium, September 15-20, 2019 Munich, Germany}, abstract = {Design Science is well-suited methodology to perform research in Model-Driven Software and Systems Engineering (MDSE). In addition, MDSE may help in performing Design Science with systematic methods to reason about possible designs. In my talk, I will give hints how to combine these two fields in order to have a solid basis for conducting a PhD thesis.}, keywords = {Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2019d, title = {Flexible Modeling by Prototype-based Languages and Inconsistency Management: Two Experiences from the Production System Domain}, author = {Manuel Wimmer}, url = {https://docs.google.com/document/d/1CvJeu1sl69g59fZrZ4vYMhMUIFiBl5rPe9D0D7CiyAg/edit#}, year = {2019}, date = {2019-09-17}, booktitle = {FlexMDE 2019 - 5th Flexible MDE Workshop Tuesday, September 17, 2019, Munich, Germany, ACM/IEEE 22nd International Conference on Model Driven Engineering Languages and Systems (MODELS 2019)}, abstract = {Prototype/Clone-based modeling is an alternative to Class/Object-based modeling. While providing a good level of flexibility in the modeling process, prototype-based modeling languages also come with their own challenges. How such languages may provide flexibility and at the same time some degree of consistency is the first part of my talk. In the second part, I will present some ongoing work on dealing with consistency requirements by managing inconsistencies. In both parts, explicitly modeling consistency requirements and how to deal with occurring inconsistencies is the key. Finally, I conclude with an outlook on future challenges for flexible modeling in the model-based systems engineering domain.}, keywords = {Flexible Modeling}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wurl2019companion, title = {A Conceptual Design of a Digital Companion for Failure Analysis in Rail Automation}, author = {Alexander Wurl and Andreas Falkner and Alois Haselböck and Alexandra Mazak}, editor = {Jörg Becker and Dmitriy Novikov}, doi = {10.1109/CBI.2019.00073}, year = {2019}, date = {2019-08-27}, booktitle = {Proceedings of the 21st IEEE Conference on Business Informatics (CBI 2019), Moscow, Russia, July 15-17, 2019, Volume 1 - Research Papers}, pages = {578--583}, publisher = {IEEE}, abstract = {In Rail Automation, a crucial task in the maintenance phase comprises the process of failure analysis. Domain experts are often faced with various challenges in analyzing large data volumes which reveal highly complex data structures. However, finding causes for potential failures and deciding how to optimize or repair the system may be extensively time consuming. To this end, we propose the concept of a digital companion which serves as continuous assistant recommending optimizations. A sequence of different data analytics methods within the digital companion enables the domain expert to reasonably manage and control the process of failure analysis. In illustrative examples, we give insights in the workflow of a digital companion and discuss the application in the domain of Rail Automation.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2019durative, title = {Production Planning with IEC 62264 and PDDL}, author = {Bernhard Wally and Jiri Vyskocil and Petr Novak and Christian Huemer and Radek Sindelar and Petr Kadera and Alexandra Mazak and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/8972050}, doi = {10.1109/INDIN41052.2019.8972050}, year = {2019}, date = {2019-07-24}, urldate = {2019-07-24}, booktitle = {Proceedings of the 17th IEEE International Conference on Industrial Informatics (INDIN 2019)}, abstract = {Smart production systems need to be able to adapt to changing environments and market needs. They have to reflect changes in (i) the reconfiguration of the production systems themselves, (ii) the processes they perform or (iii) the products they produce. Manual intervention for system adaptation is costly and potentially error-prone. In this article, we propose a model-driven approach for the automatic generation and regeneration of production plans that can be triggered anytime a change in any of the three aforementioned parameters occurs.}, keywords = {Manufacturing Automation, Model-Driven Engineering, Module 2, Process Planning}, pubstate = {published}, tppubtype = {inproceedings} } @article{Wally2019fps, title = {Flexible Production Systems: Automated Generation of Operations Plans Based on ISA-95 and PDDL}, author = {Bernhard Wally and Jiri Vyskocil and Petr Novak and Christian Huemer and Radek Sindelar and Petr Kadera and Alexandra Mazak and Manuel Wimmer}, doi = {10.1109/LRA.2019.2929991}, issn = {2377-3766}, year = {2019}, date = {2019-07-22}, urldate = {2019-07-22}, journal = {Journal of IEEE Robotics and Automation Letters}, volume = {4}, number = {4}, pages = {4062-4069}, abstract = {Model-driven engineering (MDE) provides tools and methods for the manipulation of formal models. In this letter, we leverage MDE for the transformation of production system models into flat files that are understood by general purpose planning tools and that enable the computation of "plans", i.e., sequences of production steps that are required to reach certain production goals. These plans are then merged back into the production system model, thus enriching the formalized production system knowledge.}, keywords = {Flexible Production Systems, ISA-95, Model-Driven Engineering, Module 2, PDDL}, pubstate = {published}, tppubtype = {article} } @article{Vogel-HeuserFWB19, title = {Herausforderungen in der interdisziplinären Entwicklung von Cyber-Physischen Produktionssystemen}, author = {Birgit Vogel-Heuser and Cesare Fantuzzi and Manuel Wimmer and Markus Böhm and Alexander Fay}, doi = {10.1515/auto-2018-0144}, year = {2019}, date = {2019-07-04}, journal = {Automatisierungstechnik}, volume = {67}, number = {6}, pages = {445-454}, abstract = {Modellbasierte Systementwicklung hat bereits Anwendung in der industriellen Entwicklung einer Vielzahl technischer Systeme gefunden. Die Verwendung verschiedener Modelle, z. B. für mechanische, elektrotechnische und automatisierungstechnische Systemaspekte sowie deren Varianten und Versionen unterstützt interdisziplinäre Innovationen, führt jedoch zu vielen Herausforderungen. Eine davon ist die heterogene Modelllandschaft, die insbesondere von überlappenden, teilweise redundant modellierten Informationen geprägt ist. Zudem unterliegen Entwicklungs-, Produktions- und Serviceprozesse ständig internen sowie auch externen Entwicklungszyklen. Zur Bewältigung dieser Herausforderungen können verschiedene Methoden und Techniken eingesetzt werden. In diesem Beitrag werden einige dieser Ansätze hinsichtlich ihrer Vorteile und Grenzen untersucht, und zwar das Konsistenz- bzw. Inkonsistenzmanagement von gekoppelten Modellen im Engineering, das disziplin-übergreifende Management des Engineering-Workflows sowie die Bedeutung von Smart Data Ansätzen bzw. modellbasiertem Wissen.}, keywords = {CPPS, Cyber-Physical Systems}, pubstate = {published}, tppubtype = {article} } @article{Wally2019variability, title = {Modeling Variability and Persisting Configurations in OPC UA}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak and Manuel Wimmer and Radek Sindelar}, doi = {10.1016/j.procir.2019.03.003}, issn = {2212-8271}, year = {2019}, date = {2019-06-24}, journal = {Procedia CIRP}, volume = {81}, pages = {13-18}, abstract = {Variability is crucial in the design of many advanced goods and it is also receiving increasing attention in production systems engineering. Since OPC Unified Architecture plays an important role when it comes to standardized information exchange in modern production systems, it can be a melting pot for information from various engineering domains, such as product design and production engineering — thus, it is an ideal place to hold variability information of products and production systems alike. Based on an initial variability information model we propose additional concepts for the persisting of configurations.}, keywords = {Configuration, Feature Model, Information Model, Module 2, OPC UA, OPC Unified Architecture, Variability}, pubstate = {published}, tppubtype = {article} } @misc{Wimmer2019Standard, title = {Industrial robot meets digital twin}, author = {Manuel Wimmer}, url = {https://derstandard.at/2000103109784/Industrieroboter-trifft-digitalen-Zwilling}, year = {2019}, date = {2019-05-19}, abstract = {A platform for intelligent industrial production is created at the Kepler University Linz. The aim is to create a virtual model. The digital twin describes one of the fundamental visions behind a digitized production of the future. The physical world is doubled in a virtual counter-world.}, keywords = {Digital Twin}, pubstate = {published}, tppubtype = {misc} } @workshop{Wimmer2019se, title = {Software Evolution in Time and Space: Unifying Version and Variability Management}, author = {Manuel Wimmer}, url = {https://www.dagstuhl.de/en/program/calendar/semhp/?semnr=19191}, year = {2019}, date = {2019-05-10}, urldate = {2019-05-10}, booktitle = {Dagstuhl Seminar 19191}, abstract = {Modern software systems evolve rapidly and often need to exist in many variants. Consider the Linux kernel with its uncountable number of variants. Each variant addresses different requirements, such as runtime environments ranging from Android phones to large super-computers and server farms. At the same time, the Linux kernel frequently boasts new versions, managed by thousands of developers. Yet, software versions – resulting from evolution in time – and variants – resulting from evolution in space – are managed radically differently. Version management relies on a version control system (Git) and sophisticated workflows – concepts that have been developed for decades in the field of software configuration management (SCM).}, keywords = {Empirical Evaluation, Software Configuration Management, Software Product Lines, Variability Management, Versioning}, pubstate = {published}, tppubtype = {workshop} } @mastersthesis{peherstorfer2019, title = {BIM and blockchain; a decentralized solution for a change management workflow in construction projects}, author = {Co-Advisor: Univ. -Ass. DI Galina Paskaleva Advisor: Univ.-Prof. Mag. Dr. Manuel Wimmer Peherstorfer David}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-125414 http://hdl.handle.net/20.500.12708/13791}, year = {2019}, date = {2019-04-10}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {There is a big potential for process optimizations, due to the digitalization gap in the construction business. New digital technologies, as the Building Information Modelling (BIM), are increasingly being adapted by the stakeholders in this area. On the other hand, blockchain is a very new and innovative technology domain which has grown immensely in the last several years, and where people are now trying to find the right use-cases. Especially, the even newer field of smart contract development has opened the door for a large amount of possible applications, where it is neither clear if these can actually be implemented as envisioned, nor if there is even a need for a decentralized solution at all. In a construction project, changes on BIM models are only to be approved by the appropriate stakeholder. Therefore, we have combined the BIM models, which are stored using a Git repository, with a release management workflow, which is realised as a smart contract on the Ethereum blockchain.This enables the workflow to be transparent, traceable and its results to be immutable. The goal of this work is to create a prototype and compare it to other (off-chain) solutions and to evaluate if an application of a combination of BIM and blockchain yields an advantage in terms of costs and security.}, keywords = {Blockchain; Smart Contracts, Building Information Modelling, Ethereum}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{eigner2019, title = {From Modeling Languages to Query Languages: A Generative Approach}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Eigner Alexander}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-125136 http://hdl.handle.net/20.500.12708/13795}, year = {2019}, date = {2019-03-21}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {The utilization of models and approaches, such as MDE, in engineering fields grows in popularity, because models provide useful means for the data-exchange, testing, validation and code generation. Domain experts prefer to use DSMLs over GPMLs. GPMLs can be hard to understand, since they require a sufficient education in computer science-related fields and may not be easily applicable for the modeling of domain-specific artefacts. In contrast, DSMLs are tailored towards particular domains and can thus be easily understood and applied by domain-experts. In the process of the ongoing digitalization models constantly grow in size and complexity. Thus, the need for querying models, which are usually created with DSMLs, grows as well, whereat model querying is not only important for information retrieval, but can also provide powerful means for the testing and validation of large systems. Although many well-established model querying approaches already exist, they are usually meant to be used by IT-experts and ignore the need of users from utterly different engineering fields for easy-to-use query languages, who lack the necessary IT know-how. Besides that, users, who lack the knowledge about the DSML's metamodels, may run into metamodel-related pitfalls. In order to meet these needs, an EMF-based prototype has been developed in the course of this thesis based on Wieringa's Engineering Cycle , that generates the model querying language MQL automatically from the Ecore metamodel of a chosen DSML and provides means for the specification and execution of MQL queries. This approach should provide query languages that resemble the original DSMLs as much as possible, by reusing and modifying the DSML's original elements. This prototype also generates an XText grammar specification that will be used for writing MQL queries. A model-to-text generator translates MQL queries into equivalent Viatra VQL queries, which are then executed by the MQL's Viatra-based query engine. Hence, MQL firstly tries to be easily usable by domain experts and secondly helps users, who lack knowledge about the DSML's metamodel, by ``guiding'' them through the DSML's syntactical features. A literature survey has yielded only one related work that can be considered as relatively comparable to the approach of this thesis. This result emphasizes the novelty of this approach and the relatively little amount of attention that has been paid to the addressed domain expert's needs so far. The MQL prototype has been evaluated in terms of query execution time and usability against Viatra VQL. The evaluation of the execution times shows, that MQL's Viatra VQL code generator needs to be optimized, in order to allow MQL to benefit from the rapidness of its underlying Viatra VQL query engine. Furthermore, MQL achieved higher scores in the Usability evaluation than Viatra VQL regarding the effectiveness, efficiency and satisfiability.}, keywords = {EMF, Model Transformations, Model-Driven Engineering}, pubstate = {published}, tppubtype = {mastersthesis} } @article{Kessentini2019ammmce, title = {Automated metamodel/model co-evolution: A search-based approach}, author = {Wael Kessentini and Houari Sahraoui and Manuel Wimmer}, doi = {10.1016/j.infsof.2018.09.003}, issn = {0950-5849}, year = {2019}, date = {2019-02-00}, urldate = {2019-02-00}, journal = {Journal of Information and Software Technology}, volume = {106}, pages = {49-67}, abstract = {Context: Metamodels evolve over time to accommodate new features, improve existing designs, and fix errors identified in previous releases. One of the obstacles that may limit the adaptation of new metamodels by developers is the extensive manual changes that have been applied to migrate existing models. Recent studies addressed the problem of automating the metamodel/model co-evolution based on manually defined migration rules. The definition of these rules requires the list of changes at the metamodel level which are difficult to fully identify. Furthermore, different possible alternatives may be available to translate a metamodel change to a model change. Thus, it is hard to generalize these co-evolution rules. Objective: We propose an alternative automated approach for the metamodel/model co-evolution. The proposed approach refines an initial model instantiated from the previous metamodel version to make it as conformant as possible to the new metamodel version by finding the best compromise between three objectives, namely minimizing (i) the non-conformities with new metamodel version, (ii) the changes to existing models, and (iii) the textual and structural dissimilarities between the initial and revised models. Method: We formulated the metamodel/model co-evolution as a multi-objective optimization problem to handle the different conflicting objectives using the Non-dominated Sorting Genetic Algorithm II (NSGA-II) and the Multi-Objective Particle Swarm Optimization (MOPSO). Results: We evaluated our approach on several evolution scenarios extracted from different widely used metamodels. The results confirm the effectiveness of our approach with average manual correctness, precision and recall respectively higher than 91%, 88% and 89% on the different co-evolution scenarios. Conclusion: A comparison with our previous work confirms the out-performance of our multi-objective formulation.}, keywords = {Co-Evolution}, pubstate = {published}, tppubtype = {article} } @inproceedings{IsakovicBWRRDKG19, title = {Sensyml: Simulation Environment for large-scale IoT Applications}, author = {Haris Isakovic and Vanja Bisanovic and Bernhard Wally and Thomas Rausch and Denise Ratasich and Schahram Dustdar and Gerti Kappel and Radu Grosu}, url = {https://ieeexplore.ieee.org/document/8927756}, doi = {10.1109/IECON.2019.8927756}, year = {2019}, date = {2019-01-01}, urldate = {2019-01-01}, booktitle = {IECON 2019 - 45th Annual Conference of the IEEE Industrial Electronics Society}, pages = {3024--3030}, publisher = {IEEE}, abstract = {IoT systems are becoming an increasingly important component of the civil and industrial infrastructure. With the growth of these IoT ecosystems, their complexity is also growing exponentially. In this paper we explore the problem of testing and evaluating large scale IoT systems at design time. To this end we employ simulated sensors with the physical and geographical characteristics of real sensors. Moreover, we propose Sensyml, a simulation environment that is capable of generating big data from cyber-physical models and real-world data. To the best of our knowledge it is the first approach to use a hybrid integration of real and simulated sensor data, that is also capable of being integrated into existing IoT systems. Sensyml is a cloud based Infrastructure-as-a-Service (IaaS) system that enables users to test both functionality and scalability of their IoT applications.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{MazakTdSE2019, title = {Entwicklung einer domänenspezifischen Sprache zur Modellierung von Komponentenversionen und ihren Abhängigkeiten}, author = {Alexandra Mazak and Sabine Wolny and Manuel Wimmer and Daniel Siegl and Konrad Wieland}, year = {2019}, date = {2019-01-01}, urldate = {2019-01-01}, booktitle = {Tag des Systems Engineering 2019, TdSE 2019, Munich, Germany}, pages = {153--156}, publisher = {GfSE Verlag}, abstract = {Durch die Verschmelzung softwaretechnischer Komponenten mit mechanischen Komponenten erhöht sich der Komplexitätsgrad von Systemen zunehmend. Um dieser Herausforderung zu begegnen wird in unterschiedlichen Anwendungsbereichen die Modellierung zur Komplexitätsreduktion verstärkt eingesetzt. Die Herausforderung in der Modellierung ist jedoch, dass im Verlauf des Modellierungsprozesses unterschiedliche Komponenten eines Systems in verschiedenen Versionen, Varianten und Abhängigkeiten vorliegen. Ein offener Punkt in der Modellversionsverwaltung ist die bisher fehlende Nachverfolgbarkeit von Abhängigkeiten zwischen Komponentenmodellen über verschiedene Versionen hinweg. Dieser Umstand führt beispielweise bei Versionsupgrades zu möglichen Konflikten. Der vorgestellte Ansatz zeigt, wie solche Abhängigkeiten mittels einer domänenspezifischen Sprache versionsübergreifend und nachvollziehbar modelliert werden können. Wir präsentieren diesen Ansatz gemeinsam mit unserem Projektpartner der LieberLieber Software GmbH am Beispiel eines Softwareupdates für den Controller eines elektrischen Autofensterhebers.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @proceedings{2019mde4iot, title = {Joint Proceedings of the Workshop on Model-Driven Engineering for the Internet of Things (MDE4IoT) & of the Workshop on Interplay of Model-Driven and Component-Based Software Engineering (ModComp) Co-located with the IEEE/ACM 22nd International Conference on Model Driven Engineering Languages and Systems (MODELS 2019), Munich, Germany, September 15 and 17, 2019}, editor = {Nicolas Ferry and Antonio Cicchetti and Federico Ciccozzi and Arnor Solberg and Manuel Wimmer and Andreas Wortmann}, url = {http://ceur-ws.org/Vol-2442}, year = {2019}, date = {2019-01-01}, urldate = {2019-01-01}, volume = {2442}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, keywords = {IoT, Model-Driven Engineering}, pubstate = {published}, tppubtype = {proceedings} } @proceedings{2019pnse, title = {Proceedings of the International Workshop on Petri Nets and Software Engineering (PNSE 2019), co-located with the 40th International Conference on Application and Theory of Petri Nets and Concurrency Petri Nets 2019 and the 19th International Conference on Application of Concurrency to System Design ACSD 2019 and the 1st IEEE International Conference on Process Mining Process Mining 2019, Aachen, Germany, June 23-28, 2019}, editor = {Daniel Moldt and Ekkart Kindler and Manuel Wimmer}, url = {http://ceur-ws.org/Vol-2424}, year = {2019}, date = {2019-01-01}, volume = {2424}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, keywords = {}, pubstate = {published}, tppubtype = {proceedings} } @inproceedings{WernerWA19, title = {A Generic Language for Query and Viewtype Generation By-Example}, author = {Christopher Werner and Manuel Wimmer and Uwe Aßmann}, editor = {Loli Burgueño and Alexander Pretschner and Sebastian Voss and Michel Chaudron and Jörg Kienzle and Markus Völter and Sébastien Gérard and Mansooreh Zahedi and Erwan Bousse and Arend Rensink and Fiona Polack and Gregor Engels and Gerti Kappel}, url = {https://ieeexplore.ieee.org/document/8904594}, doi = {10.1109/MODELS-C.2019.00059}, year = {2019}, date = {2019-01-01}, urldate = {2019-01-01}, booktitle = {22nd ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion, MODELS Companion 2019, Munich, Germany, September 15-20, 2019}, pages = {379--386}, publisher = {IEEE}, abstract = {In model-driven engineering, powerful query/view languages exist to compute result sets/views from underlying models. However, to use these languages effectively, one must understand the query/view language concepts as well as the underlying models and metamodels structures. Consequently, it is a challenge for domain experts to create queries/views due to the lack of knowledge about the computer-internal abstract representation of models and metamodels. To better support domain experts in the query/view creation, the goal of this paper is the presentation of a generic concept to specify queries/views on models without requiring deep knowledge on the realization of modeling languages. The proposed concept is agnostic to specific modeling languages and allows the query/view generation by-example with a simple mechanism for filtering model elements. Based on this generic concept, a generic query/view language is proposed that uses role-oriented modeling for its non-intrusive application for specific modeling languages. The proposed language is demonstrated based on the role-based single underlying model (RSUM) approach for AutomationML to create queries/views by-example, and subsequently, associated viewtypes to modify the result set or view.}, keywords = {Workshop}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer19, title = {From Design-Time to Runtime and Back Again with Liquid Models}, author = {Manuel Wimmer}, editor = {Alessandra Bagnato and Hugo Brunelière and Loli Burgueño and Romina Eramo and Abel Gómez}, url = {http://ceur-ws.org/Vol-2405/05_invited.pdf}, year = {2019}, date = {2019-01-01}, urldate = {2019-01-01}, booktitle = {STAF 2019 Co-Located Events Joint Proceedings: 1st Junior Researcher Community Event, 2nd International Workshop on Model-Driven Engineering for Design-Runtime Interaction in Complex Systems, and 1st Research Project Showcase Workshop co-located with Software Technologies: Applications and Foundations (STAF 2019), Eindhoven, The Netherlands, July 15 - 19, 2019}, volume = {2405}, pages = {21--22}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {Today, we recognize a discrepancy between design models concentrating on the desired behaviour of a system and its real world correspondents reflecting deviations taking place at runtime. In order to close this gap, design models must not be static, but evolutionary artefacts so to speak liquid models. Such liquid models are the cornerstone of our research project CDL-MINT (https://cdlmint.se.jku.at/) which is about the model-based continuous evolution of cyberphysical systems based on operational data gathered and analysed at runtime. In my talk, I will present some initial results of this project, in particular the liquid models architecture for linking design models with runtime concerns. I will also elaborate on the proposed technologies for the respective architectural layers and identify the research challenges ahead. }, keywords = {Workshop}, pubstate = {published}, tppubtype = {inproceedings} } @article{Burgueno2019quantities, title = {Specifying quantities in software models}, author = {Loli Burgueño and Tanja Mayerhofer and Manuel Wimmer and Antonio Vallecillo}, doi = {10.1016/j.infsof.2019.05.006}, issn = {0950-5849}, year = {2019}, date = {2019-00-00}, urldate = {2019-00-00}, journal = {Journal of Information and Software Technology}, volume = {113}, pages = {82-97}, abstract = {Context An essential requirement for the design and development of any engineering application that deals with real-world physical systems is the formal representation and processing of physical quantities, comprising both measurement uncertainty and units. Although solutions exist for several programming languages and simulation frameworks, this problem has not yet been fully solved for software models. Objective This paper shows how both measurement uncertainty and units can be effectively incorporated into software models, becoming part of their basic type systems. Method We introduce the main concepts and mechanisms needed for representing and handling physical quantities in software models. More precisely, we describe an extension of basic type Real, called Quantity, and a set of operations defined for the values of that type, together with a ready-to-use library of dimensions and units, which can be added to any modeling project. Results We show how our approach permits modelers to safely represent and operate with physical quantities, statically ensuring type- and unit-safe assignments and operations, prior to any simulation of the system or implementation in any programming language. Conclusion Our approach improves the expressiveness and type-safety of software models with respect to measurement uncertainty and units of physical quantities, and its effective use in modeling projects of physical systems. }, keywords = {Model-Based Engineering, Physical Quantities}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wally2018IECAML, title = {IEC 62264-2 for AutomationML}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak and Manuel Wimmer}, url = {https://www.researchgate.net/publication/328095694_IEC_62264-2_for_AutomationML }, year = {2018}, date = {2018-10-25}, urldate = {2018-10-25}, booktitle = {Proceedings of the 5th AutomationML User Conference}, abstract = {IEC 62264-2 and AutomationML can co-exist as separate views on the same production system; the former with a slight bias towards the upper levels, the latter slightly biased towards the lower levels of the automation hierarchy. Still, there is quite some semantic and structural overlap between IEC 62264-2 and AutomationML. Therefore, a semantic and structural alignment of their entities on a metamodel and a model level seems approrpate. In this work, we will present such an alignment together with two combinable methods for integration: (i) tagging AutomationML elements with IEC 62264-2 roles and (ii) referencing external IEC 62264-2 data.}, keywords = {AutomationML, ISA-95}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Gomez2018temporalEMF, title = {TemporalEMF: A Temporal Metamodeling Framework}, author = {A. Gómez and Jordi Cabot and Manuel Wimmer}, url = {https://modeling-languages.com/wp-content/uploads/2018/07/temporalEMF.pdf}, year = {2018}, date = {2018-10-25}, urldate = {2018-10-25}, booktitle = {Proceedings of the 37th International Conference on Conceptual Modeling (ER 2018)}, abstract = {Existing modeling tools provide direct access to the most current version of a model but very limited support to inspect the model state in the past. This typically requires looking for a model version (usually stored in some kind of external versioning system like Git) roughly corresponding to the desired period and using it to manually retrieve the required data. This approximate answer is not enough in scenarios that require a more precise and immediate response to temporal queries like complex collaborative co-engineering processes or runtime models. In this paper, we reuse well-known concepts from temporal languages to propose a temporal metamodeling framework, called TemporalEMF, that adds native temporal support for models. In our framework, models are automatically treated as temporal models and can be subjected to temporal queries to retrieve the model contents at different points in time. We have built our framework on top of the Eclipse Modeling Framework (EMF). Behind the scenes, the history of a model is transparently stored in a NoSQL database. We evaluate the resulting TemporalEMF framework with an Industry 4.0 case study about a production system simulator. The results show good scalability for storing and accessing temporal models without requiring changes to the syntax and semantics of the simulator.}, keywords = {EMF}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wolny2018mde4iot, title = {An Initial Mapping Study on MDE4IoT}, author = {Sabine Wolny and Alexandra Mazak and Bernhard Wally}, url = {https://ceur-ws.org/Vol-2245/mde4iot_paper_6.pdf}, year = {2018}, date = {2018-10-24}, urldate = {2018-10-24}, booktitle = {Proceedings of the 2nd International Workshop on Model-Driven Engineering for the Internet-of-Things (MDE4IoT 2018)}, abstract = {The term “Internet of Things” (IoT) refers to a distributed network of physical objects and applications that create, transform and consume data. Due to the growing interest in digital transformation and Industry 4.0 topics, IoT is becoming more and more important. However, in order to correctly implement IoT concepts that are mostly highly complex, solutions and techniques must be provided to tackle a multitude of challenges such as heterogeneity, collaborative development, reusability of software artifacts, self-adaptation, etc. Model-Driven Engineering (MDE) uses the abstraction power of models to handle the complexity of systems and thus it may act as a key-enabler for IoT systems and applications. Therefore, we present an initial mapping study on the state-of-the-art in the field of MDE4IoT. This study aims to identify to which extent MDE techniques are currently being applied in the field of IoT, and which challenges are addressed.}, keywords = {IoT, Model-Driven Engineering, Module 2, Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Echeverria2018lsp, title = {Towards a Language Server Protocol Infrastructure for Graphical Modeling}, author = {R. Rodriguez-Echeverria and J. Canovas Izquierdo and Jordi Cabot and Manuel Wimmer}, url = {https://dl.acm.org/doi/10.1145/3239372.3239383}, doi = {10.1145/3239372.3239383}, year = {2018}, date = {2018-10-19}, urldate = {2018-10-19}, booktitle = {Proceedings of the ACM/IEEE 21st International Conference on Model Driven Engineering Languages and Systems (MODELS 2018)}, abstract = {In Model-Driven Engineering (MDE), models are often expressed following a graphical representation of their concepts and associations. MDE tooling allows developers to create models according to their graphical syntax and subsequently, generate code or other kind of models from them. However, the development of full-fledge graphical modeling tools is a challenging and complex task [18]. These tools usually address specific languages and platforms, as supporting multiple ones is not a viable option given the implementation and integration costs. Although the advantages of following the path defined by Language Server Protocol (LSP) are clear for IDE development aimed at graphical languages, currently the question about how to do it properly remains open as LSP has been defined without considering graphical languages. Basically, there is no scientific assessment or tool provider position on whether LSP provides enough expressiveness for graphical manipulations, whether it should be extended to support specific features of graphical edition or whether it would be best to ignore LSP in graphical modeling. Furthermore, LSP definition is still an ongoing work, thus it could be the right moment to suggest reasonable adaptations or extensions to provide support for graphical languages.}, keywords = {Language Server Protocol}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{DBLP:conf/models/KessentiniWS18, title = {Integrating the Designer in-the-loop for Metamodel/Model Co-Evolution via Interactive Computational Search}, author = {Wael Kessentini and Manuel Wimmer and Houari A. Sahraoui}, editor = {Andrzej Wasowski and Richard F. Paige and Øystein Haugen}, url = {https://doi.org/10.1145/3239372.3239375}, doi = {10.1145/3239372.3239375}, year = {2018}, date = {2018-10-19}, urldate = {2018-01-01}, booktitle = {Proceedings of the 21th ACM/IEEE International Conference on Model Driven Engineering Languages and Systems, MODELS 2018, Copenhagen, Denmark, October 14-19, 2018}, pages = {101--111}, publisher = {ACM}, abstract = {Metamodels evolve even more frequently than programming languages. This evolution process may result in a large number of instance models that are no longer conforming to the revised meta-model. On the one hand, the manual adaptation of models after the metamodels’ evolution can be tedious, error-prone, and time-consuming. On the other hand, the automated co-evolution of metamodels/models is challenging especially when new semantics is introduced to the metamodels. In this paper, we propose an interactive multi-objective approach that dynamically adapts and interactively suggests edit operations to developers and takes their feedback into consideration. Our approach uses NSGA-II to find a set of good edit operation sequences that minimizes the number of conformance errors, maximizes the similarity with the initial model (reduce the loss of information) and minimizes the number of proposed edit operations. The designer can approve, modify, or reject each of the recommended edit operations, and this feedback is then used to update the proposed rankings of recommended edit operations. We evaluated our approach on a set of metamodel/model coevolution case studies and compared it to fully automated coevolution techniques.}, keywords = {Co-Evolution}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Echeverria2018lspemf, title = {An LSP infrastructure to build EMF language servers for web-deployable model editors}, author = {Roberto Rodríguez-Echeverría and Javier Luis Cánovas Izquierdo and Manuel Wimmer and Jordi Cabot}, editor = {Regina Hebig and Thorsten Berger}, url = {http://ceur-ws.org/Vol-2245/mdetools_paper_3.pdf}, year = {2018}, date = {2018-10-15}, urldate = {2018-10-15}, booktitle = {Proceedings of MODELS 2018 Workshops: ModComp, MRT, OCL, FlexMDE, EXE, COMMitMDE, MDETools, GEMOC, MORSE, MDE4IoT, MDEbug, MoDeVVa, ME, MULTI, HuFaMo, AMMoRe, PAINS co-located with ACM/IEEE 21st International Conference on Model Driven Engineering Languages and Systems (MODELS 2018), Copenhagen, Denmark, October, 14, 2018}, volume = {2245}, pages = {326--335}, publisher = {CEUR-WS.org}, abstract = {The development of modern IDEs is still a challenging and time-consuming task, which requires implementing the support for language-specific features such as syntax highlighting or validation. When the IDE targets a graphical language, its development becomes even more complex due to the renderingand manipulation of the graphical notation symbols. To simplify the development of IDEs, the Language Server Protocol (LSP) proposes a decoupled approach based on language-agnostic clients and language-specific servers. LSP clients communicate changes to LSP servers, which validate and store language in-stances. However, LSP only addresses textual languages (i.e., character as atomic unit) and neglects the support for graphical ones (i.e., nodes/edges as atomic units). In this paper, we introduce a novel LSP infrastructure to simplify the development of new graphical modeling tools, in which Web technologies may be used for editor front-ends while leveraging existing modeling frameworks to build language servers. More concretely, in this work, we present the architecture of our LSP infrastructure, based on LSP4J, to build EMF-based graphical language servers.}, keywords = {EMF, Language Server Protocol}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Bordeleau2018stc, title = {Tool-Support of Socio-Technical Coordination in the Context of Heterogeneous Modeling}, author = {Francis Bordeleau and Benoît Combemale and Romina Eramo and Mark Brand and Manuel Wimmer}, editor = {Regina Hebig and Thorsten Berger}, url = {http://ceur-ws.org/Vol-2245/gemoc_paper_3.pdf}, year = {2018}, date = {2018-10-15}, urldate = {2018-10-15}, booktitle = {Proceedings of MODELS 2018 Workshops: ModComp, MRT, OCL, FlexMDE, EXE, COMMitMDE, MDETools, GEMOC, MORSE, MDE4IoT, MDEbug, MoDeVVa, ME, MULTI, HuFaMo, AMMoRe, PAINS co-located with ACM/IEEE 21st International Conference on Model Driven Engineering Languages and Systems (MODELS 2018), Copenhagen, Denmark, October, 14, 2018}, volume = {2245}, pages = {423--425}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {The growing complexity of everyday life systems (and devices)over the last decades has forced the industry to use and investigate different development techniques to manage the many different aspects of the systems. In this context, the use of model driven engineering (MDE) has emerged and is now common practice for many engineering disciplines. However, this comes with important challenges. As set of main challenges relates to the fact that different modeling techniques, languages, and tools are required to deal with the different system aspects, and that support is required to ensure consistence and coherence between the different models. This paper identifies a number of the challenges and paints a roadmap on how tooling can support a multi-model integrated way of working.}, keywords = {Heterogeneous Modeling}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Ciccozzi2018bkmbse, title = {Towards a Body of Knowledge for Model-Based Software Engineering}, author = {Federico Ciccozzi and Michalis Famelis and Gerti Kappel and Leen Lambers and Sébastien Mosser and Richard F. Paige and Alfonso Pierantonio and Arend Rensink and Rick Salay and Gabi Taentzer and Antonio Vallecillo and Manuel Wimmer}, editor = {Önder Babur and Daniel Strüber and Silvia Abrahão and Loli Burgueño and Martin Gogolla and Joel Greenyer and Sahar Kokaly and Dimitris S. Kolovos and Tanja Mayerhofer and Mansooreh Zahedi}, url = {https://doi.org/10.1145/3270112.3270121}, doi = {10.1145/3270112.3270121}, year = {2018}, date = {2018-10-15}, urldate = {2018-10-15}, booktitle = {Proceedings of the 21st ACM/IEEE International Conference on Model Driven Engineering Languages and Systems: Companion Proceedings, MODELS 2018, Copenhagen, Denmark, October 14-19, 2018}, pages = {82--89}, publisher = {ACM}, abstract = {Model-based Software Engineering (MBSE) is now accepted as a Software Engineering (SE) discipline and is being taught as part of more general SE curricula. However, an agreed core of concepts,mechanisms and practices — which constitutes the Body of Knowledge of a discipline — has not been captured anywhere, and is only partially covered by the SE Body of Knowledge (SWEBOK). With the goals of characterizing the contents of the MBSE discipline,promoting a consistent view of it worldwide, clarifying its scope with regard to other SE disciplines, and defining a foundation for a curriculum development on MBSE, this paper provides a proposal for an extension of the contents of SWEBOK with the set of fundamental concepts, terms and mechanisms that should constitute the MBSE Body of Knowledge.}, keywords = {Model-Based Software Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Pierantonio2018teach, title = {How do we teach Modelling and Model-Driven Engineering? A survey}, author = {A. Pierantonio and F. Ciccozzi and M. Famelis and Gerti Kappel and L. Lambers and S. Mosser and R. Paige and A. Rensink and R. Salay and G. Taentzer and Antonio Vallecillo and Manuel Wimmer}, url = {https://dl.acm.org/doi/10.1145/3270112.3270129}, doi = {10.1145/3270112.3270129}, year = {2018}, date = {2018-10-15}, urldate = {2018-10-15}, booktitle = {Proceedings of the 14th Educators Symposium at MODELS}, abstract = {Understanding the experiences of instructors teaching modelling and model-driven engineering is of great relevance to determining how MDE courses should be managed in terms of content, assessment, and teaching methods. In this paper, we report the results of a survey of 47 instructors in this field. Questions address course content, tools and technologies used, as well as positive and negative factors affecting learning outcomes. We analyse the results and summarise key findings with the potential of improving the state of teaching and learning practices. The survey is a preliminary effort in giving a structured overview on the state-of-the-practice within teaching modeling and model-driven engineering (from the point of view of the instructor).}, keywords = {Model-Driven Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Isakovic2018cpsiot, title = {CPS/IoT Ecosystem: a platform for research and education}, author = {Haris Isakovic and Denise Ratasich and Christian Hirsch and Michael Platzer and Bernhard Wally and Thomas Rausch and Dejan Nickovic and Willibald Krenn and Gerti Kappel and Schahram Dustdar and Radu Grosu}, url = {https://www.researchgate.net/publication/330542164_CPSIoT_Ecosystem_A_platform_for_research_and_education}, year = {2018}, date = {2018-10-05}, urldate = {2018-10-05}, booktitle = {Proceedings of the 14th Workshop on Embedded and Cyber-Physical Systems Education (WESE 2018)}, abstract = {The CPS/IoT Ecosystem project aims to build an IoT infrastructure that will be used as a platform for research and education in multiple disciplines related to CPS and IoT. The main objective is to provide a real-world infrastructure, and allow students and researchers explore its capabilities on actual use cases.}, keywords = {CPS, IoT, Module 2}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Kessentini2018ace, title = {Automated Co-Evolution of Metamodels and Transformation Rules: A Search-Based Approach}, author = {Wael Kessentini and Houari A. Sahraoui and Manuel Wimmer}, editor = {Thelma Elita Colanzi and Phil McMinn}, url = {https://doi.org/10.1007/978-3-319-99241-9_12}, doi = {10.1007/978-3-319-99241-9_12}, year = {2018}, date = {2018-09-09}, urldate = {2018-09-09}, booktitle = {Proceedings of the 10th Symposium on Search-Based Software Engineering (SBSE 2018), Montpellier, France, September 8-9, 2018}, volume = {11036}, pages = {229--245}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {Metamodels frequently change over time by adding new concepts or changing existing ones to keep track with the evolving problem domain they aim to capture. This evolution process impacts several depending artifacts such as model instances, constraints, as well as transformation rules. As a consequence, these artifacts have to be co-evolved to ensure their conformance with new metamodel versions. While several studies addressed the problem of metamodel/- model co-evolution3, the co-evolution of metamodels and transformation rules has been less studied. Currently, programmers have to manually change model transformations to make them consistent with the new metamodel versions which require the detection of which transformations to modify and how to properly change them. In this paper, we propose a novel search-based approach to recommend transformation rule changes to make transformations coherent with the new metamodel versions by finding a trade-off between maximizing the coverage of metamodel changes and minimizing the number of static errors in the transformation and the number of applied changes to the transformation. We implemented our approach for the ATLAS Transformation Language (ATL) and validated the proposed approach on four co-evolution case studies.We demonstrate the outperformance of our approach by comparing the quality of the automatically generated co-evolution solutions by NSGA-II with manually revised transformations, one mono-objective algorithm, and random search.}, keywords = {Co-Evolution}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2018Variability, title = {A Variability Information Model for OPC UA}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/8502502 }, year = {2018}, date = {2018-09-06}, urldate = {2018-09-06}, booktitle = {Proceedings of the 23rd IEEE International Conference on Emerging Technologies and Factory Automation (ETFA 2018)}, abstract = {OPC Unified Architecture (UA) is a powerful technology for modeling and instantiating domain-specific information in a standardized manner. Its initial application scenario is in the domain of automated production systems, that increasingly have to deal with variability information, (i) regarding the products being manufactured and (ii) regarding the production systems themselves. In this work we propose a non-intrusive OPC UA information model for the modeling and querying of variability information using feature models, which are a well-known paradigm in the management of software product lines. Our information model can be applied “aside” existing domain information without interfering with their internal structure.}, keywords = {OPC UA, Variability}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{kletzander2018, title = {A Kernel Language based Exchange Framework for Behavioural Modelling Languages}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Kletzander Christian}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-115837 http://hdl.handle.net/20.500.12708/7745}, year = {2018}, date = {2018-08-31}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {The interoperability for exchanging behavioural models between different tools in automation is only achieved by a small amount, which are supporting standardized import and export formats. There is no transformation framework existing for exchanging different behavioural models through a standardized kernel language. The literature describes several techniques to transform a modelling language into another pre-defined modelling language, but all of these are fixed to specific types of modelling languages and do not support the general exchange between any behavioural modelling language. In this thesis, I introduce a new technique that allows exchanging a small amount of behavioural modelling languages through a standardized kernel language based exchange framework. I am using the Intermediate Modelling Layer (IML) from the AutomationML consortium as a kernel language for exchanging activity-on-node networks (AONN) into GANTT charts and back. By doing a case study based evaluation the generated input and output models of the different behaviour modelling types are analysed for possible information loss after exchanging them. The round trip transformation from GANTT to AONN and back has no information loss, whereas AONN to GANTT and back loses information attributes like delay, latest start time point, earliest start time point and latest end time point.}, keywords = {AutomationML, Model Exchange, Transformations}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Lueder2018mcr, title = {Modelling Consistency Rules within Production System Engineering}, author = {Arndt Lueder and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/8560537}, doi = {10.1109/COASE.2018.8560537}, year = {2018}, date = {2018-08-24}, urldate = {2018-08-24}, booktitle = {Proceedings of the 14th International Conference on Automation Science and Engineering (CASE 2018)}, abstract = {The engineering of control systems is an essential part within the engineering of production systems cumulating various predecessor engineering activities. Therefore a high data quality of the predecessor activities has to be ensured especially avoiding inconsistencies between provided sets of engineering data. Within this paper, a methodology is sketched applicable to model engineering discipline crossing consistency rules to enable an automatic evaluation for consistency management. It is based on the use of AutomationML as production system modelling language but can be generalized to further modelling means.}, keywords = {Production System Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer2018amlaql, title = {From AutomationML to AutomationQL: A By-Example Query Language for CPPS Engineering Models}, author = {Manuel Wimmer and Alexandra Mazak}, url = {https://ieeexplore.ieee.org/abstract/document/8560448}, doi = {10.1109/COASE.2018.8560448}, year = {2018}, date = {2018-08-24}, urldate = {2018-08-24}, booktitle = {Proceedings of the 14th International Conference on Automation Science and Engineering (CASE 2018)}, abstract = {Model-based engineering is an emerging paradigm to deal with the complexity of multi-disciplinary engineering in CPPS projects. In such projects, different kinds of models are created during the lifecycle of a production system. AutomationML is a promising standard to provide a unifying format to represent and connect the different engineering models. Dedicated tool support has been developed for AutomationML in the last years to create and evolve models. However, when it comes to querying AutomationML models, implementation-related query languages have to be currently used. These languages have a certain complexity as they are not directly based on the concepts of AutomationML but on the underlying technological concepts and encodings of AutomationML. This often hinders the formulation of automatically executable queries by domain experts. In this paper, we propose a dedicated query language for AutomationML called Automation Query Language (AutomationQL) which is directly derived from AutomationML. Using this query language, queries can be defined in a by-example manner which allows engineers to formulate queries in terms of AutomationML concepts instead of switching to an implementation-oriented query language. We illustrate how AutomationQL is defined, how queries can be formulated as well as how tool support is provided to automatically evaluate the queries and represent their results. Finally, we contrast our solution with existing query languages and derive a roadmap for future research on AutomationQL.}, keywords = {AutomationML, CPPS}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{dopplinger2018, title = {Supporting Model Extensions in RubyTL}, author = {Co-Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Advisor: O.Univ.-Prof. DI Mag. Dr. Gerti Kappel Dopplinger Marc}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-115693 http://hdl.handle.net/20.500.12708/5441}, year = {2018}, date = {2018-08-23}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Model Engineering gets more important in software development because of the increasing use of models. At the same it is important the adapt and extend existing models. But this is sometimes not possible. For example the model was developed from somebody else or the model is used in another project and it is necessary to keep the compatibility. Therefore several lightweight extension mechanism have been developed. For example UML profile for UML diagrams or EMF profiles for standard diagrams of the Eclipse Modeling Framework (EMF). They allow to extend an already existing model without changing the original one. But unfortunately they have some drawbacks. Only a few transformation languages have a support for lightweight extensions and if they do only very basic. ATL can only access the profile with the underlying Java API. With RubyTL it is not possible to process profiles at all. This thesis covers the development of an extension which enables RubyTL to process EMF and UML profiles. Thereby should the extension be not integrated into the RubyTL code. This will be done with model processors. They integrate the profile into the existing model. Due to the circumstance that the profile is now a complete part of the diagram it is possible that the transformation language can access the stereotypes. Furthermore should it be possible to use the model processors for other transformation languages, like ATL. The goal is to enable the use of UML and EMF profiles also for other transformation languages. But they do not get integrated into the language. The model processors are used from a command line interface (CLI). The feasibility of the approach is demonstrated by using transformations to apply and read profile information in RubyTL and ATL. The resulting ATL transformations are also compared with ATL transformations using the basic ATL support based on the Java API for UML.}, keywords = {EMF Profiles, Ruby, Transformations, UML Profiles}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Wally2018Rendenvous, title = {AutomationML, ISA-95 and Others: Rendezvous in the OPC UA Universe}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/8560600}, doi = {10.1109/COASE.2018.8560600}, year = {2018}, date = {2018-08-20}, urldate = {2018-08-20}, booktitle = {Proceedings of the 14th IEEE International Conference on Automation Science and Engineering (CASE 2018)}, abstract = {OPC Unified Architecture (UA) is a powerful and versatile platform for hosting information from a large variety of domains. In some cases, the domain-specific information models provide overlapping information, such as (i) different views on a specific entity or (ii) different levels of detail of a single entity. Emerging from a multi-disciplinary engineering process, these different views can stem from various tools that have been used to deal with that entity, or from different stages in an engineering process, e.g., from requirements engineering over system design and implementation to operations. In this work, we provide a small but expressive set of OPC UA reference types that unobtrusively allow the persistent instantiation of additional knowledge with respect to relations between OPC UA nodes. We will show the application of these reference types on the basis of a rendezvous of AutomationML and ISA-95 in an OPC UA server.}, keywords = {AutomationML, ISA-95, Module 2, OPC UA}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{proyer2018, title = {Transfer monitoring from University to Industry}, author = {Co-Advisor: DI Mag. Dr. Alexandra Mazak-Huemer Advisor: Ao.Univ.Prof. Mag. Dr. Christian Huemer Proyer Clemens}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-115769 http://hdl.handle.net/20.500.12708/5423}, year = {2018}, date = {2018-08-20}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {The measurement of the knowledge change of employees as well as the transfer is discussed in this thesis. Although these two terms are often used synonymously, there is a difference between them. Learning is adapting to a situation whereas transfer is applying the knowledge to similar situations. There are many approaches to measuring learning success or transfer, most of which originate in educational science. In this thesis we consider the special case of innovation courses, where there are further requirements that must be met. Unfortunately, the existing frameworks are not designed for these requirements and are therefore not sufficient. An innovation course is a long-term course in which employees of companies are taught in a certain topic. Such an innovation course consists of several modules for which both the measurement of learning success and knowledge transfer for the participants must take place. To achieve this and to make the measurements repeatable and objective, we have developed a framework. We use the Design Science Approach to develop the framework. However, the goal is not to create a static artefact that can only be applied to the course of our case study, but to design a framework that is also easily adaptable and applicable in other innovation courses or in a similar environment. To test and improve the framework, we use it in four modules of the DigiTrans 4.0 innovation course. For three of the four modules of our case study, the difference between the knowledge before the module and at the end is statistically significant. We also create linear models to explain or predict the transfer. The models are created with and without heteroscedasticity adjustment. The results of the models are slightly different, but show a common trend, which originates from the same background formula. Since these characteristics are known in the literature of knowledge transfer, the framework created is well suited for measuring the transfer.}, keywords = {framework, innovation course}, pubstate = {published}, tppubtype = {mastersthesis} } @manual{Wally2018AR, title = {Provisioning for MES and ERP - Support for IEC 62264-2 and B2MML}, author = {Bernhard Wally}, url = {https://publik.tuwien.ac.at/files/publik_276188.pdf}, year = {2018}, date = {2018-07-06}, urldate = {2018-07-06}, organization = {TU Wien, AutomationML e.V.}, keywords = {AutomationML, ISA-95, Model-Driven, OPC UA, Vertical Integration}, pubstate = {published}, tppubtype = {manual} } @inproceedings{Leroy2018tco, title = {Trace Comprehension Operators for Executable DSLs}, author = {Dorian Leroy and Erwan Bousse and A. Megna and Benoit Combemale and Manuel Wimmer}, doi = {10.1007/978-3-319-92997-2_19}, isbn = {978-3-319-92996-5}, year = {2018}, date = {2018-06-28}, booktitle = {Proceedings of the 14th European Conference on Modelling Foundations and Applications (ECMFA 2018)}, pages = {293-310}, abstract = {Recent approaches contribute facilities to breathe life into metamodels, thus making behavioral models directly executable. Such facilities are particularly helpful to better utilize a model over the time dimension, e.g., for early validation and verification. However, when even a small change is made to the model, to the language definition (e.g., semantic variation points), or to the external stimuli of an execution scenario, it remains difficult for a designer to grasp the impact of such a change on the resulting execution trace. This prevents accessible trade-off analysis and design-space exploration on behavioral models. In this paper, we propose a set of formally defined operators for analyzing execution traces. The operators include dynamic trace filtering, trace comparison with diff computation and visualization, and graph-based view extraction to analyze cycles. The operators are applied and validated on a demonstrative example that highlight their usefulness for the comprehension specific aspects of the underlying traces.}, keywords = {Executable DSL}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2018, title = {Using Physical Quantities in Robot Software Models}, author = {Loli Burgueño and Tanja Mayerhofer and Manuel Wimmer and Antonio Vallecillo}, doi = {10.1145/3196558.3196562}, year = {2018}, date = {2018-05-28}, urldate = {2018-05-28}, booktitle = {Proceedings of the 1st International Workshop on Robotics Software Engineering (RoSE@ICSE 2018)}, pages = {23-28}, abstract = {One of the challenges of modeling any software application that deals with real-world physical systems resides in the correct representation of numerical values and their units. This paper shows how both measurement uncertainty and units can be effectively incorporated into software models, becoming part of their basic type systems, and illustrates this approach in the particular case of a robot language. We show how our approach allows robot modelers to safely represent and manipulate units and measurement uncertainties of the robots and their elements in a natural manner, statically ensuring unit-safe assignments and operations, as well as the propagation of uncertainty in the computations of derived attributes and operations. }, keywords = {Physical Quantities, Robot Software Models}, pubstate = {published}, tppubtype = {inproceedings} } @article{Ceravolo2018bds, title = {Big Data Semantics}, author = {Paolo Ceravolo and Antonia Azzini and Marco Angelini and Tiziana Catarci and Philippe Cudré-Mauroux and Ernesto Damiani and Alexandra Mazak and Maurice Van Keulen and Mustafa Jarrar and Giuseppe Santucci and Kai-Uwe Sattler and Monica Scannapieco and Manuel Wimmer and Robert Wrembel and Fadi Zaraket}, doi = {10.1007/s13740-018-0086-2}, issn = {1861-2040}, year = {2018}, date = {2018-05-23}, urldate = {2018-05-23}, journal = {Journal of Data Semantics}, volume = {7}, issue = {2}, pages = {65-85}, abstract = {Big Data technology has discarded traditional data modeling approaches as no longer applicable to distributed data processing. It is, however, largely recognized that Big Data impose novel challenges in data and infrastructure management. Indeed, multiple components and procedures must be coordinated to ensure a high level of data quality and accessibility for the application layers, e.g., data analytics and reporting. In this paper, the third of its kind co-authored by members of IFIP WG 2.6 on Data Semantics, we propose a review of the literature addressing these topics and discuss relevant challenges for future research. Based on our literature review, we argue that methods, principles, and perspectives developed by the Data Semantics community can significantly contribute to address Big Data challenges.}, keywords = {Big Data}, pubstate = {published}, tppubtype = {article} } @mastersthesis{plettenberg2018, title = {Frameworks for Distributed Big Data Processing: A Comparison in the Domain of Predictive Maintenance}, author = {Co-Advisor: DI Mag. Dr. Alexandra Mazak-Huemer Advisor: Univ.-Prof. Mag. Dr. Manuel Wimmer Plettenberg Rudolf}, editor = {Alex}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-116011 http://hdl.handle.net/20.500.12708/5431}, year = {2018}, date = {2018-04-16}, urldate = {2018-04-16}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Predictive maintenance is a novel approach for making maintenance decisions, lowering maintenance costs, increasing a plants capacity and production volume, and positively affecting environmental and employee safety. In predictive maintenance, condition data of machines is constantly collected and analysed to predict future machine failures. Due to the high volume, velocity, and variety of gathered data, Big Data analytic frameworks are necessary to provide the desired results. The performance of these frameworks highly influences the overall performance of a predictive maintenance system, raising the need for tools to measure it. Benchmarks present such tools by defining general workloads for a system to measure its performance. Due to the wide popularity of Big Data analytics across industries, benchmarks for Big Data analytic frameworks are defined specifically for each domain. While there are currently many benchmarks available for other domains such as retail, social network, or search engines, there are none available for Big Data analytic frameworks in the application area of predictive maintenance. This thesis introduces the predictive maintenance benchmark (PMB). The PMB is a benchmark aimed at measuring the performance of Big Data analytic frameworks in the field of predictive maintenance. The data model and workload of the PMB represent typical tasks encountered by a predictive maintenance system. The PMB is implemented in the two most popular Big Data analytic ecosystems Hadoop and Spark and show Spark outperforming Hadoop in almost every task. For evaluation, findings gathered during implementation and execution of the PMB are analysed. Furthermore, the PMB results are validated against other studies comparing Hadoop and Spark.}, keywords = {Big Data, Machine Learning, Rasperry Pi}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{detamble2018, title = {An Interactive Modeling Editor for QVT Relations}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Detamble Christian}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-109728 http://hdl.handle.net/20.500.12708/6109}, year = {2018}, date = {2018-03-27}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Since its publication in 2008, Query/View/Transformation-Relations (QVTr) claims to be the standard model transformation language (MTL) for the declarative specification of model transformations, and has been used as an enabling formalism. In consideration of productivity being a central goal in MDE, it is vital for tools and editors to maximize the usability of their implementing MTL. However, taking into account the current state of the art in tools for QVTr, several shortcomings are revealed. First, the availability of matured tools is sparse, and furthermore, they have been developed with the goal to enable the underlying technology. Their design is not user-centered and, in particular, they lack from a poor level of automation and interactivity. In addition, we identify a lack of support for short feedback cycles, which significantly influences the usability of both the editor and implementing MTL. Finally, we consider the neglection of QVTr's concrete, graphical syntax in state of the art editors as unused potential for an increase in readability and traceability. In the context of this thesis, we shed light on the impact of an increase in interactivity, automation, readability, traceability, the usage of QVTr's graphical syntax, and of short feedback cycles on the usability of QVTr. For this purpose, we propose a theoretical concept comprising techniques to push the modeling process towards a user-centered approach. The underlying key principles of our concept comprise the so called outward modeling style, a suggestion-driven process, interactive graphical model visualizations and the enforcement of conventions. To show the feasibility of our approach, we conduct user experiments in an industrial context at the LieberLieber Software GmbH company in Vienna, using a prototypical implementation.}, keywords = {Model Transformations, Model-Driven Engineering}, pubstate = {published}, tppubtype = {mastersthesis} } @article{Wolny2018mdtsa, title = {Model-Driven Time-Series Analytics}, author = {Sabine Wolny and Alexandra Mazak and Manuel Wimmer and Rafael Konlechner and Gerti Kappel}, url = {https://www.academia.edu/82092068/Model_Driven_Time_Series_Analytics}, year = {2018}, date = {2018-02-27}, urldate = {2018-02-27}, journal = {International Journal of Conceptual Modeling}, volume = {13}, pages = {252-261}, abstract = {Tackling the challenge of managing the full life-cycle of systems requires a well-defined mix of approaches. While in the early phases model-driven approaches are frequently used to design systems, in the later phases data-driven approaches are used to reason on different key performance indicators of systems under operation. This immediately poses the question how operational data can be mapped back to design models to evaluate existing designs and to reason about future re-designs. In this paper, we present a novel approach for harmonizing model-driven and data-driven approaches. In particular, we introduce an architecture for time-series data management to analyse runtime properties of systems which is derived from design models. Having this systematic generation of time-series data management opens the door to analyse data through design models. We show how such data analytics is specified for modelling languages using standard metamodelling techniques and technologies.}, keywords = {Analytics, Model-Driven}, pubstate = {published}, tppubtype = {article} } @inproceedings{Mazak2017ebmp, title = {Execution-based Model Profiling}, author = {Alexandra Mazak and Manuel Wimmer and Polina Patsuk-Boesch}, doi = {10.1007/978-3-319-74161-1_3}, isbn = {978-3-319-74160-4}, year = {2018}, date = {2018-01-26}, urldate = {2018-01-26}, booktitle = {Post-Proceedings of the6th International Symposium on Data-Driven Process Discovery and Analysis}, volume = {307}, pages = {37-52}, publisher = {Springer International Publishing}, address = {Cham}, series = {Lecture Notes in Business Information Processing}, abstract = {In model-driven engineering (MDE), models are mostly used in prescriptive ways for system engineering. While prescriptive models are indeed an important ingredient to realize a system, for later phases in the systems’ lifecycles additional model types are beneficial to use. Unfortunately, current MDE approaches mostly neglect the information upstream in terms of descriptive models from operations to (re)design phases. To tackle this limitation, we propose execution-based model profiling as a continuous process to improve prescriptive models at design-time through runtime information. This approach incorporates knowledge in terms of model profiles from execution logs of the running system. To accomplish this, we combine techniques of process mining with runtime models of MDE. In the course of a case study, we make use of a traffic light system example to demonstrate the feasibility and benefits of the introduced execution-based model profiling approach. }, keywords = {Model Profiling, Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @article{DBLP:journals/jss/BousseLCWB18, title = {Omniscient debugging for executable DSLs}, author = {Erwan Bousse and Dorian Leroy and Benoît Combemale and Manuel Wimmer and Benoit Baudry}, url = {https://doi.org/10.1016/j.jss.2017.11.025}, doi = {10.1016/j.jss.2017.11.025}, year = {2018}, date = {2018-01-01}, urldate = {2018-01-01}, journal = {Journal of Systems and Software}, volume = {137}, pages = {261--288}, abstract = {Omniscient debugging is a promising technique that relies on execution traces to enable free traversal of the states reached by a model (or program) during an execution. While a few General-Purpose Languages (GPLs) already have support for omniscient debugging, developing such a complex tool for any executable Domain Specific Language (DSL) remains a challenging and error prone task. A generic solution must: support a wide range of executable DSLs independently of the metaprogramming approaches used for implementing their semantics; be efficient for good responsiveness. Our contribution relies on a generic omniscient debugger supported by efficient generic trace management facilities. To support a wide range of executable DSLs, the debugger provides a common set of debugging facilities, and is based on a pattern to define runtime services independently of metaprogramming approaches. Results show that our debugger can be used with various executable DSLs implemented with different metaprogramming approaches. As compared to a solution that copies the model at each step, it is on average sixtimes more efficient in memory, and at least 2.2 faster when exploring past execution states, while only slowing down the execution 1.6 times on average.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wimmer2018, title = {How do we teach modelling and model-driven engineering?: a survey}, author = {Federico Ciccozzi and Michalis Famelis and Gerti Kappel and Leen Lambers and Sébastien Mosser and Richard F. Paige and Alfonso Pierantonio and Arend Rensink and Rick Salay and Gabi Taentzer and Antonio Vallecillo and Manuel Wimmer}, editor = {Önder Babur and Daniel Strüber and Silvia Abrahão and Loli Burgueño and Martin Gogolla and Joel Greenyer and Sahar Kokaly and Dimitris S. Kolovos and Tanja Mayerhofer and Mansooreh Zahedi}, url = {https://doi.org/10.1145/3270112.3270129}, doi = {10.1145/3270112.3270129}, year = {2018}, date = {2018-01-01}, urldate = {2018-01-01}, booktitle = {Proceedings of the 21st ACM/IEEE International Conference on Model Driven Engineering Languages and Systems: Companion Proceedings, MODELS 2018, Copenhagen, Denmark, October 14-19, 2018}, pages = {122--129}, publisher = {ACM}, abstract = {Understanding the experiences of instructors teaching modelling and model-driven engineering is of great relevance to determining how MDE courses should be managed in terms of content, assessment, and teaching methods. In this paper, we report the results of a survey of 47 instructors in this field. Questions address course content, tools and technologies used, as well as positive and negative factors affecting learning outcomes. We analyse the results and summarise key findings with the potential of improving the state of teaching and learning practices. The survey is a preliminary effort in giving a structured overview on the state-of-the-practice within teaching modeling and model-driven engineering (from the point of view of the instructor). }, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{wimmer2018, title = {Interoperability and Integration in Future Production Systems}, author = {Christian Huemer and Gerti Kappel and Manuel Wimmer and Henderik A. Proper and Siegfried Reich and Wernher Behrendt and Stefan Thalmann and Georg Weichhart and Alois Zoitl}, editor = {Henderik A. Proper and Stefan Strecker and Christian Huemer and Christophe Feltus and Wided Guédria and Iván S. Razo-Zapata and Manel Brichni and David Rozier and Mikhail M. Komarov and Svetlana V. Maltseva and Sérgio Guerreiro}, url = {https://doi.org/10.1109/CBI.2018.10067}, doi = {10.1109/CBI.2018.10067}, isbn = {2378-1971}, year = {2018}, date = {2018-01-01}, urldate = {2018-01-01}, booktitle = {20th IEEE Conference on Business Informatics, CBI 2018, Vienna, Austria, July 11-14, 2018, Volume 2 - Research-in-Progress Papers and Workshops}, pages = {175-177}, publisher = {IEEE Computer Society}, abstract = {This panel discussion in the context of the IEEE International Conference of Business Informatics (CBI2018) focuses on topics that allow systems to interact and exchange information. Every system in this context has its own world model. Interactions between two systems will involve a partially shared model (including e.g. standard interfaces), and two detailed, private models.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @article{Mazak2018mbg, title = {Model-Based Generation of Run-Time Data Collection Systems Exploiting AutomationML}, author = {Alexandra Mazak and Arndt Lueder and Sabine Wolny and Manuel Wimmer and Dietmar Winkler and Ronald Rosendahl and H. Bayanifar and S. Biffl}, url = {https://www.degruyter.com/document/doi/10.1515/auto-2018-0022/html}, doi = {10.1515/auto-2018-0022}, year = {2018}, date = {2018-00-00}, urldate = {2018-00-00}, journal = {at - Automatisierungstechnik}, volume = {66}, pages = {819-833}, abstract = {Production system operators need support for collecting and pre-processing data on production systems consisting of several system components, as foundation for optimization and defect detection. Traditional approaches based on hard-coded programming of such runtime data collection systems take time and effort, and require both domain and technology knowledge. In this article, we introduce the AML-RTDC approach, which combines the strengths of AutomationML (AML) data modeling and model-driven engineering, to reduce the manual effort for realizing the run-time data collection (RTDC) system. We evaluate the feasibility of the AML-RTDC approach with a demonstration case about a lab-sized production system and a use case based on real-world requirements.}, keywords = {AutomationML}, pubstate = {published}, tppubtype = {article} } @article{Angel2018, title = {Automated modelling assistance by integrating heterogeneous information sources}, author = {Mora Segura Ángel and Juan Lara and Patrick Neubauer and Manuel Wimmer}, doi = {10.1016/j.cl.2018.02.002}, issn = {1477-8424}, year = {2018}, date = {2018-00-00}, journal = {Computer Languages, Systems & Structures}, volume = {53}, pages = {90-120}, abstract = {Model-Driven Engineering (MDE) uses models as its main assets in the software development process. The structure of a model is described through a meta-model. Even though modelling and meta-modelling are recurrent activities in MDE and a vast amount of MDE tools exist nowadays, they are tasks typically performed in an unassisted way. Usually, these tools cannot extract useful knowledge available in heterogeneous information sources like XML, RDF, CSV or other models and meta-models. We propose an approach to provide modelling and meta-modelling assistance. The approach gathers heterogeneous information sources in various technological spaces, and represents them uniformly in a common data model. This enables their uniform querying, by means of an extensible mechanism, which can make use of services, e.g., for synonym search and word sense analysis. The query results can then be easily incorporated into the (meta-)model being built. The approach has been realized in the Extremo tool, developed as an Eclipse plugin. Extremo has been validated in the context of two domains – production systems and process modelling – taking into account a large and complex industrial standard for classification and product description. Further validation results indicate that the integration of Extremo in various modelling environments can be achieved with low effort, and that the tool is able to handle information from most existing technological spaces.}, keywords = {Automated Modelling Assistance}, pubstate = {published}, tppubtype = {article} } @proceedings{Garrigos2018ctwe, title = {Current Trends in Web Engineering}, editor = {I. Garrigós and Manuel Wimmer}, doi = {10.1007/978-3-319-74433-9}, isbn = {978-3-319-74432-2}, year = {2018}, date = {2018-00-00}, urldate = {2018-00-00}, publisher = {Springer}, series = {Current Trends in Web Engineering - ICWE 2017 International Workshops, Liquid Multi-Device Software and EnWoT, practi-O-web, NLPIT, SoWeMine}, abstract = {This book constitutes the refereed thoroughly refereed post-workshop proceedings of the 17th International Conference on Web Engineering, ICWE 2017, held in Rome, Italy, in June 2017. The 24 revised full papers were selected from 34 submissions. The workshops complement the main conference, and explore new trends on core topics of Web engineering.}, keywords = {AI, cloud-computing, IoT, model-driven software engineering, Web Engineering}, pubstate = {published}, tppubtype = {proceedings} } @workshop{Draheim2018mlmt, title = {Multi-Level Model Transformation}, author = {D. Draheim and T. Holmes and Manuel Wimmer}, editor = {João Paulo A. Almeida and Ulrich Frank and Thomas Kühne}, url = {http://drops.dagstuhl.de/opus/volltexte/2018/8675/}, doi = {10.4230/DagRep.7.12.18}, issn = {2192-5283}, year = {2018}, date = {2018-00-00}, booktitle = {Dagstuhl Seminar 17492}, volume = {7}, number = {12}, publisher = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik}, abstract = {This report documents the program and the outcomes of Dagstuhl Seminar 17492 "Multi-Level Modelling". This seminar brought together researchers and industry practitioners from the fields of conceptual modeling, ontologies, and formal foundations to discuss and share the benefits of Multi-Level Modelling (MLM), to develop an agreement on MLM terminology and scope, and to drive future research directions in MLM. Some foundational presentations were given by the seminar organizers to ground the discussions and provide an initial set of open questions which would lead to the formation of the working groups. In addition, six industry representatives gave talks explaining the needs, challenges, utility, and possible issues with adoption of MLM in industry. Based on the original seminar goals, the talks, and the resulting discussions, four working groups were established to investigate: the formal and ontological "Foundations"of MLM; promising "Applications" and potential evaluation criteria for MLM methods; the "Dynamic Aspects" of MLM, such as processes and behaviour; and, the use of and impact on "Model Transformations" in the context of MLM.}, keywords = {Metamodeling, Multi-Level Modeling}, pubstate = {published}, tppubtype = {workshop} } @inproceedings{Wolny2017, title = {Towards Continuous Behavior Mining}, author = {Sabine Wolny and Alexandra Mazak and Rafael Konlechner and Manuel Wimmer}, editor = {Paolo Ceravolo and Maurice Keulen and Kilian Stoffel}, url = {http://ceur-ws.org/Vol-2016/paper13.pdf}, issn = {1613-0073}, year = {2017}, date = {2017-12-07}, urldate = {2017-12-07}, booktitle = {Proceedings of the 7th International Symposium on Data-driven Process Discovery and Analysis (SIMPDA 2017), Neuchâtel, Switzerland, December 6-8 2017}, volume = {2016}, pages = {149-150}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {With new advances in Cyber-Physical Systems (CPS) and Internet of Things (IoT), more and more discrete software controllers interact with continuous physical systems. Workflow models are a classical approach to define controllers. However, the effect of the associated actions that are activated by executing the workflow may not spontaneously be realized but have to be realized over time. Generally, behavioral model elements such as activities in workflow languages are displayed mostly as black box, meaning that it is not possible to trace variable changes over time in most of the classical modeling approaches. In this paper, we introduce an envisioned architecture to cope with this challenge.}, keywords = {Module 3, Sensor Values, Time-Series, Tracing Variable Changes}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2017abs, title = {Aligning Business Services with Production Services: The Case of REA and ISA-95}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak}, doi = {10.1109/SOCA.2017.10}, year = {2017}, date = {2017-11-22}, booktitle = {Proceedings of the 10th IEEE International Conference on Service Oriented Computing and Applications (SOCA 2017)}, abstract = {"Industrie 4.0" aims at flexible production networks that require horizontal integration across companies. Evidently, any production related information exchanged in the network must be vertically forwarded to the corresponding service endpoints of the local production system. Accordingly, there is a need to align information that flows between companies and within each company. The Resource-Event-Agent (REA) business ontology describes a metamodel for internal business activities (e.g., production) and for inter-organizational exchange constellations on the enterprise resource planning (ERP) level. ISA-95 is a series of standards targeting the integration of enterprise control systems on the interface between ERP systems and manufacturing execution systems. Consequently, we align elements of REA and ISA-95 and define conversion rules for the transformation of elements from one system to the other. By interleaving the semantics of both standards, we formally strengthen the links between the services of the business level and the production level, and support multi-system adaptation in flexible production environments.}, keywords = {ISA-95, Module 3, REA, Vertical Integration}, pubstate = {published}, tppubtype = {inproceedings} } @article{Leymann01.1, title = {A Systematic Review of Cloud Modeling Languages}, author = {Alexander Bergmayr and Uwe Breitenbücher and Nicolas Ferry and Alessandro Rossini and Anor Solberg and Manuel Wimmer and Gerti Kappel and Frank Leymann}, url = {https://dl.acm.org/doi/10.1145/3150227}, doi = {10.1145/3150227}, year = {2017}, date = {2017-11-20}, urldate = {2017-11-20}, journal = {ACM Computing Surveys}, pages = {1-39}, abstract = {Modern cloud computing environments support a relatively high degree of automation in service provisioning, which allows cloud service customers (CSCs) to dynamically acquire services required for deploying cloud applications. Cloud modeling languages (CMLs) have been proposed to address the diversity of features provided by cloud computing environments and support different application scenarios, such as migrating existing applications to the cloud, developing new cloud applications, or optimizing them. There is, however, still much debate in the research community on what a CML is, and what aspects of a cloud application and its target cloud computing environment should be modeled by a CML. Furthermore, the distinction between CMLs on a fine-grain level exposing their modeling concepts is rarely made. In this article, we investigate the diverse features currently provided by existing CMLs. We classify and compare them according to a common framework with the goal to support CSCs in selecting the CML that fits the needs of their application scenario and setting. As a result, not only features of existing CMLs are pointed out for which extensive support is already provided but also in which existing CMLs are deficient, thereby suggesting a research agenda.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @article{Wimmer11.1, title = {A Local and Global Tour on MOMoT}, author = {Robert Bill and Martin Fleck and Javier Troya and Tanja Mayerhofer and Manuel Wimmer}, url = {https://link.springer.com/article/10.1007/s10270-017-0644-3}, doi = {10.1007/s10270-017-0644-3}, year = {2017}, date = {2017-11-20}, urldate = {2017-11-20}, journal = {Journal of Software and Systems Modeling}, abstract = {Many model transformation scenarios require flexible execution strategies as they should produce models with the highest possible quality. At the same time, transformation problems often span a very large search space with respect to possible transformation results. Recently, different proposals for finding good transformation results without enumerating the complete search space have been proposed by using meta-heuristic search algorithms. However, determining the impact of the different kinds of search algorithms, such as local search or global search, on the transformation results is still an open research topic. In this paper, we present an extension to MOMoT, which is a search-based model transformation tool, for supporting not only global searchers for model transformation orchestrations, but also local ones. This leads to a model transformation framework that allows as the first of its kind multi-objective local and global search. By this, the advantages and disadvantages of global and local search for model transformation orchestration can be evaluated. This is done in a case-study-based evaluation, which compares different performance aspects of the local- and global-search algorithms available in MOMoT. Several interesting conclusions have been drawn from the evaluation: (1) local-search algorithms perform reasonable well with respect to both the search exploration and the execution time for small input models, (2) for bigger input models, their execution time can be similar to those of global-search algorithms, but global-search algorithms tend to outperform local-search algorithms in terms of search exploration, (3) evolutionary algorithms show limitations in situations where single changes of the solution can have a significant impact on the solution’s fitness.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{Mazak2017tdse, title = {Sequence Pattern Mining: Automatisches Erkennen und Auswerten von Interaktionsmustern zwischen technischen Assets basierend auf SysML-Sequenzdiagrammen}, author = {Alexandra Mazak and Manuel Wimmer}, url = {https://www.hanser-elibrary.com/doi/epdf/10.3139/9783446455467.016 }, year = {2017}, date = {2017-11-09}, urldate = {2017-11-09}, booktitle = {Proceedings of Tag des Software Engineerings (TdSE 2017)}, address = {Paderborn}, abstract = {Mit Industrie 4.0 erhalten physische Systeme eine virtuelle Repräsentation um über das Internet der Dinge mit anderen Komponenten kommunizieren zu können. Dabei setzen I4.0-Systeme die Interaktionsfähigkeit der verwendeten Komponenten voraus. Wir zeigen wie SysML-Sequenzdiagramme als Teilmodelle der Verwaltungsschale von I4.0-Komponenten genutzt werden können, um den Nachrichtenaustausch zwischen Komponenten zu visualisieren und um daraus Interaktionsmuster zu erkennen. Bei diesem Vorgang, den wir „Sequence Pattern Mining“ nennen, werden Interaktionsmodelle erzeugt, um daraus das Laufzeitverhalten von Systemkomponenten analysieren zu können.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer23.1, title = {Virtual Textual Model Composition for Supporting Versioning and Aspect-Orientation}, author = {Robert Bill and Patrick Neubauer and Manuel Wimmer}, editor = {Benoît Combemale and Marjan Mernik and Bernhard Rumpe}, url = {https://doi.org/10.1145/3136014.3136037}, doi = {10.1145/3136014.3136037}, year = {2017}, date = {2017-10-20}, urldate = {2017-10-20}, booktitle = {Proceedings of the 10th ACM SIGPLAN International Conference on Software Language Engineering (SLE), Vancouver, BC, Canada, October 23-24 2017}, pages = {67-78}, publisher = {ACM}, abstract = {The maintenance of modern systems often requires developers to perform complex and error-prone cognitive tasks, which are caused by the obscurity, redundancy, and irrelevancy of code, distracting from essential maintenance tasks. Typical maintenance scenarios include multiple branches of code in repositories, which involves dealing with branch-interdependent changes, and aspects in aspect-oriented development, which requires in-depth knowledge of behavior-interdependent changes. Thus, merging branched files as well as validating the behavior of statically composed code requires developers to conduct exhaustive individual introspection. In this work we present VirtualEdit for associative, commutative, and invertible model composition. It allows simultaneous editing of multiple model versions or variants through dynamically derived virtual models. We implemented the approach in terms of an open-source framework that enables multi-version editing and aspect-orientation by selectively focusing on specific parts of code, which are significant for a particular engineering task. The VirtualEdit framework is evaluated based on its application to the most popular publicly available Xtext-based languages. Our results indicate that VirtualEdit can be applied to existing languages with reasonably low effort.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Leroy2017pacman, title = {Create and Play Your Pac-Man Game with the GEMOC Studio}, author = {Dorian Leroy and Manuel Wimmer and Erwan Bousse and Benoît Combemale and Wieland Schwinger}, editor = {Loli Burgueño and Jonathan Corley and Nelly Bencomo and Peter J. Clarke and Philippe Collet and Michalis Famelis and Sudipto Ghosh and Martin Gogolla and Joel Greenyer and Esther Guerra and Sahar Kokaly and Alfonso Pierantonio and Julia Rubin and Davide Di Ruscio}, url = {http://ceur-ws.org/Vol-2019/exe_1.pdf}, year = {2017}, date = {2017-09-18}, urldate = {2017-09-18}, booktitle = {Proceedings of MODELS 2017 Satellite Event: Workshops (ModComp, ME, EXE, COMMitMDE, MRT, MULTI, GEMOC, MoDeVVa, MDETools, FlexMDE, MDEbug), Posters, Doctoral Symposium, Educator Symposium, ACM Student Research Competition, and Tools and Demonstrations co-located with ACM/IEEE 20th International Conference on Model Driven Engineering Languages and Systems (MODELS 2017), Austin, TX, USA, September 17}, volume = {2019}, pages = {84--87}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {Executable Domain-Specific Languages (DSLs) are used for defining the behaviors of systems. The operational semantics of such DSLs may define how conforming models react to stimuli from their environment. This commonly requires adapting the semantics to define both the possible domainlevel stimuli, and their handling during the execution. However, manually adapting the semantics for such cross-cutting concern is a complex and error-prone task. In this paper, we present an approach and a tool addressing this problem by augmenting the operational semantics for handling stimuli, and by automatically generating a complete behavioral language interface from this augmentation. At runtime, this interface can receive stimuli sent to models, and can safely handle them by interrupting the execution flow. This tool has been developed for the GEMOC Studio, a language and modeling workbench for executable DSLs. We demonstrate how it can be used to implement a Pac-Man DSL enabling the creation and execution of Pac-Man games. }, keywords = {Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Bousse2017dld, title = {Domain-level Debugging for Compiled DSLs with the GEMOC Studio}, author = {Erwan Bousse and Tanja Mayerhofer and Manuel Wimmer}, editor = {Loli Burgueño and Jonathan Corley and Nelly Bencomo and Peter J. Clarke and Philippe Collet and Michalis Famelis and Sudipto Ghosh and Martin Gogolla and Joel Greenyer and Esther Guerra and Sahar Kokaly and Alfonso Pierantonio and Julia Rubin and Davide Di Ruscio}, url = {http://ceur-ws.org/Vol-2019/mdebug_3.pdf}, year = {2017}, date = {2017-09-18}, urldate = {2017-09-18}, booktitle = {Proceedings of MODELS 2017 Satellite Event: Workshops (ModComp, ME, EXE, COMMitMDE, MRT, MULTI, GEMOC, MoDeVVa, MDETools, FlexMDE, MDEbug), Posters, Doctoral Symposium, Educator Symposium, ACM Student Research Competition, and Tools and Demonstrations co-located with ACM/IEEE 20th International Conference on Model Driven Engineering Languages and Systems (MODELS 2017), Austin, TX, USA, September 17}, volume = {2019}, pages = {457-459}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, abstract = {Executable Domain-Specific Languages (DSLs) are commonly defined with either operational semantics (i.e., interpretation) or translational semantics (i.e., compilation). An interpreted DSL relies on domain concepts to specify the possible execution states and steps of conforming models, which facilitates the observation and control of the execution using the very same domain concepts. In contrast, a compiled DSL relies on a transformation to an arbitrarily different executable target language, which creates a conceptual and technical gap between the considered domain and the target domain. In this tool demonstration paper, we present the implementation of our approach to supplement a compiled DSL with a feedback manager, which during execution translates execution steps and states of the target model back to the source domain. This enables the development and use of tools such as an omniscient debugger and a trace constructor for debugging compiled models. Our implementation was achieved for the GEMOC Studio, a language and modeling workbench that provides generic model debugging tools for interpreted DSLs. With our approach, these debugging tools can be also used for compiled DSLs. Our demonstration features the definition of a feedback manager for a subset of fUML that compiles to Petri nets. }, keywords = {Model-Driven}, pubstate = {published}, tppubtype = {inproceedings} } @article{Wimmer2017, title = {A feature-based survey of model view approaches}, author = {Hugo Bruneliere and Erik Burger and Jordi Cabot and Manuel Wimmer}, doi = {10.1007/s10270-017-0622-9}, year = {2017}, date = {2017-09-15}, urldate = {2017-09-15}, journal = {Journal of Software and Systems Modeling}, pages = {1-22}, abstract = {When dealing with complex systems, information is very often fragmented across many different models expressed within a variety of (modeling) languages. To provide the relevant information in an appropriate way to different kinds of stakeholders, (parts of) such models have to be combined and potentially revamped by focusing on concerns of particular interest for them. Thus, mechanisms to define and compute views over models are highly needed. Several approaches have already been proposed to provide (semi)automated support for dealing with such model views. This paper provides a detailed overview of the current state of the art in this area. To achieve this, we relied on our own experiences of designing and applying such solutions in order to conduct a literature review on this topic. As a result, we discuss the main capabilities of existing approaches and propose a corresponding research agenda. We notably contribute a feature model describing what we believe to be the most important characteristics of the support for views on models. We expect this work to be helpful to both current and potential future users and developers of model view techniques, as well as to any person generally interested in model-based software and systems engineering.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{Wimmer2017cbvm, title = {Cardinality-Based Variability Modeling with AutomationML}, author = {Manuel Wimmer and Petr Novák and Radek Sindelár and Luca Berardinelli and Tanja Mayerhofer and Alexandra Mazak}, url = {https://doi.org/10.1109/ETFA.2017.8247711}, doi = {10.1109/ETFA.2017.8247711}, year = {2017}, date = {2017-09-13}, urldate = {2017-09-13}, booktitle = {Proceedings of the 22nd IEEE International Conference on Emerging Technologies and Factory Automation (ETFA 2017), Limassol, Cyprus, September 12-15 2017}, pages = {1-4}, publisher = {IEEE}, abstract = {Variability modeling is an emerging topic in the general field of systems engineering and, with current trends such as Industrie 4.0, it gains more and more interest in the domain of production systems. Therefore, it is not sufficient to describe systems in several specific cases, but instead families of systems have to be used. In this paper we introduce a role class library for AutomationML to explicitly represent variability. This allows to exchange not only system descriptions but also system family descriptions. We argue for a light-weight extension of AutomationML. The variability-based modeling approach is based on cardinalities, which is a well-known concept from conceptual modeling and feature modeling. Furthermore, we also show how instantiations of variability models can be validated by our EMF-based AutomationML workbench.}, keywords = {AutomationML, Module 2, Module 3, Variability}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Novak2017mbe, title = {Model-Based Engineering and Virtual Commissioning of Cyber-Physical Manufacturing Systems - Transportation System Case Study}, author = {Petr Novák and Petr Kadera and Manuel Wimmer}, url = {https://doi.org/10.1109/ETFA.2017.8247743}, doi = {10.1109/ETFA.2017.8247743}, year = {2017}, date = {2017-09-13}, urldate = {2017-09-13}, booktitle = {Proceedings of the 22nd IEEE International Conference on Emerging Technology and Factory Automation (ETFA 2017), Limassol, Cyprus, September 12-15 2017}, pages = {1-4}, publisher = {IEEE}, abstract = {Emerging manufacturing systems are becoming complex while their engineering and ramp-up phases have to be as short as possible in order to decrease the reaction time to new market demands as well as to minimize production line down-times causing financial losses. Since engineering knowledge is not shared satisfactorily, virtual commissioning of industrial plants is very complicated. This paper contributes to a better synthesis and analysis of manufacturing lines by integrating the simulation of manufacturing systems with the tool Siemens Plant Simulation and their engineering with Schmid P'X5 Configurator for Montratec. The proposed approach is based on the model-based techniques and it is demonstrated on a laboratory-scaled use-case showing its efficiency.}, keywords = {Case Study, Model-Based Engineering, Module 2, Virtual Commissioning}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{weghofer2017, title = {Moola - A Grovvy-based Model Operation Orchestration Language}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Weghofer Stefan}, editor = {Manuel}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-112866 http://hdl.handle.net/20.500.12708/3416}, year = {2017}, date = {2017-09-13}, urldate = {2017-09-13}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {A fundamental part of Model-Driven Engineering (MDE) is the use of models and operations. Models represent information of a target system on varying levels of abstraction, while operations allow performing actions on one or more models, including model validation, model transformation, model merging, etc. In recent years, more and more such operations and languages to describe them were introduced to allow MDE to be applied to a wide spectrum of use cases. Today, many advanced scenarios can be expressed by MDE and the use of new operation languages. In every non-trivial project, multiple operations have to be executed in particular order to yield the final result. To orchestrate operations to so-called operation chain, tools and languages have been developed and included to development environments that help in defining complex operation chains and executing them whenever input models change. In this thesis, existing tools and languages for model operation orchestration are analyzed and compared against each other. Inspiration is taken from these tools and other domains, such as Build Management and Workflow Management, to create a new tool for describing operation chains, called Moola. Based on a feature list derived from real-life use cases, Moola is designed and later implemented as domain-specific language (DSL) on top of Groovy. Finally, Moola is evaluated against use cases taken from the ARTIST project.}, keywords = {Groovy, Model Operation Orchestration, Model-Driven Engineering, Moola, Operation Chains, Workflow Management}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Novak2017ssm, title = {Slicing Simulation Models into Co-Simulations}, author = {Petr Novák and Manuel Wimmer and Petr Kadera}, editor = {Vladimír Marík and Wolfgang Wahlster and Thomas I. Strasser and Petr Kadera}, url = {https://doi.org/10.1007/978-3-319-64635-0_9}, doi = {10.1007/978-3-319-64635-0_9}, year = {2017}, date = {2017-08-28}, urldate = {2017-08-28}, booktitle = {Proceedings of the 8th International Conference on Industrial Applications of Holonic and Multi-Agent Systems (HoloMAS 2017), Lyon, France, August 28-30 2017}, volume = {10444}, pages = {111-124}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {The emerging generation of large-scale cyber-physical production systems, which represents a backbone of a trend denoted as Industrie 4.0, broadly adopts fundamentals laid by the multi-agent system paradigm. The joint roots of these concepts bring not only advantages such as flexibility, resilience or self-organization, but also severe issues such as difficult validation and verification of their behavior. Simulations are a well proven strategy facilitating these issues. Although simulations as virtual copies of real system behavior are useful test-beds for various experiments and optimizations along the entire industrial plant life-cycle, their design and integration are time-consuming and difficult. This paper proposes a new method to facilitate slicing of a monolithic simulation into a co-simulation, which is a simulation consisting of multiple inter-linked simulation units. The proposed method aims at specifying interfaces of the simulation units as well as routing signals for integrating the simulation units. The method improves engineering and re-design of co-simulations in terms of saving time and effort for creating and integrating complex co-simulations.}, keywords = {Co-Simulation, Module 2, Simulation Models}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Mazak2017repp, title = {Reverse Engineering of Production Processes based on Markov Chains}, author = {Alexandra Mazak and Manuel Wimmer and Polina Patsuk-Boesch}, url = {https://doi.org/10.1109/COASE.2017.8256182}, doi = {10.1109/COASE.2017.8256182}, year = {2017}, date = {2017-08-20}, urldate = {2017-08-20}, booktitle = {Proceedings of the 13th IEEE Conference on Automation Science and Engineering (CASE 2017), Xián, China, August 20-23 2017}, pages = {680--686}, publisher = {IEEE}, abstract = {Understanding and providing knowledge of production processes is crucial for flexible production systems as many decisions are postponed to the operation time. Furthermore, dealing with process improvements requires to have a clear picture about the status of the currently employed process. This becomes even more challenging with the emergence of Cyber-Physical Production Systems (CPPS). However, CPPS also provide the opportunity to observe the running processes by using concepts from IoT to producing logs for reflecting the events happening in the system during its execution. Therefore, we propose in this paper a fully automated approach for representing operational logs as models which additionally allows analytical means. In particular, we provide a transformation chain which allows the reverse engineering of Markov chains from event logs. The reverse engineered Markov chains allow to abstract the complexity of run-time information as well as to enable what-if analysis whenever improvements are needed by employing current model-based as well as measurement-based technologies. We demonstrate the approach based on a lab-sized transportation line system.}, keywords = {Markov Chains, Module 3, Reverse Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wally2017mdvi, title = {A View on Model-Driven Vertical Integration: Alignment of Production Facility Models and Business Models}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak}, doi = {10.1109/COASE.2017.8256235}, year = {2017}, date = {2017-08-20}, booktitle = {Proceedings of the 13th IEEE Conference on Automation Science and Engineering (CASE 2017)}, abstract = {Smart manufacturing requires deeply integrated IT systems in order to foster flexibility in the setup, re-arrangement and use of attached manufacturing systems. In a vertical integration scenario, IT systems of different vendors might be in use and proprietary interfaces need to defined in order to allow the exchange of relevant information from one system to another. In this paper we present a model-driven approach for vertical integration of IT systems. It is based on the application of industry standards for the representation of hierarchy level specific system properties and an alignment of their key concepts in order to provide bridging functions for the transformation between the different systems.}, keywords = {Business Models, Model-Driven, Module 3, Vertical Integration}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wurl2017usdi, title = {Using Signifiers for Data Integration in Rail Automation}, author = {Alexander Wurl and Andreas Falkner and Alois Haselböck and Alexandra Mazak}, url = {https://dl.acm.org/doi/10.5220/0006416401720179}, doi = {10.5220/0006416401720179}, year = {2017}, date = {2017-07-24}, urldate = {2017-07-24}, booktitle = {Proceedings of the 6th International Conference on Data Science, Technology and Applications (DATA 2017)}, abstract = {In Rail Automation, planning future projects requires the integration of business-critical data from heterogeneous data sources. As a consequence, data quality of integrated data is crucial for the optimal utilization of the production capacity. Unfortunately, current integration approaches mostly neglect uncertainties and inconsistencies in the integration process in terms of railway specific data. To tackle these restrictions, we propose a semi-automatic process for data import, where the user resolves ambiguous data classifications. The taskof finding the correct data warehouse classification of source values in a proprietary, often semi-structured format is supported by the notion of a signifier, which is a natural extension of composite primary keys. In a case study from the domain of asset management in Rail Automation we evaluate that this approach facilitates high-quality data integration while minimizing user interaction.}, keywords = {Data Integration, Module 3, Rail Automation, Signifiers}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Bill2017tmr, title = {On the Need for Temporal Model Repositories}, author = {Robert Bill and Alexandra Mazak and Manuel Wimmer and Birgit Vogel-Heuser}, editor = {Martina Seidl and Steffen Zschaler}, url = {https://doi.org/10.1007/978-3-319-74730-9_11}, doi = {10.1007/978-3-319-74730-9_11}, year = {2017}, date = {2017-07-17}, urldate = {2017-07-17}, booktitle = {Software Technologies: Applications and Foundations - STAF 2017 Collocated Workshops, Marburg, Germany, July 17-21, 2017, Revised Selected Papers}, volume = {10748}, pages = {136--145}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {Current model repositories often rely on existing versioning systems or standard database technologies. These approaches are sufficient for hosting different versions of models. However, the time dimension is often not explicitly represented and accessible. A more explicit presentation of time is needed in several use cases going beyond the classical system design phase support of models such as in simulation and runtime environments. In this paper, we discuss the need for introducing temporal model repositories and their prospective benefits. In particular, we outline several challenges which immediately arise when moving towards temporal model repositories, which are: storage, consistency, access, manipulation, and visualization of temporal models.}, keywords = {Model Repository, Module 3, Temporal}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Novak2017gsm, title = {Generation of Simulation Models in MATLAB-Simulink Based on AutomationML Plant Description}, author = {Petr Novak and Fajar Juang Ekaputra and Stefan Biffl}, url = {https://www.researchgate.net/publication/317232232_Generation_of_Simulation_Models_in_MATLAB-Simulink_Based_on_AutomationML_Plant_Description}, year = {2017}, date = {2017-07-09}, urldate = {2017-07-09}, booktitle = {Proceedings of the 20th World Congress of the International Federation of Automatic Control (IFAC WC 2017)}, abstract = {Process simulations are useful test-beds for experiments and optimizations along the entire industrial plant life-cycle. Shifting testing and tuning of industrial plants and their automation systems from the real world to simulated environments is a part of a virtualization, which is one of the key movements in emerging areas of Industry 4.0 and factories of the future. Although simulations bring a large variety of bene�ts, they suffer from a time-consuming and error-prone design phase, which limits their use in industrial practice. This paper proposes a new design method called AML2SIM, which transforms the real plant description represented in AutomationML (AML) and generates a dynamic simulation model (SIM). The proposed method signi�cantly improves the engineering and re-design of simulation models in terms of saving time and effort of experts as the models can be easily re-generated based on a given AutomationML plant model. Simulations are assembled from simulation blocks that are shared among various projects in simulation libraries, hence the method contributes to reuse of simulation artifacts.}, keywords = {AutomationML, MATLAB, Module 2, Simulation Models, Simulink}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Novak2017abm, title = {Agent-Based Modeling and Simulation of Hybrid Cyber-Physical Systems}, author = {Petr Novák and Petr Kadera and Manuel Wimmer}, url = {https://doi.org/10.1109/CYBConf.2017.7985755}, doi = {10.1109/CYBConf.2017.7985755}, year = {2017}, date = {2017-06-21}, urldate = {2017-06-21}, booktitle = {Proceedings of the 3rd IEEE International Conference on Cybernetics (CYBCONF-2017), Exeter, United Kingdom, June 21-23 2017}, pages = {1--8}, publisher = {IEEE}, abstract = {Cyber-physical production systems are becoming more complex and heterogeneous in the frame of Internet of Cyber-Physical Things and Industry 4.0 environments. Behavior of such systems is difficult to analyze and control due to the system scale and emergency aspects. Multi-agent paradigm is a suitable formalism for modeling these kinds of systems and simulation modeling is important for getting an insight into the system, for synthesizing its control, and for analyzing performance and efficiency of systems under various circumstances. This paper is focused on simulation modeling for complex hybrid cyber-physical production systems. They combine continuous-time parts and discrete-event parts and the goal is to bring a methodology for addressing simulation for such complex hybrid systems. The paper presents two possible approaches how to unify access to both paradigms for system modeling and it explains how to combine them meaningfully. The proposed solution is based on the application of the traditional time-driven approach on the level of individual agents simulating cyber-physical components or sub-systems, and the dual value-driven approach frequently denoted as Quantized State Systems method in such a way that the benefits of both approaches are gained. The proposed approach simplifies synchronization and improves stability of coupled simulations by self-optimization of sampling periods for synchronizing the simulation agents for cyber-physical components.}, keywords = {Agent-Based, Module 2, Simulation}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Wimmer2017tag, title = {Towards Automatic Generation of Web-Based Modeling Editors}, author = {Manuel Wimmer and Irene Garrigós and Sergio Firmenich}, editor = {Jordi Cabot and Roberto De Virgilio and Riccardo Torlone}, url = {https://doi.org/10.1007/978-3-319-60131-1_31}, doi = {10.1007/978-3-319-60131-1_31}, isbn = {978-3-319-60131-1}, year = {2017}, date = {2017-06-08}, urldate = {2017-06-08}, booktitle = {Proceedings of the 17th International Conference on Web Engineering (ICWE 2017), Rome, Italy, June 5-8 2017}, volume = {10360}, pages = {446-454}, publisher = {Springer International Publishing}, series = {Lecture Notes in Computer Science}, abstract = {With the current trend of digitalization within a multitude of different domains, the need raises for effective approaches to capture domain knowledge. Modeling languages, especially, domain-specific modeling languages (DSMLs), are considered as an important method to involve domain experts in the system development. However, current approaches for developing DSMLs and generating modeling editors are mostly focusing on reusing the infrastructures provided by programming IDEs. On the other hand, several approaches exist for developing Web-based modeling editors using dedicated JavaScript frameworks. However, these frameworks do not exploit the high automation potential from DSML approaches to generate modeling editors from language specifications. Thus, the development of Web-based modeling editors requires still major programming efforts and dealing with recurring tasks. In this paper, we combine the best of both worlds by reusing the language specification techniques of DSML engineering approaches for generating Web-based modeling editors. In particular, we show how to combine two concrete approaches, namely Eugenia from DSML engineering and JointJS as a protagonist from JavaScript frameworks, and demonstrate the automation potential of establishing Web-based modeling editors. We present first results concerning two reference DSML examples which have been realized by our approach as Web-based modeling editors.}, keywords = {Web Engineering}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Artner2017tspm, title = {Towards Stochastic Performance Models for Web 2.0 Applications}, author = {Johannes Artner and Alexandra Mazak and Manuel Wimmer}, editor = {Jordi Cabot and Roberto De Virgilio and Riccardo Torlone}, url = {https://doi.org/10.1007/978-3-319-60131-1_21}, doi = {10.1007/978-3-319-60131-1_21}, year = {2017}, date = {2017-06-01}, urldate = {2017-06-01}, booktitle = {Proceedings of the 17th International Conference on Web Engineering (ICWE 2017), Rome, Italy, June 5-8 2017}, volume = {10360}, pages = {360-369}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, abstract = {System performance is one of the most critical quality characteristics of Web applications which is typically expressed in response time, throughput, and utilization. These performance indicators, as well as the workload of a system, may be evaluated and analyzed by (i) model-based or (ii) measurement-based techniques. Given the complementary benefits offered by both techniques, it seems beneficial to combine them. For this purpose we introduce a combined performance engineering approach by presenting a concise way of describing user behavior by Markov models and derive from them workloads on resources. By means of an empirical user test, we evaluate the Markov assumption for a given Web 2.0 application which is an important prerequisite for our approach.}, keywords = {Module 3, Stochastic Performance Models, Web 2.0}, pubstate = {published}, tppubtype = {inproceedings} } @article{Mansoor2017mvr, title = {Multi-view refactoring of class and activity diagrams using a multi-objective evolutionary algorithm}, author = {Usman Mansoor and Marouane Kessentini and Manuel Wimmer and Kalyanmoy Deb}, doi = {10.1007/s11219-015-9284-4}, issn = {1573-1367}, year = {2017}, date = {2017-06-01}, urldate = {2017-06-01}, journal = {Journal of Software Quality Journal}, volume = {25}, number = {2}, pages = {473-501}, abstract = {To improve the quality of software systems, one of the widely used techniques is refactoring defined as the process of improving the design of an existing system by changing its internal structure without altering the external behavior. The majority of existing refactoring work focuses mainly on the source code level. The suggestion of refactorings at the model level is more challenging due to the difficulty to evaluate: (a) the impact of the suggested refactorings applied to a diagram on other related diagrams to improve the overall system quality, (b) their feasibility, and (c) interdiagram consistency. We propose, in this paper, a novel framework that enables software designers to apply refactoring at the model level. To this end, we used a multi-objective evolutionary algorithm to find a trade-off between improving the quality of class and activity diagrams. The proposed multi-objective approach provides a multi-view for software designers to evaluate the impact of suggested refactorings applied to class diagrams on related activity diagrams in order to evaluate the overall quality, and check their feasibility and behavior preservation. The statistical evaluation performed on models extracted from four open-source systems confirms the efficiency of our approach.}, keywords = {Multi-View}, pubstate = {published}, tppubtype = {article} } @article{Kessentini2017gesi, title = {Guest Editorial Special Issue on Computational Intelligence for Software Engineering and Services Computing}, author = {Marouane Kessentini and Manuel Wimmer}, url = {https://ieeexplore.ieee.org/document/7935486}, doi = {10.1109/TETCI.2017.2700659}, issn = {2471-285X}, year = {2017}, date = {2017-05-29}, urldate = {2017-05-29}, booktitle = {IEEE Trans. Emerging Topics in Comput. Intellig.}, journal = {Journal of IEEE Transactions on Emerging Topics in Computational Intelligence}, volume = {1}, number = {3}, pages = {143-144}, abstract = {The papers in this special section focus on computational intelligence for software engineering and services computing. Recently, there has been an increasing demand for complex systems in distributed and mobile environments. The development of these complex software systems is challenging, especially while dealing with dynamic, imprecise and uncertain information and environments. In the recent years, an emerging paradigm is to focus on the investigation and integration of computational intelligence tools into current software development practices to address the growing complexity of software systems and improving their robustness. This emerging paradigm uses various techniques from the computational intelligence literature (e.g., knowledge-transfer and data-driven search, fuzzy logic, machine learning, evolutionary computation, etc.) to address problems related to requirements engineering, services computing, cloud computing, Internet of Things (IoT), quality of services, software testing, model-driven engineering, etc.}, keywords = {Software Engineering}, pubstate = {published}, tppubtype = {article} } @inbook{Berardinelli2017mdse, title = {Model-Driven Systems Engineering: Principles and Applications in the CPPS Domain}, author = {Luca Berardinelli and Alexandra Mazak and Oliver Alt and Manuel Wimmer and Gerti Kappel}, editor = {Stefan Biffl and Arndt Lueder and Detlef Gerhard}, doi = {10.1007/978-3-319-56345-9_11}, isbn = {978-3-319-56345-9}, year = {2017}, date = {2017-05-07}, booktitle = {Multi-Disciplinary Engineering for Cyber-Physical Production Systems: Data Models and Software Solutions for Handling Complex Engineering Projects}, pages = {261-299}, publisher = {Springer International Publishing}, abstract = {To engineer large, complex, and interdisciplinary systems, modeling is considered as the universal technique to understand and simplify reality through abstraction, and thus, models are in the center as the most important artifacts throughout interdisciplinary activities within model-driven engineering processes. Model-Driven Systems Engineering (MDSE) is a systems engineering paradigm that promotes the systematic adoption of models throughout the engineering process by identifying and integrating appropriate concepts, languages, techniques, and tools. This chapter discusses current advances as well as challenges towards the adoption of model-driven approaches in cyber-physical production systems (CPPS) engineering. In particular, we discuss how modeling standards, modeling languages, and model transformations are employed to support current systems engineering processes in the CPPS domain, and we show their integration and application based on a case study concerning a lab-sized production system. The major outcome of this case study is the realization of an automated engineering tool chain, including the languages SysML, AML, and PMIF, to perform early design and validation.}, keywords = {CPPS, Model-Driven, Module 3, Systems Engineering}, pubstate = {published}, tppubtype = {inbook} } @inproceedings{Wally2017viai, title = {Entwining Plant Engineering Data and ERP Information: Vertical Integration with AutomationML and ISA-95}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak}, doi = {10.1109/ICCAR.2017.7942718}, isbn = {978-1-5090-6089-4}, year = {2017}, date = {2017-04-26}, booktitle = {Proceedings of the 3rd IEEE International Conference on Control, Automation and Robotics (ICCAR 2017)}, pages = {356-364}, abstract = {IT systems' integration in manufacturing companies is currently investigated in both academia and industry. While there can be found specialized systems and standards that tackle specific, e.g., production relevant problems, little has been done in the alignment of and transformation between such industrial standards. We will present the alignment of two specialized international standards, which will foster vertical system integration through detailed mapping of related concepts: (i) the Automation Markup Language (AML) standardizes the modeling of factory shop floors on top of the XML-based Computer Aided Engineering Exchange (CAEX) data format and (ii) ISA-95 is a series of standards targeting the integration of enterprise control systems, most prominent enterprise resource planning systems and manufacturing execution systems. In order to provide higher level semantics to lower level system descriptions, we have (i) aligned elements from AML and ISA-95 in order to make explicit both overlaps and complementary concepts and (ii) defined a ruleset for referencing external ISA-95 documents/elements from AML documents. Finally, we have developed a scenario that shows the potential use case for such an entwined use of AML and ISA-95.}, keywords = {Module 3}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{wiesenhofer2017, title = {Constraints and Models@Runtime for EMF Profiles}, author = {Advisor: Univ. -Prof. Mag. Dr. Manuel Wimmer Wiesenhofer Christian}, url = {https://resolver.obvsg.at/urn:nbn:at:at-ubtuw:1-98941 http://hdl.handle.net/20.500.12708/5123}, year = {2017}, date = {2017-04-19}, address = {A-1040 Wien, Karlsplatz 13}, school = {TU Wien, Fakultät für Informatik}, abstract = {Modeling languages play an essential part in the software engineering process. Currently, mostly UML is used for that purpose, but domain-specific modeling languages (DSMLs) get more and more attention. Their main benefit is a higher abstraction-level, which eases generating code from such models. One major drawback of DSMLs, is their time-consuming development. To tackle this problem the EMF Profiles project was founded. It provides a lightweight extension mechanism, just as UML profiles, to be used for DSMLs. This way models can be altered without modifying their whole metamodel and domain properties can be reused, thus reducing the required development time. In comparison to pure metamodel-based languages there are certain limitations in EMF Profiles. There is no way to model constraints regarding the restricted use of stereotypes or to include runtime behavior. A typical use case is for example to use multiple languages at once. However, considering these shortcomings, such an attempt is not possible. Thus the question emerged, how these features can be realized. In this thesis two extensions to EMF Profiles are presented and implemented as prototype, which is then evaluated using a case study. The research problems were solved by introducing an OCL constraint mechanism, which manages the stereotype application. Furthermore a generator was implemented to add AspectJ-based code fragments to profiles, so they can influence the runtime behavior of a model element. The case study was conducted by creating a base Petri net language and adding three Petri net extensions, implemented as EMF profiles, to it. All of their specifications could be fully implemented. Further metrics about the approach and the prototype were collected, in order to ensure it is assessable and comparable.}, keywords = {DSML, EMF, Models@Runtime, OCL, Runtime Support}, pubstate = {published}, tppubtype = {mastersthesis} } @phdthesis{novak2017, title = {Design and Integration of Simulation Models for Industrial Systems}, author = {Advisor: Dr. Radek Sindelar Novak Petr}, url = {https://dspace.cvut.cz/handle/10467/65523 https://dspace.cvut.cz/bitstream/handle/10467/65523/Disertace_Novak_Petr_2016.pdf?sequence=1&isAllowed=y}, year = {2017}, date = {2017-03-09}, urldate = {2017-03-09}, address = {České vysoké učení technické v Praze, Technická 1902/2, 166 27 Praha 6 - Dejvice-Praha 6, Czechia}, school = {Czech Technical University in Prague, Faculty of Electrical Engineering}, abstract = {Industrial systems are becoming complex and large-scale. Optimization of their operation and testing of their control systems are done on simulation models frequently, because simulated experiments are faster, cheaper, and repeatable compared to experiments done on real industrial plants. However, design and re-design of simulation models are difficult and time-consuming tasks. In addition, integration of simulation models within industrial automation systems is not satisfactory nowadays. This thesis is aimed at improving the design and integration phases of the simulation model life-cycle. In the area of the simulation model design, especially a component-based approach for simulation model creation is investigated and improved in this thesis. It assumes that engineering systems consist of atomic components that are connected into topologies of real industrial plants. The proposed method supports assembling simulation models from simulation components, which can be reused from previous simulation projects. Each real device can be simulated by one of the available implementations of the component, representing this device. The proposed solution is based on the utilization of the bond-graph theory to guarantee the compatibility of the interfaces of the connected component implementations and to support their selection. In addition, the bond-graph theory is used to support splitting a simulation model into a set of simulation modules and their integration into a simulation workflow. For all of these types of tasks, the bond-graph theory was enhanced with an explicit description of component interfaces and a new causality assignment algorithm was designed. This algorithm can be used not only for generation of simulation models, but also for verifications on a conceptual planning level, whether specific sets of simulation component implementations are sufficient to model particular plants. In the area of the simulation model integration, two research threads are followed. The first one is related to formalizing, capturing, and integrating knowledge about the real industrial plant, input and output tags, parameters of devices, and mappings of all these entities to simulation model components, variables, and parameters. Such engineering knowledge is used to support simulation model design and maintenance of existing simulation models when a real plant is changed. The second thread in the integration area is focused on interoperability of simulation modules on the level of the supervisory control and data acquisition of the automation pyramid. This task covers the access of simulations to runtime data, improved parameter setting, and version-control of simulation modules. This thesis contributes to the areas of the simulation modeling, knowledge representation, and distributed system integration. The most important results are (i) adaptation of the bond graph theory for non-traditional applications including selection of explicitly specified component implementations as well as a new causality assignment algorithm supporting this approach, (ii) utilization of ontologies for supporting simulation model design and integration, and (iii) improved simulation model integration.}, keywords = {Simulation Models}, pubstate = {published}, tppubtype = {phdthesis} } @inproceedings{Wally2017tsl, title = {ISA-95 based Task Specification Layer for REA in Production Environments}, author = {Bernhard Wally and Christian Huemer and Alexandra Mazak}, year = {2017}, date = {2017-03-07}, booktitle = {Proceedings of the 11th International Workshop on Value Modeling and Business Ontologies (VMBO 2017)}, abstract = {Resource-Event-Agent (REA) has been applied to various engineering and business domains, with a focus on transfer activities rather than transformation activities. In the context of smart manufacturing, vertical integration of IT systems (e.g., business applications and production control systems) is a key factor. In this work, we shed light on the integration of REA concepts into production environments by investigating properties of REA transformations and aligning them with concepts from an international standard for enterprise-control system integration (ISA-95).}, keywords = {ISA-95, Module 3, REA}, pubstate = {published}, tppubtype = {inproceedings} } @article{Bousse2017od, title = {Omniscient debugging for executable DSLs}, author = {Erwan Bousse and Dorian Leroy and Benoit Combemale and Manuel Wimmer and Benoit Baudry}, url = {https://www.sciencedirect.com/science/article/pii/S0164121217302765?via%3Dihub}, doi = {10.1016/j.jss.2017.11.025}, issn = {0164-1212}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, journal = {Journal of Systems and Software}, volume = {137}, pages = {261-288}, abstract = {Omniscient debugging is a promising technique that relies on execution traces to enable free traversal of the states reached by a model (or program) during an execution. While a few General-Purpose Languages (GPLs) already have support for omniscient debugging, developing such a complex tool for any executable Domain Specific Language (DSL) remains a challenging and error prone task. A generic solution must: support a wide range of executable DSLs independently of the metaprogramming approaches used for implementing their semantics; be efficient for good responsiveness. Our contribution relies on a generic omniscient debugger supported by efficient generic trace management facilities. To support a wide range of executable DSLs, the debugger provides a common set of debugging facilities, and is based on a pattern to define runtime services independently of metaprogramming approaches. Results show that our debugger can be used with various executable DSLs implemented with different metaprogramming approaches. As compared to a solution that copies the model at each step, it is on average sixtimes more efficient in memory, and at least 2.2 faster when exploring past execution states, while only slowing down the execution 1.6 times on average.}, keywords = {Domain-Specific Languages, Executable DSL, Execution Trace, Omniscient Debugging, Software Language Engineering}, pubstate = {published}, tppubtype = {article} } @article{Mayerhofer2017mdew, title = {A Model-Driven Engineering Workbench for CAEX Supporting Language Customization and Evolution}, author = {Tanja Mayerhofer and Manuel Wimmer and Luca Berardinelli and Rainer Drath}, url = {https://ieeexplore.ieee.org/document/8239624}, doi = {10.1109/TII.2017.2786780}, issn = {1551-3203}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, journal = {Journal of IEEE Transactions on Industrial Informatics}, abstract = {Computer Aided Engineering Exchange (CAEX) is one of the most promising standards when it comes to data exchange between engineering tools in the production system automation domain. This is also reflected by the current emergence of AutomationML (AML), which uses CAEX as its core representation language. However, with the increasing use of CAEX, important language engineering challenges arise. One of these challenges is the customization of CAEX for its usage in superior standards, such as AML, which requires the precise specification of the language including the formalization and validation of additional usage rules. Another highly topical challenge is the ongoing evolution of CAEX as has recently happened with the transition from version 2.15 to version 3.0. Further challenges include the provisioning of editing facilities and visualizations of CAEX documents such that they can be inspected and modified by engineers, and the development of transformations from and to CAEX such that different engineering artifacts can be exchanged via CAEX. In this paper, we take a language engineering point of view and present a model-driven engineering (MDE) workbench for CAEX that allows to address these and other challenges. In particular, we present how CAEX can be formulated in a model-based framework, which allows the application of MDE techniques, such as model validation, migration, editing, visualization, and transformation techniques, to solve a diverse set of language engineering challenges experienced for CAEX. We give an overview of the developed workbench and illustrate its benefits with a focus on customizing CAEX for AML and evolving CAEX documents from version 2.15 to 3.0.}, keywords = {AutomationML, CAEX, Model-Driven}, pubstate = {published}, tppubtype = {article} } @book{DBLP:series/synthesis/2017Brambilla, title = {Model-Driven Software Engineering in Practice, Second Edition}, author = {Marco Brambilla and Jordi Cabot and Manuel Wimmer}, url = {https://doi.org/10.2200/S00751ED2V01Y201701SWE004}, doi = {10.2200/S00751ED2V01Y201701SWE004}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, publisher = {Morgan & Claypool Publishers}, series = {Synthesis Lectures on Software Engineering}, abstract = {This book discusses how model-based approaches can improve the daily practice of software professionals. This is known as Model-Driven Software Engineering (MDSE) or, simply, Model-Driven Engineering (MDE). MDSE practices have proved to increase efficiency and effectiveness in software development, as demonstrated by various quantitative and qualitative studies. MDSE adoption in the software industry is foreseen to grow exponentially in the near future, e.g., due to the convergence of software development and business analysis. The aim of this book is to provide you with an agile and flexible tool to introduce you to the MDSE world, thus allowing you to quickly understand its basic principles and techniques and to choose the right set of MDSE instruments for your needs so that you can start to benefit from MDSE right away. The book is organized into two main parts. The first part discusses the foundations of MDSE in terms of basic concepts (i.e., models and transformations), driving principles, application scenarios, and current standards, like the well-known MDA initiative proposed by OMG (Object Management Group) as well as the practices on how to integrate MDSE in existing development processes. The second part deals with the technical aspects of MDSE, spanning from the basics on when and how to build a domain-specific modeling language, to the description of Model-to-Text and Model-to-Model transformations, and the tools that support the management of MDSE projects. }, keywords = {}, pubstate = {published}, tppubtype = {book} } @article{DBLP:journals/tse/FleckTKWA17, title = {Model Transformation Modularization as a Many-Objective Optimization Problem}, author = {Martin Fleck and Javier Troya and Marouane Kessentini and Manuel Wimmer and Bader Alkhazi}, url = {https://doi.org/10.1109/TSE.2017.2654255}, doi = {10.1109/TSE.2017.2654255}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, journal = {Journal of IEEE Transactions Software Engineering}, volume = {43}, number = {11}, pages = {1009-1032}, abstract = {Model transformation programs are iteratively refined, restructured, and evolved due to many reasons such as fixing bugs and adapting existing transformation rules to new metamodels version. Thus, modular design is a desirable property for model transformations as it can significantly improve their evolution, comprehensibility, maintainability, reusability, and thus, their overall quality. Although language support for modularization of model transformations is emerging, model transformations are created as monolithic artifacts containing a huge number of rules. To the best of our knowledge, the problem of automatically modularizing model transformation programs was not addressed before in the current literature. These programs written in transformation languages, such as ATL, are implemented as one main module including a huge number of rules. To tackle this problem and improve the quality and maintainability of model transformation programs, we propose an automated search-based approach to modularize model transformations based on higher-order transformations. Their application and execution is guided by our search framework which combines an in-place transformation engine and a search-based algorithm framework. We demonstrate the feasibility of our approach by using ATL as concrete transformation language and NSGA-III as search algorithm to find a trade-off between different well-known conflicting design metrics for the fitness functions to evaluate the generated modularized solutions. To validate our approach, we apply it to a comprehensive dataset of model transformations. As the study shows, ATL transformations can be modularized automatically, efficiently, and effectively by our approach. We found that, on average, the majority of recommended modules, for all the ATL programs, by NSGA-III are considered correct with more than 84 percent of precision and 86 percent of recall when compared to manual solutions provided by active developers. The statistical analy...}, keywords = {}, pubstate = {published}, tppubtype = {article} } @inproceedings{DBLP:conf/wcre/NeubauerBMW17, title = {Automated generation of consistency-achieving model editors}, author = {Patrick Neubauer and Robert Bill and Tanja Mayerhofer and Manuel Wimmer}, editor = {Martin Pinzger and Gabriele Bavota and Andrian Marcus}, url = {https://doi.org/10.1109/SANER.2017.7884615}, doi = {10.1109/SANER.2017.7884615}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, booktitle = {IEEE 24th International Conference on Software Analysis, Evolution and Reengineering, SANER 2017, Klagenfurt, Austria, February 20-24, 2017}, pages = {127--137}, publisher = {IEEE Computer Society}, abstract = {The advances of domain-specific modeling languages (DSMLs) and their editors created with modern language work-benches, have convinced domain experts of applying them as important and powerful means in their daily endeavors. Despite the fact that such editors are proficient in retaining syntactical model correctness, they present major shortages in mastering the preservation of consistency in models with elaborated language-specific constraints which require language engineers to manually implement sophisticated editing capabilities. Consequently, there is a demand for automating procedures to support editor users in both comprehending as well as resolving consistency violations. In this paper, we present an approach to automate the generation of advanced editing support for DSMLs offering automated validation, content-assist, and quick fix capabilities beyond those created by state-of-the-art language workbenches that help domain experts in retaining and achieving the consistency of models. For validation, we show potential error causes for violated constraints, instead of only the context in which constraints are violated. The state-space explosion problem is mitigated by our approach resolving constraint violations by increasing the neighborhood scope in a three-stage process, seeking constraint repair solutions presented as quick fixes to the editor user. We illustrate and provide an initial evaluation of our approach based on an Xtext-based DSML for modeling service clusters.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{DBLP:conf/wcre/NeubauerBW17, title = {Modernizing domain-specific languages with XMLText and IntellEdit}, author = {Patrick Neubauer and Robert Bill and Manuel Wimmer}, editor = {Martin Pinzger and Gabriele Bavota and Andrian Marcus}, url = {https://doi.org/10.1109/SANER.2017.7884679}, doi = {10.1109/SANER.2017.7884679}, year = {2017}, date = {2017-01-01}, urldate = {2017-01-01}, booktitle = {IEEE 24th International Conference on Software Analysis, Evolution and Reengineering, SANER 2017, Klagenfurt, Austria, February 20-24, 2017}, pages = {565--566}, publisher = {IEEE Computer Society}, abstract = {The necessity of software evolution caused by novel requirements is often triggered alongside the advancement of underlying languages and tools. Although modern language workbenches decrease the opportunity cost of creating new language implementations, they do not offer automated and complete integration of existing languages. Moreover, they still require complex language engineering skills and extensive manual implementation effort to suit the expectations of domain experts, e.g., in terms of editor capabilities. In this work we present XMLIntellEdit-a framework for evolving domain-specific languages by automating the generation of modernized languages offering advanced editing capabilities, such as extended validation, content-assist, and quick fix solutions. Our approach builds on techniques from Model-Driven Engineering and Search-based Software Engineering research. Initial results indicate that XML Schema definitions containing restrictions can be applied for the automated generation of advanced editing facilities.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Sint2023b, title = {An Interdisciplinary Course on Model-Based Systems Engineering}, author = {Azad Khandoker and Sabine Sint and Manuel Wimmer and Klaus Zeman}, booktitle = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C), Västeras, Schweden, October 1-6/2023.}, journal = {2023 ACM/IEEE International Conference on Model Driven Engineering Languages and Systems Companion (MODELS-C), Västeras, Schweden, October 1-6/2023.}, abstract = {Model-Based Systems Engineering (MBSE) has emerged as a promising approach to design and develop complex engineering systems. Its adoption is steadily increasing in various industries and along additional system life cycle phases, showcasing its potential to enhance system development processes, to improve overall system performance, to support traceability, safety & security, maintenance, condition monitoring, upcycling, recycling, and even circular economy. As MBSE is becoming more prevalent in several industries, it is crucial to incorporate MBSE education into engineering curricula to prepare future engineers with the necessary knowledge, methods, skills, and tools. In this paper, we present our interdisciplinary MBSE course at Johannes Kepler University in Linz and further explore the challenges and opportunities of the current state of MBSE education for the effective integration of MBSE into engineering education.}, keywords = {MBSE}, pubstate = {forthcoming}, tppubtype = {inproceedings} }