Auf dieser Seite finden Sie sämtliche Publikationen, die an diesem Institut entstanden sind. Wenn Sie nach einem bestimmten Papier suchen, das womöglich geschrieben wurde, bevor der entsprechende Autor an unser Institut gewechselt hat, besuchen Sie bitte seine/ihre Mitarbeiterseite.

Bitte beachten Sie, dass auch eine Auflistung sämtlicher am Institut entstandener studentischer Abschlussarbeiten existiert.

Publikationen des Instituts für Künstliche Intelligenz

% % This file was created by the TYPO3 extension % publications % --- Timezone: CEST % Creation date: 2024-03-29 % Creation time: 12:00:25 % --- Number of references % 447 % @Conference { Lin2024PlanVerificationComplexity, author = {Lin, Songtuan and Olz, Conny and Helmert, Malte and Bercher, Pascal}, title = {On the Computational Complexity of Plan Verification, (Bounded) Plan-Optimality Verification, and Bounded Plan Existence}, year = {2024}, booktitle = {Proceedings of the 38th AAAI Conference on Artificial Intelligence (AAAI 2024)}, publisher = {AAAI Press} } @Inproceedings { Olz2023ConjunctiveHTNEffects, author = {Olz, Conny and Bercher, Pascal}, title = {Can They Come Together? A Computational Complexity Analysis of Conjunctive Possible Effects of Compound HTN Planning Tasks}, year = {2023}, DOI = {https://doi.org/10.1609/icaps.v33i1.27209}, booktitle = {Proceedings of the 33rd International Conference on Automated Planning and Scheduling (ICAPS 2023)}, publisher = {AAAI Press}, pages = {314--323}, file_url = {t3://file?uid=479855} } @Conference { 367086889653_2023, author = {Wu, Ying Xian and Olz, Conny and Lin, Songtuan and Bercher, Pascal}, title = {Grounded (Lifted) Linearizer at the IPC 2023: Solving Partial Order HTN Problems by Linearizing Them}, year = {2023}, booktitle = {Proceedings of the 11th International Planning Competition: Planner Abstracts – Hierarchical Task Network (HTN) Planning Track (IPC)}, file_url = {t3://file?uid=483769} } @Inproceedings { Lin2023VerificationComplexity, author = {Lin, Songtuan and Olz, Conny and Helmert, Malte and Bercher, Pascal}, title = {On the Computational Complexity of Plan Verification, (Bounded) Plan-Optimality Verification, and Bounded Plan Existence}, year = {2023}, booktitle = {Proceedings of the 6th ICAPS Workshop on Hierarchical Planning (HPlan 2023)} } @Conference { Olz23PandaDealer, author = {Olz, Conny and H\"{o}ller, Daniel and Bercher, Pascal}, title = {The PANDADealer System for Totally Ordered HTN Planning in the 2023 IPC}, year = {2023}, booktitle = {Proceedings of the 11th International Planning Competition: Planner Abstracts – Hierarchical Task Network (HTN) Planning Track (IPC)}, file_url = {t3://file?uid=483768} } @Inproceedings { Olz2023Lookahead, author = {Olz, Conny and Bercher, Pascal}, title = {A Look-Ahead Technique for Search-Based HTN Planning: Reducing the Branching Factor by Identifying Inevitable Task Refinements}, year = {2023}, DOI = {https://doi.org/10.1609/socs.v16i1.27284}, booktitle = {Proceedings of the 16th International Symposium on Combinatorial Search (SoCS 2023)}, publisher = {AAAI Press}, pages = {65--73}, file_url = {t3://file?uid=479856} } @Conference { Lindner22BeyondCausalLinks, author = {Lindner, Felix and Olz, Conny}, title = {Step-by-Step Task Plan Explanations Beyond Causal Links}, year = {2022}, DOI = {https://doi.org/10.1109/RO-MAN53752.2022.9900590}, booktitle = {2022 31st IEEE International Conference on Robot \\& Human Interactive Communication (RO-MAN)}, publisher = {IEEE}, pages = {45--51}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2022/Lindner22PlanExplanationBeyondCL.pdf} } @Incollection { 677401578892_2022, author = {Krarup, Benjamin and Lindner, Felix and Krivic, Senka and Long, Derek}, title = {Understanding a Robot’s Guiding Ethical Principles via Automatically Generated Explanations}, year = {2022}, booktitle = {IEEE International Conference on Automation Science and Engineering (CASE)} } @Incollection { 159142454452_2022, author = {Hannibal, Glenda and Lindner, Felix}, title = {Towards A Questions-Driven Approach to Explainable Human-Robot Interaction}, year = {2022}, booktitle = {Robophilosophy 2022} } @Conference { 923461743627_2022, author = {Halilovic, Amar and Lindner, Felix}, title = {Explaining Local Path Plans Using LIME}, year = {2022}, booktitle = {31st International Conference on Robotics in Alpe-Adria-Danube Region (RAAD)} } @Conference { Olz2022POPrecsAndEffects, author = {Olz, Conny and Bercher, Pascal}, title = {On the Efficient Inference of Preconditions and Effects of Compound Tasks in Partially Ordered HTN Planning Domains}, abstract = {Recently, preconditions and effects of compound tasks based on their possible refinements have been introduced together with an efficient inference procedure to compute a subset of them. However, they were restricted to total-order HTN planning domains. In this paper we generalize the definitions and algorithm to the scenario of partially ordered domains.}, year = {2022}, booktitle = {Proceedings of the 5th ICAPS Workshop on Hierarchical Planning (HPlan 2022)}, pages = {47--51}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2022/Olz2022POPrecsAndEffects.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2022/Olz2022POPrecsAndEffects.pdf} } @Inbook { 634936780812_2022, author = {Lindner, Felix}, title = {Digital Phenotyping and Mobile Sensing}, year = {2022}, chapter = {Defining Artificial Intelligence}, pages = {451-454} } @Inproceedings { IlGl22a, author = {Illich, Moritz and Glimm, Birte}, title = {Computing Concept Referring Expressions for Queries on Horn ALC Ontologies}, abstract = {Classical instance queries over an ontology only consider explicitly named individuals. Concept referring expressions (CREs) also allow for returning answers in the form of concepts that describe implicitly given individuals in terms of their relation to an explicitly named one. Existing approaches, e.g., based on tree automata, can neither be integrated into state-of-the-art OWL reasoners nor are they directly amenable for an efficient implementation. To address this, we devise a novel algorithm that uses highly optimized OWL reasoners as a black box. In addition to the standard criteria of singularity and certainty for CREs, we devise and consider the criterion of uniqueness of CREs for Horn ALC ontologies. The evaluation of our prototypical implementation shows that computing CREs for the most general concept (top) can be done in less than one minute for ontologies with thousands of individuals and concepts.}, year = {2022}, booktitle = {Proceedings of the 31st International Joint Conference on Artificial Intelligence and the 23rd European Conference on Artificial Intelligence (IJCAI-ECAI 2022)}, publisher = {Morgan Kaufmann}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations, Concept Referring Expressions}, tags = {AutomatedReasoning}, file_url = {t3://file?uid=455865} } @Incollection { 644085957355_2022, author = {Karalus, Jakob and Lindner, Felix}, title = {Accelerating the Learning of TAMER with Counterfactual Explanations}, year = {2022}, reviewed = {1}, booktitle = {The 2022 IEEE International Conference on Development and Learning (ICDL 2022)} } @Inproceedings { Olz21RevealingEffects, author = {Olz, Conny and Biundo, Susanne and Bercher, Pascal}, title = {Revealing Hidden Preconditions and Effects of Compound HTN Planning Tasks – A Complexity Analysis}, year = {2021}, DOI = {https://doi.org/10.1609/aaai.v35i13.17414}, booktitle = {Proceedings of the 35th AAAI Conference on Artificial Intelligence (AAAI 2021)}, publisher = {AAAI Press}, pages = {11903--11912}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2021/Olz21CompoundEffects.pdf} } @Conference { 161353181642_2021, author = {Louise, Dennis and Bentzen, Martin Mose and Lindner, Felix and Fisher, Michael}, title = {Verifiable Machine Ethics in Changing Contexts}, year = {2021}, booktitle = {AAAI 2021} } @Inproceedings { Olz2021ComprehendHTNModels, author = {Olz, Conny and Wierzba, Eva and Bercher, Pascal and Lindner, Felix}, title = {Towards Improving the Comprehension of HTN Planning Domains by Means of Preconditions and Effects of Compound Tasks}, year = {2021}, booktitle = {Proceedings of the 10th Workshop on Knowledge Engineering for Planning and Scheduling (KEPS 2021)}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2021/Olz2021ComprehendHTNModels.pdf} } @Inbook { 414084394015_2021, author = {Lindner, Felix}, title = {Soziale Robotik und KI}, year = {2021}, publisher = {Springer} } @Conference { 108985300602_2021, author = {Karalus, Jakob and Halilovic, Amar and Lindner, Felix}, title = {Explanations in, Explanations out: Human-in-the-Loop Social Navigation Learning}, year = {2021}, booktitle = {ICDL Workshop on Human aligned Reinforcement Learning for Autonomous Agents and Robots} } @Inproceedings { StGl21a, author = {Steigmiller, Andreas and Glimm, Birte}, title = {Parallelised ABox Reasoning and Query Answering with Expressive Description Logics}, abstract = {Automated reasoning support is an important aspect of logic-based knowledge representation. The development of specialised procedures and sophisticated optimisation techniques significantly improved the performance even for complex reasoning tasks such as conjunctive query answering. Reasoning and query answering over knowledge bases with a large number of facts and expressive schemata remains, however, challenging. We propose a novel approach where the reasoning over assertional knowledge is split into small, similarly sized work packages to enable a parallelised processing with tableau algorithms, which are dominantly used for reasoning with more expressive Description Logics. To retain completeness in the presence of expressive schemata, we propose a specifically designed cache that allows for controlling and synchronising the interaction between the constructed partial models. We further report on encouraging performance improvements for the implementation of the techniques in the tableau-based reasoning system Konclude.}, year = {2021}, booktitle = {Proceedings of the 18th European Semantic Web Conference (ESWC 2021)}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, tags = {AutomatedReasoning KnowledgeModelling SemanticTechnologies} } @Inproceedings { QuAG21a, author = {Qiu, Haonan and Ayara, Adel and Glimm, Birte}, title = {Ontology-Based Map Data Quality Assurance}, abstract = {A lane-level, high-definition (HD) digital map is needed for autonomous cars to provide safety and security to the passengers. However, it continues to prove very difficult to produce error-free maps. To avoid the deactivation of autonomous driving (AD) mode caused by map errors, ensuring map data quality is a crucial task. We propose an ontology-based workflow for HD map data quality assurance, including semantic enrichment, violation detection, and violation handling. Evaluations show that our approach can successfully check the quality of map data and suggests that violation handling is even feasible on-the-fly in the car (on-board), avoiding the autonomous driving mode's deactivation.}, year = {2021}, booktitle = {Proceedings of the 18th European Semantic Web Conference (ESWC 2021)}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, tags = {AutomatedReasoning KnowledgeModelling} } @Inproceedings { WeLG21a, author = {Wenzel, Maximilian and Liebig, Thorsten and Glimm, Birte}, title = {HDT Bitmap Triple Indices for Efficient RDF Data Exploration}, abstract = {The exploration of large, unknown RDF data sets is difficult even for users who are familiar with Semantic Web technologies as, e.g., the SPARQL query language. The concept of faceted navigation offers a user-friendly exploration method through filters that are chosen such that no empty result sets occur. However, especially for large data sets, computing such filters is resource intensive and may cause considerable delays in the user interaction. One possibility for improving the performance is the generation of indices for partial solutions. In this paper, we propose and evaluate indices in form of the Bitmap Triple (BT) data structure, generated over the Header-Dictionary-Triples (HDT) RDF compression format. We show that the resulting indices can be utilized to efficiently compute the required exploratory operations for data sets with up to 150 million triples. In the experiments, the BT indices exhibit a stable performance and outperform other deployed approaches in four out of five compared operations.}, status = {1}, year = {2021}, booktitle = {Proceedings of the 18th European Semantic Web Conference (ESWC 2021)}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, tags = {AutomatedReasoning, KnowledgeModelling} } @Article { Bercher2021DIY, author = {Bercher, Pascal and Behnke, Gregor and Kraus, Matthias and Schiller, Marvin R. G. and Manstetten, Dietrich and Dambier, Michael and Dorna, Michael and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {Do It Yourself, but Not Alone: Companion-Technology for Home Improvement – Bringing a Planning-Based Interactive DIY Assistant to Life}, abstract = {We report on the technology transfer project “Do it yourself, but not alone: Companion-Technology for Home Improvement” that was carried out by Ulm University in cooperation with Robert Bosch GmbH. We developed a prototypical assistance system that assists a Do It Yourself (DIY) handyman in carrying out DIY projects. The assistant, based on various AI and dialog management capabilities, generates a sequence of detailed instructions that users may just follow or adapt according to their individual preferences. It features explanation capabilities as well as pro-active support based on communication with the user as well as with the involved tools. We report on the project’s main achievements, including the findings of various empirical studies conducted in various development stages of the prototype.}, year = {2021}, journal = {K\"{u}nstliche Intelligenz – Special Issue on NLP and Semantics}, web_url2 = {https://rdcu.be/cmGwb}, file_url = {http://link.springer.com/article/10.1007/s13218-021-00721-x} } @Conference { 821527562625_2021, author = {Karalus, Jakob and Lindner, Felix}, title = {Accelerating the Convergence of Human-in-the-Loop Reinforcement Learning with Counterfactual Explanations}, year = {2021}, day = {1}, reviewed = {1}, booktitle = {ICML Workshop on Human in the Loop Learning (HILL)} } @Inproceedings { QiAG20b, author = {Qiu, Haonan and Ayara, Adel and Glimm, Birte}, title = {Ontology-based Processing of Dynamic Maps in Automated Driving}, year = {2020}, booktitle = {Proceedings of the 12th International Conference on Knowledge Engineering and Ontology Development (KEOD 2020)}, publisher = {SciTePress}, tags = {AutomatedReasoning,KnowledgeModelling}, file_url = {t3://file?uid=431048} } @Inproceedings { Kraus2020ICMI, author = {Kraus, Matthias and Schiller, Marvin R. G. and Behnke, Gregor and Bercher, Pascal and Dorna, Michael and Dambier, Michael and Glimm, Birte and Biundo, Susanne and Minker, Wolfgang}, title = {Was that successful? On Integrating Proactive Meta-Dialogue in a DIY-Assistant System using Multimodal Cues}, abstract = {Effectively supporting novices during performance of complex tasks, e.g. do-it-yourself (DIY) projects, requires intelligent assistants to be more than mere instructors. In order to be accepted as a competent and trustworthy cooperation partner, they need to be able to actively participate in the project and engage in helpful conversations with users when assistance is necessary. Therefore, a new proactive version of the DIY-assistant \textsc\{Robert\} is presented in this paper. It extends the previous prototype by including the capability to initiate reflective meta-dialogues using multimodal cues. Two different strategies for reflective dialogue are implemented: A progress-based strategy initiates a reflective dialogue about previous experience with the assistance for encouraging the self-appraisal of the user. An activity-based strategy is applied for providing timely, task-dependent support. Therefore, user activities with a connected drill driver are tracked that trigger dialogues in order to reflect on the current task and to prevent task failure. An experimental study comparing the proactive assistant against the baseline version shows that proactive meta-dialogue is able to build user trust significantly better than a solely reactive system. Besides, the results provide interesting insights for the development of proactive dialogue assistants.}, year = {2020}, booktitle = {Proceedings of 22nd ACM International Conference on Multimodal Interaction (ICMI 2020)}, publisher = {ACM}, tags = {SFB-T3}, web_url = {https://dl.acm.org/doi/pdf/10.1145/3382507.3418818 - - \dqLink to the Conference Paper\dq}, file_url = {t3://file?uid=431053} } @Conference { 709640316532_2020, author = {Krarup, Benjamin and Krivic, Senka and Lindner, Felix and Long, Derek}, title = {Towards Contrastive Explanations for Comparing the Ethics of Plans}, year = {2020}, reviewed = {1}, booktitle = {Against Robot Dystopias: Thinking through the ethical, legal and societal issues of robotics and automation (AGAINST-20), Workshop at ICRA 2020} } @Conference { 564963994726_2020, author = {Lindner, Felix}, title = {Towards a Formalization of Explanations for Robots’ Actions and Beliefs}, status = {1}, year = {2020}, booktitle = {JOWO 2020 Proceedings of the FOIS Workshop Ontologies for Autonomous Robotics (ROBONTICS 2020)} } @Conference { 178388592553_2020, author = {Tolmeijer, Suzanne and Weiss, Astrid and Hanheide, Marc and Lindner, Felix and Powers, Thomas M. and Dixon, Clare and Tielman, Myrthe L.}, title = {Taxonomy of Trust-Relevant Failures and Mitigation Strategies}, year = {2020}, reviewed = {1}, DOI = {https://doi.org/10.1145/3319502.3374793}, booktitle = {HRI '20: Proceedings of the 2020 ACM/IEEE International Conference on Human-Robot Interaction} } @Inproceedings { 507003734620_2020, author = {Bercher, Pascal and Olz, Conny}, title = {POP ≡ POCL, right? Complexity Results for Partial Order (Causal Link) Makespan Minimization}, year = {2020}, DOI = {https://doi.org/10.1609/aaai.v34i06.6530}, booktitle = {Proceedings of the 34th AAAI Conference on Artificial Intelligence (AAAI 2020)}, publisher = {AAAI Press}, pages = {9785--9793}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/Bercher2020POPvsPOCL.pdf} } @Conference { 882114214833_2020, author = {Lindner, Felix}, title = {Permissibility-under-a-description reasoning for deontological robots}, status = {1}, year = {2020}, reviewed = {1}, booktitle = {RoboPhilosophy 2020} } @Inproceedings { DBLP:conf/dlog/SteigmillerG20, author = {Steigmiller, Andreas}, title = {Parallelised ABox Reasoning and Query Answering with Expressive Description Logics (Extended Abstract)}, year = {2020}, booktitle = {Proceedings of the 33rd International Workshop on Description Logics (DL 2020)}, volume = {2663}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Stefan Borgwardt and Thomas Meyer}, tags = {AutomatedReasoning,KnowledgeModelling}, web_url = {http://ceur-ws.org/Vol-2663/abstract-21.pdf - - \dqCEUR WS\dq}, file_url = {t3://file?uid=431054} } @Inproceedings { Kraus2020Comparison, author = {Kraus, Matthias and Fischbach, Fabian and Jansen, Pascal and Minker, Wolfgang}, title = {A Comparison of Explicit and Implicit Proactive Dialogue Strategies for Conversational Recommendation}, year = {2020}, booktitle = {Proceedings of the 12th International Conference on Language Resources and Evaluation (LREC 2020)}, publisher = {ELRA}, pages = {429--435}, tags = {SFB-T3}, file_url = {https://www.aclweb.org/anthology/2020.lrec-1.54.pdf} } @Inproceedings { Behnke2020SuccinctGrounding, author = {Behnke, Gregor and H\"{o}ller, Daniel and Schmid, Alexander and Bercher, Pascal and Biundo, Susanne}, title = {On Succinct Groundings of HTN Planning Problems}, abstract = {The research in hierarchical planning has made considerable progress in the last few years. Many recent systems do not rely on hand-tailored advice anymore to find solutions, but are supposed to be domain-independent systems that come with sophisticated solving techniques. In principle, this development would make the comparison between systems easier (because the domains are not tailored to a single system anymore) and - much more important - also the integration into other systems, because the modeling process is less tedious (due to the lack of advice) and there is no (or less) commitment to a certain planning system the model is created for. However, these advantages are destroyed by the lack of a common input language and feature set supported by the different systems. In this paper, we propose an extension to PDDL, the description language used in non-hierarchical planning, to the needs of hierarchical planning systems.}, year = {2020}, booktitle = {Proceedings of the 34th AAAI Conference on Artificial Intelligence (AAAI 2020)}, publisher = {AAAI Press}, pages = {9775--9784}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/AAAI-BehnkeG.1770.pdf} } @Inproceedings { Behnke2020DIYAssistant, author = {Behnke, Gregor and Bercher, Pascal and Kraus, Matthias and Schiller, Marvin R. G. and Mickeleit, Kristof and H\"{a}ge, Timo and Dorna, Michael and Dambier, Michael and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {New Developments for Robert – Assisting Novice Users Even Better in DIY Projects}, year = {2020}, booktitle = {Proceedings of the 30th International Conference on Automated Planning and Scheduling (ICAPS 2020)}, publisher = {AAAI Press}, pages = {343--347}, keywords = {SFB-T3,Planning}, tags = {SFB-T3,Planning,KnowledgeModelling}, web_url = {https://aaai.org/ojs/index.php/ICAPS/article/view/6679/6533 - - \dqLink to the AAAI Paper Version\dq}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/Behnke2020DIYAssistant.pdf} } @Article { 265824339876_2020, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {HTN Planning as Heuristic Progression Search}, year = {2020}, journal = {Journal of Artificial Intelligence Research (JAIR)}, volume = {67}, publisher = {AAAI Press}, pages = {835--880}, file_url = {https://jair.org/index.php/jair/article/view/11282/26578} } @Inproceedings { Hoeller2020HTNPlanRepair, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {HTN Plan Repair via Model Transformation}, abstract = {To make planning feasible, planning models abstract from many details of the modeled system. When executing plans in the actual system, the model might be inaccurate in a critical point, and plan execution may fail. There are two options to handle this case: the previous solution can be modified to address the failure (plan repair), or the planning process can be re-started from the new situation (re-planning). In HTN planning, discarding the plan and generating a new one from the novel situation is not easily possible, because the HTN solution criteria make it necessary to take already executed actions into account. Therefore all approaches to repair plans in the literature are based on specialized algorithms. In this paper, we discuss the problem in detail and introduce a novel approach that makes it possible to use unchanged, off-the-shelf HTN planning systems to repair broken HTN plans. That way, no specialized solvers are needed.}, year = {2020}, booktitle = {Proceedings of the 43th German Conference on Artificial Intelligence (KI 2020)}, publisher = {Springer}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/Hoeller20Repair.pdf} } @Inproceedings { Hoeller20HDDL, author = {H\"{o}ller, Daniel and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne and Fiorino, Humbert and Pellier, Damien and Alford, Ron}, title = {HDDL: An Extension to PDDL for Expressing Hierarchical Planning Problems}, abstract = {The research in hierarchical planning has made considerable progress in the last few years. Many recent systems do not rely on hand-tailored advice anymore to find solutions, but are supposed to be domain-independent systems that come with sophisticated solving techniques. In principle, this development would make the comparison between systems easier (because the domains are not tailored to a single system anymore) and - much more important - also the integration into other systems, because the modeling process is less tedious (due to the lack of advice) and there is no (or less) commitment to a certain planning system the model is created for. However, these advantages are destroyed by the lack of a common input language and feature set supported by the different systems. In this paper, we propose an extension to PDDL, the description language used in non-hierarchical planning, to the needs of hierarchical planning systems.}, year = {2020}, booktitle = {Proceedings of the 34th AAAI Conference on Artificial Intelligence (AAAI 2020)}, publisher = {AAAI Press}, pages = {9883--9891}, tags = {SFB-TRR-62,Planning,SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/Hoeller2020HDDL.pdf} } @Article { 752914869574_2020, author = {Lindner, Felix and Mattm\"{u}ller, Robert and Nebel, Bernhard}, title = {Evaluation of the Moral Permissibility of Action Plans}, year = {2020}, reviewed = {1}, DOI = {https://doi.org/10.1016/j.artint.2020.103350}, journal = {Artificial Intelligence}, volume = {287} } @Inproceedings { Kraus2020Umap, author = {Kraus, Matthias and Wagner, Nicolas and Minker, Wolfgang}, title = {Effects of Proactive Dialogue Strategies on Human-Computer Trust}, year = {2020}, booktitle = {Proceedings of the 28th ACM Conference on User Modeling, Adaptation and Personalization}, publisher = {ACM}, pages = {107--116}, tags = {SFB-T3}, file_url = {https://dl.acm.org/doi/pdf/10.1145/3340631.3394840} } @Inproceedings { QiAG20c, author = {Qiu, Haonan and Ayara, Adel and Glimm, Birte}, title = {A Knowledge-Spatial Architecture for Processing Dynamic Maps in Automated Driving}, year = {2020}, booktitle = {Proceedings of the ISWC 2020 Posters \\& Demonstrations Track}, volume = {2721}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, keywords = {Semantic Web}, tags = {AutomatedReasoning, KnowledgeModelling}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2020/QiAG20c.pdf} } @Inproceedings { QiAG20a, author = {Qiu, Haonan and Ayara, Adel and Glimm, Birte}, title = {A Knowledge Architecture Layer for Map Data in Autonomous Vehicles}, year = {2020}, booktitle = {Proceedings of the 23rd IEEE International Conference on Intelligent Transportation Systems (ITSC 2020)}, publisher = {IEEE}, tags = {AutomatedReasoning,KnowledgeModelling}, file_url = {t3://file?uid=431045}, note = {Best Paper Nomination} } @Phdthesis { Behnke2019Thesis, author = {Behnke, Gregor}, title = {Hierarchical planning through propositional logic - highly efficient, versatile, and flexible}, year = {2019}, month = {12}, day = {2}, web_url = {t3://file?uid=418459}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/thesis\_behnke\_main.pdf} } @Inproceedings { Olz2019PostOptimizing, author = {Olz, Conny and Bercher, Pascal}, title = {Eliminating Redundant Actions in Partially Ordered Plans -- A Complexity Analysis}, abstract = {In this paper we study the computational complexity of post-optimizing partially ordered plans, i.e., we investigate the problem that is concerned with detecting and deleting unnecessary actions. For totally ordered plans it can easily be tested in polynomial time whether a single action can be removed without violating executability. Identifying an executable sub-plan, i.e., asking whether k plan steps can be removed, is known to be NP-hard. We investigate the same questions for partially ordered input plans, as they are created by many search algorithms or used by real-world applications -- in particular time-critical ones that exploit parallelism of non-conflicting actions. More formally, we investigate the computational complexity of removing an action from a partially ordered solution plan in which every linearization is a solution in the classical sense while allowing ordering insertions afterwards to repair arising executability issues. It turns out that this problem is NP-complete -- even if just a single action is removed -- and thereby show that this reasoning task is harder than for totally ordered plans. Moreover, we identify the structural properties responsible for this hardness by providing a fixed-parameter tractability (FPT) result.}, year = {2019}, DOI = {https://doi.org/10.1609/icaps.v29i1.3493}, booktitle = {Proceedings of the 29th International Conference on Automated Planning and Scheduling (ICAPS 2019)}, publisher = {AAAI Press}, pages = {310--319}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Olz2019StepElimination.pdf} } @Inproceedings { Hoeller2019OnGuiding, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {On Guiding Search in HTN Planning with Classical Planning Heuristics}, abstract = {Planning is the task of finding a sequence of actions that achieves the goal(s) of an agent. It is solved based on a model describing the environment and how to change it. There are several approaches to solve planning tasks, two of the most popular are classical planning and hierarchical planning. Solvers are often based on heuristic search, but especially regarding domain-independent heuristics, techniques in classical planning are more sophisticated. However, due to the different problem classes, it is difficult to use them in hierarchical planning. In this paper we describe how to use arbitrary classical heuristics in hierarchical planning and show that the resulting system outperforms the state of the art in hierarchical planning.}, year = {2019}, booktitle = {Proceedings of the 28th International Joint Conference on Artificial Intelligence (IJCAI 2019)}, publisher = {IJCAI}, pages = {6171--6175}, tags = {SFB-T3}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Hoeller2019ProgressionHeuristics.pdf} } @Inproceedings { Behnke2019GroundingWS, author = {Behnke, Gregor and H\"{o}ller, Daniel and Bercher, Pascal and Biundo, Susanne}, title = {More Succinct Grounding of HTN Planning Problems -- Preliminary Results}, abstract = {Planning systems usually operate on grounded representations of the planning problems during search. Further, planners that use translations into other combinatorial problems also often perform their translations based on a grounded model. Planning models, however, are commonly defined in a lifted formalism. As such, one of the first preprocessing steps a planner performs is to generate a grounded representation. In this paper we present a new approach for grounding HTN planning problems that produces smaller groundings than the previously published method. We expect this decrease in size to lead to more efficient planners.}, year = {2019}, booktitle = {Proceedings of the Second ICAPS Workshop on Hierarchical Planning}, pages = {40--48}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Behnke2019Grounding.pdf} } @Inproceedings { Behnke2019ICP, author = {Behnke, Gregor and H\"{o}ller, Daniel and Bercher, Pascal and Biundo, Susanne and Pellier, Damien and Fiorino, Humbert and Alford, Ron}, title = {Hierarchical Planning in the IPC}, abstract = {Over the last years, the amount of research in hierarchical planning has increased, leading to significant improvements in the performance of planners. However, the research is diverging and planners are somewhat hard to compare against each other. This is mostly caused by the fact that there is no standard set of benchmark domains, nor even a common description language for hierarchical planning problems. As a consequence, the available planners support a widely varying set of features and (almost) none of them can solve (or even parse) any problem developed for another planner. With this paper, we propose to create a new track for the IPC in which hierarchical planners will compete. This competition will result in a standardised description language, broader support for core features of that language among planners, a set of benchmark problems, a means to fairly and objectively compare HTN planners, and for new challenges for planners. }, year = {2019}, booktitle = {Proceedings of the Workshop on the International Planning Competition}, tags = {Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Behnke2019HTNIPC.pdf} } @Inproceedings { Hoeller2019HDDL, author = {H\"{o}ller, Daniel and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne and Fiorino, Humbert and Pellier, Damien and Alford, Ron}, title = {HDDL - A Language to Describe Hierarchical Planning Problems}, abstract = {The research in hierarchical planning has made considerable progress in the last few years. Many recent systems do not rely on hand-tailored advice anymore to find solutions, but are supposed to be domain-independent systems that come with sophisticated solving techniques. In principle, this development would make the comparison between systems easier (because the domains are not tailored to a single system anymore) and – much more important – also the integration into other systems, because the modeling process is less tedious (due to the lack of advice) and there is no (or less) commitment to a certain planning system the model is created for. However, these advantages are destroyed by the lack of a common input language and feature set supported by the different systems. In this paper, we propose an extension to PDDL, the description language used in non-hierarchical planning, to the needs of hierarchical planning systems. We restrict our language to a basic feature set shared by many recent systems, give an extension of PDDL’s EBNF syntax definition, and discuss our extensions, especially with respect to planner-specific input languages from related work.}, year = {2019}, booktitle = {Proceedings of the Second ICAPS Workshop on Hierarchical Planning}, pages = {6--14}, tags = {SFB-T3}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Hoeller2019HDDL.pdf} } @Inproceedings { Behnke2019SAToptimal, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {Finding Optimal Solutions in HTN Planning - A SAT-based Approach}, abstract = { Over the last years, several new approaches to Hierarchical Task Network (HTN) planning have been proposed that increased the overall performance of HTN planners. However, the focus has been on agile planning -- on finding a solution as quickly as possible. Little work has been done on finding \emph\{optimal\} plans. We show how the currently best-performing approach to HTN planning -- the translation into propositional logic -- can be utilised to find optimal plans. Such SAT-based planners usually bound the HTN problem to a certain depth of decomposition and then translate the problem into a propositional formula. To generate optimal plans, the \emph\{length\} of the solution has to be bounded instead of the decomposition \emph\{depth\}. We show the relationship between these bounds and how it can be handled algorithmically. Based on this, we propose an optimal SAT-based HTN planner and show that it performs favourably on a benchmark set. }, year = {2019}, booktitle = {Proceedings of the 28th International Joint Conference on Artificial Intelligence (IJCAI 2019)}, tags = {SFB-T3,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Behnke2019satoptimal.pdf} } @Proceedings { Bercher2019HTNWorkshop, author = {Bercher, Pascal and Behnke, Gregor and Shivashankar, Vikas and Alford, Ron}, title = {Proceedings of the 2nd ICAPS Workshop on Hierarchical Planning}, year = {2019}, web_url = {https://icaps19.icaps-conference.org/workshops/Hierarchical-Planning/}, file_url = {https://icaps19.icaps-conference.org/workshops/Hierarchical-Planning/HPLAN2019Proceedings.pdf} } @Inproceedings { DBLP:conf/rweb/GlimmK19, author = {Glimm, Birte and Kazakov, Yevgeny}, title = {Classical Algorithms for Reasoning and Explanation in Description Logics}, year = {2019}, DOI = {10.1007/978-3-030-31423-1\_1}, booktitle = {Reasoning Web. Explainable Artificial Intelligence - 15th International Summer School}, volume = {11810}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Markus Kr\"{o}tzsch and Daria Stepanova}, pages = {1-64}, tags = {KnowledgeModeling}, web_url = {https://doi.org/10.1007/978-3-030-31423-1\\_1}, file_url = {t3://file?uid=420338} } @Inproceedings { 10.1007/978-3-030-30793-6_34, author = {Steigmiller, Andreas and Glimm, Birte}, title = {Absorption-Based Query Answering for Expressive Description Logics}, abstract = {Conjunctive query answering is an important reasoning task for logic-based knowledge representation formalisms, such as Description Logics, to query for instance data that is related in certain ways. Although many knowledge bases use language features of more expressive Description Logics, there are hardly any systems that support full conjunctive query answering for these logics. In fact, existing systems usually impose restrictions on the queries or only compute incomplete results.}, year = {2019}, isbn = {978-3-030-30793-6}, booktitle = {Proceedings of the 18th International Semantic Web Conference (ISWC 2019) Band 11778 aus Lecture Notes in Computer Science}, publisher = {Springer International Publishing}, address = {Cham}, editor = {Chiara Ghidini, Olaf Hartig, Maria Maleshkova, Vojt\v{e}ch Sv\'{a}tek,Isabel Cruz, Aidan Hogan}, pages = {593--611}, web_url = {https://link.springer.com/chapter/10.1007{\%}2F978-3-030-30793-6\_34}, file_url = {t3://file?uid=429519} } @Inbook { Kraus2019CloudCompanion, author = {Kraus, Matthias and Schiller, Marvin R. G. and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne and Glimm, Birte and Minker, Wolfgang}, title = {9th International Workshop on Spoken Dialogue Systems}, abstract = {Companion systems are cooperative, cognitive systems aiming at assist-ing a user in everyday situations. Therefore, these systems require a high level ofavailability. One option to meet this requirement is to use a web-deployable archi-tecture. In this demo paper, we present a multimodal cloud-based dialogue frame-work for the development of a distributed, web-based companion system. The pro-posed framework is intended to provide an efficient, easily extensible, and scalableapproach for these kinds of systems and will be demonstrated in a do-it-yourselfassistance scenario.}, year = {2019}, publisher = {Springer}, chapter = {A Multimodal Dialogue Framework for Cloud-Based Companion Systems}, series = {Lecture Notes in Electrical Engineering}, editor = {Rafael Banchs and Luis Fernando D'Haro and Haizhou Li}, tags = {SFB-T3,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Kraus2019CloudCompanion.pdf} } @Inproceedings { Bercher2019HierarchicalPlanningSurvey, author = {Bercher, Pascal and Alford, Ron and H\"{o}ller, Daniel}, title = {A Survey on Hierarchical Planning - One Abstract Idea, Many Concrete Realizations}, abstract = {Hierarchical planning has attracted renewed interest in the last couple of years, which led to numerous novel formalisms, problem classes, and theoretical investigations. Yet it is important to differentiate between the various formalisms and problem classes, since they show -- sometimes fundamental -- differences with regard to their expressivity and computational complexity: Some of them can be regarded equivalent to non-hierarchical formalisms while others are clearly more expressive. We survey the most important hierarchical problem classes and explain their differences and similarities. We furthermore give pointers to some of the best-known planning systems capable of solving the respective problem classes.}, year = {2019}, booktitle = {Proceedings of the 28th International Joint Conference on Artificial Intelligence (IJCAI 2019)}, publisher = {IJCAI}, pages = {6267-6275}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Bercher2019HierarchicalPlanningSurvey.pdf} } @Inproceedings { Behnke2019orderCharos, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {Bringing Order to Chaos - A Compact Representation of Partial Order in SAT-based HTN Planning}, abstract = {HTN planning provides an expressive formalism to model complex application domains and has been widely used in real-world applications. However, the development of domain-independent planning techniques for such models is still lacking behind. The need to be informed about both - state-transition and hierarchy - makes the realisation of search-based approaches difficult, especially with unrestricted partial ordering of tasks in HTN domains. Recently, a translation of HTN planning problems into propositional logic has shown promising empirical results. Such planners benefit from a unified representation of state and hierarchy, but until now require very large formulae to represent partial order. In this paper, we introduce a novel encoding of HTN Planning as SAT. In contrast to related work, most of the reasoning on ordering relations is not left to the SAT solver, but done beforehand. This results in much smaller formulae and, as shown in our evaluation, in a planner that outperforms previous SAT-based approaches as well as the state-of-the-art in search-based HTN planning.}, year = {2019}, booktitle = {Proceedings of the 33rd AAAI Conference on Artificial Intelligence (AAAI 2019)}, publisher = {AAAI Press}, pages = {7520--7529}, keywords = {Planning}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Behnke2019orderchaos.pdf} } @Inproceedings { DBLP:conf/dlog/SteigmillerG19, author = {Steigmiller, Andreas and Glimm, Birte}, title = {Absorption-Based Query Entailment Checking for Expressive Description Logics}, year = {2019}, booktitle = {Proceedings of the 32nd International Workshop on Description Logics (DL 2019) Band 2373 aus CEUR Workshop Proceedings}, publisher = {CEUR-WS.org}, web_url = {http://ceur-ws.org/Vol-2373/paper-25.pdf}, file_url = {http://ceur-ws.org/Vol-2373/paper-25.pdf} } @Article { Behnke2019bosch, author = {Behnke, Gregor and Schiller, Marvin R. G. and Kraus, Matthias and Bercher, Pascal and Schmautz, Mario and Dorna, Michael and Dambier, Michael and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {Alice in DIY wonderland or: Instructing novice users on how to use tools in DIY projects}, abstract = {We present the interactive assistant Robert that provides situation-adaptive support in the realisation of do-it-yourself (DIY) home improvement projects. Robert assists its users by providing comprehensive step-by-step instructions for completing the DIY project. Each instruction is illustrated with detailed graphics, written and spoken text, as well as with videos. They explain how the steps of the project have to be prepared and assembled and give precise instructions on how to operate the required electric devices. The step-by-step instructions are generated by a hierarchical planner, which enables Robert to adapt to a multitude of environments easily. Parts of the underlying model are derived from an ontology storing information about the available devices and resources. A dialogue manager capable of natural language interaction is responsible for hands-free interaction. We explain the required background technology and present preliminary results of an empirical evaluation.}, year = {2019}, DOI = {10.3233/AIC-180604}, journal = {AI Communications}, volume = {32}, publisher = {IOS Press}, pages = {31-57}, number = {1}, tags = {SFB-T3,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2019/Behnke2019bosch.pdf}, note = {The final publication is available at IOS Press through http://dx.doi.org/10.3233/AIC-180604}, annotation = {The final publication is available at IOS Press through http://dx.doi.org/10.3233/AIC-180604} } @Inproceedings { SQAG19a, author = {Suryawanshi, Yogita and Qiu, Haonan and Ayara, Adel and Glimm, Birte}, title = {An Ontological Model for Map Data in Automotive Systems}, year = {2019}, booktitle = {Proceedings of the 2nd IEEE International Conference on Artificial Intelligence and Knowledge Engineering (AIKE 2019)}, publisher = {IEEE}, pages = {140--147}, tags = {AutomatedReasoning,KnowledgeModelling}, web_url = {https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=\\&arnumber=8791704 - - \dqLink to conference paper\dq}, file_url = {t3://file?uid=431057} } @Inproceedings { Behnke2018HomeImprovementSystem, author = {Behnke, Gregor and Schiller, Marvin R. G. and Kraus, Matthias and Bercher, Pascal and Schmautz, Mario and Dorna, Michael and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {Instructing Novice Users on How to Use Tools in DIY Projects}, abstract = {Novice users require assistance when performing handicraft tasks. Adequate instruction ensures task completion and conveys knowledge and abilities required to perform the task. We present an assistant teaching novice users how to operate electronic tools, such as drills, saws, and sanders, in the context of Do-It-Yourself (DIY) home improvement projects. First, the actions that need to be performed for the project are determined by a planner. Second, a dialogue manager capable of natural language interaction presents these actions as instructions to the user. Third, questions on these actions and involved objects are answered by generating appropriate ontology-based explanations.}, year = {2018}, DOI = {10.24963/ijcai.2018/844}, booktitle = {Proceedings of the 27th International Joint Conference on Artificial Intelligence and the 23rd European Conference on Artificial Intelligence (IJCAI-ECAI 2018)}, publisher = {IJCAI}, pages = {5805--5807}, tags = {SFB-T3}, web_url = {www.ijcai.org}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018DIY.pdf} } @Article { Behnke2018SATltljournal, author = {Behnke, Gregor and Biundo, Susanne}, title = {X and more Parallelism: Integrating LTL-Next into SAT-based Planning with Trajectory Constraints While Allowing for Even More Parallelism}, year = {2018}, DOI = {10.4114/intartif.vol21iss62pp75-90}, journal = {Inteligencia Artificial}, volume = {21}, pages = {75--90}, number = {62}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018SATltljournal.pdf} } @Inproceedings { Behnke2018MoreParallelLTL, author = {Behnke, Gregor and Biundo, Susanne}, title = {X and more Parallelism - Integrating LTL-Next into SAT-based Planning with Trajectory Constraints while Allowing for even more Parallelism}, abstract = {Linear temporal logic (LTL) provides expressive means to specify temporally extended goals as well as preferences. Recent research has focussed on compilation techniques, i.e., methods to alter the domain ensuring that every solution adheres to the temporally extended goals. This requires either new actions or an construction that is exponential in the size of the formula. A translation into boolean satisfiability (SAT) on the other hand requires neither. So far only one such encoding exists, which is based on the parallel ∃-step encoding for classical planning. We show a connection between it and recently developed compilation techniques for LTL, which may be exploited in the future. The major drawback of the encoding is that it is limited to LTL without the X operator. We show how to integrate X and describe two new encodings, which allow for more parallelism than the original encoding. An empirical evaluation shows that the new encodings outperform the current state-of-the-art encoding.}, year = {2018}, booktitle = {Proceedings of the Workshop on Constraint Satisfaction Techniques for Planning and Scheduling Problems (COPLAS)}, pages = {1--10}, tags = {SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018satltl.pdf} } @Inproceedings { Behnke2018treeSATICTAI, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {Tracking Branches in Trees - A Propositional Encoding for Solving Partially-Ordered HTN Planning Problems}, abstract = {Planning via SAT has proven to be an efficient and versatile planning technique. Its declarative nature allows for an easy integration of additional constraints and can harness the progress made in the SAT community without the need to adapt the planner. However, there has been only little attention to SAT planning for hierarchical domains. To ease encoding, existing approaches for HTN planning require additional assumptions, like non-recursiveness or totally-ordered methods. Both limit the expressiveness of HTN planning severely. We propose the first propositional encodings which are able to solve general, i.e., partially-ordered, HTN planning problems, based on a previous encoding for totally-ordered problems. The empirical evaluation of our encoding shows that it outperforms existing HTN planners significantly.}, year = {2018}, booktitle = {Proceedings of the 30th IEEE International Conference on Tools with Artificial Intelligence (ICTAI 2018)}, publisher = {IEEE Computer Society}, pages = {73--80}, keywords = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018treeSAT.pdf} } @Inproceedings { Behnke2018TreeSAT, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {Tracking Branches in Trees - A Propositional Encoding for Solving Partially-Ordered HTN Planning Problems}, abstract = {Planning via SAT has proven to be an efficient and versatile planning technique. Its declarative nature allows for an easy integration of additional constraints and can harness the progress made in the SAT community without the need to adapt the planner. However, there has been only little attention to SAT planning for hierarchical domains. To ease encoding, existing approaches for HTN planning require additional assumptions, like non-recursiveness or totally-ordered methods. Both limit the expressiveness of HTN planning severely. We propose the first propositional encodings which are able to solve general, i.e., partially-ordered, HTN planning problems, based on a previous encoding for totally-ordered problems. The empirical evaluation of our encoding shows that it outperforms existing HTN planners significantly.}, year = {2018}, booktitle = {Proceedings of the First ICAPS Workshop on Hierarchical Planning}, pages = {40--47}, tags = {SFB-TRR-62,SFB-T3}, web_url = {http://icaps18.icaps-conference.org/hierarchicalplanning/}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018partSAT.pdf} } @Inproceedings { Leichtmann2018HumanPlanning, author = {Leichtmann, Benedikt and Bercher, Pascal and H\"{o}ller, Daniel and Behnke, Gregor and Biundo, Susanne and Nitsch, Verena and Baumann, Martin}, title = {Towards a Companion System Incorporating Human Planning Behavior -- A Qualitative Analysis of Human Strategies}, abstract = {User-friendly Companion Systems require Artificial Intelligence planning to take into account human planning behavior. We conducted a qualitative exploratory study of human planning in a knowledge rich, real-world scenario. Participants were tasked with setting up a home theater. The effect of strategy knowledge on problem solving was investigated by comparing the performance of two groups: one group (n = 23) with strategy instructions for problem-solving and a control group without such instructions (n = 16). We inductively identify behavioral patterns for human strategy use through Markov matrices. Based on the results, we derive implications for the design of planning-based assistance systems.}, year = {2018}, booktitle = {Proceedings der dritten transdisziplin\"{a}ren Konferenz \dqTechnische Unterst\"{u}tzungssysteme, die die Menschen wirklich wollen\dq (engl: Proceedings of the 3rd \dqTransdisciplinary Conference on Support Technologies\dq), TCST 2018}, pages = {89--98}, tags = {SFB-TRR-62,Planning,SFB-T3}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Leichtmann2018HumanPlanningBehavior.pdf}, note = {This paper won the Best Paper Award.} } @Inproceedings { Behnke2018totSAT, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {totSAT - Totally-Ordered Hierarchical Planning through SAT}, abstract = {In this paper, we propose a novel SAT-based planning approach for hierarchical planning by introducing the SAT-based planner totSAT for the class of totally-ordered HTN planning problems. We use the same general approach as SAT planning for classical planning does: bound the problem, translate the problem into a formula, and if the formula is not satisfiable, increase the bound. In HTN planning, a suitable bound is the maximum depth of decomposition. We show how totally-ordered HTN planning problems can be translated into a SAT formula, given this bound. Furthermore, we have conducted an extensive empirical evaluation to compare our new planner against state-of-the-art HTN planners. It shows that our technique outperforms any of these systems.}, year = {2018}, booktitle = {Proceedings of the 32nd AAAI Conference on Artificial Intelligence (AAAI 2018)}, publisher = {AAAI Press}, pages = {6110--6118}, event_name = {AAAI 2018}, event_place = {New Orleans}, keywords = {SFB-TRR-62,Planning}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Behnke2018totSAT.pdf} } @Proceedings { HierarchicalPlanningWorkshop2018, author = {Bercher, Pascal and H\"{o}ller, Daniel and Biundo, Susanne and Alford, Ron}, title = {Proceedings of the 1st ICAPS Workshop on Hierarchical Planning}, abstract = {The motivation for using hierarchical planning formalisms is manifold. It ranges from an explicit and predefined guidance of the plan generation process and the ability to represent complex problem solving and behavior patterns to the option of having different abstraction layers when communicating with a human user or when planning cooperatively. This led to a large set of different hierarchical formalisms and systems. With this workshop, we want to bring together scientists working on any aspect related to hierarchical planning to exchange ideas and foster cooperation. Hierarchies induce fundamental differences from classical, non-hierarchical planning, creating distinct computational properties and requiring separate algorithms for plan generation, plan verification, plan repair, and practical applications. Many of these aspects of hierarchical planning are still unexplored. This wide range of important yet insufficiently solved problems is reflected in the topics presented in this proceedings. Though the main focus lies on the development of planning systems, these tackle quite different classes of hierarchical problems and use several solving techniques. It includes work on real-time planning, planning with task insertion, distributed planning, and extensions of formalisms to enable real-world application. Beside solvers, the presented work includes techniques for the plan repair problem and discussions of the application in real-world problems.}, year = {2018}, editor = {Pascal Bercher and Daniel H\"{o}ller and Susanne Biundo and Ron Alford}, tags = {Planning}, web_url = {http://icaps18.icaps-conference.org/hierarchicalplanning/}, file_url = {http://icaps18.icaps-conference.org/fileadmin/alg/conferences/icaps18/workshops/workshop08/docs/HierarchicalPlanningProceedings.pdf} } @Inproceedings { Hoeller2018PlanRecPAIR, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {Plan and Goal Recognition as HTN Planning}, abstract = {Plan- and Goal Recognition (PGR) is the task of inferring the goals and plans of an agent based on its actions. A few years ago, an approach has been introduced that successfully exploits the performance of planning systems to solve it. That way, no specialized solvers are needed and PGR benefits from present and future research in planning. The approach uses classical planning systems and needs to plan (at least) once for every possible goal. However, models in PGR are often structured in a hierarchical way, similar to Hierarchical Task Networks (HTNs). These models are strictly more expressive than those in classical planning and can describe partially ordered sets of tasks or multiple goals with interleaving plans. We present the approach PGR as HTN Planning that enables the recognition of complex agent behavior by using unmodified, off-the-shelf HTN planners. Planning is thereby needed only once, regardless of how many possible goals there are. Our evaluation shows that current planning systems are able to handle large models with thousands of possible goals and that the approach results in high recognition rates.}, year = {2018}, booktitle = {Proceedings of the AAAI 2018 Workshop on Plan, Activity, and Intent Recognition (PAIR 2018)}, pages = {607--613}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.planrec.org/PAIR/PAIR18/Resources.html}, file_url = {http://www.planrec.org/PAIR/PAIR18/Papers/HollerPair18.pdf} } @Inproceedings { Hoeller2018PlanRec, author = {H\"{o}ller, Daniel and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne}, title = {Plan and Goal Recognition as HTN Planning}, abstract = {Plan- and Goal Recognition (PGR) is the task of inferring the goals and plans of an agent based on its actions. Traditional approaches in PGR are based on a plan library including pairs of plans and corresponding goals. In recent years, the field successfully exploited the performance of planning systems for PGR. The main benefits are the presence of efficient solvers and well-established, compact formalisms for behavior representation. However, the expressivity of the STRIPS planning models used so far is limited, and models in PGR are often structured in a hierarchical way. We present the approach Plan and Goal Recognition as HTN Planning that combines the expressive but still compact grammar-like HTN representation with the advantage of using unmodified, off-the-shelf planning systems for PGR. Our evaluation shows that - using our approach - current planning systems are able to handle large models with thousands of possible goals, that the approach results in high recognition rates, and that it works even when the environment is partially observable, i.e., if the observer might miss observations.}, year = {2018}, booktitle = {Proceedings of the 30th IEEE International Conference on Tools with Artificial Intelligence (ICTAI 2018)}, publisher = {IEEE Computer Society}, pages = {466--473}, tags = {Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Hoeller2018PlanRec.pdf} } @Inproceedings { Bercher2018DissertationAbstract, author = {Bercher, Pascal}, title = {Hybrides Planen -- Von der Theorie zur Praxis}, abstract = {Die Dissertation legt Grundlagen, die es erlauben, Planungstechnologie der KünstlichenIntelligenz als Basis für flexible Assistenzsysteme einzusetzen. Die Aufgabe der automatischenHandlungsplanung ist es hierbei, selbständig einen Plan zu entwickeln, der dem Nutzer Schritt fürSchritt präsentiert wird und ihn oder sie bei der Bearbeitung einer entsprechenden Aufgabe anleitet.Durch die starke Miteinbeziehung eines menschlichen Nutzers ergeben sich viele neue Herausforde-rungen: Pläne müssen schnell gefunden werden; und sie sollen nicht nur korrekt sein, sondern auchkostengünstig und dem Nutzer plausibel erscheinen; und sie sollen erklärbar sein, um Transparenzzu schaffen. Aus diesem Grund wurde das hybride Planen gewählt, ein hierarchischer, nicht-linearerPlanungsansatz. Es wurden neue Komplexitätsergebnisse für das Planexistenz- und das Planverifika-tionsproblem erzielt; die ersten zulässigen Heuristiken erforscht, welche das Finden optimaler Plänegarantieren; und es wurde ein prototypisches Assistenzsystem realisiert, das seinen Nutzer bei demAufbau einer komplexen Heimkinoanlage unterstützt.}, year = {2018}, booktitle = {Proc. of Ausgezeichnete Informatikdissertationen}, publisher = {Gesellschaft f\"{u}r Informatik}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Bercher2018DissertationAbstract.pdf} } @Inproceedings { Hoeller18Repair, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {HTN Plan Repair Using Unmodified Planning Systems}, abstract = {To make planning feasible, planning models abstract from many details of the modeled system. When executing plans in the actual system, the model might be inaccurate in a critical point, and plan execution may fail. There are two options to handle this case: the previous solution can be modified to address the failure (Plan Repair), or the planning process can be re-started from the new situation (Re-Planning). In HTN planning, discarding the plan and generating a new one from the novel situation is not easily possible, because the HTN solution criteria make it necessary to take already executed actions into account. Therefore all approaches to repair plans in the literature are based on specialized algorithms. In this paper, we discuss the problem in detail and introduce a novel approach that makes it possible to use unchanged, off-the-shelf HTN planning systems to repair broken HTN plans. That way, no specialized solvers are needed. }, year = {2018}, booktitle = {Proceedings of the First ICAPS Workshop on Hierarchical Planning}, pages = {26--30}, tags = {SFB-TRR-62,SFB-T3,Planning}, web_url = {http://icaps18.icaps-conference.org/hierarchicalplanning/}, file_url = {www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Hoeller18Repair.pdf} } @Phdthesis { Richter2018PhDThesis, author = {Richter, Felix}, title = {Hierarchical planning under uncertainty}, abstract = {The recent years have seen significant progress in the fields of computer science and the engineering sciences, leading to a plethora of systems and services aimed at simplifying the organization of everyday life. The great potential of these utilities is however hindered by their complexity as well as the complexity of their interplay. Automated assistance systems can help users overcome this challenge. At the core of these systems lies planning functionality, needed for automatically generating courses of action, or policies, that represent, e.g., step-by-step instructions. Often, planning requires accounting for uncertainty inherent in a given application domain, making the process of generating such instructions computationally difficult. Fortunately, many assistance tasks exhibit hierarchical structure that allows understanding planning tasks as a hierarchy of subtasks, each of which can be solved using a limited number of solution recipes. The Hierarchical Task Network planning approach can already exploit such structures in deterministic planning domains by representing subtasks and recipes using abstract actions and methods, respectively, and generating plans by iteratively refining an initial abstract plan. The main goal of this thesis is to create a similar planning approach suited for planning domains that exhibit uncertainty, modeled as Partially Observable Markov Decision Processes. Based on a newly introduced suitable policy representation formalism called logical finite state controllers, the concepts of abstract actions and methods are reintroduced to create the Partially Observable Hierarchical Task Network planning approach. Next, Monte-Carlo tree search in the space of partially abstract controllers is identified as a suitable means for planning. The approach is then empirically evaluated on four domains by comparing it to search in the space of histories, a state-of-the-art non-hierarchical planning approach also based on Monte-Carlo tree search. This reveals that, with comparable computational effort, the proposed approach leads to policies of superior quality, and that it scales well with problem size. Two further techniques are then proposed enhance the presented approach: one that further reduces the required controller construction effort during search by refining controllers only selectively where required, and one that combines hierarchical and non-hierarchical search in order to combine the advantages of both.}, type = {Dissertation}, year = {2018}, DOI = {10.18725/OPARU-5243}, school = {Ulm University}, address = {Germany}, tags = {SFB-TRR-62, Planning}, file_url = {https://oparu.uni-ulm.de/xmlui/bitstream/handle/123456789/5300/dissertation\_richter.pdf} } @Inproceedings { schillerEtAlMCI2018, author = {Schiller, Marvin R. G. and Behnke, Gregor and Bercher, Pascal and Kraus, Matthias and Dorna, Michael and Richter, Felix and Biundo, Susanne and Glimm, Birte and Minker, Wolfgang}, title = {Evaluating Knowledge-Based Assistance for DIY}, abstract = {We report on the development of a companion system incorporating hierarchical planning, ontology-based knowledge modeling and multimodal cloud-based dialog. As an application scenario, we consider the domain of do-it-yourself (DIY) home improvement involving the use of power tools. To test and – if necessary – adjust the developed techniques, user studies are conducted throughout the development phase. We present fundamental considerations and open questions encountered when testing the implemented prototype with potential users and report first observations from a current study.}, year = {2018}, booktitle = {Proceedings of MCI Workshop \dqDigital Companion\dq}, pages = {925--930}, tags = {SFB-TRR-62,SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/MCI-WS19-schillerEtAl2018.pdf} } @Conference { KazSko:IJCAR18:Justifications, author = {Kazakov, Yevgeny and Skocovsky, Peter}, title = {Enumerating Justifications using Resolution}, abstract = {If a conclusion follows from a set of axioms, then its justification is a minimal subset of axioms for which the entailment holds. An entailment can have several justifications. Such justifications are commonly used for the purpose of debugging of incorrect entailments in Description Logic ontologies. Recently a number of SAT-based methods have been proposed that can enumerate all justifications for entailments in light-weight ontologies languages, such as EL. These methods work by encoding EL inferences in propositional Horn logic, and finding minimal models that correspond to justifications using SAT solvers. In this paper, we propose a new procedure for enumeration of justifications that uses resolution with answer literals instead of SAT solvers. In comparison to SAT-based methods, our procedure can enumerate justifications in any user-defined order that extends the set inclusion relation. The procedure is easy to implement and, like resolution, can be parametrized with ordering and selection strategies. We have implemented this procedure in PULi - a new Java-based Proof Utility Library, and performed an empirical comparison of (several strategies of) our procedure and other SAT-based tools on popular EL ontologies. The experiments show that our procedure provides a comparable, if not better performance than those highly optimized tools. For example, using one of the strategies, we were able for the first time to compute all justifications for all entailed concept subsumptions in one of the largest commonly used medical ontology Snomed CT.}, year = {2018}, isbn = {978-3-319-94204-9}, DOI = {10.1007/978-3-319-94205-6}, booktitle = {IJCAR}, volume = {10900}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Didier Galmiche and Stephan Schulz and Roberto Sebastiani}, pages = {609--626}, tags = {KnowledgeModeling LiveOntologies}, web_url = {https://doi.org/10.1007/978-3-319-94205-6\_40}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/KazSko18Justifications\_IJCAR.pdf} } @Inproceedings { BrGl18a, author = {Brenner, Markus and Glimm, Birte}, title = {Embracing Change by Abstraction Materialization Maintenance for Large ABoxes}, abstract = {Abstraction Refinement is a recently introduced technique which allows for reducing materialization of an ontology with a large ABox to materialization of a smaller (compressed) 'abstraction' of this ontology. In this paper, we show how Abstraction Refinement can be adopted for incremental ABox materialization by combining it with the well-known DRed algorithm for materialization maintenance. Such a combination is non-trivial and to preserve soundness and completeness, already Horn ALCHI requires more complex abstractions. Nevertheless, we show that significant benefits can be obtained for synthetic and real-world ontologies.}, year = {2018}, booktitle = {Proceedings of the 27th International Joint Conference on Artificial Intelligence and the 23rd European Conference on Artificial Intelligence (IJCAI-ECAI 2018)}, publisher = {AAAI Press}, tags = {AutomatedReasoning}, web_url = {www.ijcai.org}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/BrGl18a.pdf} } @Inproceedings { PSH2018, author = {Perleth, Tanja and Schiller, Marvin R. G. and Glimm, Birte}, title = {Applying a Model of Text Comprehension to Automated Verbalizations of EL Derivations}, year = {2018}, booktitle = {Proceedings of DL 2018, CEUR Workshop Proceedings Vol. 2211}, tags = {SFB-TRR-62,SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/PerlethSchillerGlimmDL2018.pdf} } @Inproceedings { Kraus2018CompanionCloudDemo, author = {Kraus, Matthias and Behnke, Gregor and Bercher, Pascal and Schiller, Marvin R. G. and Biundo, Susanne and Glimm, Birte and Minker, Wolfgang}, title = {A Multimodal Dialogue Framework for Cloud-Based Companion Systems}, abstract = {Abstract Companion systems are cooperative, cognitive systems aiming at assisting a user in everyday situations. Therefore, these systems require a high level of availability. One option to meet this requirement is to use a web-deployable architecture. In this demo paper, we present a multimodal cloud-based dialogue framework for the development of a distributed, web-based companion system. The proposed framework is intended to provide an efficient, easily extensible, and scalable approach for this kind of systems and will be demonstrated in a do-it-yourself assistance scenario.}, year = {2018}, booktitle = {Proc. of the 10th International Workshop on Spoken Dialog Systems Technology (IWSDS 2018)}, tags = {SFB-TRR-62,SFB-T3,Planning,KnowledgeModeling}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Kraus2018CompanionCloudDemo.pdf} } @Inproceedings { Hoeller18Progression, author = {H\"{o}ller, Daniel and Bercher, Pascal and Behnke, Gregor and Biundo, Susanne}, title = {A Generic Method to Guide HTN Progression Search with Classical Heuristics}, abstract = {HTN planning combines actions that cause state transition with grammar-like decomposition of compound tasks that additionally restricts the structure of solutions. There are mainly two strategies to solve such planning problems: decomposition-based search in a plan space and progression-based search in a state space. Existing progression-based systems do either not rely on heuristics (e.g. SHOP2) or calculate their heuristics based on extended or modified models (e.g. GoDeL). Current heuristic planners for standard HTN models (e.g. PANDA) use decomposition-based search. Such systems represent search nodes more compactly due to maintaining a partial order between tasks, but they have no current state at hand during search. This makes the design of heuristics difficult. In this paper we present a progression-based heuristic HTN planning system: We (1) provide an improved progression algorithm, prove its correctness, and empirically show its efficiency gain; and (2) present an approach that allows to use arbitrary classical (non-hierarchical) heuristics in HTN planning. Our empirical evaluation shows that the resulting system outperforms the state-of-the-art in HTN planning.}, year = {2018}, booktitle = {Proceedings of the 28th International Conference on Automated Planning and Scheduling (ICAPS 2018)}, publisher = {AAAI Press}, pages = {114--122}, tags = {SFB-TRR-62,Planning,SFB-T3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/Hoeller18Progression.pdf}, note = {This paper has won the ICAPS Best Student Paper Award} } @Phdthesis { Bercher2018Dissertation, author = {Bercher, Pascal}, title = {Hybrid Planning - From Theory to Practice}, abstract = {This work lays fundamental groundwork for the development of so-called Companion Systems - cognitive technical systems that are capable to reason about themselves, their users and environment, and to plan a course of action to achieve their users' goals. They are intelligent devices that assist their users in operating them: instead of the user having to learn how to operate the respective system, the system is intelligent and flexible enough to provide its functionality in a truly user-friendly way. To fully meet a user's demands, Companion Systems rely on a multi-facet of capabilities that stem from different disciplines, such as Artificial Intelligence (AI) planning, knowledge representation and reasoning, dialog management, and user interaction management, to name just a few. This thesis focuses on the relevant aspects of AI planning technology that are of importance for such systems. AI planning is the central technology for many \emph\{Companion Systems\} as it allows to compute a course of action that, if followed by its user, achieves his or her goals and therefore serves as a basis of providing advanced user assistance. This thesis is concerned with \emph\{hybrid planning\} -- a hierarchical planning formalism that is especially suited for the basis of providing assistance to human users. Based on this formalism we will investigate the full endeavor of developing Companion Systems -- from theory to practice. The thesis presents a novel formalization for hierarchical planning problems, which has become a standard in the field. We present a categorization of different problem classes into which hybrid planning as well as other well-known problem classes fall. This formalization allowed to prove a series of novel complexity results that are of interest both for theoretical and practical considerations. For many of the identified classes we introduce novel heuristics that are used to speed up the solution generation process. Some of them are the very first for the respective problem class, and some are the first admissible ones, thereby allowing to find optimal solutions -- which is especially important when plans are generated for human users. We apply hybrid planning in a prototypical Companion System. It assists a user in the task of setting up a complex home entertainment system. Based on a declarative (planning) model of the available hardware and its functionality, the assistant computes a sequence of actions that the user simply needs to follow to complete the setup task. Several so-called user-centered planning capabilities are applied in this system, such as a technique for generating user-friendly linearizations of non-linear plans or the capability to answer questions about the necessity of actions -- an essential property to ensure transparency of the system's behavior. In conclusion: Most modern technical devices are still lacking true intelligence -- since no research such as AI planning is sufficiently applied, so there is still huge potential in making such devices really smart by implementing them as cognitive systems that effectively assist their human users. Applying the research presented in this thesis is one step towards achieving this goal.}, type = {Dissertation}, year = {2018}, DOI = {10.18725/OPARU-5242}, school = {Ulm University}, tags = {SFB-TRR-62, Planning}, web_url = {https://oparu.uni-ulm.de/xmlui/handle/123456789/5299}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2018/DissertationBercher.pdf} } @Techreport { Bercher2018AssemblyAssistant, author = {Bercher, Pascal and Richter, Felix and Honold, Frank and Nielsen, Florian and Sch\"{u}ssel, Felix and Geier, Thomas and H\"{o}rnle, Thilo and Reuter, Stephan and H\"{o}ller, Daniel and Behnke, Gregor and Weber, Michael and Dietmayer, Klaus and Minker, Wolfgang and Biundo, Susanne}, title = {A Companion-System Architecture for Realizing Individualized and Situation-Adaptive User Assistance}, abstract = {We show how techniques from various research areas --most notably hierarchical planning, dialog management, and interactionmanagement -- can be employed to realize individualized andsituation-adaptive user assistance. We introduce a modular systemarchitecture that is composed of domain-independent componentsimplementing techniques from the respective areas. Systems based on thisarchitecture -- so-called Companion-Systems -- can provideintelligent assistance in a broad variety of tasks. They provide a user-and situation-adapted sequence of instructions that show how achieve therespective task. Additional explanations are, like the instructionsthemselves, automatically derived based on a declarative model of thecurrent task. These systems can react to unforeseen execution failuresrepairing their underlying plans if required. We introduce a prototypesystem that assists with setting up a home theater and use it as arunning example as well as for an empirical evaluation with testsubjects that shows the usefulness of our approach. We summarize thework of more than half a decade of research and development done byvarious research groups from different disciplines. Here, for the firsttime, we explain the full integration of all components thereby showing``the complete picture\\" of our approach to provide individualized andsituation-adaptive user assistance.}, type = {technical report}, year = {2018}, DOI = {10.18725/OPARU-11023}, institution = {Ulm University}, keywords = {planning}, tags = {SFB-TRR-62}, file_url = {https://oparu.uni-ulm.de/xmlui/bitstream/handle/123456789/11080/AssemblyAssistant.pdf} } @Inproceedings { Behnke2017Sloth, author = {Behnke, Gregor and Nielsen, Florian and Schiller, Marvin R. G. and Bercher, Pascal and Kraus, Matthias and Minker, Wolfgang and Biundo, Susanne and Glimm, Birte}, title = {SLOTH - the Interactive Workout Planner}, abstract = {We present the mixed-initiative planning system SLOTH, which is designed to assist users in planning a fitness workout. Mixed-initiative planning systems are especially useful for companion systems, as they allow the seamless integration of the complex cognitive ability of planning into ambient assistance systems. This is achieved by integrating the user directly into the process of plan generation and thereby allowing him to specify these objectives and to be assisted in generating a plan that not only achieves his objectives, but at the same time also fits his preferences. We present the capabilities that are integrated into SLOTH and discuss the design choices and considerata that have to be taken into account when constructing a mixed-initiative planning system.}, year = {2017}, DOI = {10.1109/COMPANION.2017.8287077}, booktitle = {Proceedings of the 2nd International Conference on Companion Technology (ICCT 2017)}, publisher = {IEEE}, tags = {SFB-TRR-62,Planning}, web_url = {http://ieeexplore.ieee.org/document/8287077/}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Behnke2017Sloth.pdf} } @Proceedings { ArGK17a, title = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, year = {2017}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Artale, Alessandro and Glimm, Birte and Kontchakov, Roman}, web_url = {http://ceur-ws.org/Vol-1879/} } @Article { Ponomaryov:2017:PDL:3091105.3091119, author = {Ponomaryov, Denis and Soutchanski, Mikhail}, title = {Progression of Decomposed Local-Effect Action Theories}, year = {2017}, DOI = {10.1145/3091119}, journal = {ACM Transactions on Computational Logic}, volume = {18}, publisher = {ACM}, pages = {1--41}, number = {2}, tags = {SFB-TRR-62,AutomatedReasoning}, web_url = {http://doi.acm.org/10.1145/3091119} } @Inproceedings { MaKG17a, author = {Mathieu, Christian and Klusch, Matthias and Glimm, Birte}, title = {QSMat: Query-Based Materialization for Efficient RDF Stream Processing}, abstract = {This paper presents a novel approach, QSMat, for efficient RDF data stream querying with flexible query-based materialization. Previous work accelerates either the maintenance of a stream window materialization or the evaluation of a query over the stream. QSMat exploits knowledge of a given query and entailment rule-set to accelerate window materialization by avoiding inferences that provably do not affect the evaluation of the query. We prove that stream querying over the resulting partial window materializations with QSMat is sound and complete with regard to the query. A comparative experimental performance evaluation based on the Berlin SPARQL benchmark and with selected representative systems for stream reasoning shows that QSMat can significantly reduce window materialization size, reasoning overhead, and thus stream query evaluation time.}, year = {2017}, DOI = {10.1007/978-3-319-69548-8\_12}, booktitle = {Proceedings of the 8th International Conference on Knowledge Engineering and Semantic Web (KESW 2017)}, volume = {786}, publisher = {Springer-Verlag}, series = {Communications in Computer and Information Science}, pages = {159-174}, tags = {AutomatedReasoning}, web_url = {https://link.springer.com/chapter/10.1007/978-3-319-69548-8\_12}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/MaKG17a.pdf} } @Inproceedings { GlKT17b, author = {Glimm, Birte and Kazakov, Yevgeny and Tran, Trung-Kien}, title = {Scalable Reasoning by Abstraction in DL-Lite}, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1879/paper57.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/GlKT17b.pdf} } @Inproceedings { KazKliStu:DL17:Explanations, author = {Kazakov, Yevgeny and Klinov, Pavel and Stupnikov, Alexander}, title = {Towards Reusable Explanation Services in Protege}, abstract = {We present several extensions of the explanation facility of the ontology editor Protege. Currently, explanations of OWL entailments in Protege are provided as justifications--minimal subsets of axioms that entail the given axiom. The plugin called \grqexplanation workbench’ computes justifications using a black-box algorithm and displays them in a convenient way. Recently, several other (mostly glass-box) tools for computing justifications have been developed, and it would be of interest to use such tools in Protege. To facilitate the development of justification-based explanation plugins for Protege, we have separated the explanation workbench into two reusable components--a plugin for black- box computation of justifications and a plugin for displaying (any) justifications. Many glass-box methods compute justifications from proofs, and we have also developed a reusable plugin for this service that can be used with (any) proofs. In addition, we have developed an explanation plugin that displays such proofs directly. Both plugins can be used, e.g., with the proofs provided by the ELK reasoner. This paper describes design, features, and implementation of these plugins.}, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Alessandro Artale and Birte Glimm and Roman Kontchakov}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1879/paper31.pdf}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/KazKliStu17Explanations\_DL.pdf} } @Inproceedings { ScSG17a, author = {Glimm, Birte and Schiller, Marvin R. G. and Schiller, Florian}, title = {Testing the Adequacy of Automated Explanations of EL Subsumptions}, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning,KnowledgeModelling}, web_url = {http://ceur-ws.org/Vol-1879/paper43.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/ScSG17a.pdf} } @Article { PMGG17a, author = {Parsia, Bijan and Matentzoglu, Nicolas and Goncalves, Rafael S. and Glimm, Birte and Steigmiller, Andreas}, title = {The OWL Reasoner Evaluation (ORE) 2015 Competition Report}, abstract = {The OWL Reasoner Evaluation competition is an annual competition (with an associated workshop) that pits OWL 2 compliant reasoners against each other on various standard reasoning tasks over naturally occurring problems. The 2015 competition was the third of its sort and had 14 reasoners competing in six tracks comprising three tasks (consistency, classification, and realisation) over two profiles (OWL 2 DL and EL). In this paper, we discuss the design, execution and results of the 2015 competition with particular attention to lessons learned for benchmarking, comparative experiments, and future competitions.}, year = {2017}, DOI = {10.1007/s10817-017-9406-8}, journal = {Journal of Automated Reasoning (JAR)}, volume = {59}, pages = {455-482}, number = {4}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {https://doi.org/10.1007/s10817-017-9406-8}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/PMGG17a.pdf} } @Inproceedings { Behnke17Verify, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {This is a solution! (... but is it though?) - Verifying solutions of hierarchical planning problems}, abstract = {Plan-Verification is the task of determining whether a plan is a solution to a given planning problem. Any plan verifier has, apart from showing that verifying plans is possible in practice, a wide range of possible applications. These include mixed-initiative planning, where a user is integrated into the planning process, and local search, e.g., for post-optimising plans or for plan repair. In addition to its practical interest, plan verification is also a problem worth investigating for theoretical reasons. Recent work showed plan verification for hierarchical planning problems to be NP-complete, as opposed to classical planning where it is in P. As such, plan verification for hierarchical planning problem was – until now – not possible. We describe the first plan verifier for hierarchical planning. It uses a translation of the problem into a SAT formula. Further we conduct an empirical evaluation, showing that the correct output is produced within acceptable time.}, year = {2017}, booktitle = {Proceedings of the 27th International Conference on Automated Planning and Scheduling (ICAPS 2017)}, publisher = {AAAI Press}, pages = {20--28}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Behnke17Verify\_Talk.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Behnke17Verify.pdf} } @Inbook { Behnke2017CompBookMIP, author = {Behnke, Gregor and Nielsen, Florian and Schiller, Marvin R. G. and Ponomaryov, Denis and Bercher, Pascal and Glimm, Birte and Minker, Wolfgang and Biundo, Susanne}, title = {To Plan for the User Is to Plan With the User -- Integrating User Interaction Into the Planning Process}, abstract = {Settings where systems and users work together to solve problems collaboratively are among the most challenging applications of Companion-Technology. So far we have seen how planning technology can be exploited to realize Companion-Systems that adapt flexibly to changes in the user’s situation and environment and provide detailed help for users to realize their goals. However, such systems lack the capability to generate their plans in cooperation with the user. In this chapter we go one step further and describe how to involve the user directly into the planning process. This enables users to integrate their wishes and preferences into plans and helps the system to produce individual plans, which in turn let the Companion-System gain acceptance and trust from the user. Such a Companion-System must be able to manage diverse interactions with a human user. A so-called mixed-initiative planning system integrates several Companion-Technologies which are described in this chapter. For example, a--not yet final--plan, including its flaws and solutions, must be presented to the user to provide a basis for her or his decision. We describe how a dialog manager can be constructed such that it can handle all communication with a user. Naturally, the dialog manager and the planner must use coherent models. We show how an ontology can be exploited to achieve such models. Finally, we show how the causal information included in plans can be used to answer the questions a user might have about a plan. The given capabilities of a system to integrate user decisions and to explain its own decisions to the user in an appropriate way are essential for systems that interact with human users.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_7}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {7}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {123--144}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Behnke2017CompBookMIP.pdf} } @Inbook { Bercher2017CompBookUCPlanning, author = {Bercher, Pascal and H\"{o}ller, Daniel and Behnke, Gregor and Biundo, Susanne}, title = {User-Centered Planning}, abstract = {User-centered planning capabilities are core elements of Companion-Technology. They are used to implement the functional behavior of technical systems in a way that makes those systems Companion-able – able to serve users individually, to respect their actual requirements and needs, and to flexibly adapt to changes of the user’s situation and environment. This book chapter presents various techniques we have developed and integrated to realize user-centered planning. They are based on a hybrid planning approach that combines key principles also humans rely on when making plans: stepwise refining complex tasks into executable courses of action and considering causal relationships between actions. Since the generated plans impose only a partial order on actions, they allow for a highly flexible execution order as well. Planning for Companion-Systems may serve different purposes, depending on the application for which the system is created. Sometimes, plans are just like control programs and executed automatically in order to elicit the desired system behavior; but sometimes they are made for humans. In the latter case, plans have to be adequately presented and the definite execution order of actions has to coincide with the user’s requirements and expectations. Furthermore, the system should be able to smoothly cope with execution errors. To this end, the plan generation capabilities are complemented by mechanisms for plan presentation, execution monitoring, and plan repair.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_5}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {5}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {79--100}, tags = {SFR-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Bercher2017CompBookUCPlanning.pdf} } @Inproceedings { KazPon:DL17:Integration, author = {Kazakov, Yevgeny and Ponomaryov, Denis}, title = {On the Complexity of Semantic Integration of OWL Ontologies}, abstract = {We propose a new mechanism for integration of OWL ontologies using semantic import relations. In contrast to the standard OWL importing, we do not require all axioms of the imported ontologies to be taken into account for reasoning tasks, but only their logical implications over a chosen signature. This property comes natural in many ontology integration scenarios. In this paper, we study the complexity of reasoning over ontologies with semantic import relations and establish a range of tight complexity bounds for various fragments of OWL.}, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Alessandro Artale and Birte Glimm and Roman Kontchakov}, tags = {KnowledgeModeling, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1879/paper59.pdf}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/KazPon17Integration\_DL.pdf} } @Conference { GliKazTra:17:Abstraction:Horn:SHOIF:AAAI, author = {Glimm, Birte and Kazakov, Yevgeny and Tran, Trung-Kien}, title = {Ontology Materialization by Abstraction Refinement in Horn SHOIF}, abstract = {Abstraction refinement is a recently introduced technique using which reasoning over large ABoxes is reduced to reasoning over small abstract ABoxes. Although the approach is sound for any classical Description Logic such as \{SROIQ\}, it is complete only for Horn \{ALCHOI\}. In this paper, we propose an extension of this method that is now complete for Horn \{SHOIF\} and also handles role- and equality-materialization. To show completeness, we use a tailored set of materialization rules that loosely decouple the ABox from the TBox. An empirical evaluation demonstrates that, despite the new features, the abstractions are still significantly smaller than the original ontologies and the materialization can be computed efficiently.}, year = {2017}, booktitle = {Proceedings of the 31st AAAI Conference on Artificial Intelligence}, publisher = {AAAI Press}, editor = {Satinder P. Singh and Shaul Markovitch}, pages = {1114--1120}, event_name = {31st AAAI Conference on Artificial Intelligence}, event_place = {San Francisco, California, USA}, tags = {AutomatedReasoning}, web_url = {http://aaai.org/ocs/index.php/AAAI/AAAI17/paper/view/14726}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/GlKT17a.pdf} } @Inproceedings { Bercher17AdmissibleHTNHeuristic, author = {Bercher, Pascal and Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {An Admissible HTN Planning Heuristic}, abstract = {Hierarchical task network (HTN) planning is well-known for being an efficient planning approach. This is mainly due to the success of the HTN planning system SHOP2. However, its performance depends on hand-designed search control knowledge. At the time being, there are only very few domain-independent heuristics, which are designed for differing hierarchical planning formalisms. Here, we propose an admissible heuristic for standard HTN planning, which allows to find optimal solutions heuristically. It bases upon the so-called task decomposition graph (TDG), a data structure reflecting reachable parts of the task hierarchy. We show (both in theory and empirically) that rebuilding it during planning can improve heuristic accuracy thereby decreasing the explored search space. The evaluation further studies the heuristic both in terms of plan quality and coverage.}, year = {2017}, booktitle = {Proceedings of the 26th International Joint Conference on Artificial Intelligence (IJCAI 2017)}, publisher = {IJCAI}, pages = {480--488}, tags = {SFB-TRR-62,Planning,SFB-T3}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Bercher17AdmissibleHTNHeuristicTalk.pdf}, web_url2 = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Bercher17AdmissibleHTNHeuristicPoster.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Bercher17AdmissibleHTNHeuristic.pdf} } @Inbook { Siegert2017CompBookTicketSystem, author = {Siegert, Ingo and Sch\"{u}ssel, Felix and Schmidt, Miriam and Reuter, Stephan and Meudt, Sascha and Layher, Georg and Krell, Gerald and H\"{o}rnle, Thilo and Handrich, Sebastian and Al-Hamadi, Ayoub and Dietmayer, Klaus and Neumann, Heiko and Palm, G\"{u}nther and Schwenker, Friedhelm and Wendemuth, Andreas}, title = {Multi-modal Information Processing in Companion-Systems: A Ticket Purchase System}, abstract = {We demonstrate a successful multimodal dynamic human-computer interaction (HCI) in which the system adapts to the current situation and the user’s state is provided using the scenario of purchasing a train ticket. This scenario demonstrates that Companion Systems are facing the challenge of analyzing and interpreting explicit and implicit observations obtained from sensors under changing environmental conditions. In a dedicated experimental setup, a wide range of sensors was used to capture the situative context and the user, comprising video and audio capturing devices, laser scanners, a touch screen, and a depth sensor. Explicit signals describe a user’s direct interaction with the system, such as interaction gestures, speech and touch input. Implicit signals are not directly addressed to the system; they comprise the user’s situative context, his or her gesture, speech, body pose, facial expressions and prosody. Both multimodally fused explicit signals and interpreted information from implicit signals steer the application component, which was kept deliberately robust. The application offers stepwise dialogs gathering the most relevant information for purchasing a train ticket, where the dialog steps are sensitive and adaptable within the processing time to the interpreted signals and data. We further highlight the system’s potential for a fast-track ticket purchase when several pieces of information indicate a hurried user. A video of the complete scenario in German language is available at: http://www.uni-ulm.de/en/in/sfb-transregio-62/pr-and-press/videos.html All authors contributed equally.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_25}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {25}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {493--500}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Siegert2017CompBookTicketSystem.pdf} } @Inbook { Hoernle2017CompBookReferenceArchitecture, author = {H\"{o}rnle, Thilo and Tornow, Michael and Honold, Frank and Schwegler, Reinhard and Heinemann, Ralph and Biundo, Susanne and Wendemuth, Andreas}, title = {Companion-Systems: A Reference Architecture}, abstract = {Companion-Technology for cognitive technical systems consists of a multitude of components that implement different properties. A primary point is the architecture which is responsible for the interoperability of all components. It defines the capabilities of the systems crucially. For research concerning the requirements and effects of the architecture, several demonstration scenarios were developed. Each of these demonstration scenarios focuses on some aspects of a Companion-System. For the implementation a middleware concept was used, having the capability to realize the major part of the Companion-Systems. Currently the system architecture takes up only a minor property in projects which are working on related research topics. For the description of an architecture representing the major part of possible Companion-Systems, the demonstration scenarios are studied with regard to their system structure and the constituting components. A monolithic architecture enables a simple system design and fast direct connections between the components, such as: sensors with their processing and fusion components, knowledge bases, planning components, dialog systems and interaction components. Herein, only a limited number of possible Companion-Systems can be represented. In a principled approach, a dynamic architecture, capable of including new components during run time, is able to represent almost all Companion-Systems. Furthermore, an approach for enhancing the architecture is introduced.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_22}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {22}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {449-469}, keywords = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Hoernle2017CompBookReferenceArchitecture.pdf} } @Inproceedings { Schiller17CouplingKnowledge, author = {Schiller, Marvin R. G. and Behnke, Gregor and Schmautz, Mario and Bercher, Pascal and Kraus, Matthias and Dorna, Michael and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {A Paradigm for Coupling Procedural and Conceptual Knowledge in Companion Systems}, abstract = {Companion systems are technical systems that adjust their functionality to the needs and the situation of an individual user. Consequently, companion systems are strongly knowledge-based. We propose a modelling paradigm for integrating procedural and conceptual knowledge which is targeted at companion systems that require a combination of planning and reasoning capabilities. The presented methodology couples the hierarchical task network (HTN) planning formalism with an ontology-based knowledge representation, thereby minimising redundancies in modelling and enabling the use of state-of-the-art reasoning and planning tools on the shared knowledge model. The approach is applied within a prototype of a companion system that assists novice users in the do-it-yourself (DIY) domain with the planning and execution of home improvement projects involving the use of power tools.}, year = {2017}, DOI = {10.1109/COMPANION.2017.8287072}, booktitle = {Proceedings of the 2nd International Conference on Companion Technology (ICCT 2017)}, publisher = {IEEE}, tags = {SFB-TRR-62,SFB-T3,Planning,KnowledgeModeling}, web_url = {http://ieeexplore.ieee.org/document/8287072/}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/SBSBKDMGB-ICCT2017.pdf} } @Inbook { Richter2017UserCenteredPlanningUnderUncertainty, author = {Richter, Felix and Biundo, Susanne}, title = {Addressing Uncertainty in Hierarchical User-Centered Planning}, abstract = {Companion-Systems need to reason about dynamic properties of their users, e.g., their emotional state, and the current state of the environment. The values of these properties are often not directly accessible; hence information on them must be pieced together from indirect, noisy or partial observations. To ensure probability-based treatment of partial observability on the planning level, planning problems can be modeled as Partially Observable Markov Decision Processes (POMDPs). While POMDPs can model relevant planning problems, it is algorithmically difficult to solve them. A starting point for mitigating this is that many domains exhibit hierarchical structures where plans consist of a number of higher-level activities, each of which can be implemented in different ways that are known a priori. We show how to make use of such structures in POMDPs using the Partially Observable HTN (POHTN) planning approach by developing a Partially Observable HTN (POHTN) action hierarchy for an example domain derived from an existing deterministic demonstration domain. We then apply Monte-Carlo Tree Search to POHTNs for generating plans and evaluate both the developed domain and the POHTN approach empirically.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_6}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {6}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {101--121}, keywords = {SFB-TRR-62, Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Richter2017UserCenteredPlanningUnderUncertainty.pdf} } @Inbook { Geier2017CompBookMLKP, author = {Geier, Thomas and Biundo, Susanne}, title = {Multi-Level Knowledge Processing in Cognitive Technical Systems}, abstract = {Companion-Systems are composed of different modules that have to share a single, sound estimate of the current situation. While the long-term decision-making of automated planning requires knowledge about the user’s goals, short-term decisions, like choosing among modes of user-interaction, depend on properties such as lighting conditions. In addition to the diverse scopes of the involved models, a large portion of the information required within such a system cannot be directly observed, but has to be inferred from background knowledge and sensory data--sometimes via a cascade of abstraction layers, and often resulting in uncertain predictions. In this contribution, we interpret an existing cognitive technical system under the assumption that it solves a factored, partially observable Markov decision process. Our interpretation heavily draws from the concepts of probabilistic graphical models and hierarchical reinforcement learning, and fosters a view that cleanly separates between inference and decision making. The results are discussed and compared to those of existing approaches from other application domains.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_2}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {2}, series = {Cognitive Technologies}, editor = {Susanne Biundo}, pages = {17--36}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Geier2017CompBookMLKP.pdf} } @Inbook { Biundo2017CompBookIntro, author = {Biundo, Susanne and Wendemuth, Andreas}, title = {An Introduction to Companion-Technology}, abstract = {Companion-technology enables a new generation of intelligent systems. These Companion-systems smartly adapt their functionality to a user’s individual requirements. They comply with his or her abilities, preferences, and current needs and adjust their behavior as soon as critical changes of the environment or changes of the user’s emotional state or disposition are observed. Companion-systems are distinguished by characteristics such as competence, individuality, adaptability, availability, cooperativeness, and trustworthiness. These characteristics are realized by integrating the technical functionality of systems with a combination of cognitive processes. Companion-systems are able to perceive the user and the environment; they reason about the current situation, exploit background knowledge, and provide and pursue appropriate plans of action; and they enter into a dialog with the user where they select the most suitable modes of interaction in terms of media, modalities and dialog strategies. This chapter introduces the essence of Companion-technology and sheds light on the huge range of its prospective applications.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_1}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {1}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {1--15}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Biundo2017CompBookIntro.pdf} } @Book { Biundo2017ParadigmShift, author = {Biundo, Susanne and Wendemuth, Andreas}, title = {Companion Technology - A Paradigm Shift in Human-Technology Interaction}, abstract = {Future technical systems will be companion systems, competent assistants that provide their functionality in a completely individualized way, adapting to a user’s capabilities, preferences, requirements, and current needs, and taking into account both the emotional state and the situation of the individual user. This book presents the enabling technology for such systems. It introduces a variety of methods and techniques to implement an individualized, adaptive, flexible, and robust behavior for technical systems by means of cognitive processes, including perception, cognition, interaction, planning, and reasoning. The technological developments are complemented by empirical studies from psychological and neurobiological perspectives.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4}, publisher = {Springer}, editor = {Susanne Biundo and Andreas Wendemuth}, keywords = {Companion Systems, Cognitive Systems, Artificial Intelligence (AI), Automated Planning, Knowledge Representation (KR), Human-Computer Interaction (HCI), Multimodal Interaction, Dialogue Management, Emotion Recognition, Information Fusion}, tags = {SFB-TRR-62,Planning}, web_url = {https://link.springer.com/book/10.1007/978-3-319-43665-4}, file_url = {https://link.springer.com/book/10.1007/978-3-319-43665-4} } @Inbook { Bercher2017CompBookHomeTheater, author = {Bercher, Pascal and Richter, Felix and H\"{o}rnle, Thilo and Geier, Thomas and H\"{o}ller, Daniel and Behnke, Gregor and Nielsen, Florian and Honold, Frank and Sch\"{u}ssel, Felix and Reuter, Stephan and Minker, Wolfgang and Weber, Michael and Dietmayer, Klaus and Biundo, Susanne}, title = {Advanced User Assistance for Setting Up a Home Theater}, abstract = {In many situations of daily life, such as in educational, work-related, or social contexts, one can observe an increasing demand for intelligent assistance systems. In this chapter, we show how such assistance can be provided in a wide range of application scenarios--based on the integration of user-centered planning with advanced dialog and interaction management capabilities. Our approach is demonstrated by a system that assists a user in the task of setting up a complex home theater. The theater consists of several hi-fi devices that need to be connected with each other using the available cables and adapters. In particular for technically inexperienced users, the task is quite challenging due to the high number of different ports of the devices and because the used cables might not be known to the user. Support is provided by presenting a detailed sequence of instructions that solves the task.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_24}, booktitle = {Companion Technology -- A Paradigm Shift in Human-Technology Interaction}, publisher = {Springer}, chapter = {24}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {485--491}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Bercher2017CompBookHomeTheater.pdf} } @Inbook { Nothdurft17UserInvolvement, author = {Nothdurft, Florian and Bercher, Pascal and Behnke, Gregor and Minker, Wolfgang}, title = {Dialogues with Social Robots: Enablements, Analyses, and Evaluation}, abstract = {Abstract Mixed-initiative assistants are systems that support humans in their decision-making and problem-solving capabilities in a collaborative manner. Such systems have to integrate various artificial intelligence capabilities, such as knowledge representation, problem solving and planning, learning, discourse and dialog, and human-computer interaction. These systems aim at solving a given problem autonomously for the user, yet involve the user into the planning process for a collaborative decision-making, to respect e. g. user preferences. However, how the user is involved into the planning can be framed in various ways, using different involvement strategies, varying e. g. in their degree of user freedom. Hence, here we present results of a study examining the effects of different user involvement strategies on the user experience in a mixed-initiative system.}, year = {2017}, DOI = {10.1007/978-981-10-2585-3\_10}, booktitle = {Dialogues with Social Robots: Enablements, Analyses, and Evaluation}, publisher = {Springer}, chapter = {User Involvement in Collaborative Decision-Making Dialog Systems}, editor = {Kristiina Jokinen and Graham Wilcock}, pages = {129--141}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.springer.com/us/book/9789811025846}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Nothdurft2016UserInvolvement.pdf}, note = {This book chapter was accepted at the 7th International Workshop On Spoken Dialogue Systems (IWSDS 2016)} } @Inproceedings { KazSko:DL17:Justifications, author = {Kazakov, Yevgeny and Skocovsky, Peter}, title = {Enumerating Justifications using Resolution}, abstract = {We propose a new procedure that can enumerate justifications of a logical entailment given a set of inferences using which this entailment can be derived from axioms in the ontology. The procedure is based on the extension of the resolution method with so-called answer literals. In comparison to other (SAT-based) methods for enumerating justifications, our procedure can enumerate justifications in any user-defined order that extends the subset relation. The procedure is easy to implement and can be parametrized with ordering and selection strategies used in resolution. We describe an implementation of the procedure provided in PULi--a new Java-based Proof Utility Library, and provide an em- pirical comparison of (several strategies of) our procedure and other SAT-based tools on popular EL ontologies. The experiments show that our procedure provides a comparable, if not better performance than those highly optimized tools. For example, using one of the strategies, we were able to compute all justifications for all direct subsumptions of Snomed CT in about 1.5 hour. No other tool used in our experiments was able to do it even within a much longer period. }, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Alessandro Artale and Birte Glimm and Roman Kontchakov}, tags = {KnowledgeModeling, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1879/paper38.pdf}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/KazSko17Justifications\_DL.pdf} } @Inbook { Reuter2017CompBookEnvironmentAdaption, author = {Reuter, Stephan and Scheel, Alexander and Geier, Thomas and Dietmayer, Klaus}, title = {Environment Adaption for Companion-Systems}, abstract = {One of the key characteristics of a Companion-System is the adaptation of its functionality to the user’s preferences and the environment. On the one hand, a dynamic environment model facilitates the adaption of output modalities in human computer interaction (HCI) to the current situation. On the other hand, continuous tracking of users in the proximity of the system allows for resuming a previously interrupted interaction. Thus, an environment perception system based on a robust multi-object tracking algorithm is required to provide these functionalities. In typical Companion-System applications, persons in the proximity are closely spaced, which leads to statistical dependencies in their behavior. The multi-object Bayes filter allows for modeling these statistical dependencies by representing the multi-object state using random finite sets. Based on the social force model and the knowledge base of the companion system, an approach to modeling object interactions is presented. In this work, the interaction model is incorporated into the prediction step of the sequential Monte Carlo (SMC) of the multi-object Bayes filter. Further, an alternative implementation of the multi-object Bayes filter based on labeled random finite sets is outlined.}, year = {2017}, DOI = {10.1007/978-3-319-43665-4\_15}, publisher = {Springer}, chapter = {15}, series = {Cognitive Technologies}, editor = {Susanne Biundo and Andreas Wendemuth}, pages = {301--320}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Reuter2017CompBookEnvironmentAdaption.pdf} } @Inproceedings { Behnke2017Challenge, author = {Behnke, Gregor and Leichtmann, Benedikt and Bercher, Pascal and H\"{o}ller, Daniel and Nitsch, Verena and Baumann, Martin and Biundo, Susanne}, title = {Help me make a dinner! Challenges when assisting humans in action planning}, abstract = {A promising field of application for cognitive technical systems is individualised user assistance for complex tasks. Here, a companion system usually uses an AI planner to solve the underlying combinatorial problem. Often, the use of a bare black-box planning system is not sufficient to provide individualised assistance, but instead the user has to be able to control the process that generates the presented advice. Such an integration guarantees that the user will be satisfied with the assistance s/he is given, trust the advice more, and is thus more likely to follow it. In this paper, we provide a general theoretical view on this process, called mixed-initiative planning, and derive several research challenges from it.}, year = {2017}, booktitle = {Proceedings of the 2nd International Conference on Companion Technology (ICCT 2017)}, publisher = {IEEE}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/Behnke2017Challenge.pdf} } @Inproceedings { BrGl17a, author = {Brenner, Markus and Glimm, Birte}, title = {Incremental Materialization Update via Abstraction Refinement}, year = {2017}, booktitle = {Proceedings of the 30th International Workshop on Description Logics (DL 2017)}, volume = {1879}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1879/paper19.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/GlBr17a.pdf} } @Inproceedings { Ulmschneider.2017, author = {Ulmschneider, Klaus and Glimm, Birte}, title = {Knowledge Graph: Semantic Representation and Assessment of Innovation Ecosystems}, abstract = {Innovative capacity is highly dependent upon knowledge and the possession of unique competences can be an important source of enduring strategic advantage. Hence, being able to identify, locate, measure, and assess competence occupants can be a decisive competitive edge. In this work, we introduce a framework that assists with performing such tasks. To achieve this, NLP-, rule-based, and machine learning techniques are employed to process raw data such as academic publications or patents. The framework gains normalized person and organization profiles and compiles identified entities (such as persons, organizations, or locations) into dedicated objects disambiguating and unifying where needed. The objects are then mapped with conceptual systems and stored along with identified semantic relations in a Knowledge Graph, which is constituted by RDF triples. An OWL reasoner allows for answering complex business queries, and in particular, to analyze and evaluate competences on multiple aggregation levels (i.e., single vs. collective) and dimensions (e.g., region, technological field of interest, time). In order to prove the general applicability of the framework and to illustrate how to solve concrete business cases from the automotive domain, it is evaluated with different datasets.}, year = {2017}, isbn = {978-3-319-69547-1}, DOI = {10.1007/978-3-319-69548-8\_15}, booktitle = {Proceedings of the 8th International Conference on Knowledge Engineering and Semantic Web (KESW 2017)}, publisher = {Springer}, address = {Berlin, Germany}, series = {Communications in Computer and Information Science (CCIS)}, editor = {Rozewski, Przemyslaw and Lange, Christoph}, pages = {211--226}, keywords = {Competence Analysis;Competence Assessment;Competence Detection;Computational Linguistics;Corporate Strategy;Data Mining;Decision Making;Expert Matching;Expert Mining;Information Extraction;Information Retrieval;Innovation Ecosystem;Knowledge Graph;Knowledge Representation;Machine Learning;Name Disambiguation;Name Normalization;Natural Language Processing;Ontology;Patent Analysis;Question Answering;Reasoning;Semantic Analysis;Semantic Technologies}, tags = {AutomatedReasoning,KnowledgeModelling}, web_url = {https://link.springer.com/chapter/10.1007/978-3-319-69548-8\_15}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2017/UlGl17a.pdf} } @Inproceedings { PMGG16a, author = {Parsia, Bijan and Matentzoglu, Nicolas and Goncalves, Rafael S. and Glimm, Birte and Steigmiller, Andreas}, title = {The OWL Reasoner Evaluation (ORE) 2015 Resources}, year = {2016}, booktitle = {Proceedings of the 15th International Semantic Web Conference (ISWC 2016)}, volume = {9982}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, editor = {Paul T. Groth and Elena Simperl and Alasdair J. G. Gray and Marta Sabou and Markus Kr\"{o}tzsch and Freddy L\'{e}cu\'{e} and Fabian Fl\"{o}ck and Yolanda Gil}, pages = {159--167}, keywords = {Benchmarking, Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/chapter/10.1007{\%}2F978-3-319-46547-0\_17}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/PMGG16a.pdf}, note = {Best Resource Track Paper Award} } @Article { GlSt2016b, author = {Glimm, Birte and Stuckenschmidt, Heiner}, title = {Special Issue on Semantic Web}, year = {2016}, issn = {1610-1987}, DOI = {10.1007/s13218-016-0430-3}, journal = {KI - K\"{u}nstliche Intelligenz}, volume = {30}, pages = {113--115}, number = {2}, keywords = {Reasoning, Description Logics, Semantic Web}, tags = {AutomatedReasoning}, web_url = {http://dx.doi.org/10.1007/s13218-016-0430-3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/GlSt2016b.pdf} } @Article { Biundo2016Editorial, author = {Biundo, Susanne and H\"{o}ller, Daniel and Bercher, Pascal}, title = {Special Issue on Companion Technologies}, abstract = {Dear reader, at present, we observe a rapid growth in the development of increasingly complex “intelligent” systems that serve users throughout all areas of their daily lives. They range from classical technical systems such as household appliances, cars, or consumer electronics through mobile apps and services to advanced service robots in various fields of application. While many of the rather conventional systems already provide multiple modalities to interact with, the most advanced are even equipped with cognitive abilities such as perception, cognition, and reasoning. However, the use of such complex technical systems and in particular the actual exploitation of their rich functionality remain challenging and quite often lead to users’ cognitive overload and frustration. Companion Technologies bridge the gap between the extensive functionality of technical systems and human users’ individual requirements and needs. They enable the construction of really smart – adaptive, flexible, and cooperative – technical systems by applying and fusing techniques from different areas of research. In our special issue we present interesting pieces of work – quite a number of new technical contributions, ongoing and completed research projects, several dissertation abstracts, as well as an interview – that are related to, or even fundamental for, Companion-Technology. In the community part of this issue, there is also a conference report on the first International Symposium on Companion-Technology.}, year = {2016}, DOI = {10.1007/s13218-015-0421-9}, journal = {K\"{u}nstliche Intelligenz}, volume = {30}, publisher = {Springer}, pages = {5-9}, number = {1}, tags = {SFB-TRR-62,Planning}, web_url = {http://link.springer.com/article/10.1007/s13218-015-0421-9}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Biundo2016Editorial.pdf}, note = {Special Issue on Companion Technologies} } @Inproceedings { DS3ESSV:2016DS3, author = {Siegert, Ingo and Reuter, Stephan and Sch\"{u}ssel, Felix and Layher, Georg and H\"{o}rnle, Thilo and Meudt, Sascha and Wendemuth, Andreas}, title = {Multimodal Information Processing: The Ticket Purchase - a Demonstration Scenario of the SFB/TRR-62}, abstract = {The demonstration scenario of the SFB/TRR-62 shows multimodal, dynamic interactions between a human being and a technical system that are adaptive to the situation and the emotional state. It uses the example of purchasing a train ticket to demonstrate how a companion system is able to adapt its dialog with the user according to the situational context and the emotions of the user. One special feature of this scenario are the simultaneous analyses and evaluations of explicit and implicit input data. The scenario demonstrates further how background knowledge about the user can be included; for example often visited destinations, the user's timetable or the number of travelers.}, year = {2016}, booktitle = {Elektronische Sprachsignalverarbeitung 2016. Tagungsband der 27. Konferenz}, volume = {81}, publisher = {TUDpress}, address = {Leipzig, Germany}, series = {Studientexte zur Sprachkommunikation}, editor = {Jokisch, Oliver}, pages = {111--118}, keywords = {SFB-TRR-62} } @Inproceedings { Ulmschneider.2016, author = {Ulmschneider, Klaus and Glimm, Birte}, title = {Semantic Exploitation of Implicit Patent Information}, abstract = {In recent years patents have become increasingly important for businesses to protect their intellectual capital and as a valuable source of information. Patent information is, however, not employed to its full potential and the interpretation of structured and unstructured patent information in large volumes remains a challenge. We address this by proposing an integrated interdisciplinary approach that uses natural language processing and machine learning techniques to formalize multilingual patent information in an ontology. The ontology further contains patent and domain specific knowledge, which allows for aligning patents with technological fields of interest and other business-related artifacts. Our empirical evaluation shows that for categorizing patents according to well-known technological fields of interest, the approach achieves high accuracy with selected feature sets compared to related work focussing on monolingual patents. We further show that combining OWL RL reasoning with SPARQL querying over the patent knowledge base allows for answering complex business queries and illustrate this with real-world use cases from the automotive domain.}, year = {2016}, isbn = {9781509042395}, DOI = {10.1109/SSCI.2016.7849943}, booktitle = {Proceedings of the 7th IEEE Symposium Series on Computational Intelligence (SSCI'16)}, publisher = {IEEE Computer Society}, address = {Red Hook, United States}, editor = {Jin, Yaochu and Kollias, Stefanos}, pages = {1-8}, keywords = {Computational Intelligence;Computational Linguistics;Corporate Strategy;Data Mining;Decision Making;Information Extraction;Information Retrieval;Knowledge Graph;Knowledge Representation;Machine Learning;Natural Language Processing;Patent Analysis;Reasoning;Semantic Analysis;Semantic Technologies}, tags = {AutomatedReasoning,KnowledgeModelling}, web_url = {http://ieeexplore.ieee.org/document/7849943/}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/UlGl16a.pdf} } @Inproceedings { GlKT16b, author = {Glimm, Birte and Kazakov, Yevgeny and Tran, Trung-Kien}, title = {Scalable Reasoning by Abstraction Beyond DL-Lite}, year = {2016}, booktitle = {Proceedings of the 10th International Conference on Web Reasoning and Rule Systems (RR 2016)}, volume = {9898}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, editor = {Magdalena Ortiz and Stefan Schlobach}, pages = {77--93}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations, Materialization, Materialisation, Abstraction}, tags = {AutomatedReasoning}, web_url = {http://dx.doi.org/10.1007/978-3-319-45276-0\_7}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/GlKT16b.pdf}, note = {Best Student Paper Award} } @Phdthesis { DBLP:phd/dnb/Steigmiller16, author = {Steigmiller, Andreas}, title = {Optimisation of tableau-based reasoning systems for expressive description logics}, year = {2016}, school = {University of Ulm, Germany}, web_url = {https://oparu.uni-ulm.de/xmlui/handle/123456789/4042?locale-attribute=en}, file_url = {http://nbn-resolving.de/urn:nbn:de:bsz:289-oparu-4042-6} } @Inproceedings { GlKT16a, author = {Glimm, Birte and Kazakov, Yevgeny and Tran, Trung-Kien}, title = {Ontology Materialization by Abstraction Refinement in Horn SHOIF}, year = {2016}, booktitle = {Proceedings of the 29th International Workshop on Description Logics (DL 2016)}, volume = {1577}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1577/invited\_paper\_1.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/GlKT16a.pdf} } @Inproceedings { Behnke16ChangeThePlan, author = {Behnke, Gregor and H\"{o}ller, Daniel and Bercher, Pascal and Biundo, Susanne}, title = {Change the Plan - How hard can that be?}, abstract = {Interaction with users is a key capability of planning systems that are applied in real-world settings. Such a system has to be able to react appropriately to requests issued by its users. Most of these systems are based on a generated plan that is continually criticised by him, resulting in a mixed-initiative planning system. We present several practically relevant requests to change a plan in the setting of hierarchical task network planning and investigate their computational complexity. On the one hand, these results provide guidelines when constructing algorithms to execute the respective requests, but also provide translations to other well-known planning queries like plan existence or verification. These can be employed to extend an existing planner such that it can form the foundation of a mixed-initiative planning system simply by adding a translation layer on top.}, year = {2016}, booktitle = {Proceedings of the 26th International Conference on Automated Planning and Scheduling (ICAPS 2016)}, publisher = {AAAI Press}, pages = {38--46}, event_name = {26th International Conference on Automated Planning and Scheduling}, event_place = {London}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Behnke16Change\_Talk.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Behnke16ChangeThePlan.pdf} } @Inproceedings { Bercher16HybridPlanningComplexities, author = {Bercher, Pascal and H\"{o}ller, Daniel and Behnke, Gregor and Biundo, Susanne}, title = {More than a Name? On Implications of Preconditions and Effects of Compound HTN Planning Tasks}, abstract = {There are several formalizations for hierarchical planning. Many of them allow to specify preconditions and effects for compound tasks. They can be used, e.g., to assist during the modeling process by ensuring that the decomposition methods' plans ``implement'' the compound tasks' intended meaning. This is done based on so-called legality criteria that relate these preconditions and effects to the method's plans and pose further restrictions. Despite the variety of expressive hierarchical planning formalisms, most theoretical investigations are only known for standard HTN planning, where compound tasks are just names, i.e., no preconditions or effects can be specified. Thus, up to know, a direct comparison to other hierarchical planning formalisms is hardly possible and fundamental theoretical properties are yet unknown. We therefore investigate the theoretical impact of such preconditions and effects -- depending on the legality criteria known from the literature -- for two of the most basic questions to planning: plan existence and plan verification. It turns out that for all investigated legality criteria, the respective problems are as hard as in the HTN setting and therefore equally expressive.}, year = {2016}, booktitle = {Proceedings of the 22nd European Conference on Artificial Intelligence (ECAI 2016)}, publisher = {IOS Press}, pages = {225--233}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Bercher16HybridPlanningComplexitiesSlides.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Bercher16HybridPlanningComplexities.pdf} } @Article { Smith2016Interview, author = {Bercher, Pascal and H\"{o}ller, Daniel}, title = {Interview with David E. Smith}, abstract = {David E. Smith is a senior Researcher in the Intelligent Systems Division at NASA Ames Research Center. He received his Ph.D. in 1985 from Stanford University, and spent time as a Research Associate at Stanford, a Scientist at the Rockwell Palo Alto Science Center, and a Visiting Scholar at the University of Washington before joining NASA in 1997. Beginning in 1999, he served as the lead of the 18 member planning and scheduling group at NASA Ames for six years before abdicating to devote more time to research. Much of his research has focused on pushing the boundaries of AI planning technology to handle richer models of time, concurrency, exogenous events, uncertainty, and oversubscription. Smith served as an Associate Editor for the Journal of Artificial Intelligence Research (JAIR) from 2001-2004, and as Guest Editor for the JAIR Special Issue and Special Track on the 3rd and 4th International Planning Competitions. He served on the JAIR Advisory Board 2004-2007. Smith was recognized as a AAAI Fellow in 2005, and served on the AAAI Executive Council 2007-2010.}, year = {2016}, DOI = {10.1007/s13218-015-0403-y}, journal = {K\"{u}nstliche Intelligenz}, volume = {30}, publisher = {Springer}, pages = {101--105}, number = {1}, tags = {SFB-TRR-62,Planning}, web_url = {http://link.springer.com/article/10.1007/s13218-015-0403-y}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Smith2016Interview.pdf}, note = {Special Issue on Companion Technologies} } @Inproceedings { ZhQG16a, author = {Zhou, Zhangquan and Qi, Guilin and Glimm, Birte}, title = {Exploring Parallel Tractability of Ontology Materialization}, abstract = {Materialization is an important reasoning service for applications built on the Web Ontology Language (OWL). To make materialization efficient in practice, current research focuses on deciding tractability of an ontology language and designing parallel reasoning algorithms. However, some well-known large-scale ontologies, such as YAGO, have been shown to have good performance for parallel reasoning, but they are expressed in ontology languages that are not parallelly tractable, i.e., the reasoning is inherently sequential in the worst case. This motivates us to study the problem of parallel tractability of ontology materialization from a theoretical perspective. That is, we aim to identify the ontologies for which materialization is parallelly tractable, i.e., in NC complexity. In this work, we focus on datalog rewritable ontology languages. We identify several classes of datalog rewritable ontologies (called parallelly tractable classes) such that materialization over them is parallelly tractable. We further investigate the parallel tractability of materialization of a datalog rewritable OWL fragment DHL (Description Horn Logic) and an extension of DHL that allows complex role inclusion axioms. Based on the above results, we analyze real-world datasets and show that many ontologies expressed in DHL or its extension belong to the parallelly tractable classes.}, year = {2016}, DOI = {10.3233/978-1-61499-672-9-73}, booktitle = {Proceedings of the 22nd European Conference on Artificial Intelligence (ECAI 2016)}, volume = {285}, publisher = {IOS Press}, series = {Frontiers in Artificial Intelligence and Applications}, pages = {73-81}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations,Materialization, Materialisation, Paralellisation, Paralellization}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/ZhQG16a.pdf} } @Article { Biundo2016CompanionSurvey, author = {Biundo, Susanne and H\"{o}ller, Daniel and Schattenberg, Bernd and Bercher, Pascal}, title = {Companion-Technology: An Overview}, abstract = {Companion-technology is an emerging field of cross-disciplinary research. It aims at developing technical systems that appear as \dqCompanions'' to their users. They serve as co-operative agents assisting in particular tasks or, in a more general sense, even give companionship to humans. Overall, Companion-technology enables technical systems to smartly adapt their services to individual users' current needs, their requests, situation, and emotion. We give an introduction to the field, discuss the most relevant application areas that will benefit from its developments, and review the related research projects.}, year = {2016}, DOI = {10.1007/s13218-015-0419-3}, journal = {K\"{u}nstliche Intelligenz}, volume = {30}, publisher = {Springer}, pages = {11-20}, number = {1}, tags = {SFB-TRR-62,Planning}, web_url = {http://link.springer.com/article/10.1007/s13218-015-0419-3}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Biundo2016CompanionSurvey.pdf}, note = {Special Issue on Companion Technologies} } @Article { Biundo16CompanionTechnology, author = {Biundo, Susanne and Wendemuth, Andreas}, title = {Companion-Technology for Cognitive Technical Systems}, abstract = {We introduce the Transregional Collaborative Research Centre “Companion-Technology for Cognitive Technical Systems” – a cross-disciplinary endeavor towards the development of an enabling technology for Companion-systems. These systems completely adjust their functionality and service to the individual user. They comply with his or her capabilities, preferences, requirements, and current needs and adapt to the individual’s emotional state and ambient conditions. Companion-like behavior of technical systems is achieved through the investigation and implementation of cognitive abilities and their well-orchestrated interplay.}, year = {2016}, DOI = {10.1007/s13218-015-0414-8}, journal = {K\"{u}nstliche Intelligenz}, volume = {30}, publisher = {Springer}, pages = {71-75}, number = {1}, tags = {SFB-TRR-62}, web_url = {http://link.springer.com/article/10.1007/s13218-015-0414-8}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Biundo16CompanionTechnology.pdf}, note = {Special Issue on Companion Technologies}, annotation = {Special Issue on Companion Technologies} } @Inproceedings { Alford16BoundToPlan, author = {Alford, Ron and Behnke, Gregor and H\"{o}ller, Daniel and Bercher, Pascal and Biundo, Susanne and Aha, David}, title = {Bound to Plan: Exploiting Classical Heuristics via Automatic Translations of Tail-Recursive HTN Problems}, abstract = {Hierarchical Task Network (HTN) planning is a formalism that can express constraints which cannot easily be expressed by classical (non-hierarchical) planning approaches. It enables reasoning about procedural structures and domain-specific search control knowledge. Yet the cornucopia of modern heuristic search techniques remains largely unincorporated in current HTN planners, in part because it is not clear how to estimate the goal distance for a partially-ordered task network. When using SHOP2-style progression, a task network of yet unprocessed tasks is maintained during search. In the general case it can grow arbitrarily large. However, many – if not most – existing HTN domains have a certain structure (called tail-recursive) where the network’s size is bounded. We show how this bound can be calculated and exploited to automatically translate tail-recursive HTN problems into non-hierarchical STRIPS representations, which allows using both hierarchical structures and classical planning heuristics. In principle, the approach can also be applied to non-tail-recursive HTNs by incrementally increasing the bound. We give three translations with different advantages and present the results of an empirical evaluation with several HTN domains that are translated to PDDL and solved by two current classical planning systems. Our results show that we can automatically find practical bounds for solving partially-ordered HTN problems. We also show that classical planners perform similarly with our automatic translations versus a previous hand-bounded HTN translation which is restricted to totally-ordered problems.}, year = {2016}, booktitle = {Proceedings of the 26th International Conference on Automated Planning and Scheduling (ICAPS 2016)}, publisher = {AAAI Press}, pages = {20--28}, event_name = {26th International Conference on Automated Planning and Scheduling}, event_place = {London}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Alford16BoundToPlan.pdf} } @Inproceedings { Hoeller16Expressivity, author = {H\"{o}ller, Daniel and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne}, title = {Assessing the Expressivity of Planning Formalisms through the Comparison to Formal Languages}, abstract = {From a theoretical perspective, judging the expressivity of planning formalisms helps to understand the relationship of different representations and to infer theoretical properties. From a practical point of view, it is important to be able to choose the best formalism for a problem at hand, or to ponder the consequences of introducing new representation features. Most work on the expressivity is based either on compilation approaches, or on the computational complexity of the plan existence problem. Recently, we introduced a new notion of expressivity. It is based on comparing the structural complexity of the set of solutions to a planning problem by interpreting the set as a formal language and classifying it with respect to the Chomsky hierarchy. This is a more direct measure than the plan existence problem and enables also the comparison of formalisms that can not be compiled into each other. While existing work on that last approach focused on different hierarchical problem classes, this paper investigates STRIPS with and without conditional effects; though we also tighten some existing results on hierarchical formalisms. Our second contribution is a discussion on the language-based expressivity measure with respect to the other approaches.}, year = {2016}, booktitle = {Proceedings of the 26th International Conference on Automated Planning and Scheduling (ICAPS 2016)}, publisher = {AAAI Press}, pages = {158--165}, event_name = {26th International Conference on Automated Planning and Scheduling}, event_place = {London}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Hoeller16Expressivity.pdf} } @Article { GlSt2016a, author = {Glimm, Birte and Stuckenschmidt, Heiner}, title = {15 Years of Semantic Web: An Incomplete Survey}, year = {2016}, issn = {0933-1875}, DOI = {10.1007/s13218-016-0424-1}, journal = {KI - K\"{u}nstliche Intelligenz}, volume = {30}, pages = {117--130}, number = {2}, keywords = {Reasoning, Description Logics, Semantic Web}, tags = {AutomatedReasoning}, web_url = {http://dx.doi.org/10.1007/s13218-016-0424-1}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/GlSt2016a.pdf} } @Article { Glim2016a, author = {Glimm, Birte}, title = {Interview with Prof. Dr. Ian Horrocks, Professor at the Department of Computer Science of the University of Oxford}, year = {2016}, issn = {0933-1875}, DOI = {10.1007/s13218-016-0428-x}, journal = {KI - K\"{u}nstliche Intelligenz}, volume = {30}, pages = {201--203}, number = {2}, keywords = {Reasoning, Description Logics, Semantic Web}, tags = {AutomatedReasoning}, web_url = {http://dx.doi.org/10.1007/s13218-016-0428-x}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2016/Glim2016a.pdf} } @Inproceedings { BrGl15a, author = {Brenner, Markus and Glimm, Birte}, title = {Breaking the Black Box - Using Background Knowledge for Efficient Stream Reasoning}, abstract = {Current approaches to stream reasoning neglect knowledge about the system as a whole. We present first steps towards self-describing streams by outlining a possible definition of the data produced by differ- ent streams. We give an outlook on future paths and how such descrip- tions can be used to improve reasoning about the streamed data.}, year = {2015}, month = {9}, booktitle = {Proceedings of the International Symposium on Companion Technology (ISCT 2015)}, tags = {SFB-TRR-62,AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/BrGl15a.pdf} } @Inproceedings { Nothdurft2015InterplayDialogPlanning, author = {Nothdurft, Florian and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne and Minker, Wolfgang}, title = {The Interplay of User-Centered Dialog Systems and AI Planning}, abstract = {Technical systems evolve from simple dedicated task solvers to cooperative and competent assistants, helping the user with increasingly complex and demanding tasks. For this, they may proactively take over some of the users responsibilities and help to find or reach a solution for the user’s task at hand, using e.g., Artificial Intelligence (AI) Planning techniques. However, this intertwining of user-centered dialog and AI planning systems, often called mixed-initiative planning (MIP), does not only facilitate more intelligent and competent systems, but does also raise new questions related to the alignment of AI and human problem solving. In this paper, we describe our approach on integrating AI Planning techniques into a dialog system, explain reasons and effects of arising problems, and provide at the same time our solutions resulting in a coherent, userfriendly and efficient mixed-initiative system. Finally, we evaluate our MIP system and provide remarks on the use of explanations in MIP-related phenomena.}, year = {2015}, booktitle = {Proceedings of the 16th Annual Meeting of the Special Interest Group on Discourse and Dialogue (SIGDIAL)}, publisher = {Association for Computational Linguistics}, pages = {344--353}, event_place = {Prague, Czech Republic}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Nothdurft15MIP.pdf} } @Inproceedings { KPLT15a, author = {Kotzyba, Michael and Ponomaryov, Denis and Low, Thomas and Thiel, Marcus and N\"{u}rnberger, Andreas}, title = {Ontology-supported Exploratory Search for Physical Training Exercises}, year = {2015}, booktitle = {Proceedings of the ISWC 2015 Posters \\& Demonstrations Track}, volume = {1486}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, keywords = {Semantic Web}, tags = {AutomatedReasoning, SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/KPLT15a.pdf} } @Article { StGl15b, author = {Steigmiller, Andreas and Glimm, Birte}, title = {Pay-As-You-Go Description Logic Reasoning by Coupling Tableau and Saturation Procedures}, year = {2015}, DOI = {10.1613/jair.4897}, journal = {Journal of Artificial Intelligence Research}, volume = {54}, pages = {535--592}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {SFB-TRR-62,AutomatedReasoning}, web_url = {http://jair.org/media/4897/live-4897-9009-jair.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/StGl15b.pdf} } @Inproceedings { Hoernle15ArchitectureApproach, author = {H\"{o}rnle, Thilo and Tornow, Michael}, title = {Reference Architecture Approach for Companion-Systems}, abstract = {This paper focuses on an approach of a reference architecture for a Companion-System (CS). It enhances a technical system by a user adaptive interaction and a situation adjustive behavior. To generate a reference architecture existing demonstration systems were analyzed and combined in order to find the essential system components. All aspects of the architecture of a CS will be considered in detail. It is finally enhanced by a system controller which enables a dynamic adaption to the current system state in order to allow future extensions of the architecture.}, year = {2015}, booktitle = {Proc. of the Int. Symposium on Companion Technology (ISCT 2015)}, tags = {SFB-TRR-62}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Hoernle15Architecture.pdf} } @Inproceedings { Bercher15UserCenteredDiscussion, author = {Bercher, Pascal and H\"{o}ller, Daniel and Behnke, Gregor and Biundo, Susanne}, title = {User-Centered Planning - A Discussion on Planning in the Presence of Human Users}, abstract = {AI planning forms a core capability of intelligent systems. It enables goal directed behavior and allows systems to react adequately and flexibly to the current situation. Further, it allows systems to provide advice to a human user on how to reach his or her goals. Though the process of finding a plan is, by itself, a hard computational problem, some new challenges arise when involving a human user into the process. Plans have to be generated in a certain way, so that the user can be included into the plan generation process in case he or she wishes to; the plans should be presented to the user in an adequate way to prevent confusion or even rejection; to improve the trust in the system, it needs to be able to explain its behavior or presented plans. Here, we discuss these challenges and give pointers on how to solve them.}, year = {2015}, booktitle = {Proceedings of the First International Symposium on Companion Technology (ISCT 2015)}, pages = {79--82}, event_name = {First International Symposium on Companion Technology (ISCT 2015)}, event_place = {Ulm, Germany}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Bercher15ISCTPoster.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Bercher15UserCenteredDiscussion.pdf} } @Inproceedings { DBLP:conf/semweb/ParsiaMGGS15, author = {Parsia, Bijan and Matentzoglu, Nicolas and Goncalves, Rafael S. and Glimm, Birte and Steigmiller, Andreas}, title = {The OWL Reasoner Evaluation (ORE) 2015 Competition Report}, year = {2015}, booktitle = {Proceedings of the 11th International Workshop on Scalable Semantic Web Knowledge Base Systems co-located with 14th International Semantic Web Conference (ISWC 2015)}, volume = {1457}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Thorsten Liebig and Achille Fokoue}, pages = {2-15}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/PMGG15a.pdf} } @Inproceedings { Alford15TightHTNBounds, author = {Alford, Ron and Bercher, Pascal and Aha, David}, title = {Tight Bounds for HTN Planning}, abstract = {Although HTN planning is in general undecidable, there are many syntactically identifiable sub-classes of HTN problems that can be decided. For these sub-classes, the decision procedures provide upper complexity bounds. Lower bounds were often not investigated in more detail, however. We generalize a propositional HTN formalization to one that is based upon a function-free first-order logic and provide tight upper and lower complexity results along three axes: whether variables are allowed in operator and method schemas, whether the initial task and methods must be totally ordered, and where recursion is allowed (arbitrary recursion, tail-recursion, and acyclic problems). Our findings have practical implications, both for the reuse of classical planning techniques for HTN planning, and for the design of efficient HTN algorithms}, year = {2015}, booktitle = {Proceedings of the 25th International Conference on Automated Planning and Scheduling (ICAPS 2015)}, publisher = {AAAI Press}, pages = {7--15}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.youtube.com/watch?v=EFnsTzIVvUo}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Alford15TightHTNBounds.pdf} } @Inproceedings { Alford15TightTIHTNBounds, author = {Alford, Ron and Bercher, Pascal and Aha, David}, title = {Tight Bounds for HTN planning with Task Insertion}, abstract = {Hierarchical Task Network (HTN) planning with Task Insertion (TIHTN planning) is a formalism that hybridizes classical planning with HTN planning by allowing the insertion of operators from outside the method hierarchy. This additional capability has some practical benefits, such as allowing more flexibility for design choices of HTN models: the task hierarchy may be specified only partially, since ``missing required tasks'' may be inserted during planning rather than prior planning by means of the (predefined) HTN methods. While task insertion in a hierarchical planning setting has already been applied in practice, its theoretical properties have not been studied in detail, yet -- only \EXPSPACE\{\} membership is known so far. We lower that bound proving \NEXPTIME-completeness and further prove tight complexity bounds along two axes: whether variables are allowed in method and action schemas, and whether methods must be totally ordered. We also introduce a new planning technique called \{\em acyclic progression\}, which we use to define provably efficient TIHTN planning algorithms.}, year = {2015}, booktitle = {Proceedings of the 25th International Joint Conference on Artificial Intelligence (IJCAI 2015)}, publisher = {AAAI Press}, pages = {1502--1508}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Alford15TightTIHTNBounds.pdf} } @Inproceedings { Alford15TightTIHTNBoundsAbstract, author = {Alford, Ron and Bercher, Pascal and Aha, David}, title = {Tight Bounds for HTN planning with Task Insertion (Extended Abstract)}, abstract = {Hierarchical Task Network (HTN) planning with task insertion (TIHTN planning) is a variant of HTN planning. In HTN planning, the only means to alter task networks is to decompose compound tasks. In TIHTN planning, tasks may also be inserted directly. In this paper we provide tight complexity bounds for TIHTN planning along two axis: whether variables are allowed and whether methods must be totally ordered.}, year = {2015}, booktitle = {Proceedings of the Eighth Annual Symposium on Combinatorial Search (SoCS)}, publisher = {AAAI Press}, pages = {221--222}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Alford15TightTIHTNBoundsAbstract.pdf} } @Article { UMGM15a, author = {Ulmschneider, Klaus and Michelberger, Bernd and Glimm, Birte and Mutschler, Bela and Reichert, Manfred}, title = {On Maintaining Semantic Networks: Challenges, Algorithms, Use Cases}, year = {2015}, DOI = {10.1108/IJWIS-04-2015-0014}, journal = {International Journal of Web Information Systems}, volume = {11}, pages = {291-326}, number = {3}, tags = {AutomatedReasoning}, web_url = {http://dx.doi.org/10.1108/IJWIS-04-2015-0014} } @Inproceedings { Behnke2015Verification, author = {Behnke, Gregor and H\"{o}ller, Daniel and Biundo, Susanne}, title = {On the Complexity of HTN Plan Verification and its Implications for Plan Recognition}, abstract = {In classical planning it is easy to verify if a given sequence of actions is a solution to a planning problem. It has to be checked whether the actions are applicable in the given order and if a goal state is reached after executing them. In this paper we show that verifying whether a plan is a solution to an HTN planning problem is much harder. More specifically, we prove that this problem is NP-complete, even for very simple HTN planning problems. Furthermore, this problem remains NP-complete if an executable sequence of tasks is already provided. HTN-like hierarchical structures are commonly used to represent plan libraries in plan and goal recognition. By applying our result to plan and goal recognition we provide insight into its complexity.}, year = {2015}, booktitle = {Proceedings of the 25th International Conference on Automated Planning and Scheduling (ICAPS 2015)}, publisher = {AAAI Press}, editor = {Ronen Brafman, Carmel Domshlak, Patrik Haslum, Shlomo Zilberstein}, pages = {25-33}, event_name = {25th International Conference on Automated Planning and Scheduling}, event_place = {Jerusalem}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Behnke15Verify\_Talk.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Behnke2015HTNVerification.pdf} } @Inproceedings { GKKS14a, author = {Glimm, Birte and Kazakov, Yevgeny and Kollia, Ilianna and Stamou, Giorgos}, title = {Lower and Upper Bounds for SPARQL Queries over OWL Ontologies}, year = {2015}, booktitle = {Proceedings of the 29th AAAI Conference on Artificial Intelligence (AAAI 2015)}, publisher = {AAAI Press}, keywords = {Description Logics, Query Answering, Semantic Web, SPARQL}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/GKKS15a.pdf} } @Inproceedings { Behnke15MIPDiscussion, author = {Behnke, Gregor and Schiller, Marvin R. G. and Ponomaryov, Denis and Nothdurft, Florian and Bercher, Pascal and Minker, Wolfgang and Glimm, Birte and Biundo, Susanne}, title = {A Unified Knowledge Base for Companion-Systems - A Case Study in Mixed-Initiative Planning}, abstract = {Companion systems aim to extend the abilities of ordinary technical systems, for instance by modeling the user's situation, by recognizing the user's intentions, and by being able to interact with the user and to adapt to her/him. Such a system depends on planning capabilities to determine which actions are necessary to achieve a particular goal. In many situations it may not be appropriate for a companion system to develop plans on its own, but instead it has to integrate the user while creating the plan, i.e., it needs to be mixed-initiative. Based on earlier work, we demonstrate how a central knowledge base for a mixed-initiative planning system can be designed. We outline various benefts our approach brings to bear within a companion system. Lastly, we present several requests a user might issue towards the mixed-initiative planning system and how they can be answered by harnessing the knowledge base.}, year = {2015}, booktitle = {Proceedings of the First International Symposium on Companion Technology (ISCT 2015)}, pages = {43--48}, event_name = {First International Symposium on Companion Technology (ISCT 2015)}, event_place = {Ulm, Germany}, tags = {SFB-TRR-62,Planning,AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Behnke15MIPDiscussion.pdf} } @Inproceedings { Bercher2015DemoSystem, author = {Bercher, Pascal and Richter, Felix and H\"{o}rnle, Thilo and Geier, Thomas and H\"{o}ller, Daniel and Behnke, Gregor and Nothdurft, Florian and Honold, Frank and Minker, Wolfgang and Weber, Michael and Biundo, Susanne}, title = {A Planning-based Assistance System for Setting Up a Home Theater}, abstract = {Modern technical devices are often too complex for many users to be able to use them to their full extent. Based on planning technology, we are able to provide advanced user assistance for operating technical devices. We present a system that assists a human user in setting up a complex home theater consisting of several HiFi devices. For a human user, the task is rather challenging due to a large number of different ports of the devices and the variety of available cables. The system supports the user by giving detailed instructions how to assemble the theater. Its performance is based on advanced user-centered planning capabilities including the generation, repair, and explanation of plans.}, year = {2015}, booktitle = {Proceedings of the 29th AAAI Conference on Artificial Intelligence (AAAI 2015)}, publisher = {AAAI Press}, pages = {4264--4265}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Bercher15DemoSystem.pdf} } @Inproceedings { Geier2015, author = {Geier, Thomas and Richter, Felix and Biundo, Susanne}, title = {Locally Conditioned Belief Propagation}, abstract = {Conditioned Belief Propagation (CBP) is an algorithm for approximate inference in probabilistic graphical models. It works by conditioning on a subset of variables and solving the remainder using loopy Belief Propagation. Unfortunately, CBP's runtime scales exponentially in the number of conditioned variables. Locally Conditioned Belief Propagation (LCBP) approximates the results of CBP by treating conditions locally, and in this way avoids the exponential blow-up. We formulate LCBP as a variational optimization problem and derive a set of update equations that can be used to solve it. We show empirically that LCBP delivers results that are close to those obtained from CBP, while the computational cost scales favorably with problem size. }, year = {2015}, booktitle = {Proceedings of the 31st Conference on Uncertainty in Artificial Intelligence (UAI)}, publisher = {AUAI Press}, pages = {296--305}, event_place = {Amsterdam}, keywords = {probablistic inference, belief propagation, approximate inference, graphical models}, tags = {SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Geier2015Lcbp.pdf} } @Inproceedings { BBBGPS2015OntologiesAndPlanning, author = {Behnke, Gregor and Bercher, Pascal and Biundo, Susanne and Glimm, Birte and Ponomaryov, Denis and Schiller, Marvin R. G.}, title = {Integrating Ontologies and Planning for Cognitive Systems}, abstract = {We present an approach for integrating ontological reasoning and planning within cognitive systems. Patterns and mechanisms that suitably link planning domains and interrelated knowledge in an ontology are devised. In particular, this enables the use of (standard) ontology reasoning for extending a (hierarchical) planning domain. Furthermore, explanations of plans generated by a cognitive system benefit from additional explanations relying on background knowledge in the ontology and inference. An application of this approach in the domain of fitness training is presented.}, year = {2015}, booktitle = {Proceedings of the 28th International Workshop on Description Logics (DL 2015)}, publisher = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning, Planning, SFB-TRR-62}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/BBBG15a.pdf} } @Proceedings { DGGH2015a, title = {Informal Proceedings of the 4th International Workshop on OWL Reasoner Evaluation (ORE-2015) co-located with the 28th International Workshop on Description Logics (DL 2015)}, year = {2015}, volume = {1387}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Michel Dumontier and Birte Glimm and Rafael S. Goncalves and Matthew Horridge and Ernesto Jimenez-Ruiz and Nicolas Matentzoglu and Bijan Parsia and Giorgos B. Stamou and Giorgos Stoilos}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1387} } @Inproceedings { Bercher15DissertationAbstractDC, author = {Bercher, Pascal}, title = {Hybrid Planning - Theoretical Foundations and Practical Applications}, abstract = {The thesis presents a novel set-theoretic formalization of (propositional) hybrid planning – a planning framework that fuses Hierarchical Task Network (HTN) planning with Partial-Order Causal-Link (POCL) planning. Several sub classes thereof are identified that capture well-known problems such as HTN planning and POCL planning. For these problem classes, the complexity of the plan-existence problem is investigated, i.e., the problem of deciding whether there exists a solution for a given planning problem. For solving the problems of the respective problem classes, a hybrid planning algorithm is presented. Its search is guided by informed heuristics. Several such heuristics are introduced, both for POCL planning problems (i.e., problems without task hierarchy) and for hybrid planning problems (i.e., heuristics that are \dqhierarchy-aware\dq).}, year = {2015}, booktitle = {Doctoral Consortium at ICAPS 2015}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Bercher15DissertationAbstractDC.pdf} } @Article { Glodek201517, author = {Glodek, Michael and Honold, Frank and Geier, Thomas and Krell, Gerald and Nothdurft, Florian and Reuter, Stephan and Sch\"{u}ssel, Felix and H\"{o}rnle, Thilo and Dietmayer, Klaus and Minker, Wolfgang and Biundo, Susanne and Weber, Michael and Palm, G\"{u}nther and Schwenker, Friedhelm}, title = {Fusion paradigms in cognitive technical systems for human–computer interaction}, abstract = {Abstract Recent trends in human–computer interaction (HCI) show a development towards cognitive technical systems (CTS) to provide natural and efficient operating principles. To do so, a CTS has to rely on data from multiple sensors which must be processed and combined by fusion algorithms. Furthermore, additional sources of knowledge have to be integrated, to put the observations made into the correct context. Research in this field often focuses on optimizing the performance of the individual algorithms, rather than reflecting the requirements of CTS. This paper presents the information fusion principles in CTS architectures we developed for Companion Technologies. Combination of information generally goes along with the level of abstractness, time granularity and robustness, such that large CTS architectures must perform fusion gradually on different levels -- starting from sensor-based recognitions to highly abstract logical inferences. In our CTS application we sectioned information fusion approaches into three categories: perception-level fusion, knowledge-based fusion and application-level fusion. For each category, we introduce examples of characteristic algorithms. In addition, we provide a detailed protocol on the implementation performed in order to study the interplay of the developed algorithms.}, year = {2015}, issn = {0925-2312}, DOI = {10.1016/j.neucom.2015.01.076}, journal = {Neurocomputing}, volume = {161}, pages = {17 - 37}, keywords = {Human–computer interaction}, tags = {SFB-TRR-62}, file_url = {http://www.sciencedirect.com/science/article/pii/S0925231215002155/pdfft?md5=0f4a009f3f5ba7b155a82192be2a0dbe\\&pid=1-s2.0-S0925231215002155-main.pdf} } @Inproceedings { StGL15a, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Completion Graph Caching for Expressive Description Logics}, year = {2015}, booktitle = {Proceedings of the 28th International Workshop on Description Logics (DL 2015)}, publisher = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/StGL15a.pdf} } @Inproceedings { Behnke2015Coherence, author = {Behnke, Gregor and Ponomaryov, Denis and Schiller, Marvin R. G. and Bercher, Pascal and Nothdurft, Florian and Glimm, Birte and Biundo, Susanne}, title = {Coherence Across Components in Cognitive Systems – One Ontology to Rule Them All}, abstract = {The integration of the various specialized components of cognitive systems poses a challenge, in particular for those architectures that combine planning, inference, and human-computer interaction (HCI). An approach is presented that exploits a single source of common knowledge contained in an ontology. Based upon the knowledge contained in it, specialized domain models for the cognitive systems’ components can be generated automatically. Our integration targets planning in the form of hierarchical planning, being well-suited for HCI as it mimics planning done by humans. We show how the hierarchical structures of such planning domains can be (partially) inferred from declarative background knowledge. The same ontology furnishes the structure of the interaction between the cognitive system and the user. First, explanations of plans presented to users are enhanced by ontology explanations. Second, a dialog domain is created from the ontology coherent with the planning domain. We demonstrate the application of our technique in a fitness training scenario.}, year = {2015}, booktitle = {Proceedings of the 25th International Joint Conference on Artificial Intelligence (IJCAI 2015)}, publisher = {AAAI Press}, pages = {1442-1449}, event_name = {IJCAI 2015}, event_place = {Buenos Aires}, tags = {SFB-TRR-62,Planning,AutomatedReasoning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/Behnke15IJCAI-poster.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/BPSB15a.pdf} } @Inproceedings { Richter2015believing, author = {Richter, Felix and Geier, Thomas and Biundo, Susanne}, title = {Believing in POMDPs}, abstract = {Partially observable Markov decision processes (POMDP) are well-suited for realizing sequential decision making capabilities that respect uncertainty in Companion systems that are to naturally interact with and assist human users. Unfortunately, their complexity prohibits modeling the entire Companion system as a POMDP. We therefore propose an approach that makes use of abstraction to enable employing POMDPs in Companion systems and discuss challenges for applying it.}, year = {2015}, booktitle = {Proceedings of the First International Symposium on Companion Technology (ISCT 2015)}, event_name = {First International Symposium on Companion Technology}, event_place = {Ulm, Germany}, keywords = {POMDP, cognitive technical systems}, tags = {SFB-TRR-62,Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/believingPOMDPs.pdf} } @Inproceedings { KazKli:2015:Advancing-ELK:DL, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Advancing ELK: Not Only Performance Matters}, abstract = {This paper reports on the recent development of ELK, a consequence-based reasoner for \$\mathcal\{EL^+\_\bot\}\$ ontologies. It covers novel reasoning techniques which aim at improving efficiency and providing foundation for new reasoning services. On the former front we present a simple optimization for handling of role composition axioms, such as transitivity, which substantially reduces the number of rule applications. For the latter, we describe a new rule application strategy that takes advantage of concept definitions to avoid many redundant inferences without making rules dependent on derived conclusions. This improvement is not visible to the end user but considerably simplifies implementation for incremental reasoning and proof generation. We also present a rewriting of low-level inferences used by ELK to higher-level proofs that can be defined in the standard DL syntax, and thus be used for automatic verification of reasoning results or (visual) ontology debugging. We demonstrate the latter capability using a new ELK Prot\'eg\'e plugin.}, year = {2015}, booktitle = {DL}, volume = {1350}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Diego Calvanese and Boris Konev}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1350/paper-27.pdf}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/KazKli15Advancing-ELK\_DL.pdf} } @Inproceedings { GKKS15b, author = {Glimm, Birte and Kazakov, Yevgeny and Kollia, Ilianna and Stamou, Giorgos}, title = {Lower and Upper Bounds for SPARQL Queries over OWL Ontologies}, year = {2015}, booktitle = {Proceedings of the 28th International Workshop on Description Logics (DL 2015)}, publisher = {CEUR Workshop Proceedings}, keywords = {Description Logics, Query Answering, Semantic Web, SPARQL}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/GKKS15a.pdf} } @Techreport { Geier2014CBPTR, author = {Geier, Thomas and Richter, Felix and Biundo, Susanne}, title = {Conditioned Belief Propagation Revisited: Extended Version}, abstract = {Belief Propagation (BP) applied to cyclic problems is a well known approximate inference scheme for probabilistic graphical models. To improve its accuracy, Conditioned Belief Propagation (CBP) has been proposed, which splits a problem into subproblems by conditioning on variables, applies BP to subproblems, and merges the results to produce an answer to the original problem. In this work, we propose a reformulated version of CBP that exhibits anytime behavior and allows for more specific tuning by formalizing a further aspect of the algorithm through the use of a leaf selection heuristic. We propose several simple and easy to compute heuristics and demonstrate their performance using an empirical evaluation on randomly generated problems.}, year = {2014}, month = {8}, institution = {Ulm University}, number = {UIB 2014-03}, tags = {SFB-TRR-62,KnowledgeModeling}, web_url = {http://vts.uni-ulm.de/doc.asp?id=9201 }, file_url = {http://vts.uni-ulm.de/docs/2014/9201/vts\_9201\_13795.pdf} } @Article { StLG14a, author = {Steigmiller, Andreas and Liebig, Thorsten and Glimm, Birte}, title = {Konclude: System Description}, year = {2014}, journal = {Journal of Web Semantics: Science, Services and Agents on the World Wide Web}, volume = {27}, number = {1}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://www.websemanticsjournal.org/index.php/ps/article/view/366/388}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/StLG14a.pdf} } @Inproceedings { Hoeller2014HTNLanguage, author = {H\"{o}ller, Daniel and Behnke, Gregor and Bercher, Pascal and Biundo, Susanne}, title = {Language Classification of Hierarchical Planning Problems}, abstract = {Theoretical results on HTN planning are mostly related to the plan existence problem. In this paper, we study the structure of the generated plans in terms of the language they produce. We show that such languages are always context-sensitive. Furthermore we identify certain subclasses of HTN planning problems which generate either regular or context-free languages. Most importantly we have discovered that HTN planning problems, where preconditions and effects are omitted, constitute a new class of languages that lies strictly between the context-free and context-sensitive languages.}, year = {2014}, DOI = {10.3233/978-1-61499-419-0-447}, booktitle = {Proceedings of the 21st European Conference on Artificial Intelligence (ECAI 2014)}, volume = {263}, publisher = {IOS Press}, address = {Amsterdam}, series = {Frontiers in Artificial Intelligence and Applications}, editor = {Schaub, Torsten and Friedrich, Gerhard and O'Sullivan, Barry}, pages = {447-452}, event_name = {21st European Conference on Artificial Intelligence (ECAI 2014)}, event_place = {Prague, Czech Republic}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Hoeller2014HtnLanguages.pdf} } @Inproceedings { MUGM14a, author = {Michelberger, Bernd and Ulmschneider, Klaus and Glimm, Birte and Mutschler, Bela and Reichert, Manfred}, title = {Maintaining Semantic Networks: Challenges and Algorithms}, year = {2014}, booktitle = {Proceedings of the 16th International Conference on Information Integration and Web-Based Applications \\& Services (iiWAS 2014)}, publisher = {ACM}, series = {ACM International Conference Proceedings}, keywords = {Semantic Networks}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/MUGM14a.pdf} } @Inproceedings { Bercher14TDGHeuristics, author = {Bercher, Pascal and Keen, Shawn and Biundo, Susanne}, title = {Hybrid Planning Heuristics Based on Task Decomposition Graphs}, abstract = {Hybrid Planning combines Hierarchical Task Network (HTN) planning with concepts known from Partial-Order Causal-Link (POCL) planning. We introduce novel heuristics for Hybrid Planning that estimate the number of necessary modifications to turn a partial plan into a solution. These estimates are based on the task decomposition graph that contains all decompositions of the abstract tasks in the planning domain. Our empirical evaluation shows that the proposed heuristics can significantly improve planning performance.}, year = {2014}, booktitle = {Proceedings of the Seventh Annual Symposium on Combinatorial Search (SoCS 2014)}, publisher = {AAAI Press}, pages = {35--43}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Bercher14HybridHeuristicsSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Bercher14HybridHeuristics.pdf} } @Inproceedings { StGL14b, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Optimised Absorption for Expressive Description Logics}, year = {2014}, booktitle = {Proceedings of the 27th International Workshop on Description Logics (DL 2014)}, volume = {1193}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Meghyn Bienvenu, Magdalena Ortiz, Riccardo Rosati, and Mantas Simkus}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1193/paper\_49.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/StGL14b.pdf} } @Inproceedings { Nothdurft2014ProbabilisticExplanation, author = {Nothdurft, Florian and Richter, Felix and Minker, Wolfgang}, title = {Probabilistic Explanation Dialog Augmentation}, year = {2014}, booktitle = {Proceedings of the 10th International Conference on Intelligent Environments (IE), 2014}, publisher = {IEEE}, pages = {392 - 395}, tags = {SFB-TRR-62, Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Nothdurft2014ProbabilisticExplanation.pdf} } @Inproceedings { Nothdurft2014HCT, author = {Nothdurft, Florian and Richter, Felix and Minker, Wolfgang}, title = {Probabilistic Human-Computer Trust Handling}, abstract = {Human-computer trust has shown to be a critical factor in influencing the complexity and frequency of interaction in technical systems. Particularly incomprehensible situations in human-computer interaction may lead to a reduced users trust in the system and by that influence the style of interaction. Analogous to human-human interaction, explaining these situations can help to remedy negative effects. In this paper we present our approach of augmenting task-oriented dialogs with selected explanation dialogs to foster the human-computer trust relationship in those kinds of situations. We have conducted a web-based study testing the effects of different goals of explanations on the components of human-computer trust. Subsequently, we show how these results can be used in our probabilistic trust handling architecture to augment pre-defined task-oriented dialogs.}, year = {2014}, booktitle = {Proceedings of the 15th Annual Meeting of the Special Interest Group on Discourse and Dialogue (SIGDIAL)}, publisher = {Association for Computational Linguistics}, pages = {51--59}, tags = {SFB-TRR-62, Planning}, file_url = {http://www.aclweb.org/anthology/W/W14/W14-4307.pdf} } @Article { StGL14c, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Reasoning with Nominal Schemas through Absorption}, abstract = {Nominal schemas have recently been introduced as a new approach for the integration of DL-safe rules into the Description Logic framework. The efficient processing of knowledge bases with nominal schemas remains, however, challenging. We address this by extending the well-known optimisation of absorption as well as the standard tableau calculus to directly handle the (absorbed) nominal schema axioms. We implement the resulting extension of standard tableau calculi in a novel reasoning system and we integrate further optimisations. In our empirical evaluation, we show the effect of these optimisations and we find that the proposed approach performs well even when compared to other DL reasoners with dedicated rule support.}, year = {2014}, DOI = {10.1007/s10817-014-9310-4}, journal = {Journal of Automated Reasoning}, volume = {53}, publisher = {Springer-Verlag}, pages = {351-405}, number = {4}, keywords = {Nominal Schema, Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/article/10.1007/s10817-014-9310-4}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/StGL14c.pdf} } @Article { KazKroSim:14:ELK:JAR, author = {Kazakov, Yevgeny and Kr\"{o}tzsch, Markus and Siman\v{c}\'{i}k, Franti\v{s}ek}, title = {The Incredible ELK: From Polynomial Procedures to Efficient Reasoning with EL Ontologies}, abstract = {EL is a simple tractable Description Logic that features conjunctions and existential restrictions. Due to its favorable computational properties and relevance to existing ontologies, EL has become the language of choice for terminological reasoning in biomedical applications, and has formed the basis of the OWL EL profile of the Web ontology language OWL. This paper describes ELK - a high performance reasoner for OWL EL ontologies - and details various aspects from theory to implementation that make ELK one of the most competitive reasoning systems for EL ontologies available today.}, year = {2014}, DOI = {10.1007/s10817-013-9296-3}, journal = {JAR}, volume = {53}, pages = {1-61}, number = {1}, tags = {KnowledgeModeling, ELK, LiveOntologies}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/KazKroSim13ELK\_JAR.pdf} } @Inproceedings { Pragst14CyberSecurity, author = {Pragst, Louisa and Richter, Felix and Bercher, Pascal and Schattenberg, Bernd and Biundo, Susanne}, title = {Introducing Hierarchy to Non-Hierarchical Planning Models – A Case Study for Behavioral Adversary Models}, abstract = {Hierarchical planning approaches are often pursued when it comes to a real-world application scenario, because they allow for incorporating additional expert knowledge into the domain. That knowledge can be used both for improving plan explanations and for reducing the explored search space. In case a non-hierarchical planning model is already available, for instance because a bottom-up modeling approach was used, one has to concern oneself with the question of how to introduce a hierarchy. This paper discusses the points to consider when adding a hierarchy to a non-hierarchical planning model using the example of the BAMS Cyber Security domain.}, year = {2014}, booktitle = {28th PuK Workshop \dqPlanen, Scheduling und Konfigurieren, Entwerfen\dq (PuK 2014)}, keywords = {Hybrid Planning, Hierarchical Planning, POCL Planning, Domain Modeling, Task Hierarchy, Abstractions}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url2 = {http://www.puk-workshop.de/puk2014/prog.html}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Pragst14CyberSecurity.pdf} } @Inproceedings { Bercher14HybridPlanningApplication, author = {Bercher, Pascal and Biundo, Susanne and Geier, Thomas and Hoernle, Thilo and Nothdurft, Florian and Richter, Felix and Schattenberg, Bernd}, title = {Plan, Repair, Execute, Explain - How Planning Helps to Assemble your Home Theater}, abstract = {In various social, work-related, or educational contexts, an increasing demand for intelligent assistance systems can be observed. In this paper, we present a domain-independent approach that combines a number of planning and interaction components to realize advanced user assistance. Based on a hybrid planning formalism, the components provide facilities including the generation, execution, and repair as well as the presentation and explanation of plans. We demonstrate the feasibility of our approach by means of a system that aims to assist users in the assembly of their home theater. An empirical evaluation shows the benefit of such a supportive system, in particular for persons with a lack of domain expertise.}, year = {2014}, booktitle = {Proceedings of the 24th International Conference on Automated Planning and Scheduling (ICAPS 2014) }, publisher = {AAAI Press}, pages = {386--394}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Bercher14PlanRepairExecuteExplainSlides.pdf}, web_url2 = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/movie.zip}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Bercher14PlanRepairExecuteExplain.pdf}, note = {The domain can be downloaded from http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Bercher14PlanRepairExecuteExplain.zip} } @Article { GHMS14a, author = {Glimm, Birte and Horrocks, Ian and Motik, Boris and Stoilos, Giorgos and Wang, Zhe}, title = {HermiT: An OWL 2 Reasoner}, year = {2014}, DOI = {10.1007/s10817-014-9305-1}, journal = {Journal of Automated Reasoning (JAR)}, volume = {53}, pages = {245-269}, number = {3}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/article/10.1007{\%}2Fs10817-014-9305-1}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/GHMS14a.pdf} } @Inproceedings { KazKli:2014:Tableau-CB:DL, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Bridging the Gap between Tableau and Consequence-Based Reasoning}, abstract = {We present a non-deterministic consequence-based procedure for the description logic ALCHI. Just like the similar style (deterministic) procedures for EL and Horn-SHIQ, our procedure explicitly derives subsumptions between concepts, but due to non-deterministic rules, not all of these subsumptions are consequences of the ontology. Instead, the consequences are only those subsumptions that can be derived regardless of the choices made in the application of the rules. This is similar to tableau-based procedures, for which an ontology is inconsistent if every expansion of the tableau eventually results in a clash. We report on a preliminary experimental evaluation of the procedure using a version of SNOMED CT with disjunctions, which demonstrates some promising potential.}, year = {2014}, booktitle = {DL}, volume = {1193}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Meghyn Bienvenu and Magdalena Ortiz and Riccardo Rosati and Mantas Simkus}, pages = {579-590}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1193/paper\_10.pdf}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2015/KazKli15Advancing-ELK\_DL.pdf} } @Inproceedings { KazKli:14:ELK:Tracing:ISWC, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Goal-Directed Tracing of Inferences in EL Ontologies}, abstract = {EL is a family of tractable Description Logics (DLs) that is the basis of the OWL 2 EL profile. Unlike for many expressive DLs, reasoning in EL can be performed by computing a deductively-closed set of logical consequences of some specific form. In some ontology-based applications, e.g., for ontology debugging, knowing the logical consequences of the ontology axioms is often not sufficient. The user also needs to know from which axioms and how the consequences were derived. Although it is possible to record all inference steps during the application of rules, this is usually not done in practice to avoid the overheads. In this paper, we present a goal-directed method that can generate inferences for selected consequences in the deductive closure without re-applying all rules from scratch. We provide an empirical evaluation demonstrating that the method is fast and economical for large EL ontologies. Although the main benefits are demonstrated for EL reasoning, the method can be potentially applied to many other procedures based on deductive closure computation using fixed sets of rules.}, year = {2014}, booktitle = {ISWC}, volume = {8797}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, pages = {196--211}, tags = {KnowledgeModeling, ELK, LiveOntologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/KazKli14Tracing\_ISWC.pdf} } @Article { Glodek2014, author = {Glodek, Michael and Geier, Thomas and Biundo, Susanne and Palm, G\"{u}nther}, title = {A layered architecture for probabilistic complex pattern recognition to detect user preferences}, abstract = {The recognition of complex patterns is nowadays one of the most challenging tasks in machine learning, and it promises to be of great benefit for many applications, e.g. by allowing advanced human computer interaction to access the user’s situative context. This work examines a layered architecture that operates on different temporal granularities to infer complex patterns of user preferences. Classical hidden Markov models (HMM), conditioned HMM (CHMM) and fuzzy CHMM (FCHMM) are compared to find the best configuration in the lower architecture layers. In the uppermost layer, a Markov logic network (MLN) is applied to infer the user preference in a probabilistic rule-based manner. For each layer a comprehensive evaluation is given. We provide empirical evidence showing that the layered architecture using FCHMM and MLN is well-suited to recognize patterns on different layers.}, year = {2014}, DOI = {10.1016/j.bica.2014.06.003}, journal = {Neural-Symbolic Networks for Cognitive Capacities}, tags = {SFB-TRR-62}, web_url = {http://www.sciencedirect.com/science/article/pii/S2212683X14000486} } @Inproceedings { GKLT14b, author = {Glimm, Birte and Kazakov, Yevgeny and Liebig, Thorsten and Tran, Trung-Kien and Vialard, Vincent}, title = {Abstraction Refinement for Ontology Materialization}, year = {2014}, DOI = {10.1007/978-3-319-11915-1\_12}, booktitle = {Proceedings of the 13th International Semantic Web Conference (ISWC 2014)}, volume = {8797}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, pages = {180-195}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/chapter/10.1007{\%}2F978-3-319-11915-1\_12}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/GKLT14b.pdf} } @Inproceedings { GKLT14a, author = {Glimm, Birte and Kazakov, Yevgeny and Liebig, Thorsten and Tran, Trung-Kien and Vialard, Vincent}, title = {Abstraction Refinement for Ontology Materialization}, year = {2014}, booktitle = {Proceedings of the 27th International Workshop on Description Logics (DL 2014)}, volume = {1193}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, pages = {180-195}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1193/paper\_6.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/GKLT14a.pdf} } @Inproceedings { honold2014b, author = {Honold, Frank and Bercher, Pascal and Richter, Felix and Nothdurft, Florian and Geier, Thomas and Barth, Roland and H\"{o}rnle, Thilo and Sch\"{u}ssel, Felix and Reuter, Stephan and Rau, Matthias and Bertrand, Gregor and Seegebarth, Bastian and Kurzok, Peter and Schattenberg, Bernd and Minker, Wolfgang and Weber, Michael and Biundo, Susanne}, title = {Companion-Technology: Towards User- and Situation-Adaptive Functionality of Technical Systems}, abstract = {The properties of multimodality, individuality, adaptability, availability, cooperativeness and trustworthiness are at the focus of the investigation of Companion Systems. In this article, we describe the involved key components of such a system and the way they interact with each other. Along with the article comes a video, in which we demonstrate a fully functional prototypical implementation and explain the involved scientific contributions in a simplified manner. The realized technology considers the entire situation of the user and the environment in current and past states. The gained knowledge reflects the context of use and serves as basis for decision-making in the presented adaptive system.}, year = {2014}, DOI = {10.1109/IE.2014.60}, booktitle = {10th International Conference on Intelligent Environments (IE 2014)}, publisher = {IEEE}, pages = {378--381}, keywords = {Adaptive HCI; AI Planning; Dialog Management; Interaction Management; Companion Technology}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url = {https://mirkwood.informatik.uni-ulm.de/sfbtrr62/companion-system.mp4}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Honold2014HomeTheaterVideo.pdf} } @Inproceedings { Geier2014CBP, author = {Geier, Thomas and Richter, Felix and Biundo, Susanne}, title = {Conditioned Belief Propagation Revisited}, abstract = {Belief Propagation (BP) applied to cyclic problems is a well known approximate inference scheme for probabilistic graphical models. To improve the accuracy of BP, a divide-and-conquer approach termed Conditioned Belief Propagation (CBP) has been proposed in the literature. It recursively splits a problem by conditioning on variables, applies BP to subproblems, and merges the results to produce an answer to the original problem. In this essay, we propose a reformulated version of CBP that exhibits anytime behavior, and allows for more specific tuning by formalizing a further decision point that decides which subproblem is to be decomposed next. We propose some simple and easy to compute heuristics, and demonstrate their performance using an empirical evaluation on randomly generated problems.}, year = {2014}, DOI = {10.3233/978-1-61499-419-0-1011}, booktitle = {Proceedings of the 21st European Conference on Artificial Intelligence (ECAI 2014)}, volume = {263}, publisher = {IOS Press}, address = {Amsterdam}, series = {Frontiers in Artificial Intelligence and Applications}, editor = {Schaub, Torsten and Friedrich, Gerhard and O'Sullivan, Barry}, pages = {1011-1012}, event_name = {21st European Conference on Artificial Intelligence (ECAI 2014)}, event_place = {Prague, Czech Republic}, tags = {SFB-TRR-62,KnowledgeModeling}, web_url = {http://ebooks.iospress.nl/volumearticle/37095} } @Inproceedings { StGL14a, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Coupling Tableau Algorithms for Expressive Description Logics with Completion-based Saturation Procedures}, year = {2014}, DOI = {10.1007/978-3-319-08587-6\_35}, booktitle = {Proceedings of the 7th International Joint Conference on Automated Reasoning (IJCAR 2014)}, volume = {8562}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, editor = {St\'{e}phane Demri, Deepak Kapur and Christoph Weidenbach}, pages = {449-463}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/chapter/10.1007/978-3-319-08587-6\_35{\#}}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/StGL14a.pdf} } @Inproceedings { Hoeller14PlanLinearization, author = {H\"{o}ller, Daniel and Bercher, Pascal and Richter, Felix and Schiller, Marvin R. G. and Geier, Thomas and Biundo, Susanne}, title = {Finding User-friendly Linearizations of Partially Ordered Plans}, abstract = {Planning models usually do not discriminate between different possible execution orders of the actions within a plan, as long as the sequence remains executable. As the formal planning problem is an abstraction of the real world, it can very well occur that one linearization is more favorable than the other for reasons not captured by the planning model --- in particular if actions are performed by a human. Post-hoc linearization of plans is thus a way to improve the quality of a plan enactment. The cost of this transformation decouples from the planning process, and it allows to incorporate knowledge that cannot be expressed within the limitations of a certain planning formalism. In this paper we discuss the idea of finding useful plan linearizations within the formalism of hybrid planning (although the basic ideas are applicable to a broader class of planning models). We propose three concrete models for plan linearization, discuss their ramifications using the application domain of automated user-assistance, and sketch out ways how to empirically validate the assumptions underlying these user-centric models.}, year = {2014}, booktitle = {28th PuK Workshop \dqPlanen, Scheduling und Konfigurieren, Entwerfen\dq (PuK 2014)}, keywords = {Hybrid Planning, POCL Planning, HTN Planning, Plan Linearization, User-centered Planning, User Assistance, Plan Execution}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Hoeller14PlanLinearizationSlides.pdf}, web_url2 = {http://www.puk-workshop.de/puk2014/prog.html}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/Hoeller14PlanLinearization.pdf} } @Inproceedings { Kaz:Kli:2014:Tracing:DL, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Goal-Directed Tracing of Inferences in EL Ontologies}, abstract = {EL is a family of tractable Description Logics (DLs) that is the basis of the OWL 2 EL profile. Unlike for many expressive DLs, reasoning in EL can be performed by computing a deductively-closed set of logical consequences of some specific form. In some ontology-based applications, e.g., for ontology debugging, knowing the logical consequences of the ontology axioms is often not sufficient. The user also needs to know from which axioms and how the consequences were derived. Although it is possible to keep track of all inferences applied during reasoning, this is usually not done in practice to avoid the overheads. In this paper, we present a goal-directed method that can generate inferences for selected consequences in the deductive closure without re-applying all rules from scratch. We provide an empirical evaluation demonstrating that the method is fast and economical for large EL ontologies. Although the main benefits are demon- strated for EL reasoning, the method can be easily extended to other procedures based on deductive closure computation using fixed sets of rules.}, year = {2014}, booktitle = {DL}, volume = {1193}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Meghyn Bienvenu and Magdalena Ortiz and Riccardo Rosati and Mantas Simkus}, pages = {221-232}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1193/paper\_26.pdf}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2014/KazKli14Tracing\_DL.pdf} } @Article { KoGl13a, author = {Kollia, Ilianna and Glimm, Birte}, title = {Optimizing SPARQL Query Answering over OWL Ontologies}, abstract = {The SPARQL query language is currently being extended by the World Wide Web Consortium (W3C) with so-called entailment regimes. An entailment regime defines how queries are evaluated under more expressive semantics than SPARQL's standard simple entailment, which is based on sub-graph matching. The queries are very expressive since variables can occur within complex class expressions and can also bind to class or property names.In this paper, we describe a sound and complete algorithm for the OWL Direct Semantics entailment regime. We further propose several novel optimizations such as strategies for determining a good query execution order, query rewriting techniques, and show how specialized OWL reasoning tasks and the class and property hierarchy can be used to reduce the query execution time. For determining a good execution order, we propose a cost-based model, where the costs are based on information about the instances of classes and properties that are extracted from a model abstraction built by an OWL reasoner. We present two ordering strategies: a static and a dynamic one. For the dynamic case, we improve the performance by exploiting an individual clustering approach that allows for computing the cost functions based on one individual sample from a cluster.We provide a prototypical implementation and evaluate the efficiency of the proposed optimizations. Our experimental study shows that the static ordering usually outperforms the dynamic one when accurate statistics are available. This changes, however, when the statistics are less accurate, e.g., due to non-deterministic reasoning decisions. For queries that go beyond conjunctive instance queries we observe an improvement of up to three orders of magnitude due to the proposed optimizations.}, year = {2013}, month = {9}, DOI = {10.1613/jair.3872}, journal = {Journal of Artificial Intelligence Research (JAIR)}, volume = {48}, pages = {253-303}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {SFB-TRR-62,AutomatedReasoning}, web_url = {https://www.jair.org/media/3872/live-3872-7402-jair.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/KoGl13a.pdf} } @Inproceedings { GKKS13a, author = {Glimm, Birte and Kazakov, Yevgeny and Kollia, Ilianna and Stamou, Giorgos}, title = {Using the TBox to Optimise SPARQL Queries}, abstract = {We present an approach for using schema knowledge from the TBox to optimize the evaluation of SPARQL queries. The queries are evaluated over an OWL ontology using the OWL Direct Semantics entailment regime. For conjunctive instance queries, we proceed by transforming the query into an ABox. We then show how the TBox and this (small) query ABox can be used to build a maximal equivalent query where the additional query atoms can be used for reducing the set of possible mappings for query variables. We also consider arbitrary SPARQL queries and show how the concept and role hierarchies can be used to prune the search space of possible answers based on the polarity of variable occurrences in the query. We provide a prototypical implementation and evaluate the efficiency of the proposed optimizations. Our experimental study shows that the use of the proposed optimizations leads to a significant improvement in the execution times of many queries.}, year = {2013}, booktitle = {Proceedings of the 2013 International Description Logic Workshop (DL 2013)}, publisher = {CEUR Workshop Proceedings}, keywords = {SPARQL, OWL, Description Logics, Query Answering, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1014/paper\_80.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/GKKS13a.pdf} } @Inproceedings { Bercher13POCLHeuristics, author = {Bercher, Pascal and Geier, Thomas and Biundo, Susanne}, title = {Using State-Based Planning Heuristics for Partial-Order Causal-Link Planning}, abstract = {We present a technique which allows partial-order causal-link (POCL) planning systems to use heuristics known from state-based planning to guide their search. The technique encodes a given partially ordered partial plan as a new classical planning problem that yields the same set of solutions reachable from the given partial plan. As heuristic estimate of the given partial plan a state-based heuristic can be used estimating the goal distance of the initial state in the encoded problem. This technique also provides the first admissible heuristics for POCL planning, simply by using admissible heuristics from state-based planning. To show the potential of our technique, we conducted experiments where we compared two of the currently strongest heuristics from state-based planning with two of the currently best-informed heuristics from POCL planning.}, year = {2013}, booktitle = {Advances in Artificial Intelligence, Proceedings of the 36th German Conference on Artificial Intelligence (KI 2013)}, publisher = {Springer}, pages = {1--12}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13POCLHeuristicsSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13POCLHeuristics.pdf} } @Inproceedings { SG13, author = {Schiller, Marvin R. G. and Glimm, Birte}, title = {Towards Explicative Inference for OWL}, abstract = {Automated reasoning in OWL is capable of inferences that are nontrivial for people to understand. We argue that the understanding of inferences would benefit from stepwise explanations. To build a system that supports such explicative inference, we propose a framework based on inference rules and proof tactics for OWL ontologies. In particular, the goal is to present inferences in a suitable and adaptable way to human users, and to predict whether certain inferences are harder to understand than others. This article outlines the conception of this framework and its benefits whose implementation is currently work in progress.}, year = {2013}, booktitle = {Proceedings of the 2013 International Description Logic Workshop (DL 2013)}, publisher = {CEUR Workshop Proceedings}, keywords = {OWL, Description Logics, Explanations, Justifications}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1014/paper\_36.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/ScGl13a.pdf} } @Inproceedings { CGHM13a, author = {Chaussecourte, Pierre and Glimm, Birte and Horrocks, Ian and Motik, Boris and Pierre, Laurent}, title = {The Energy Management Adviser at EDF}, abstract = {The EMA (Energy Management Adviser) aims to produce personalised energy saving advice for EDF’s customers. The advice takes the form of one or more ``tips\\", and personalisation is achieved using semantic technologies: customers are described using RDF, an OWL ontology provides a conceptual model of the relevant domain (housing, environment, and so on) and the different kinds of tips, and SPARQL query answering is used to identify relevant tips. The current prototype provides tips to more than 300,000 EDF customers in France at least twice a year. The main challenges for our future work include providing a timely service for all of the 35 million EDF customers in France, simplifying the system's maintenance, and providing new ways for interacting with customers such as via a Web site.}, year = {2013}, DOI = {10.1007/978-3-642-41338-4\_4}, booktitle = {Proceedings of the 12th International Semantic Web Conference (ISWC 2013)}, volume = {8219}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, pages = {49-64}, keywords = {OWL, Description Logics, Explanations, Justifications}, tags = {AutomatedReasoning}, web_url = {http://link.springer.com/chapter/10.1007{\%}2F978-3-642-41338-4\_4}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/CGHM13a.pdf} } @Inproceedings { Bercher13POCLPreferences, author = {Bercher, Pascal and Ginter, Fabian and Biundo, Susanne}, title = {Search Strategies for Partial-Order Causal-Link Planning with Preferences}, abstract = {This paper studies how to solve classical planning problems with preferences by means of a partial-order causal-link (POCL) planning algorithm. Preferences are given by soft goals -- optional goals which increase a plan's benefit if satisfied at the end of a plan. Thus, we aim at finding a plan with the best \textitnet-benefit, which is the difference of the achieved preferences' benefit minus the cost of all actions in the plan that achieves them. While many approaches compile soft goals away, we study how they can be addressed natively by a POCL planning system. We propose novel search and flaw selection strategies for that problem class and evaluate them empirically.}, year = {2013}, booktitle = {27th PuK Workshop \dqPlanen, Scheduling und Konfigurieren, Entwerfen\dq (PuK 2013)}, pages = {29--40}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13POCLPreferencesSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13POCLPreferences.pdf} } @Inproceedings { ProgrDecompSC, author = {Ponomaryov, Denis and Soutchanski, Mikhail}, title = {Progression of Decomposed Situation Calculus Theories}, year = {2013}, booktitle = {AAAI, The Twenty-Seventh Conference on Artificial Intelligence}, tags = {AutomatedReasoning}, file_url = {http://persons.iis.nsk.su/files/persons/pages/decprog-short.pdf} } @Proceedings { EGKK13a, title = {Proceedings of the 2013 International Description Logic Workshop (DL 2013), Ulm, Germany, July 23-26, 2013}, year = {2013}, booktitle = {Proceedings of the 2013 International Description Logic Workshop (DL 2013)}, volume = {1014}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Eiter, Thomas and Glimm, Birte and Kazakov, Yevgeny and Kr\"{o}tzsch, Markus}, keywords = {Description Logics}, tags = {AutomatedReasoning} } @Inproceedings { Glodek2013LayeredLogic, author = {Glodek, Michael and Geier, Thomas and Biundo, Susanne and Schwenker, Friedhelm and Palm, G\"{u}nther}, title = {Recognizing User Preferences Based on Layered Activity Recognition and First-Order Logic}, abstract = {Only few cognitive architectures have been proposed that cover the complete range from recognizers working on the direct sensor input, to logical inference mechanisms of classical artificial intelligence (AI). Logical systems operate on abstract predicates, which are often related to an action-like state transition, especially when compared to the classes recognized by pattern recognition approaches. On the other hand, pattern recognition is often limited to static patterns, and temporal and multi-modal aspects of a class are often not regarded, e.g. by testing only on pre-segmented data. Recent trends in AI aim at developing applications and methods that are motivated by data-driven real world scenarios, while the field of pattern recognition attempts to push forward the boundary of pattern complexity. We propose a new generic architecture to close the gap between AI and pattern recognition approaches. In order to detect abstract complex patterns, we process sequential data in layers. On each layer, a set of elementary classes is recognized and the outcome of the classification is passed to the successive layer such that the time granularity increases. Layers can combine modalities, additional symbolic information or make use of reasoning algorithms. We evaluated our approach in an on-line scenario of activity recognition using three layers. The obtained results show that the combination of concepts from pattern recognition and high-level symbolic information leads to a prosperous and powerful symbiosis.}, year = {2013}, DOI = {10.1109/ICTAI.2013.101}, booktitle = {Proceedings of the 2013 IEEE 25th International Conference on Tools with Artificial Intelligence (ICTAI 2013)}, pages = {648-653}, keywords = {Conditioned hidden Markov model;Layered architecture;Markov logic network}, tags = {SFB-TRR-62}, web_url = {http://ieeexplore.ieee.org/xpl/articleDetails.jsp?tp=\\&arnumber=6735312} } @Inproceedings { Bercher13SampleFF, author = {Bercher, Pascal and Geier, Thomas and Richter, Felix and Biundo, Susanne}, title = {On Delete Relaxation in Partial-Order Causal-Link Planning}, abstract = {We prove a new complexity result for Partial-Order Causal-Link (POCL) planning, in which we study the hardness of refining a search node (i.e., a partial plan) to a valid solution given a delete effect-free domain model. While the corresponding decision problem is known to be polynomial in state-based search (where search nodes are states), it turns out to be intractable in the POCL setting. Since both the currently best-informed heuristics for POCL planning are based on delete relaxation, we hope that our result sheds some new light on the problem of designing heuristics for POCL planning. Based on this result, we developed a new variant of one of these heuristics which incorporates more information of the current partial plan. We evaluate our heuristic on several domains of the early International Planning Competitions and compare it with other POCL heuristics from the literature.}, year = {2013}, booktitle = {Proceedings of the 2013 IEEE 25th International Conference on Tools with Artificial Intelligence (ICTAI 2013)}, publisher = {IEEE Computer Society}, pages = {674--681}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13SampleFFSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13SampleFF.pdf} } @Inproceedings { CTSG13a, author = {Cheptsov, Alexey and Tenschert, Axel and Schmidt, Paul and Glimm, Birte and Matthesius, Mauricio and Liebig, Thorsten}, title = {Introducing a New Scalable Data-as-a-Service Cloud Platform for Enriching Traditional Text Mining Techniques by Integrating Ontology Modelling and Natural Language Processing}, year = {2013}, booktitle = {Proceedings of the International Workshop on Big Web Data (BigWebData 2013)}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Axel Tenschert and Alexey Cheptsov}, keywords = {Big Data, Text Mining, Ontologies}, tags = {AutomatedReasoning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/CTSG13a.pdf} } @Inproceedings { KazKli:13:ELK:Incremental:ISWC, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Incremental Reasoning in OWL EL without Bookkeeping}, abstract = {We describe a method for updating the classification of ontologies expressed in the EL family of Description Logics after some axioms have been added or deleted. While incremental classification modulo additions is relatively straightforward, handling deletions is more problematic since it requires retracting logical consequences that are no longer valid. Known algorithms address this problem using various forms of bookkeeping to trace the consequences back to premises. But such additional data can consume memory and place an extra burden on the reasoner during application of inferences. In this paper, we present a technique, which avoids this extra cost while being very efficient for small incremental changes in ontologies. The technique is freely available as a part of the open-source EL reasoner ELK and its efficiency is demonstrated on naturally occurring and synthetic data.}, year = {2013}, DOI = {10.1007/978-3-642-41335-3\_15}, booktitle = {ISWC}, volume = {8218}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, pages = {232-247}, tags = {KnowledgeModeling, ELK, LiveOntologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/KazKli13Incremental\_ISWC.pdf} } @Inproceedings { KazKli:13:Incremental:DL, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Incremental Reasoning in EL+ without Bookkeeping}, abstract = {We describe a method for updating the classification of ontologies expressed in the EL family of Description Logics after some axioms have been added or deleted. While incremental classification modulo additions is relatively straightforward, handling deletions is more problematic since it requires retracting logical consequences that no longer hold. Known algorithms address this problem using various forms of bookkeeping to trace the consequences back to premises. But such additional data can consume memory and place an extra burden on the reasoner during application of inferences. In this paper, we present a technique, which avoids this extra cost while being still very efficient for small incremental changes in ontologies.}, year = {2013}, booktitle = {DL}, volume = {1014}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, pages = {294-315}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1014/paper\_33.pdf}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/KazKli13Incremental\_DL.pdf} } @Inproceedings { StGL13b, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Extending Absorption to Nominal Schemas}, abstract = {Nominal schemas have recently been introduced as a new approach for the integration of DL-safe rules into the Description Logic framework. The efficient processing of knowledge bases with nominal schemas remains, however, challenging. We address this by extending the well-known optimisation of absorption \hlas well as the standard tableau calculus to directly handle the (absorbed) nominal schema axioms. We implement the resulting extension of standard tableau calculi in a novel reasoning system and we integrate further optimisations. In our empirical evaluation, we show the effect of these optimisations and we find that the proposed approach performs well even when compared to other DL reasoners with dedicated rule support.}, year = {2013}, booktitle = {Proceedings of the 26th International Description Logic Workshop (DL 2013)}, publisher = {CEUR Workshop Proceedings}, keywords = {Nominal Schema, Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-1014/paper\_20.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/StGL13b.pdf} } @Inproceedings { Kaz:Kli:2013:Android:ORE, author = {Kazakov, Yevgeny and Klinov, Pavel}, title = {Experimenting with ELK Reasoner on Android}, abstract = {This paper presents results of a preliminary evaluation of the OWL EL reasoner ELK running on a Google Nexus 4 cell phone under Android 4.2 OS. The results show that economic and well-engineered ontology reasoners can demonstrate acceptable performance when classifying ontologies with thousands of axioms and take advantage of multi-core CPUs of modern mobile devices. The paper emphasizes the engineering aspects of ELK's design and implementation which make this performance possible.}, year = {2013}, booktitle = {ORE}, volume = {1015}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, pages = {68-74}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://ceur-ws.org/Vol-1015/paper\_9.pdf}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/KazKli13Android\_ORE.pdf} } @Inproceedings { Bercher13EncodingPlans, author = {Bercher, Pascal and Biundo, Susanne}, title = {Encoding Partial Plans for Heuristic Search}, abstract = {We propose a technique that allows any planning system that searches in the space of partial plans to make use of heuristics from the literature which are based on search in the space of states. The technique uses a problem encoding that reduces the problem of finding a heuristic value for a partial plan to finding a heuristic value for a state: It encodes a partial plan into a new planning problem, s.t. solutions for the new problem correspond to solutions reachable from the partial plan. Evaluating the goal distance of the partial plan then corresponds to evaluating the goal distance of the initial state in the new planning problem.}, year = {2013}, booktitle = {Proceedings of the 4th Workshop on Knowledge Engineering for Planning and Scheduling (KEPS 2013) at ICAPS 2013}, pages = {11--15}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13EncodingPlansSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/Bercher13EncodingPlans.pdf} } @Inproceedings { ConcDefinability, author = {Ponomaryov, Denis and Vlasov, Dmitry}, title = {Concept Definability and Interpolation in Enriched Models of EL-TBoxes}, year = {2013}, booktitle = {Proceedings of the 26th International Workshop on Description Logics (DL 2013)}, publisher = {CEUR Workshop Proceedings}, tags = {AutomatedReasoning}, file_url = {http://persons.iis.nsk.su/files/persons/pages/conceptinterpolation.pdf} } @Inproceedings { StGL13a, author = {Steigmiller, Andreas and Glimm, Birte and Liebig, Thorsten}, title = {Nominal Schema Absorption}, abstract = {Nominal schemas have recently been introduced as a new approach for the integration of DL-safe rules into the Description Logic framework. The efficient processing of knowledge bases with nominal schemas remains, however, challenging. We address this by extending the well-known optimisation of absorption as well as the standard tableau calculus to directly handle the (absorbed) nominal schema axioms. We implement the resulting extension of standard tableau calculi in a novel reasoning system and we integrate further optimisations. In our empirical evaluation, we show the effect of these optimisations and we find that the proposed approach performs well even when compared to other DL reasoners with dedicated rule support.}, year = {2013}, booktitle = {Proceedings of the 23rd International Joint Conference on Artificial Intelligence (IJCAI 2013)}, publisher = {AAAI Press/The MIT Press}, keywords = {Nominal Schema, Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://www.aaai.org/ocs/index.php/IJCAI/IJCAI13/paper/view/6629}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2013/StGL13a.pdf} } @Inproceedings { NiGl12a, author = {Nikitina, Nadeschda and Glimm, Birte}, title = {Hitting the Sweetspot: Economic Rewriting of Knowledge Bases}, abstract = {In this paper, we consider the task of knowledge base extraction with its three conflicting requirements: the size of the extracted knowledge base, the size of the corresponding signature and the syntactic similarity of the extracted knowledge base with the originally given one. We demonstrate that, both, minimal module extraction and uniform interpolation, assign an absolute priority to one of these requirements, thereby limiting the possibilities to influence the other two. To account for scenarios, in which such an extreme prioritization is not necessary, we investigate the task of knowledge base extraction for EL based on two alternative, less restrictive notions of syntactic similarity with the second highest priority given to the knowledge base size. Moreover, to address scenarios, where computation time is important, we propose a tractable rewriting approach based on the chosen prioritization of requirements and empirically compare this novel technique with the existing implemented approaches with encouraging results.}, year = {2012}, month = {11}, DOI = {10.1007/978-3-642-35176-1\_25}, booktitle = {Proceedings of the 11th International Semantic Web Conference (ISWC 2012)}, volume = {7649}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, pages = {394-409}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations, Modularisation, Modularization, Uniform Interpolation}, tags = {AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/NiGl12a.pdf} } @Inproceedings { KoGl12b, author = {Kollia, Ilianna and Glimm, Birte}, title = {Cost Based Query Ordering over OWL Ontologies}, abstract = {The paper presents an approach for cost-based query planning for SPARQL queries issued over an OWL ontology using the OWL Direct Semantics entailment regime of SPARQL 1.1. The costs are based on information about the instances of classes and properties that are extracted from a model abstraction built by an OWL reasoner. A static and a dynamic algorithm are presented which use these costs to find optimal or near optimal execution orders for the atoms of a query. For the dynamic case, we exploit an individual clustering approach and compute the cost functions based only on at most one individual (sample) from each of the used clusters. We provide an experimental study which shows that, for queries for which accurate estimates are available from the beginning, the static usually outperforms the dynamic algorithm. However, when queries are issued over ontologies with disjunctive information and contain many atoms for which no accurate statistics can be extracted a-priori, then dynamic ordering is more promising. The use of cluster based sampling techniques leads to a performance improvement for queries with large intermediate result sizes.}, year = {2012}, month = {11}, DOI = {10.1007/978-3-642-35176-1\_15}, booktitle = {Proceedings of the 11th International Semantic Web Conference (ISWC 2012)}, volume = {7649}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, pages = {231-246}, keywords = {Reasoning, Description Logics, Optimisations, Optimizations}, tags = {SFB-TRR-62,AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/KoGl12b.pdf} } @Inproceedings { Mueller2012HPOMDPs, author = {M\"{u}ller, Felix and Sp\"{a}th, Christian and Geier, Thomas and Biundo, Susanne}, title = {Exploiting Expert Knowledge in Factored POMDPs}, abstract = {Decision support in real-world applications is often challenging because one has to deal with large and only partially observable domains. In case of full observability, large domains are successfully tackled by making use of expert knowledge and employing methods like Hierarchical Task Network (HTN) planning. In this paper, we present an approach that transfers the advantages of HTN planning to partially observable domains. Experimental results for two implemented algorithms, UCT and A* search, show that our approach significantly speeds up the generation of high-quality policies: the policies generated by our approach consistently outperform policies generated by Symbolic Perseus and can be computed in less than 10{\%} of its runtime on average.}, year = {2012}, month = {8}, booktitle = {Proceedings of the 20th European Conference on Artificial Intelligence (ECAI 2012)}, publisher = {IOS Press}, pages = {606--611}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Mueller12HierarchicalFSCsReloadedSlides.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Mueller12HierarchicalFSCsReloaded.pdf} } @Inproceedings { Geier2012PersonTracking, author = {Geier, Thomas and Reuter, Stephan and Dietmayer, Klaus and Biundo, Susanne}, title = {Goal-Based Person Tracking Using a First-Order Probabilistic Model}, abstract = {This work addresses the problem of person tracking using additional background information. We augment a particle filter-based tracking algorithm with a first-order probabilistic model expressed through Markov Logic Networks to tackle the data association problem in domains with a high occlusion rate. Using a high-level model description allows us to easily integrate additional information like a floor plan or goal information into a joint model and resolve occlusion situations that would otherwise result in the loss of association. We discuss the engineered model in detail and give an empirical evaluation using an indoor setting.}, year = {2012}, month = {8}, booktitle = {Proceedings of the Ninth UAI Bayesian Modeling Applications Workshop (UAI-AW 2012)}, tags = {SFB-TRR-62, KnowledgeModeling}, web_url = {http://ceur-ws.org/Vol-962}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Geier12TrackingGoals.pdf} } @Inproceedings { Elkawkagy12LandmarkStrategies, author = {Elkawkagy, Mohamed and Bercher, Pascal and Schattenberg, Bernd and Biundo, Susanne}, title = {Improving Hierarchical Planning Performance by the Use of Landmarks}, abstract = {Hierarchical landmarks can be extracted and used to reduce hierarchical domain models, thereby leading to substantial runtime improvements. In this work, we present novel domain-independent planning strategies based on these landmarks. Our empirical evaluation on four benchmark domains shows that these landmark-aware strategies outperform established search strategies in many cases, even for reduced domain models.}, year = {2012}, month = {7}, booktitle = {Proceedings of the 26th AAAI Conference on Artificial Intelligence (\{AAAI\} 2012)}, publisher = {AAAI Press}, pages = {1763--1769}, tags = {SFB-TRR-62,Planning}, web_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Elkawkagy12LandmarkStrategiesPoster.pdf}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Elkawkagy12LandmarkStrategies.pdf} } @Article { GHMS11a, author = {Glimm, Birte and Horrocks, Ian and Motik, Boris and Shearer, Rob and Stoilos, Giorgos}, title = {A Novel Approach to Ontology Classification}, abstract = {Ontology classification - the computation of the subsumption hierarchies for classes and properties - is a core reasoning service provided by all OWL reasoners known to us. A popular algorithm for computing the class hierarchy is the so-called Enhanced Traversal (ET) algorithm. In this paper we present a new classification algorithm that attempts to address certain shortcomings of ET and improve its performance. Apart from classification of classes, we also consider object and data property classification. Using several simple examples, we show that the algorithms commonly used to implement these tasks are incomplete even for relatively weak ontology languages. Furthermore, we show that property classification problems can be reduced to class classification problems, which allows us to classify properties using our optimised algorithm. We implemented all our algorithms in the OWL reasoner HermiT. The results of our performance evaluation show significant performance improvements on several well-known ontologies.}, year = {2012}, month = {7}, issn = {1570-8268}, DOI = {10.1016/j.websem.2011.12.007}, journal = {Journal of Web Semantics: Science, Services and Agents on the World Wide Web}, volume = {14}, publisher = {Elsevier Science Publishers (North-Holland)}, address = {Amsterdam}, pages = {84-101}, keywords = {Ontologies, OWL, Class Classification, Property Classification, Optimizations}, tags = {SFB-TRR-62,AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/GHMS12a.pdf}, note = {Special Issue on Dealing with the Messiness of the Web of Data} } @Inproceedings { Seegebarth12Explanation, author = {Seegebarth, Bastian and M\"{u}ller, Felix and Schattenberg, Bernd and Biundo, Susanne}, title = {Making Hybrid Plans More Clear to Human Users - A Formal Approach for Generating Sound Explanations}, abstract = {Human users who execute an automatically generated plan want to understand the rationale behind it. Knowledge-rich plans are particularly suitable for this purpose, because they provide the means to give reason for causal, temporal, and hierarchical relationships between actions. Based on this information, focused arguments can be generated that constitute explanations on an appropriate level of abstraction. In this paper, we present a formal approach to plan explanation. Information about plans is represented as first-order logic formulae and explanations are constructed as proofs in the resulting axiomatic system. With that, plan explanations are provably correct w.r.t. the planning system that produced the plan. A prototype plan explanation system implements our approach and first experiments give evidence that finding plan explanations is feasible in real-time.}, year = {2012}, month = {6}, day = {27}, booktitle = {Proceedings of the 22nd International Conference on Automated Planning and Scheduling (ICAPS 2012)}, publisher = {AAAI Press}, pages = {225--233}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Seegebarth12PlanExplanationSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Seegebarth12PlanExplanation.pdf} } @Inproceedings { StLG12b, author = {Steigmiller, Andreas and Liebig, Thorsten and Glimm, Birte}, title = {Extended Caching and Backjumping for Expressive Description Logics}, abstract = {With this contribution we push the boundary of some known optimisations such as caching to the very expressive Description Logic SROIQ.The developed method is based on a sophisticated dependency management and a precise unsatisfiability caching technique, which further enables better informed tableau backtracking and more efficient pruning. We empirically evaluate the proposed optimisation within the novel reasoning system Konclude and show that the proposed optimisations indeed result in significant performance improvements.}, year = {2012}, month = {6}, booktitle = {Proceedings of the 25th International Description Logic Workshop (DL 2012)}, volume = {846}, series = {CEUR Workshop Proceedings}, keywords = {Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-846/paper\_36.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/StLG12b.pdf} } @Inproceedings { StLG12a, author = {Steigmiller, Andreas and Liebig, Thorsten and Glimm, Birte}, title = {Extended Caching, Backjumping and Merging for Expressive Description Logics}, abstract = {With this contribution we push the boundary of some known optimisations such as caching to the very expressive Description Logic SROIQ.The developed method is based on a sophisticated dependency management and a precise unsatisfiability caching technique, which further enables better informed tableau backtracking and more efficient pruning. Additionally, we optimise the handling of cardinality restrictions, by introducing a strategy called pool-based merging. We empirically evaluate the proposed optimisations within the novel reasoning system Konclude and show that the proposed optimisations indeed result in significant performance improvements.}, year = {2012}, month = {6}, DOI = {10.1007/978-3-642-31365-3\_40}, booktitle = {Proceedings of the 6th International Joint Conference on Automated Reasoning (IJCAR 2012)}, volume = {7364}, series = {Lecture Notes in Computer Science}, pages = {514-529}, keywords = {Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/StLG12a.pdf} } @Inproceedings { KoGl12a, author = {Kollia, Ilianna and Glimm, Birte}, title = {Cost Based Query Ordering over OWL Ontologies}, abstract = {The paper presents an approach for cost-based query planning for SPARQL queries issued over an OWL ontology using OWL's Direct Semantics. The costs are based on information about the instances of classes and properties that are extracted from a model abstraction built by an OWL reasoner. A static and a dynamic algorithm are presented which use these costs to find optimal or near optimal execution orders for the atoms of a query. For the dynamic case, we exploit an individual clustering approach and compute the cost functions based only on at most one individual (sample) from each of the used clusters. We provide an experimental study which shows that, for queries for which accurate estimates are available from the beginning, the static usually outperforms the dynamic algorithm. However, when queries are issued over ontologies with disjunctive information and contain many atoms for which no accurate statistics can be extracted a-priori, then dynamic ordering is more promising. The use of cluster based sampling techniques leads to a performance improvement for queries with huge intermediate result sizes.}, year = {2012}, month = {6}, booktitle = {Proceedings of the 25th International Description Logic Workshop (DL 2012)}, volume = {846}, series = {CEUR Workshop Proceedings}, keywords = {Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {SFB-TRR-62,AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-846/paper\_40.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/KoGl12a.pdf} } @Inproceedings { Bercher12PreferenceHeuristic, author = {Bercher, Pascal and Biundo, Susanne}, title = {A Heuristic for Hybrid Planning with Preferences}, abstract = {In this paper, we introduce an admissible heuristic for hybrid planning with preferences. Hybrid planning is the fusion of hierarchical task network (HTN) planning with partial order causal link (POCL) planning. We consider preferences to be soft goals - facts one would like to see satisfied in a goal state, but which do not have to hold necessarily. Our heuristic estimates the best quality of any solution that can be developed from the current plan under consideration. It can thus be used by any branch-and-bound algorithm that performs search in the space of plans to prune suboptimal plans from the search space.}, year = {2012}, month = {5}, booktitle = {Proceedings of the Twenty-Fifth International Florida Artificial Intelligence Research Society Conference (FLAIRS 2012)}, publisher = {AAAI Press}, pages = {120--123}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Bercher12PreferenceHeuristicPoster.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Bercher12PreferenceHeuristic.pdf} } @Proceedings { GHKP12a, author = {Glimm, Birte and Hogan, Aidan and Kr\"{o}tzsch, Markus and Polleres, Axel}, title = {OWL: Yet to arrive on the Web of Data?}, year = {2012}, month = {4}, day = {16}, volume = {937}, publisher = {CEUR Workshop Proceedings}, event_name = {Proceedings of the 5th Linked Data on the Web Workshop (LDOW2012)}, keywords = {Ontologies, OWL, Linked Data, Semantic Web}, tags = {AutomatedReasoning}, web_url = {http://ceur-ws.org/Vol-937/ldow2012-paper-16.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/GHKP12a.pdf} } @Article { NiRG11c, author = {Nikitina, Nadeschda and Rudolph, Sebastian and Glimm, Birte}, title = {Interactive Ontology Revision}, abstract = {When ontological knowledge is acquired automatically, quality control is essential. Which part of the automatically acquired knowledge is appropriate for an application often depends on the context in which the knowledge base or ontology is used. In order to determine relevant and irrelevant or even wrong knowledge, we support the tightest possible quality assurance approach - an exhaustive manual inspection of the acquired data. By using automated reasoning, this process can be partially automatized: after each expert decision, axioms that are entailed by the already confirmed statements are automatically approved, whereas axioms that would lead to an inconsistency are declined. Starting from this consideration, this paper provides theoretical foundations, heuristics, optimization strategies and comprehensive experimental results for our approach to efficient reasoning-supported interactive ontology revision. We introduce and elaborate on the notions of revision states and revision closure as formal foundations of our method. Additionally, we propose a notion of axiom impact which is used to determine a beneficial order of axiom evaluation in order to further increase the effectiveness of ontology revision. The initial notion of impact is then further refined to take different validity ratios - the proportion of valid statements within a dataset - into account. Since the validity ratio is generally not known a priori - we show how one can work with an estimate that is continuously improved over the course of the inspection process.Finally, we develop the notion of decision spaces, which are structures for calculating and updating the revision closure and axiom impact. We optimize the computation performance further by employing partitioning techniques and provide an implementation supporting these optimizations as well as featuring a user front-end. Our evaluation shows that our ranking functions almost achieve the maximum possible automatization and that the computation time needed for each reasoning-based, automatic decision takes less than one second on average for our test dataset of over 25,000 statements.}, year = {2012}, month = {4}, issn = {1570-8268}, DOI = {10.1016/j.websem.2011.12.002}, journal = {Journal of Web Semantics: Science, Services and Agents on the World Wide Web}, volume = {12-13}, publisher = {Elsevier Science Publishers (North-Holland), Amsterdam}, pages = {118-130}, keywords = {Ontologies, Knowledge Representation, Automated Reasoning, Quality Assurance, OWL}, tags = {SFB-TRR-62,AutomatedReasoning}, web_url = {http://www.sciencedirect.com/science/article/pii/S1570826811001028}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/NiRG12a.pdf}, note = {Special Issue on Reasoning with Context in the Semantic Web} } @Inproceedings { Schattenberg12Mice, author = {Schattenberg, Bernd and L. Schulz, Andreas and Brechmann, Andr\'{e} and W. Ohl, Frank and Biundo, Susanne}, title = {Planning Models for Two-Way Avoidance and Reversal Learning}, abstract = {Reinforcement learning models can explain various aspects of two-way avoidance learning but do not provide a rationale for the relationship found between the dynamics of initial learning and those of reversal learning. Artificial Intelligence planning offers a novel way to conceptualize the learners’ cognitive processes by providing an explicit representation of and reasoning about internal processing stages. Our hybrid planning and plan repair approach demonstrates that the empirically found relationships could be motivated from a consistent theoretical framework.}, year = {2012}, month = {2}, booktitle = {Proceedings of the 7th Vienna International Conference on Mathematical Modelling (MATHMOD 2012)}, address = {Vienna, Austria}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Schattenberg12PlanningModels.pdf} } @Proceedings { GlHu12a, title = {Proceedings of the ISWC 2012 Posters \\& Demonstrations Track, Boston, USA, November 11-15, 2012}, year = {2012}, booktitle = {ISWC-PD International Semantic Web Conference (Posters \\& Demos)}, volume = {914}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Glimm, Birte and Huynh, David}, keywords = {Semantic Web}, tags = {AutomatedReasoning} } @Inproceedings { geier2012association, author = {Geier, Thomas and Reuter, Stephan and Dietmayer, Klaus and Biundo, Susanne}, title = {Track-Person Association Using a First-Order Probabilistic Model}, year = {2012}, DOI = {10.1109/ICTAI.2013.101}, booktitle = {Proceedings of the 24th IEEE International Conference on Tools with Artificial Intelligence (ICTAI)}, pages = {844--851}, event_name = {Tools with Artificial Intelligence}, event_place = {Athens, Greece}, keywords = {SFB-TRR-62, KnowledgeModeling}, web_url = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6495131\\&tag=1}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/Geier2012TrackingB.pdf} } @Conference { KazKroSim12NominalsEL_KR, author = {Kazakov, Yevgeny and Kr\"{o}tzsch, Markus and Siman\v{c}\'{i}k, Franti\v{s}ek}, title = {Practical Reasoning with Nominals in the EL Family of Description Logics}, abstract = {The EL family of description logics (DLs) has been designed to provide a restricted syntax for commonly used DL constructors with the goal to guarantee polynomial complexity of reasoning. Yet, polynomial complexity does not always mean that the underlying reasoning procedure is efficient in practice. In this paper we consider a simple DL ELO from the EL family that admits nominals, and argue that existing polynomial reasoning procedures for ELO can be impractical for many realistic ontologies. To solve the problem, we describe an optimization strategy in which the inference rules required for reasoning with nominals are avoided as much as possible. The optimized procedure is evaluated within the reasoner ELK and demonstrated to perform well in practice.}, year = {2012}, isbn = {978-1-57735-560-1}, booktitle = {Proceedings of the Thirteenth International Conference on Principles of Knowledge Representation and Reasoning (\{KR\} 2012)}, publisher = {AAAI Press}, editor = {Gerhard Brewka and Thomas Eiter and Sheila A. McIlraith}, tags = {KnowledgeModeling, ELK, LiveOntologies}, web_url = {http://www.aaai.org/ocs/index.php/KR/KR12/paper/view/4540}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/KazKroSim12NominalsEL\_KR.pdf} } @Inproceedings { nothdurft2012using, author = {Nothdurft, Florian and Honold, Frank and Kurzok, Peter}, title = {Using explanations for runtime dialogue adaptation}, abstract = {In this demo paper we present a system that is capable of adapting the dialogue between human and so-called companion systems in real-time. Companion systems are continually available, co-operative, and reliable assistants which adapt to a user's capabilities, preferences, requirements, and current needs. Typically state-of-the art Human-Computer interfaces adapt the interaction only to pre-defined levels of expertise. In contrast, the presented system adapts the structure and content of the interaction to each user by including explanations to prepare him for upcoming tasks he has to solve together with the companion system.}, year = {2012}, organization = {ACM}, booktitle = {Proceedings of the 14th ACM international conference on Multimodal interaction}, pages = {63--64}, tags = {SFB-TRR-62}, web_url = {http://dl.acm.org/citation.cfm?id=2388676.2388694\\&coll=DL\\&dl=GUIDE}, file_url = {http://delivery.acm.org/10.1145/2390000/2388694/p63-nothdurft.pdf} } @Inproceedings { Wendemuth2012CompanionTechnology, author = {Wendemuth, Andreas and Biundo, Susanne}, title = {A Companion Technology for Cognitive Technical Systems}, abstract = {The Transregional Collaborative Research Centre SFB/TRR 62 \dqA Companion Technology for Cognitive Technical Systems\dq, funded by the German Research Foundation (DFG) at Ulm and Magdeburg sites, deals with the systematic and interdisciplinary study of cognitive abilities and their implementation in technical systems. The properties of multimodality, individuality, adaptability, availability, cooperativeness and trustworthiness are at the focus of the investigation. These characteristics show a new type of interactive device which is not only practical and efficient to operate, but as well agreeable, hence the term ”companion”. The realisation of such a technology is supported by technical advancement as well as by neurobiological findings. Companion technology has to consider the entire situation of the user, machine, environment and (if applicable) other people or third interacting parties, in current and historical states. This will reflect the mental state of the user, his embeddedness in the task, and how he is situated in the current process.}, year = {2012}, DOI = {10.1007/978-3-642-34584-5\_7}, booktitle = {Cognitive Behavioural Systems: COST 2102 International Training School, Revised Selected Papers}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Anna Esposito, Alessandro Vinciarelli, R\"{u}diger Hoffman, Vincent C. M\"{u}ller}, pages = {89--103}, tags = {SFB-TRR-62,Planning}, web_url = {http://link.springer.com/chapter/10.1007{\%}2F978-3-642-34584-5\_7} } @Proceedings { GlKr12a, title = {KI 2012: Advances in Artificial Intelligence - 35th Annual German Conference on AI, Saarbr\"{u}cken, Germany, September 24-27, 2012. Proceedings}, year = {2012}, isbn = {978-3-642-33346-0}, booktitle = {KI}, volume = {7526}, publisher = {Springer}, series = {Lecture Notes in Artificial Intelligence}, editor = {Glimm, Birte and Kr\"{u}ger, Antonio}, keywords = {Artificial Intelligence}, tags = {AutomatedReasoning} } @Techreport { StLG12c, author = {Steigmiller, Andreas and Liebig, Thorsten and Glimm, Birte}, title = {Extended Caching, Backjumping and Merging for Expressive Description Logics}, abstract = {With this contribution we push the boundary of some known optimisations such as caching to the very expressive Description Logic SROIQ.The developed method is based on a sophisticated dependency management and a precise unsatisfiability caching technique, which further enables better informed tableau backtracking and more efficient pruning. Additionally, we optimise the handling of cardinality restrictions, by introducing a strategy called pool-based merging.We empirically evaluate the proposed optimisations within the novel reasoning system Konclude and show that the proposed optimisations indeed result in significant performance improvements.}, year = {2012}, institution = {University of Ulm}, number = {TR-2012-01}, keywords = {Reasoning, Description Logics, Tableau, Optimisations, Optimizations}, tags = {AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui/Ulmer\_Informatik\_Berichte/2012/UIB-2012-01.pdf} } @Techreport { KazKroSim12ELK_TR, author = {Kazakov, Yevgeny and Kr\"{o}tzsch, Markus and Siman\v{c}\'{i}k, Franti\v{s}ek}, title = {ELK: A Reasoner for OWL EL Ontologies}, abstract = {ELK is a specialized reasoner for the lightweight ontology language OWL EL. The practical utility of ELK is in its combination of high performance and comprehensive support for language features. At its core, ELK employs a consequence-based reasoning engine that can take advantage of multi-core and multi-processor systems. A modular architecture allows ELK to be used as a stand-alone application, Prot\'{e}g\'{e} plug-in, or programming library (either with or without the OWL API). This system description presents the current state of ELK.}, year = {2012}, institution = {University of Oxford}, tags = {AutomatedReasoning, ELK}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/KazKroSim12ELK\_TR.pdf} } @Inproceedings { KazKroSim12ELK_ORE, author = {Kazakov, Yevgeny and Kr\"{o}tzsch, Markus and Siman\v{c}\'{i}k, Franti\v{s}ek}, title = {ELK Reasoner: Architecture and Evaluation}, abstract = {ELK is a specialized reasoner for the lightweight ontology language OWL EL. The practical utility of ELK is in its combination of high performance and comprehensive support for language features. At its core, ELK employs a consequence-based reasoning engine that can take advantage of multi-core and multi-processor systems. A modular ar- chitecture allows ELK to be used as a stand-alone application, Prot\'{e}g\'{e} plug-in, or programming library (either with or without the OWL API). This system description presents the current state of ELK and experi- mental results with some difficult OWL EL ontologies.}, year = {2012}, booktitle = {Proceedings of the 1st International Workshop on OWL Reasoner Evaluation (\{ORE\} 2012)}, tags = {SemanticTechnologies, AutomatedReasoning, ELK}, web_url = {http://ceur-ws.org/Vol-858/ore2012\_paper10.pdf}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2012/KazKroSim12ELK\_ORE.pdf} } @Inproceedings { honold2012companion, author = {Honold, Frank and Sch\"{u}ssel, Felix and Nothdurft, Florian and Kurzok, Peter}, title = {Companion technology for multimodal interaction}, abstract = {We present a context adaptive approach for multimodal interaction for the use in cognitive technical systems, so called Companion Systems. Such systems yield properties of multimodality, individuality, adaptability, availability, cooperativeness and trustworthiness. These characteristics represent a new type of interactive systems that are not only practical and efficient to operate, but as well agreeable, hence the term \dqcompanion\dq. Companion technology has to consider the entire situation of the user, machine, and environment. The presented prototype depicts a system that offers assistance in the task of wiring the components of a home cinema system. The user interface for this task is not predefined, but built on the fly by dedicated fission and fusion components, thereby adapting the system's multimodal output and input capabilities to the user and the environment.}, year = {2012}, organization = {ACM}, booktitle = {Proceedings of the 14th ACM international conference on Multimodal interaction}, pages = {67--68}, tags = {SFB-TRR-62}, web_url = {http://dl.acm.org/citation.cfm?id=2388676.2388696\\&coll=DL\\&dl=GUIDE}, file_url = {http://delivery.acm.org/10.1145/2390000/2388696/p67-honold.pdf} } @Inproceedings { NiGR11a, author = {Nikitina, Nadeschda and Glimm, Birte and Rudolph, Sebastian}, title = {Wheat and Chaff - Practically Feasible Interactive Ontology Revision}, abstract = {When ontological knowledge is acquired automatically, quality control is essential. We consider the tightest possible approach - an exhaustive manual inspection of the acquired data. By using automated reasoning, we partially automate the process: after each expert decision, axioms that are entailed by the already approved statements are automatically approved, whereas axioms that would lead to an inconsistency are declined. Adequate axiom ranking strategies are essential in this setting to minimize the amount of expert decisions. In this paper, we present a generalization of the previously proposed ranking techniques which works well for arbitrary validity ratios - the proportion of valid statements within a dataset - whereas the previously described ranking functions were either tailored towards validity ratios of exactly 100{\%} and 0{\%} or were optimizing the worst case. The validity ratio - generally not known a priori - is continuously estimated over the course of the inspection process. We further employ partitioning techniques to significantly reduce the computational effort. We provide an implementation supporting all these optimizations as well as featuring a user front-end for successive axiom evaluation, thereby making our proposed strategy applicable to practical scenarios. This is witnessed by our evaluation showing that the novel parameterized ranking function almost achieves the maximum possible automation and that the computation time needed for each reasoning-based, automatic decision is reduced to less than one second on average for our test dataset of over 25,000 statements.}, year = {2011}, DOI = {10.1007/978-3-642-25073-6\_31}, booktitle = {Proceedings of the 10th International Semantic Web Conference (ISWC 2011)}, volume = {7031}, publisher = {Springer-Verlag}, pages = {487-503}, tags = {AutomatedReasoning}, web_url = {http://www.springerlink.com/content/r51v2xn6188kv4m2/}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/NiGR11a.pdf} } @Inproceedings { Bidot2011ServiceWorkflows, author = {Bidot, Julien and Goumopoulos, Christos and Calemis, Ioannis}, title = {Using AI planning and late binding for managing service workflows in intelligent environments}, abstract = {In this paper, we present an approach to aggregating and using devices that support the everyday life of human users in ambient intelligence environments. These execution environments are complex and changing over time, since the devices of the environments are numerous and heterogeneous, and they may appear or disappear at any time. In order to appropriately adapt the ambient system to a user's needs, we adopt a service-oriented approach; i.e., devices provide services that reflect their capabilities. The orchestration of the devices is actually realized with the help of Artificial Intelligence planning techniques and dynamic service binding. At design time, (i) a planning problem is created that consists of the user's goal to be achieved and the services currently offered by the intelligent environment, (ii) the planning problem is then solved using Hierarchical Task Network and Partial-Order Causal-Link planning techniques, (iii) and from the planning decisions taken to find solution plans, abstract service workflows are automatically generated. At run time, the abstract services are dynamically bound to devices that are actually present in the environment. Adaptation of the workflow instantiation is possible due to the late binding mechanism employed. The paper depicts the architecture of our system. It also describes the modeling and the life cycle of the workflows. We discuss the advantages and the limit of our approach with respect to related work and give specific details about implementation. We present some experimental results that validate our system in a real-world application scenario.}, year = {2011}, DOI = {10.1109/PERCOM.2011.5767580}, booktitle = {Proceedings of the Ninth Annual IEEE International Conference on Pervasive Computing and Communications (PerCom)}, pages = {156--163}, tags = {SFB-TRR-62,Planning}, web_url = {http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=\\&arnumber=5767580} } @Inproceedings { LKR+11, author = {Thao Ly, Linh and Knuplesch, David and Rinderle-Ma, Stefanie and G\"{o}ser, Kevin and Pfeifer, Holger and Reichert, Manfred and Dadam, Peter}, title = {SeaFlows Toolset - Compliance Verification Made Easy for Process-aware Information Systems}, abstract = {In the light of an increasing demand on business process compliance, the verification of process models against compliance rules has become essential in enterprise computing. The SeaFlows Toolset featured in this paper extends process-aware information systems with compliance checking functionality. It provides a user-friendly environment for modeling compliance rules using a graph-based formalism and for enriching process models with these rules. To address a multitude of verification settings, we provide two complementary compliance checking approaches: The structural compliance checking approach derives structural criteria from compliance rules and applies them to detect incompliance. The data-aware behavioral compliance checking approach addresses the state explosion problem that can occur when the data dimension is explored during compliance checking. It performs context-sensitive automatic abstraction to derive an abstract process model which is more compact with regard to the data dimension enabling more efficient compliance checking. Altogether, SeaFlows Toolset constitutes a comprehensive and extensible framework for compliance checking of process models.}, year = {2011}, booktitle = {Information Systems Evolution -- CAiSE Forum 2010, Selected Extended Papers}, volume = {72}, publisher = {Springer}, series = {Lecture Notes in Business Information Processing}, editor = {Aalst, Will and Mylopoulos, John and Sadeh, Norman M. and Shaw, Michael J. and Szyperski, Clemens and Soffer, Pnina and Proper, Erik}, pages = {76--91}, file_url = {http://dbis.eprints.uni-ulm.de/687/1/Ly10\_CAiSEForume.pdf} } @Inproceedings { Schulz2011ReversalLearning, author = {Schulz, Andreas and Schattenberg, Bernd and Woldeit, Marie and Brechmann, Andr\'{e} and Biundo, Susanne and W. Ohl, Frank}, title = {Reinforcement learning and planning models for two-way-avoidance and reversal learning}, abstract = {The framework of reinforcement learning (RL), in particular temporal difference (TD) learning, is traditionally utilized to model Pavlovian and Instrumental Learning in animal and human subjects. Neurophysiological correlates model parameters, on the level of both single cells and fMRI data, support this approach ( Barto and Sutton 1998, Schulz 2001, Doya 2008). Notably, most of these models are applied to appetitive learning, as their nature demands that reinforcement is directly related to an action. In aversive learning this is not the case, at least not for directly observable actions of the animal. Therefore RL models of avoidance learning are studied only relatively recently (e.g. Moutoussis et al. 2008). The first aim of the present study was the investigation of RL models in the context of two-way avoidance and reversal learning. We used a well-established animal model for two-way avoidance, the Mongolian gerbil trained in a shuttle box Go/NoGo paradigm (Wetzel 1998; Ohl et al. 2001). We have extended the RL model of Moutoussis et al. and compared the results to the animal behavior.We also found, that Reinforcement Learning models can explain various aspects of two-way avoidance learning but do not provide a rationale for the relationship found between dynamics of initial learning and dynamics of reversal learning. However, the Artificial Intelligence (AI) paradigm of Automated Planning can provide a rationale for explaining such relationships. As the second aim of this study, we therefore modeled the avoidance learning by a Hybrid Planning ( Biundo and Schattenberg 2001) and plan repair approach (Bidot et al. 2008) and demonstrated that the empirically found relationships could be motivated from a consistent theoretical framework. The AI planning framework has the additional advantage over mere Reinforcement models to provide an explicit representation of and reasoning about \\"internal\\" processing stages.}, year = {2011}, booktitle = {Proceedings of the Annual Meeting of the Society For Neuroscience}, keywords = {avoidance,reinforcement learning, modeling}, tags = {SFB-TRR-62,Planning,KnowledgeModeling}, web_url = {http://www.abstractsonline.com/Plan/ViewAbstract.aspx?sKey=36dc39eb-9262-472e-9ee0-df4fc3c6abd5\\&cKey=f1c0c307-ad80-41c9-b32f-f5def2a643e0\\&mKey={\%}7b8334BE29-8911-4991-8C31-32B32DD5E6C8{\%}7d} } @Inproceedings { liebig_rscale!11, author = {Liebig, Thorsten and Opitz, Michael}, title = {Reasoning over Dynamic Data in Expressive Knowledge Bases with Rscale}, abstract = {We introduce Rscale, a secondary storage-aware OWL 2 RL reasoning system capable of dealing with incremental additions and deletions of facts. Our initial evaluation indicates that Rscale is suitable for stream reasoning scenarios characterized by expressive reasoning tasks triggered by a moderate change frequency.}, year = {2011}, booktitle = {Proceedings of Workshop on Ordering and Reasoning (OrdRing 2011)}, keywords = {OWL 2 RL, stream reasoning, incremental updates}, tags = {SemanticTechnologies,AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/ordring11-liebig.pdf} } @Article { Biundo2011RealWorldPlanning, author = {Biundo, Susanne and Bidot, Julien and Schattenberg, Bernd}, title = {Planning in the Real World}, abstract = {In this article, we describe how real world planning problems can be solved by employing Artificial Intelligence planning techniques. We introduce the paradigm of hybrid planning, which is particularly suited for applications where plans are not intended to be automatically executed by systems, but are made for humans. Hybrid planning combines hierarchical planning – the stepwise refinement of complex tasks – with explicit reasoning about causal dependencies between actions, thereby reflecting exactly the kinds of reasoning humans perform when developing plans. We show how plans are generated and how failed plans are repaired in a way that guarantees stability. Our illustrating examples are taken from a domain model for disaster relief missions enforced upon extensive floods. Finally, we present a tool to support the challenging task of constructing planning domain models. The article ends with an overview of a wide varity of actual planning applications and outlines further such in the area of cognitive technical systems.}, year = {2011}, DOI = {10.1007/s00287-011-0562-7}, journal = {Informatik-Spektrum}, volume = {34}, pages = {443--454}, number = {5}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.springerlink.com/content/514981j48161127x/}, file_url = {http://www.springerlink.com/content/514981j48161127x/fulltext.pdf} } @Article { owllink!11, author = {Liebig, Thorsten and Luther, Marko and Noppens, Olaf and Wessel, Michael}, title = {OWLlink}, abstract = {A semantic application typically is a heterogenous system of interconnected components, most notably a reasoner. OWLlink is an implementation-neutral protocol for communication between OWL 2 components, published as a W3C Member Submission. It specifies how to manage reasoning engines and their Knowledge Bases, how to assert axioms, and how to query inference results. A key feature of OWLlink is its extensibility, which allows the addition of required functionality to the protocol. We introduce the OWLlink structural specification as well as three bindings which use HTTP as concrete transport protocol for exchanging OWLlink messages rendered according to selected OWL 2 syntaxes. Finally, we report on existing APIs, reasoners and applications that implement OWLlink.}, year = {2011}, issn = {1570-0844}, DOI = {10.3233/SW-2011-0027}, journal = {Journal Semantic Web - Interoperability, Usability, Applicability}, volume = {2}, pages = {23--32}, number = {1}, keywords = {OWL, OWL 2, protocol, reasoning, Semantic Web}, tags = {SemanticTechnologies, AutomatedReasoning}, file_url = {http://www.semantic-web-journal.net/sites/default/files/swj64\_0.pdf} } @Inproceedings { Geier2011HybridDecidability, author = {Geier, Thomas and Bercher, Pascal}, title = {On the Decidability of HTN Planning with Task Insertion}, abstract = {The field of deterministic AI planning can roughly be divided into two approaches - classical state-based planning and hierarchical task network (HTN) planning. The plan existence problem of the former is known to be decidable while it has been proved undecidable for the latter. When extending HTN planning by allowing the unrestricted insertion of tasks and ordering constraints, one obtains a form of planning which is often referred to as \dqhybrid planning\dq.We present a simplified formalization of HTN planning with and without task insertion. We show that the plan existence problem is undecidable for the HTN setting without task insertion and that it becomes decidable when allowing task insertion. In the course of the proof, we obtain an upper complexity bound of EXPSPACE for the plan existence problem for propositional HTN planning with task insertion.}, year = {2011}, booktitle = {Proceedings of the 22nd International Joint Conference on Artificial Intelligence (IJCAI 2011)}, publisher = {AAAI Press}, pages = {1955--1961}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Geier11HybridDecidabilityPoster.pdf}, web_url2 = {http://www.aaai.org/ocs/index.php/IJCAI/IJCAI11/paper/view/3194}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Geier11HybridDecidability.pdf} } @Incollection { Bidot2011AmbientEnvironments, author = {Bidot, Julien and Biundo, Susanne}, title = {Artificial Intelligence Planning for Ambient Environments}, abstract = {In this chapter, we describe how Artificial Intelligence planning techniques are used in The Adapted and TRusted Ambient eCOlogies (ATRACO) in order to provide Sphere Adaptation . We introduce the Planning Agent (PA) which plays a central role in the realization and the structural adaptation of activity spheres. Based on the particular information included in the ontology of the execution environment, the PA delivers workflows that consist of the basic activities to be executed in order to achieve a user’s goals. The PA encapsulates a search engine for hybrid planning--the combination of hierarchical task network (HTN) planning and partial-order causal-link (POCL) planning . In this chapter, we describe a formal framework and a development platform for hybrid planning, PANDA. This platform allows for the implementation of many search strategies, and we explain how we realize the search engine of the PA by adapting and configuring PANDA specifically for addressing planning problems that are part of the ATRACO service composition. We describe how the PA interacts with the Sphere Manager and the Ontology Manager in order to create planning problems dynamically and generate workflows in the ATRACO-BPEL language. In addition, an excerpt of a planning domain for ATRACO is provided.}, year = {2011}, DOI = {10.1007/978-3-319-23452-6\_8}, booktitle = {Next Generation Intelligence Environments - Ambient Adaptive Systems}, edition = {1}, publisher = {Springer}, chapter = {6}, editor = {Tobias Heinroth and Wolfgang Minker}, pages = {195--225}, tags = {SFB-TRR-62,Planning} } @Article { Biundo2011CognitiveSystems, author = {Biundo, Susanne and Bercher, Pascal and Geier, Thomas and M\"{u}ller, Felix and Schattenberg, Bernd}, title = {Advanced user assistance based on AI planning}, abstract = {Artificial Intelligence technologies enable the implementation of cognitive systems with advanced planning and reasoning capabilities. This article presents an approach to use hybrid planning - a method that combines reasoning about procedural knowledge and causalities - to provide user-centered assistance.Based on a completely declarative description of actions, tasks, and solution methods, hybrid planning allows for the generation of knowledge-rich plans of action. The information those plans comprise includes causal dependencies between actions on both abstract and primitive levels as well as information about their hierarchical and temporal relationships.We present the hybrid planning approach in detail and show its potential by describing the realization of various assistance functionalities based on complex cognitive processes like the generation, repair, and explanation of plans. Advanced user assistance is demonstrated by means of a practical application scenario where an innovative electronic support mechanism helps a user to operate a complex mobile communication device.}, year = {2011}, issn = {1389--0417}, DOI = {10.1016/j.cogsys.2010.12.005}, journal = {Cognitive Systems Research}, volume = {12}, pages = {219--236}, number = {3-4}, keywords = {Cognitive technical systems, Companion-technology, Hybrid planning, Plan repair, Plan explanation, Real-world planning}, tags = {SFB-TRR-62,Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Biundo11AdvancedUserAssistance.pdf}, note = {Special Issue on Complex Cognition} } @Inproceedings { Geier2011OnlineInference, author = {Geier, Thomas and Biundo, Susanne}, title = {Approximate Online Inference for Dynamic Markov Logic Networks}, abstract = {We examine the problem of filtering for dynamic probabilistic systems using Markov Logic Networks. We propose a method to approximately compute the marginal probabilities for the current state variables that is suitable for online inference. Contrary to existing algorithms, our approach does not work on the level of belief propagation, but can be used with every algorithm suitable for inference in Markov Logic Networks, such as MCSAT. We present an evaluation of its performance on two dynamic domains.}, year = {2011}, DOI = {10.1109/ICTAI.2011.120}, booktitle = {Proceedings of the 23rd IEEE International Conference on Tools with Artificial Intelligence (ICTAI)}, pages = {764--768}, keywords = {dmln, mln, dynamic, inference}, tags = {SFB-TRR-62, KnowledgeModeling}, web_url = {http://www.cse.fau.edu/ictai2011/}, web_url2 = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=6103411}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Geier11ApproxDMLNs.pdf} } @Inproceedings { Elkawkagy2011LandmarkStrategies, author = {Elkawkagy, Mohamed and Bercher, Pascal and Schattenberg, Bernd and Biundo, Susanne}, title = {Landmark-Aware Strategies for Hierarchical Planning}, abstract = {In hierarchical planning, landmarks are abstract tasks the decomposition of which are mandatory when trying to find a solution to a given problem. In this paper, we present novel domain-independent strategies that exploit landmark information to speed up the planning process. The empirical evaluation shows that the landmark-aware strategies outperform established search strategies for hierarchical planning.}, year = {2011}, booktitle = {Workshop on Heuristics for Domain-independent Planning (HDIP 2011) at ICAPS 2011}, pages = {73--79}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Elkawkagy11LandmarkStrategiesSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Elkawkagy11LandmarkStrategies.pdf} } @Inproceedings { Mueller2011HierarchicalFSCs, author = {M\"{u}ller, Felix and Biundo, Susanne}, title = {HTN-Style Planning in Relational POMDPs Using First-Order FSCs}, abstract = {n this paper, a novel approach to hierarchical planning under partial observability in relational domains is presented. It combines hierarchical task network planning with the finite state controller (FSC) policy representation for partially observable Markov decision processes. Based on a new first-order generalization of FSCs, action hierarchies are defined as in traditional hierarchical planning, so that planning corresponds to finding the best plan in a given decomposition hierarchy of predefined, partially abstract FSCs. Finally, we propose an algorithm for solving planning problems in this setting. Our approach offers a way of practically dealing with real-world partial observability planning problems: it avoids the complexity originating from the dynamic programming backup operation required in many present-day policy generation algorithms.}, year = {2011}, booktitle = {Proceedings of the 34th Annual German Conference on Artificial Intelligence (KI 2011)}, publisher = {Springer}, editor = {Joscha Bach and Stefan Edelkamp}, pages = {216--227}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.springerlink.com/content/g051ht236j73761m/}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/paper-hierarchicalFOFSCs.pdf} } @Inproceedings { Bercher2011Preferences, author = {Bercher, Pascal and Biundo, Susanne}, title = {Hybrid Planning with Preferences Using a Heuristic for Partially Ordered Plans}, abstract = {This paper is concerned with the problem of finding preferred plans in a hybrid planning setting, which is the fusion of classical and hierarchical planning. Here, we define preferences as weighted soft goals - facts one would like to see satisfied in a goal state, but which do not have to hold necessarily. We present a branch-and-bound algorithm that allows a broad variety of search strategies, as opposed to the majority of existing planning systems which usually perform progression. The algorithm prunes task networks from the search space which will never lead to a better solution than the best solution found so far. To this end, we developed an admissible heuristic, based on a combination of the h^2 heuristic and delete relaxation, which takes as input a task network and estimates the best quality of any solution that can be developed from it.}, year = {2011}, booktitle = {26th PuK Workshop \dqPlanen, Scheduling und Konfigurieren, Entwerfen\dq (PuK 2011)}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Bercher11PreferencesSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Bercher11Preferences.pdf} } @Inproceedings { Elkawkagy2011HybridMultiAgent2, author = {Elkawkagy, Mohamed and Biundo, Susanne}, title = {Hybrid Multi-agent Planning}, abstract = {Although several approaches have been constructed for multi-agent planning, solving large planning problems is still quite difficult. In this paper, we present a new approach that integrates landmark preprocessing technique in the context of hierarchical planning with multi-agent planning. Our approach uses Dependent and Independent clustering techniques to break up the planning problem into smaller clusters. These clusters are solved individually according to landmark information, then the obtained individual plans are merged according to the notion of fragments to generate a final solution plan. In hierarchical planning, landmarks are those tasks that occur in the decomposition refinements on every plan development path. Hierarchical landmark technique shows how a preprocessing step that extracts landmarks from a hierarchical planning domain and problem description can be used to prune the search space that is to be explored before actual search is performed. The methodologies in this paper have been implemented successfully, and we will present some experimental results that give evidence for the considerable performance increase gained through our system.}, year = {2011}, booktitle = {Proceedings of the of the Ninth German Conference on Multi-Agent System Technologies (MATES 2011)}, publisher = {IOS Press}, pages = {16--28}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Elkawkagy11HybridMultiAgentSlides.pdf}, web_url2 = {http://www.springerlink.com/content/e73066483x2j7502/}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2011/Elkawkagy11HybridMultiAgent.pdf} } @Inproceedings { Mattmueller10NondeterministicPlanning, author = {Mattm\"{u}ller, Robert and Ortlieb, Manuela and Helmert, Malte and Bercher, Pascal}, title = {Pattern Database Heuristics for Fully Observable Nondeterministic Planning}, abstract = {When planning in an uncertain environment, one is often interested in finding a contingent plan that prescribes appropriate actions for all possible states that may be encountered during the execution of the plan. We consider the problem of finding strong and strong cyclic plans for fully observable nondeterministic (FOND) planning problems. The algorithm we choose is LAO*, an informed explicit state search algorithm. We investigate the use of pattern database (PDB) heuristics to guide LAO* towards goal states. To obtain a fully domain-independent planning system, we use an automatic pattern selection procedure that performs local search in the space of pattern collections. The evaluation of our system on the FOND benchmarks of the Uncertainty Part of the International Planning Competition 2008 shows that in selected domains our approach is competitive with symbolic regression search in terms of problem coverage and speed, and that plan sizes are often significantly smaller than with symbolic regression search.}, year = {2010}, booktitle = {Proceedings of the 20th International Conference on Automated Planning and Scheduling (ICAPS 2010)}, publisher = {AAAI Press}, pages = {105--112}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Mattmueller10NonDetPlanningSlides.pdf}, web_url2 = {http://www.aaai.org/ocs/index.php/ICAPS/ICAPS10/paper/view/1430}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Mattmueller10NonDetPlanning.pdf} } @Article { Biundo2010mpanionSysteme, author = {Biundo, Susanne and Wendemuth, Andreas}, title = {Von kognitiven technischen Systemen zu Companion-Systemen}, abstract = {Hoffnung, l\"{a}stige Routineaufgaben zu automatisieren, der unerw\"{u}nschten Informationsflut Herr zu werden, erw\"{u}nschte Informationen zu erhalten, zeitraubende Koordinations- und Kommunikationsaufgaben zu bew\"{a}ltigen und sich von langweiligen Alltagsentscheidungen entlasten zu k\"{o}nnen. Von Informationstechnik erhoffen sich viele Menschen – entsprechend dem Heinzelm\"{a}nnchenmotiv – eine umfassende Assistenz, die ihren Wunsch nach individueller Produktivit\"{a}t und pers\"{o}nlicher Bequemlichkeit in einem immer komplexer werdenden Alltag erf\"{u}llt. Der dadurch gewonnene Freiraum soll ihnen erm\"{o}glichen, sich auf die interessanten und wesentlichen Aufgaben zu konzentrieren. Sie wollen sich diesen Aufgaben ohne technische Einschr\"{a}nkung jederzeit und \"{u}berall widmen k\"{o}nnen. Die damit verbundene Zeitersparnis f\"{u}r delegierbare Arbeit soll es au\"{s}erdem erm\"{o}glichen, in signifikanter Weise Kosten einzusparen und Waren oder Dienstleistungen deutlich preisg\"{u}nstiger anzubieten}, year = {2010}, DOI = {10.1007/s13218-010-0056-9}, journal = {K\"{u}nstliche Intelligenz}, volume = {24}, pages = {335--339}, number = {4}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.springerlink.com/content/e558815w08k09531/}, file_url = {http://www.springerlink.com/content/e558815w08k09531/fulltext.pdf} } @Inproceedings { Biundo2010Explanations, author = {Bidot, Julien and Biundo, Susanne and Heinroth, Tobias and Minker, Wolfgang and Nothdurft, Florian and Schattenberg, Bernd}, title = {Verbal Explanations for Hybrid Planning}, abstract = {State-of-the-art AI planning systems are able to generate complex plans thanks to their efficient reasoning engines. In a large number of application domains, the plans are automatically executed by systems such as autonomous robots. In this context, it is not necessary to make these automated systems understand what they are actually doing during execution and why they are doing that. In other words, these systems do not need to understand the underlying semantics of the plans they execute and how these plans have been generated. However, there are a significant number of key application domains, such as disaster relief mission support or project planning, where plans are supposed to be executed by a human user who is not necessarily a planning expert, an application expert, or both. In addition, for real-world applications, the plans and the plan generation are often complex. In order to unlock a part of the application potential of the AI planning technology, it is necessary that the user trusts the technology. Increasing trust in AI planning systems requires the design and implementation of user-friendly interfaces and the development of plan explanation methods that allow for taking into consideration the human user’s queries related to some components of the plan about their meaning and relevance for the plan and giving back the appropriate information that answers these queries. The verbal communication by speech constitutes the most natural form of communication for humans. By means of natural language dialogs in this work, we focus on the explanation of plans that are generated by a refinement-based planning system. Contrary to most approaches presented in the literature that try to provide explanations when backtracking occurs in failure situations during search, we assume in this work that the plans for which explanations are looked for are consistent. We present a domain-independent approach to enabling verbal human queries and producing verbal plan explanations.}, year = {2010}, booktitle = {Proceedings of the Conference \dqMultikonferenz Wirtschaftsinformatik\dq (MKWI 2010), Teilkonferenz \\"Planen, Scheduling und Konfigurieren, Entwerfen\\" (PuK 2010)}, publisher = {Universit\"{a}tsverlag G\"{o}ttingen}, editor = {Matthias Schumann and Lutz M. Kolbe and Michael H. Breitner and Arne Frerichs}, pages = {2309--2320}, tags = {SFB-TRR-62,Planning}, file_url = {http://webdoc.sub.gwdg.de/univerlag/2010/mkwi/03\_anwendungen/planen\_scheduling/06\_verbal\_plan\_explanations\_for\_hybrid\_plannings.pdf} } @Proceedings { PPHM10b, title = {The Semantic Web - ISWC 2010 - 9th International Semantic Web Conference, ISWC 2010, Shanghai, China, November 7-11, 2010, Revised Selected Papers, Part II}, year = {2010}, isbn = {978-3-642-17748-4}, booktitle = {International Semantic Web Conference (2)}, volume = {6497}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Patel-Schneider, Peter F. and Pan, Yue and Hitzler, Pascal and Mika, Peter and Zhang, Lei and Pan, Jeff Z. and Horrocks, Ian and Glimm, Birte}, keywords = {Semantic Web}, tags = {AutomatedReasoning} } @Proceedings { PPHM10a, title = {The Semantic Web - ISWC 2010 - 9th International Semantic Web Conference, ISWC 2010, Shanghai, China, November 7-11, 2010, Revised Selected Papers, Part I}, year = {2010}, isbn = {978-3-642-17745-3}, booktitle = {International Semantic Web Conference (1)}, volume = {6496}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Patel-Schneider, Peter F. and Pan, Yue and Hitzler, Pascal and Mika, Peter and Zhang, Lei and Pan, Jeff Z. and Horrocks, Ian and Glimm, Birte}, keywords = {Semantic Web}, tags = {AutomatedReasoning} } @Inproceedings { noppens-etal!10, author = {Noppens, Olaf and Luther, Marko and Liebig, Thorsten}, title = {The OWLlink API: Teaching OWL Components a Common Protocol}, abstract = {We introduce the OWLlink API that implements the OWLlink protocol on top of the Java-based OWL API. Besides providing an API to access remote OWLlink reasoning engines, it turns any OWL API aware reasoner into an OWLlink server. As such the OWLlink API provides the missing piece to replace the outdated DIG protocol by OWLlink in applications such as Prot\'{e}g\'{e}.}, year = {2010}, booktitle = {Proceedings of the 7th International Workshop on OWL: Experiences and Directions (OWLED 2010)}, volume = {614}, series = {CEUR Workshop Proceedings}, editor = {Evren Sirin and Kendall Clark}, keywords = {owllink}, tags = {SemanticTechnologies}, file_url = {http://CEUR-WS.org/Vol-614/owled2010\_submission\_1.pdf} } @Inproceedings { liebig-etal!10, author = {Liebig, Thorsten and Steigmiller, Andreas and Noppens, Olaf}, title = {Scalability via Parallelization of OWL Reasoning}, abstract = {Practical scalability of reasoning is an important premise for the adoption of semantic technologies in a real-world setting. Many highly effective optimizations for reasoning with expressive OWL ontologies have been invented and implemented over the last decades. This paper describes our approach for concurrent computation of the nondeterministic choices inherent to the OWL tableau reasoning procedure for \mathcalSHIQ. We present the architecture of our parallel reasoner and briefly discuss our prototypical implementation as well as future work.}, year = {2010}, booktitle = {Proceedings of the 4th International Workshop on New Forms of Reasoning for the Semantic Web: Scalable and Dynamic (NeFoRS 2010)}, keywords = {OWL, Reasoning, Parallelization, stream reasoning}, tags = {AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/nefors10-liebig.pdf} } @Techreport { OWLlink-specification-W3C!10, author = {Liebig, Thorsten and Luther, Marko and Noppens, Olaf}, title = {OWLlink: Structural Specification}, abstract = {The OWLlink interface provides an implementation-neutral mechanism for accessing OWL reasoner functionality. OWLlink is a refinement of the DIG protocol most notably with respect to query and language expressivity. It has been renamed to OWLlink since it relies on OWL 2 for the primitives of the modeling language, and is thus fully compatible with OWL. The set of OWLlink core primitives described in this document cover basic reasoner management, assertion of axioms and elementary ask functionality. An extension mechanism is provided to easily add any required functionality in a controlled way to the core language. This document defines the OWLlink core interface by providing a structural specification. A concrete binding into HTTP/1.1 and XML Schema is given in an accompanying document [OWLlink HTTP/XML Binding]. Further bindings are the [OWLlink HTTP/Functional Binding] and the [OWLlink HTTP/S-Expression Binding].}, type = {Member Submission}, year = {2010}, institution = {World Wide Web Consortium}, keywords = {owllink}, tags = {SemanticTechnologies}, web_url = {http://www.w3.org/Submission/owllink-structural-specification/} } @Techreport { OWLlink-httpxml-W3C!10, author = {Noppens, Olaf and Luther, Marko and Liebig, Thorsten and Wessel, Michael}, title = {OWLlink: HTTP/XML Binding}, abstract = {The OWLlink interface provides an implementation-neutral mechanism for accessing OWL reasoner functionality. OWLlink relies on OWL 2 for the primitives of the modeling language, and is thus fully compatible with OWL. The set of OWLlink primitives described in this document cover basic reasoner managment, assertion of axioms and elementary ask functionality. An extension mechanism is provided in order to easily add any required functionality in a controlled way to the core language. This document defines a concrete binding of OWLlink into HTTP/1.1 and XML Schema. A structural specification of OWLlink is given in an accompanying document [OWLlink Structural Specification].}, type = {Member Submission}, year = {2010}, institution = {World Wide Web Consortium}, keywords = {owllink}, tags = {SemanticTechnologies}, web_url = {http://www.w3.org/Submission/owllink-httpxml-binding/} } @Techreport { OWLlink-retraction-httpxml-W3C!10, author = {Noppens, Olaf and Liebig, Thorsten}, title = {OWLlink Extension: Retraction HTTP/XML Binding}, abstract = {OWLlink provides a declarative interface for -- among other things -- asserting OWL axioms to a Knowledge Base (KB) of an OWL reasoner. This OWLlink extension adds the ability to retract previously asserted axioms from KBs. This document describes the accompanying HTTP/XML Binding of this extension.}, type = {Member Submission}, year = {2010}, institution = {World Wide Web Consortium}, keywords = {owllink}, tags = {SemanticTechnologies}, web_url = {http://www.w3.org/Submission/owllink-extension-retraction-httpxml-binding/} } @Techreport { OWLlink-retraction-W3C!10, author = {Noppens, Olaf and Liebig, Thorsten}, title = {OWLlink Extension: Retraction}, abstract = {OWLlink provides a declarative interface for -- among other things -- asserting OWL axioms to a Knowledge Base (KB) of an OWL reasoner. This OWLlink extension adds the ability to retract previously asserted axioms from KBs.}, type = {Member Submission}, year = {2010}, institution = {World Wide Web Consortium}, keywords = {owllink}, tags = {SemanticTechnologies}, web_url = {http://www.w3.org/Submission/owllink-extension-retraction/} } @Inproceedings { KLR+10, author = {Knuplesch, David and Thao Ly, Linh and Rinderle-Ma, Stefanie and Pfeifer, Holger and Dadam, Peter}, title = {On Enabling Data-Aware Compliance Checking of Business Process Models}, abstract = {In the light of an increasing demand on business process compliance, the verification of process models against compliance rules has become essential in enterprise computing. To be broadly applicable compliance checking has to support data-aware compliance rules as well as to consider data conditions within a process model. Independently of the actual technique applied to accomplish compliance checking, data awareness means that in addition to the control ow dimension, the data dimension has to be explored during compliance checking. However, naive exploration of the data dimension can lead to state explosion. We address this issue by introducing an abstraction approach in this paper. We show how state explosion can be avoided by conducting compliance checking for an abstract process model and abstract compliance rules. Our abstraction approach can serve as preprocessing step to the actual compliance checking and provides the basis for more efficient application of existing compliance checking algorithms.}, year = {2010}, booktitle = {Proceedings of the 29th International Conference on Conceptual Modeling -- ER 2010}, volume = {6412}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Jeffrey Parsons and Motoshi Saeki and Peretz and Shoval and Carson Woo and Yair Wand}, pages = {332--346}, file_url = {http://dbis.eprints.uni-ulm.de/665/3/KLRD10a.pdf} } @Inproceedings { Hahn!10, author = {Hahn, Clemens and Turlier, St\'{e}phane and Liebig, Thorsten and Gebhardt, Sascha and Roelle, Christopher}, title = {Metadata Aggregation for Personalized Music Playlists}, abstract = {The growing amount of digital music content and the increasing connectivity of vehicles raise new challenges in terms of media access for vehicle drivers. Creating easily a personalized playlist in vehicles involves a unified representation of various metadata, combined with a mobile architecture addressing media resolution and aggregation issues. This paper analyzes the technical aspects of mobile access to music metadata and its use in a personalized playlist generation scenario. A prototype illustrates this study and gives first results.}, year = {2010}, DOI = {10.1007/978-3-642-16607-5\_29}, booktitle = {Proceedings of the HCI in Work and Learning, Life and Leisure}, volume = {6389}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Leitner, Gerhard and Hitz, Martin and Holzinger, Andreas}, pages = {427--442}, keywords = {meta data, content aggregation, mobile architecture, playlist creation}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/hci10-han-turlier-liebig-et-all.pdf} } @Inproceedings { Elkawkagy10LandmarksInHTN, author = {Elkawkagy, Mohamed and Schattenberg, Bernd and Biundo, Susanne}, title = {Landmarks in Hierarchical Planning}, abstract = {In this paper we introduce a novel landmark technique for hierarchical planning. Landmarks are abstract tasks that are mandatory. They have to be performed by any solution plan. Our technique relies on a landmark extraction procedure that pre-processes a given planning problem by systematically analyzing the ways in which relevant abstract tasks can be decomposed. We show how the landmark information is used to guide hierarchical planning and present some experimental results that give evidence for the considerable performance increase gained through our technique.}, year = {2010}, booktitle = {Proceedings of the 19th European Conference on Artificial Intelligence (ECAI 2010)}, publisher = {IOS Press}, pages = {229--234}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Elkawkagy10LandmarksInHierarchicalSlides.pdf}, web_url2 = {http://www.booksonline.iospress.nl/Content/View.aspx?piid=17747}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Elkawkagy10LandmarksInHierarchical.pdf} } @Inproceedings { Elkawkagy10LandmarksInHybrid, author = {Elkawkagy, Mohamed and Bercher, Pascal and Schattenberg, Bernd and Biundo, Susanne}, title = {Exploiting Landmarks for Hybrid Planning}, abstract = {Very recently, the well-known concept of landmarks has been adapted from the classical planning setting to hierarchical planning. It was shown how a pre-processing step that extracts local landmarks from a planning domain and problem description can be used in order to prune the search space that is to be explored before the actual search is performed. This pruning technique eliminates all branches of the task decomposition tree, for which can be proven that they will never lead to a solution. In this paper, we investigate this technique in more detail and extend it by introducing search strategies which use these local landmarks in order to guide the planning process more effectively towards a solution. Our empirical evaluation shows that the pre-processing step dramatically improves performance because dead ends can be detected much earlier than without pruning and that our search strategies using the local landmarks outperform many other possible search strategies.}, year = {2010}, booktitle = {Proceedings of the 25th PuK Workshop \\"Planen, Scheduling und Konfigurieren, Entwerfen\\" (PuK 2010)}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Elkawkagy10LandmarksInHybridSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2010/Elkawkagy10LandmarksInHybrid.pdf} } @Inproceedings { avizienis-et-al!09, author = {Avizienis, Algirdas and Grigonyte, Gintare and Haller, Johann and von Henke, Friedrich and Liebig, Thorsten and Noppens, Olaf}, title = {Organizing Knowledge as an Ontology of the Domain of Resilient Computing by Means of Natural Language Processing - An Experience Report}, abstract = {Scientists typically need to take a large volume of information into account in order to deal with re-occurring tasks such as inspecting proceedings, finding related work, or reviewing papers. Our work aims at filling the gap between text documents and a structured representations of their content in the domain of resilient computing by combining computer linguistics and ontological methods. The results of our research include: a thesaurus of the domain, automatic clustering of the domain documents, a domain ontology, and a tool for constructing ontologies with the aid of domain thesauri.}, year = {2009}, booktitle = {Proceedings of the 22nd International Florida Artificial Intelligence Research Society Conference (FLAIRS-22)}, publisher = {AAAI Press}, keywords = {natural language processing}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/knowledge-in-resilience.pdf} } @Inproceedings { luther-etal!09, author = {Luther, Marko and Liebig, Thorsten and B\"{o}hm, Sebastian and Noppens, Olaf}, title = {Who the Heck is the Father of Bob?}, abstract = {Finding the optimal selection of an OWL reasoner and service interface for a specific ontology-based application is challenging. Over time it has become more and more difficult to match application requirements with service offerings from available reasoning engines, in particular with recent optimizations for certain reasoning services and new reasoning algorithms for different fragments of OWL. This work is motivated by real-world experiences and reports about interesting findings in the course of developing an ontology-based application. Benchmarking outcomes of several reasoning engines are discussed -- especially with respect to accompanying sound and completeness tests. We compare the performance of various service and communication protocols in different computing environments. Hereby, it becomes apparent that these largely underrated components may have an enormous impact on the overall performance.}, year = {2009}, DOI = {10.1007/978-3-642-02121-3\_9}, booktitle = {Proceedings of the European Semantic Web Conference (ESWC 2009)}, volume = {5554}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, pages = {66--80}, keywords = {OWL, Reasoning}, tags = {SemanticTechnologies,AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/TheFatherOfBob.pdf} } @Inproceedings { liebigViscover!09, author = {Liebig, Thorsten and Noppens, Olaf and von Henke, Friedrich}, title = {VIScover: Visualizing, Exploring, and Analysing Structured Data}, abstract = {Today's challenging task in intelligent data processing is not to store large volumes of interlinked data but to visualize, explore, and understand its explicit or implicit relationships. Our solution to this is the VIScover system. VIScover combines semantic technologies with interactive exploration and visualization techniques able to analyze large volumes of structured data. We briefly describe our VIScover system and show its potential using the example of the VAST 2009 social network and geospatial data set.}, year = {2009}, DOI = {10.1109/VAST.2009.5333946}, booktitle = {Proceedings of the IEEE Symposium on Visual Analytics Science and Technology Symposium (VAST 2009)}, publisher = {IEEE}, address = {Atlantic City, USA}, pages = {259--260}, keywords = {visualization}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/VIScover-VAST09.pdf} } @Inproceedings { liebig-etal!09, author = {Liebig, Thorsten and Luther, Marko and Noppens, Olaf}, title = {The OWLlink Protocol}, abstract = {A semantic application typically is a heterogenous system of interconnected components, most notably a reasoner. OWLlink is an implementation-neutral protocol for communication between OWL 2 components. It specifies how to manage reasoning engines and their knowledge bases, how to assert axioms, and how to query inference results. A key feature of OWLlink is its extensibility, which allows the addition of required functionality to the protocol. We introduce the OWLlink structural specification and extension mechanism. Furthermore, we present two extensions, one for retrieving previously asserted axioms and one for retracting axioms from a reasoner. Finally, we describe a binding to HTTP/XML and give an overview of existing implementations.}, year = {2009}, booktitle = {Proceedings of the 6th International Workshop on OWL: Experiences and Directions (OWLED 2009)}, editor = {Rinke Hoekstra and Peter F. Patel-Schneider}, keywords = {owllink}, tags = {SemanticTechnologies}, file_url = {http://CEUR-WS.org/Vol-529/owled2009\_submission\_11.pdf} } @Inproceedings { Bercher2009PDBHeuristics, author = {Bercher, Pascal and Mattm\"{u}ller, Robert}, title = {Solving Non-deterministic Planning Problems with Pattern Database Heuristics}, abstract = {Non-determinism arises naturally in many real-world applications of action planning. Strong plans for this type of problems can be found using AO* search guided by an appropriate heuristic function. Most domain-independent heuristics considered in this context so far are based on the idea of ignoring delete lists and do not properly take the non-determinism into account. Therefore, we investigate the applicability of pattern database (PDB) heuristics to non-deterministic planning. PDB heuristics have emerged as rather informative in a deterministic context. Our empirical results suggest that PDB heuristics can also perform reasonably well in non-deterministic planning. Additionally, we present a generalization of the pattern additivity criterion known from classical planning to the non-deterministic setting.}, year = {2009}, isbn = {978-3-642-04616-2}, booktitle = {Proceedings of the 32nd Annual German Conference on Artificial Intelligence (KI 2009)}, volume = {5803}, publisher = {Springer}, series = {LNAI}, editor = {B\"{a}rbel Mertsching and Marcus Hund and Zaheer Aziz}, pages = {57--64}, tags = {SFB-TRR-62,Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/Bercher09NnDetPlanningSlides.pdf}, web_url2 = {http://www.springerlink.com/content/82604j8321324937/}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/Bercher09NnDetPlanning.pdf} } @Inproceedings { volke!09, author = {Volke, Mario and Liebig, Thorsten}, title = {Origo - A Client for a Distributed Semantic Social Network}, abstract = {Origo is a Web-application that enables users to manage their social community profiles utilizing semantic technologies. It allows to unite their different profiles and to browse through their semantic social network across various platforms.}, year = {2009}, booktitle = {Poster Proceesings of the 6th European Semantic Web Conference (ESWC 2009)}, keywords = {Semantic Web}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/Origo-ESWC09.pdf} } @Phdthesis { schattenberg2009PhDThesis, author = {Schattenberg, Bernd}, title = {Hybrid Planning and Scheduling}, abstract = {Planning and scheduling are well-established disciplines in the field of Artificial Intelligence. They provide flexibility, robustness, and effectiveness to complex software systems in a variety of application areas. While planning is the process of finding a course of action that achieves a goal or performs a specified task, scheduling deals with the assignment of resources and time to given activities, taking into account resource restrictions and temporal dependencies. In other words, planning focuses on reasoning about causal structures and identifying the necessary actions for achieving a specific goal; scheduling concentrates on resource consumption and production for optimizing a coherent parameter assignment of a plan. As successful these techniques clearly are, the actual demands of complex, real-world applications go far beyond the potential of these single methods, however. They require an adequate integration of these problem solving methods as well as a combination of different planning and scheduling paradigms. Particularly important are abstraction-based, hierarchical approaches because of both their expressive knowledge representation and their efficiency in finding solutions. Current state-of-the-art systems rarely address the question of method integration; isolated approaches do so only in ad hoc implementations and mostly lack a proper formal basis.This thesis presents a formal framework for plan and schedule generation based on a well-founded conceptualization of refinement planning: An abstract problem specification is transformed stepwise into a concrete, executable solution. In each refinement step, plan deficiencies identify faulty or under-developed parts of the plan, which in turn triggers the generation of transformation operators that try to resolve them. All involved entities are explicitly represented and therefore transparent to the framework. This property allows for two novel aspects of our approach: First, any planning and scheduling methodology can be functionally decomposed and mapped on the deficiency announcement and plan transformation generation principle, and second, the framework allows for an explicit declaration of planning strategies. We first investigate the flexibility of the extremely modular system design by instantiating the framework in a variety of system configurations including classical partial-order causal-link (POCL) planning, hierarchical task-network (HTN) planning, and classical scheduling.As a key feature, the presented approach provides a formally integrated treatment of action and state abstraction, thus naturally combining causality-focused reasoning with hierarchical, procedure-oriented methods. While the use of procedural knowledge allows to rely on well-known, predefined solutions to planning problems, the non-hierarchical methods provide the flexibility to come up with non-standard solutions and to complete under-specified problem instances, respectively. The resulting technique of hybrid planning is capable of constructing a plan's causal and hierarchical structure across multiple levels of abstraction by using plan development options of both the POCL and HTN paradigms. We also present an integrated planning and scheduling system that is defined in our framework. For the first time, such a system is able to combine any ensemble of planning and scheduling technologies on the operational level and to address the respective deficiencies opportunistically. The accordingly unique representation of application domains incorporates temporal phenomena and resource manipulations not only for basic actions but on the abstract action level as well. This leads to the novel technique of hierarchical scheduling, in which the concept of abstraction is extended to resource representation and reasoning, for example resource aggregation and approximation.Thanks to its well-defined functional composition, the framework yields a major improvement with respect to the capabilities of planning and scheduling strategies. The explicitly represented information about a plan's deficiencies and development prospects makes it possible to utilize a new quality of knowledge sources, including relationships between deficiencies, refinements, and components in the plan to which they refer. This leads to the novel class of flexible strategies, which decide upon problem characteristics and the current state of the plan generation process, respectively. The most prominent representatives are our HotSpot and HotZone strategies, which take into account the structural dependencies between problematic elements in the plan when deciding upon their resolution. They are independent of both the application domain and the concrete framework instance. Therefore, they can be deployed for POCL planning as well as for integrated planning and scheduling, for example, and any other combination of methods the overall framework allows for. In addition, these strategy components are not only easily exchangeable, they can also be combined into sequences of decision procedures in which succeeding components fine-tune the choices of the preceding ones. We present the declarations of a comprehensive strategy repertoire, ranging from classical strategy components that implement well-known search principles from the literature to an assortment of flexible strategies.Our formal framework is not only a method for specifying a variety of planning and scheduling functionality, it also enables the derivation of software architectures for fielding the corresponding systems. We show how the formal entities of the framework can be directly mapped onto software artefacts in a knowledge-based multiagent architecture, which optimally supports concurrency \\– by enabling parallel computations of plan deficiencies and refinement options \\– as well as the paradigm of distributed knowledge management. While the former addresses the practical issue of managing multiple computational resources the latter matches perfectly the idea of different modules representing different planning and scheduling aspects.Our implementation of the framework resulted in a complex planning environment in which any planning and scheduling system can be easily compiled from a rich collection of functional components. By systematically alternating the system configuration and its parameters, it can also be used as a testbed for the evaluation of framework components, in particular planning strategies and refinement methods. This allowed for conducting a large-scale empirical study on dozens of strategy configurations, which is the first extensive experimental effort in the domain of hybrid planning. It concludes this thesis with four important results: First, we gain insights into the performance of members of our strategy portfolio on a set of benchmark problems. We thereby learned how to graduate performance measures and how to assess such test results. Second, we became familiar with the characteristics of the examined strategies, the experiment problems, and also of the benchmark domains. Third, our findings clearly support both the necessity and feasibility of systematic experimentation in order to identify suitable strategies for a given application domain. Last, but not least, our evaluation effort proves that our environment is an effective platform for orchestrating and operating component-based planning and scheduling systems, in terms of flexibility as well as in terms of efficiency.}, type = {PhD Thesis}, year = {2009}, school = {Ulm University, Germany}, tags = {SFB-TRR-62,Planning}, web_url = {http://vts.uni-ulm.de/query/longview.meta.asp?document\_id=6895}, file_url = {http://vts.uni-ulm.de/docs/2009/6895/vts\_6895\_9580.pdf} } @Inproceedings { noppens!09, author = {Noppens, Olaf and Liebig, Thorsten}, title = {Ontology Patterns and Beyond - Towards a Universal Pattern Language}, abstract = {In this paper we argue for a broader view of ontology patterns and therefore present different use-cases where drawbacks of the current declarative pattern languages can be seen. We also discuss use-cases where a declarative pattern approach can replace procedural-coded ontology patterns. With previous work on an ontology pattern language in mind we argue for a general pattern language.}, year = {2009}, booktitle = {Proceedings of the Workshop on Ontology Patterns (WOP 2009)}, volume = {516}, publisher = {CEUR Workshop Proceedings}, editor = {Eva Blomqvist and Kurt Sandkuhl and Francois Scharffe and Vojt\v ech Sv\'{a}tek}, pages = {179--186}, keywords = {ontology, patterns}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {http://CEUR-WS.org/Vol-516/pap10.pdf} } @Inproceedings { noppens!09c, author = {Noppens, Olaf}, title = {Negative Property Assertion Pattern (NPAs)}, year = {2009}, booktitle = {Proceedings of the Workshop on Ontology Patterns (WOP 2009)}, volume = {516}, publisher = {CEUR Workshop Proceedings}, editor = {Eva Blomqvist and Kurt Sandkuhl and Francois Scharffe and Vojt\v ech Sv\'{a}tek}, pages = {120--123}, keywords = {ontology, patterns}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {http://CEUR-WS.org/Vol-516/pat06.pdf} } @Inproceedings { Schattenberg2009Framework, author = {Schattenberg, Bernd and Bidot, Julien and Ge\"{s}ler, Sascha and Biundo, Susanne}, title = {A Framework for Interactive Hybrid Planning}, abstract = {Hybrid planning, the integration of hierarchical task decomposition and partial-order planning, provides a powerful mechanism to solve real-world planning problems. We present a domain-independent, mixed-initiative approach to plan generation that is based on a formal concept of hybrid planning. It allows for any interaction modalities and models of initiative while preserving the soundness of the plan generation process. Adequately involving the decision competences of end-users this way will improve the application potential as well as the acceptance of the technology.}, year = {2009}, isbn = {978-3-642-04616-2}, DOI = {10.1007/978-3-642-04617-9\_3}, booktitle = {Proceedings of the 32nd Annual German Conference on Artificial Intelligence (KI 2009)}, volume = {5803}, publisher = {Springer}, series = {LNAI 5803}, pages = {17--24}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/Schattenberg2009FrameworkSlides.pdf}, web_url2 = {http://www.springerlink.com/content/v3171u800470815r/}, file_url = {http://www.springerlink.com/content/v3171u800470815r/fulltext.pdf} } @Inproceedings { noppens!09b, author = {Noppens, Olaf}, title = {Concept Partition Pattern, 2009}, abstract = {The Partition Pattern is a logical pattern that introduces axioms which model a partition of concepts. A partition is a general structure which is divided into several disjoint parts. With respect to ontologies the structure is a concept which is divided into several pair-wise disjoint concepts. This pattern reflects the simplest case where a named concept is defined as a partition of concepts.}, year = {2009}, booktitle = {Proceedings of the Workshop on Ontology Patterns (WOP 2009)}, volume = {516}, publisher = {CEUR Workshop Proceedings}, editor = {Eva Blomqvist and Kurt Sandkuhl and Francois Scharffe and Vojt\v ech Sv\'{a}tek}, pages = {127--129}, keywords = {ontology, patterns}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {http://CEUR-WS.org/Vol-516/pat08.pdf} } @Inproceedings { BMP09, author = {Bernardeschi, Cinzia and Masci, Paolo and Pfeifer, Holger}, title = {Analysis of Wireless Sensor Network Protocols in Dynamic Scenarios}, abstract = {We describe an approach to the analysis of protocols for wireless sensor networks in scenarios with mobile nodes and dynamic link quality. The approach is based on the theorem proving system PVS and can be used for formal specification, automated simulation and verification of the behaviour of the protocol. In order to demonstrate the applicability of the approach, we analyse the reverse path forwarding algorithm, which is the basic technique used for diffusion protocols for wireless sensor networks.}, year = {2009}, isbn = {978-3-642-05117-3}, DOI = {10.1007/978-3-642-05118-0\_8}, booktitle = {Stabilization, Safety, and Security of Distributed Systems, 11th International Symposium, SSS 2009, Lyon, France, November 3-6, 2009. Proceedings}, volume = {5873}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Rachid Guerraoui and Franck Petit}, pages = {105--119}, web_url = {http://www.springerlink.com/content/c652hn103301436j/?p=7b9957e9943242eca7b86428925ba5dd\\&pi=7}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2009/bmp09.pdf} } @Article { Bidot2009Framework, author = {Bidot, Julien and Vidal, Thierry and Laborie, Philippe and Christopher Beck, J.}, title = {A Theoretical and Practical Framework for Scheduling in a Stochastic Environment}, abstract = {There are many systems and techniques that address stochastic planning and scheduling problems, based on distinct and sometimes opposite approaches, especially in terms of how generation and execution of the plan, or the schedule, are combined, and if and when knowledge about the uncertainties is taken into account. In many real-life problems, it appears that many of these approaches are needed and should be combined, which to our knowledge has never been done. In this paper, we propose a typology that distinguishes between proactive, progressive, and revision approaches. Then, focusing on scheduling and schedule execution, a theoretic model integrating those three approaches is defined. This model serves as a general template to implement a system that will fit specific application needs: we introduce and discuss our experimental prototypes which validate our model in part, and suggest how this framework could be extended to more general planning systems.}, year = {2009}, DOI = {10.1007/s10951-008-0080-x}, journal = {Journal of Scheduling}, volume = {12}, publisher = {Springer}, pages = {315--344}, number = {3}, tags = {SFB-TRR-62,Planning} } @Inproceedings { noppens-liebig!08, author = {Noppens, Olaf and Liebig, Thorsten}, title = {Understanding Interlinked Data - Visualising, Exploring and Analysing Ontologies}, abstract = {Companies are faced with managing as well as integrating large collections of distributed data today. Here, the challenging task is not to store these volumes of structured and interlinked data but to understand and analyze its explicit or implicit relationships. However, up to date there is virtually no support in navigating, visualizing or even analyzing structured data sets of this appropriately. This paper describes novel rendering techniques enabling a new level of visual analytics combined with interactive exploration principles. The underlying visualization rationale is driven by the principle of providing detail information with respect to qualitative as well as quantitative aspects on user demand while offering an overview at any time. By means of our prototypical implementation and a real-world data set we show how to answer several data specific tasks by interactive visual exploration.}, year = {2008}, booktitle = {Proceedings of the International Conferences on Knowledge Management and New Media Technology, Journal of Universal Computer Science}, editor = {Klaus Tochtermann and Hermann Maurer}, pages = {341--348}, keywords = {visualization, ontology}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2008/ABoxVis.pdf} } @Inproceedings { noppens-libeig!08b, author = {Noppens, Olaf and Liebig, Thorsten}, title = {Realizing the hidden: interactive visualization and analysis of large volumes of structured data}, abstract = {An emerging trend in Web computing aims at collecting and integrating distributed data. For instance, community driven efforts recently have build ontological repositories made of large volumes of structured and interlinked data from various Web sources. Those repositories are extreme in the sense that they are extraordinary in size and dominated by assertional data incorporating only a small and typically lightweight schema. So far, users can find tools for building and browsing through large schemas but there is virtually no support in navigating, visualizing or even analyzing the data part of such a structured repository appropriately. This paper describes how to combine techniques from visual analytics and logical reasoning for interactive exploration of large volumes of interrelated data. Our approach utilizes visual abstraction techniques, semantical filters, as well as various level of detail information. The underlying visualization rationale is driven by the principle of providing detail information with respect to qualitative as well as quantitative aspects on user demand while offering an overview at any time. By means of our prototypical implementation and two real-world data sets we show how to answer several data specific tasks by interactive visual exploration.}, year = {2008}, isbn = {978-1-60558-141-5}, DOI = {10.1145/1385569.1385654}, booktitle = {Proceedings of the working conference on Advanced Visual Interfaces (AVI 2008)}, publisher = {ACM Press}, editor = {Stefano Levialdi}, pages = {44--48}, keywords = {visualization}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2008/avi08-noppens.pdf} } @Inproceedings { Bidot2008PlanRepair, author = {Bidot, Julien and Biundo, Susanne and Schattenberg, Bernd}, title = {Plan Repair in Hybrid Planning}, abstract = {Hybrid Planning, the integration of hierarchical task decomposition and partial-order-causal-link planning provides a powerful mechanism to solve real-world planning problems. While the generation of hybrid plans is well understood, the problem of how to adequately deal with the various types of execution failures in this context has not yet been addressed. We present a domain-independent approach to plan repair in hybrid planning. It exploits the generation process of the failed plan by retracting decisions that led to the failed plan fragments. They are selectively replaced by suitable alternatives, and the repaired plan is completed by following the previous generation process as close as possible. This way, a stable solution is obtained, i.e. a repair of the failed plan that causes minimal perturbation. We show how plan repair fits into a formal framework for hybrid planning and present a generic refinement-retraction-and-repair algorithm.}, year = {2008}, DOI = {10.1007/978-3-540-85845-4\_21}, booktitle = {Proceedings of the 31st Annual German Conference on Artificial Intelligence (KI 2008)}, volume = {5243}, publisher = {Springer}, series = {Lecture Notes in Artificial Intelligence}, editor = {Andreas Dengel and Karsten Berns and Thomas Breuel and Frank Bomarius and Thomas R. Roth-Berghofer}, pages = {169--176}, tags = {Planning}, web_url = {http://www.springerlink.com/content/7h142500k7211717/}, file_url = {http://www.springerlink.com/content/7h142500k7211717/fulltext.pdf} } @Inproceedings { liebig-etal!08, author = {Liebig, Thorsten and Luther, Marko and Noppens, Olaf and Rodriguez, Mariano and Calvanese, Diego and Wessel, Michael and Horridge, Matthew and Bechhofer, Sean and Tsarkov, Dmitry and Sirin, Evren}, title = {OWLlink: DIG for OWL 2}, abstract = {The OWLlink interface provides an implementation-neutral mechanism for accessing OWL reasoner functionality. In contrast to its DL-oriented predecessor DIG, OWLlink relies on OWL 2 for the primitives of the modelling language, and is thus fully compatible with the forthcoming incarnation of OWL. The OWLlink core introduced in this document covers (i) basic reasoner management, (ii) assertion of axioms and (iii) elementary ask functionality. The OWLlink extension mechanism allows to easily add any required functionality in a controlled way to the core language. We introduce OWLlink by providing a structural specification and a concrete binding of the interface that defines how OWLlink messages can be encoded in XML and sent over HTTP.}, year = {2008}, booktitle = {Proceedings of the 5th OWLED Workshop on OWL: Experiences and Directions, collocated with the 7th International Semantic Web Conference (ISWC-2008), Karlsruhe, Germany, October 26-27, 2008}, volume = {432}, series = {CEUR Workshop Proceedings}, editor = {Catherine Dolbear and Alan Ruttenberg and Ulrike Sattler}, keywords = {owllink, OWL, OWL 2, Semantic Web}, tags = {SemanticTechnologies}, file_url = {http://ceur-ws.org/Vol-432/owled2008eu\_submission\_26.pdf} } @Incollection { Pfe08, author = {Pfeifer, Holger}, title = {Formal Methods in the Automotive Domain: The Case of TTA}, year = {2008}, isbn = {9780849380266}, booktitle = {Automotive Embedded Systems Handbook}, publisher = {Taylor and Francis CRC Press}, chapter = {15}, editor = {Nicolas Navet and Francoise Simonot-Lion}, web_url = {http://www.crcpress.com/product/isbn/9780849380266} } @Article { liebig-scheele!08, author = {Liebig, Thorsten and Scheele, Stephan}, title = {Explaining Entailments and Patching Modelling Flaws}, abstract = {Ontology authoring is a sophisticated task and requires domain as well as some amount of background knowledge in formal logic. In fact, it is not only novice users that are commonly faced with comprehension problems with respect to the influence of complex or nested ontological axioms on reasoning. We provide practical insights into the development of tableau-based methods for explaining the key inference services, namely unsatisfiability, subsumption, and non-subsumption as well as techniques for patching ontologies in order to establish a user desired entailment.}, year = {2008}, journal = {K\"{u}nstliche Intelligenz}, pages = {25--27}, number = {2}, keywords = {explaining, ontology}, tags = {SemanticTechnologies, KnowledgeModeling}, web_url = {http://www.kuenstliche-intelligenz.de/index.php?id=7764}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2008/kijournal-liebig-scheele08.pdf} } @Inproceedings { BMP08, author = {Bernardeschi, Cinzia and Masci, Paolo and Pfeifer, Holger}, title = {Early Prototyping of Wireless Sensor Network Algorithms in PVS}, abstract = {We describe an approach of using the evaluation mechanism of the specification and verification system PVS to support formal design exploration of WSN algorithms at the early stages of their development. The specification of the algorithm is expressed with an extensible set of programming primitives, and properties of interest are evaluated with ad hoc network simulators automatically generated from the formal specification. In particular, we build on the PVSio package as the core base for the network simulator. According to requirements, properties of interest can be simulated at different levels of abstraction. We illustrate our approach by specifying and simulating a standard routing algorithm for wireless sensor networks.}, year = {2008}, booktitle = {Computer Safety, Reliability, and Security, 27th International Conference, SAFECOMP 2008, Newcastle upon Tyne, UK, September 22-25, 2008, Proceedings}, volume = {5219}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Michael D. Harrison and Mark-Alexander Sujan}, pages = {346--359}, web_url = {http://www.springerlink.com/content/g6723722m7487460/?p=775f8bc1015a402db38b9281e63d316d\\&pi=28}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2008/safecomp2008.pdf} } @Inproceedings { Schattenberg2007FlexibleStrategies, author = {Schattenberg, Bernd and Bidot, Julien and Biundo, Susanne}, title = {On the Construction and Evaluation of Flexible Plan-Refinement Strategies}, abstract = {This paper describes a system for the systematic construction and evaluation of planning strategies. It is based on a proper formal account of refinement planning and allows to decouple plan-deficiency detection, refinement computation, and search control. In adopting this methodology, planning strategies can be explicitly described and easily deployed in various system configurations. We introduce novel domain-independent planning strategies that are applicable to a wide range of planning capabilities and methods. These so-called HotSpot strategies are guided by information about current plan defects and solution options. The results of a first empirical performance evaluation are presented in the context of hybrid planning.}, year = {2007}, DOI = {10.1007/978-3-540-74565-5\_28}, booktitle = {Advances in Artificial Intelligence, Proceedings of the 30th German Conference on Artificial Intelligence (KI 2007)}, volume = {4667}, publisher = {Springer}, address = {Osnabr\"{u}ck, Germany}, series = {Lecture Notes in Artificial Intelligence}, editor = {Joachim Hertzberg and Michael Beetz and Roman Englert}, pages = {367--381}, tags = {Planning}, web_url = {http://www.springerlink.com/content/lx1l87180v450468/}, file_url = {http://www.springerlink.com/content/lx1l87180v450468/fulltext.pdf} } @Inproceedings { Bidot2007Architecture, author = {Bidot, Julien and Vidal, Thierry and Laborie, Philippe and Christopher Beck, J.}, title = {Une architecture g\'{e}n\'{e}rale pour ordonnancer dans un environnement stochastique}, year = {2007}, booktitle = {Actes des 2es journ\'{e}es Francophones Planification, D\'{e}cision, Apprentissage pour la conduite de syst\`{e}mes (JFPDA)}, address = {Grenoble, France}, pages = {13--24} } @Inproceedings { noppens-liebig!07, author = {Noppens, Olaf and Liebig, Thorsten}, title = {Understanding Large Volumes of Interconnected Individuals by Visual Exploration}, abstract = {Ontologies are now used within an increasing number of real-world applications. So far, significant effort has been spend in building tools to support users in creating, maintaining, and browsing the terminological part of an ontology. On the other hand, only little work has been done in supporting the user to explore the manifold interconnected assertional knowledge in order to analyze, visualize, and understand this network of individuals. In this paper, we present a new efficient visualization and editing approach which allows to investigate relationships within large volumes of interlinked individuals in order to grasp the structure of the assertional knowledge more easily.}, year = {2007}, isbn = {978-3-540-72666-1}, DOI = {10.1007/978-3-540-72667-8\_58}, booktitle = {The Semantic Web: Research and Applications, Proceedings of the 4th European Semantic Web Conference (ESWC 2007)}, volume = {4519}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Enrico Franconi and Michael Kifer and Wolfgang May}, pages = {799--808}, keywords = {visualization}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/eswc07-noppens.pdf} } @Article { SchattenbergKS07WorldOfPuK, author = {Schattenberg, Bernd and Krebs, Thorsten and Schumann, Ren\'{e}}, title = {The World of PUK - Service}, abstract = {Dieser Serviceteil zum Themenschwerpunkt ``Planen / Scheduling und Konfigurieren / Entwerfen\\" soll dem interessierten Leser einen ersten Einblick in das vielschichtige Gebiet erleichtern. Die f\"{u}r den Bereich wichtigen Wettbewerbe werden im Artikel von Stefan Edelkamp vorgestellt. Die Autoren haben, der Breite des Themengebietes entsprechend, diese \"{U}bersicht m\"{o}glichst weit angelegt, einen Anspruch auf Vollst\"{a}ndigkeit kann jedoch nicht erhoben werden. Wir hoffen, dass dieser Artikel Ihnen als Ausgangsbasis f\"{u}r eigene Recherchen in diesem spannenden und dynamischen Forschungsfeld dienen kann. Aus diesem Grund sind alle Eintr\"{a}ge, soweit m\"{o}glich, mit den aktuellen URLs angegeben, um weitere on-line Recherchen m\"{o}glichst effizient zu gestalten.}, year = {2007}, journal = {K\"{u}nstliche Intelligenz}, volume = {21}, pages = {44}, number = {1}, file_url = {http://www.kuenstliche-intelligenz.de/archives/ki-journal-20160917/index.php-id=7754\\&tx\_ki\_pi1[showUid]=1256\\&cHash=fa57c0ab2a.html} } @Article { Schattenberg07HybridSoftware, author = {Schattenberg, Bernd and Balzer, Steffen and Biundo, Susanne}, title = {Realizing Hybrid Planning Systems as Modern Software Environments}, abstract = {We present an architecture for planning and scheduling systems that addresses key requirements of real-world applications in a unique manner. It provides a robust, scalable, and flexible framework through the use of industrialstrength middleware and multi-agent technology. The architectural concepts extend knowledge-based components that dynamically perform and verify the system’s configuration; standardized components and communication protocols allow a seamless integration with third-party libraries and application environments. The system is based on a proper formal account of hybrid planning, the integration of HTN and POCL planning. The framework allows to decouple the detection of plan flaws, the computation of plan modifications, and search control. Consequently, planning and scheduling capabilities can be easily combined by orchestrating respective elementary modules and strategies. This platform can implement and evaluate various configurations of planning methods and strategies, without jeopardizing system consistency through interfering module activity.}, year = {2007}, journal = {K\"{u}nstliche Intelligenz}, volume = {21}, pages = {16--22}, number = {1}, web_url = {http://www.kuenstliche-intelligenz.de/archives/ki-journal-20160917/index.php-id=7754\\&tx\_ki\_pi1[showUid]=1248\\&cHash=6d9424bc18.html}, file_url = {http://www.kuenstliche-intelligenz.de/archives/ki-journal-20160917/fileadmin/template/main/archiv/2007\_1/realizing-schattenberg-web.pdf} } @Inproceedings { weithoener-etal!07, author = {Weith\"{o}ner, Timo and Liebig, Thorsten and Luther, Marko and B\"{o}hm, Sebastian and von Henke, Friedrich and Noppens, Olaf}, title = {Real-World Reasoning with OWL}, abstract = {This work is motivated by experiences in the course of de- veloping an ontology-based application within a real-world setting. We found out that current benchmarks are not well suited to provide help- ful hints for users who seek for an appropriate reasoning system able to deal with expressive terminological descriptions, large volumes of asser- tional data, and frequent updates in a sound and complete way. This paper tries to provide some insights into currently available reasoning approaches and aims at identifying requirements to make future bench- marks more useful for application developers.}, year = {2007}, isbn = {978-3-540-72666-1}, DOI = {10.1007/978-3-540-72667-8\_22}, booktitle = {The Semantic Web: Research and Applications, Proceedings of the 4th European Semantic Web Conference (ESWC 2007)}, volume = {4519}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Enrico Franconi and Michael Kifer and Wolfgang May}, pages = {296--310}, keywords = {OWL,reasoning}, tags = {SemanticTechnologies, AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/eswc07-weithoener.pdf} } @Inproceedings { springerlink:10.1007/978-3-540-76890-6_41, author = {Liebig, Thorsten and M\"{u}ller, Felix}, title = {Parallelizing Tableaux-Based Description Logic Reasoning}, abstract = {Practical scalability of Description Logic (DL) reasoning is an important premise for the adoption of OWL in a real-world setting. Many highly efficient optimizations for the DL tableau calculus have been invented over the last decades. None of them aimed at parallelizing the tableau algorithm itself. This paper describes our approach for concurrent computation of the nondeterministic choices inherent to the standard tableau procedure. We discuss how this interrelates with the well-known optimization techniques and present first promising performance results when benchmarking our prototypical reasoner UUPR ( Ulm University Parallel Reasoner ) with a selection of established DL systems.}, year = {2007}, isbn = {978-3-540-76889-0}, DOI = {10.1007/978-3-540-76890-6\_41}, booktitle = {On the Move to Meaningful Internet Systems 2007: OTM 2007 Workshops}, volume = {4806}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Robert Meersman and Zahir Tari and Pilar Herrero}, pages = {1135--1144}, tags = {SemanticTechnologies}, web_url = {http://www.springerlink.com/content/6326156250h20826/}, file_url = {http://www.springerlink.com/content/6326156250h20826/fulltext.pdf} } @Inproceedings { Weithoener2007RelationalReasoner, author = {Weith\"{o}ner, Timo}, title = {U2R2 - the Ulm University Relational Reasoner: System Description}, abstract = {This is a system description of the Ulm University Relational Reasoner (U2R2). The system merges rule based DL reasoning with technologies from relational database management systems. U2R2 implements a total forward chaining and materialization approach, which calculates and persistently stores all possible inferences whenever a knowledge base is loaded or altered. As a result U2R2 offers excellent query response times for TBox as well as ABox queries. The system is not limited by main memory restrictions as it leverages secondary storage, which allows to process huge knowledge bases even on standard desktop computers. Additional features include incremental reasoning, retraction, and availability of savepoints, which allow to restore previous system states.}, year = {2007}, booktitle = {14th International Conference on Logic for Programming, Artificial Intelligence, and Reasoning}, publisher = {Yerevan, Armenia}, series = {Short Paper Session Proceedings}, editor = {Nachum Dershowitz and Andrei Voronkov}, pages = {55--59}, tags = {AutomatedReasoning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/lpar07.pdf} } @Article { PvH07, author = {Pfeifer, Holger and von Henke, Friedrich}, title = {Modular Formal Analysis of the Central Guardian in the Time-Triggered Architecture}, abstract = {The Time-Triggered Protocol TTP/C constitutes the core of the communication level of the Time-Triggered Architecture for dependable real-time systems. TTP/C ensures consistent data distribution, even in the presence of faults occurring to nodes or the communication channel. However, the protocol mechanisms of TTP/C rely on a rather optimistic fault hypothesis. Therefore, an independent component, the central guardian, employs static knowledge about the system to transform arbitrary node failures into failure modes that are covered by the fault hypothesis. This paper presents a modular formal analysis of the communication properties of TTP/C based on the guardian approach. Through a hierarchy of formal models, we give a precise description of the arguments that support the desired correctness properties of TTP/C. First, requirements for correct communication are expressed on an abstract level. By stepwise refinement we show both that these abstract requirements are met under the optimistic fault hypothesis, and how the guardian model allows a broader class of node failures to be tolerated. The models have been developed and mechanically checked using the specification and verification system PVS.}, year = {2007}, DOI = {10.1016/j.ress.2006.10.006}, journal = {Reliability Engineering \\& System Safety}, volume = {92}, pages = {1538--1550}, number = {11}, web_url = {10.1016/j.ress.2006.10.006}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/ress07.pdf} } @Inproceedings { liebig-etal!07, author = {Liebig, Thorsten and Noppens, Olaf and Weith\"{o}ner, Timo}, title = {Interactive Exploration of the Movie DB on a Semantic Level}, abstract = {The IMDb can be seen as an ontology made of a schema and a huge network of individuals. This allows to reason about movie data and to define filters in terms of declarative descriptions. We believe that many interesting queries about movies can be answered by interactive visual exploration utilizing browsing primitives such as aggregated club views, selective expansions, or drag-n-drop filters.}, year = {2007}, booktitle = {Proceedings of the IEEE Information Visualization Contest 2007 (InfoVis 2007)}, publisher = {IEEE Computer Society}, keywords = {visualization}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/liebig-et-al-infovis07.pdf} } @Inproceedings { horridge-etal!07, author = {Horridge, Matthew and Bechhofer, Sean and Noppens, Olaf}, title = {Igniting the OWL 1.1 Touch Paper: The OWL API}, abstract = {This paper describes the design and implementation of an OWL 1.1 API, herein referred to as the OWL API . The API is designed to facilitate the manipulation of OWL 1.1 ontologies at a high level of abstraction for use by editors, reasoners and other tools. The API is based on the OWL 1.1 specification and influenced by the experience of designing and using the WonderWeb API and OWL-based applications. An overview of the basis for the design of the API is discussed along with major API functionality. The API is available from Source Forge: http://sourceforge.net/pro jects/owlapi.}, year = {2007}, booktitle = {Proceedings of the OWLED 2007 Workshop on OWL: Experiences and Directions, Innsbruck, Austria, June 6-7, 2007}, volume = {258}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Christine Golbreich and Aditya Kalyanpur and Bijan Parsia}, keywords = {OWL, OWL 2, Semantic Web, ontology}, tags = {SemanticTechnologies, KnowledgeModeling}, file_url = {http://ceur-ws.org/Vol-258/paper19.pdf} } @Inproceedings { liebig-et-al_explaining!07, author = {Liebig, Thorsten and Scheele, Stephan and Lambertz, Julian}, title = {Explaining Subsumption and Patching Non-Subsumption with Tableaux Methods}, abstract = {We argue that tableaux-based methods are valuable for explaining as well as providing clues for repairing an unwanted non-subsumption. In comparison to axiom pinpointing our approach currently is restricted in language expressivity, but more robust with respect to large amounts of axioms and provides more detailed explanations. An extension to ABox explanations seems possible.}, year = {2007}, issn = {978-88-6046-008-5}, booktitle = {Proceedings of the 2007 International Workshop on Description Logics (DL 2007)}, address = {Brixen, Italy}, editor = {Diego Calvanese, Enrico Franconi, Volker Haarslev, Domenico Lembo, Boris Motik, Anni-Yasmin Turhan, Sergio Tessaris}, pages = {537--538}, keywords = {explaining}, tags = {KnowledgeModeling,SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/liebig-et-al-dl07.pdf} } @Inproceedings { noppens-etal!07, author = {Noppens, Olaf and Liebig, Thorsten and Schmidt, Patrick}, title = {MobiXpl - A SVG-based Mobile User Interface for Semantic Service Discovery}, abstract = {The vision of a mobile Web in which everyday as well as business-related services are accessible from anywhere and anytime will become reality very soon. Mobile operators then have to meet the challenge not to overwhelm users with potentially interesting services but to allow them to easily discover individual services in a flexible and transparent way. This paper describes a novel user interface using a graphical preference specification paradigm which allows preference- based service discovery on cell phone devices. Our implementation adopts recent trends with respect to visualization techniques, like Scalable Vector Graphics for mobile devices (SVG Tiny), as well as semantic service discovery strategies.}, year = {2007}, booktitle = {Proceedings of the 5th International Conference on Scalable Vector Graphics (SVG Open 2007)}, keywords = {service discovery, HCI}, tags = {SemanticTechnologies}, file_url = {http://www.svgopen.org/2007/papers/MobiXpl/index.html} } @Inproceedings { weithoenerl!07, author = {Weith\"{o}ner, Timo and Liebig, Thorsten and Luther, Marko and B\"{o}hm, Sebastian}, title = {DIG 2.0 Reference Middleware}, abstract = {The DIG protocol -- the de-facto standard for the communication between DL reasoners and their clients -- is currently being updated to a new version. The middleware mediates between different DIG protocols and allows to route requests to the appropriate server component}, year = {2007}, booktitle = {Proceedings of the 3rd International Workshop on OWL: Experiences and Directions (OWLED 2007)}, address = {Insbruck, Austria}, editor = {Bernardo Cuenca Grau and Pascal Hitzler and Conor Shankey and Evan Wallace}, keywords = {owllink}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2007/owled07-weithoener.pdf} } @Inproceedings { Schattenberg2007UnifyingFramework, author = {Schattenberg, Bernd and Biundo, Susanne}, title = {A Unifying Framework For Hybrid Planning And Scheduling}, abstract = {Many real-world application domains that demand planning and scheduling support do not allow for a clear separation of these capabilities. Typically, an adequate mixture of both methodologies is required, since some aspects of the underlying planning problem imply consequences on the scheduling part and vice versa. Several integration efforts have been undertaken to couple planning and scheduling methods, most of them using separate planning and scheduling components which iteratively exchange partial solutions until both agree on a result. This paper presents a framework that provides a uniform integration of hybrid planning –the combination of operator based partial order planning and abstraction based hierarchical task network planning– and a hierarchical scheduling approach. It is based on a proper formal account of refinement planning, which allows for the formal definition of hybrid planning, scheduling, and search strategies. In a first step, the scheduling functionality is used to produce plans that comply with time restrictions and resource bounds. We show how the resulting framework is thereby able to perform novel kinds of search strategies that opportunistically interleave what used to be separate planning and scheduling processes.}, year = {2007}, DOI = {10.1007/978-3-540-69912-5\_27}, booktitle = {Advances in Artificial Intelligence, Proceedings of the 29th German Conference on Artificial Intelligence (KI 2006)}, volume = {4314}, publisher = {Springer}, series = {Lecture Notes in Artificial Intelligence}, editor = {Christian Freksa and Michael Kohlhase and Kerstin Schill}, pages = {361--373}, tags = {Planning}, web_url = {http://www.springerlink.com/content/3821231298565uk3/}, file_url = {http://www.springerlink.com/content/3821231298565uk3/fulltext.pdf} } @Inproceedings { Bidot2007SchedulingFramework, author = {Bidot, Julien and Vidal, Thierry and Laborie, Philippe and Christopher Beck, J.}, title = {A General Framework for Scheduling in a Stochastic Environment}, abstract = {There are many systems and techniques that address stochastic scheduling problems, based on distinct and sometimes opposite approaches, especially in terms of how scheduling and schedule execution are combined, and if and when knowledge about the uncertainties are taken into account. In many real-life problems, it appears that all these approaches are needed and should be combined, which to our knowledge has never been done. Hence it it first desirable to define a thorough classification of the techniques and systems, exhibiting relevant features: in this paper, we propose a tree-dimension typology that distinguishes between proactive, progressive, and revision techniques. Then a theoretical representation model integrating those three distinct approaches is defined. This model serves as a general template within which parameters can be tuned to implement a system that will fit specific application needs: we briefly introduce in this paper our first experimental prototypes which validate our model.}, year = {2007}, booktitle = {Proceedings of the 20th International Joint Conference on Artificial Intelligence (IJCAI 2007)}, pages = {56--61}, file_url = {http://www.ijcai.org/papers07/Papers/IJCAI07-007.pdf} } @Techreport { liebig-reasoning-insights!06, author = {Liebig, Thorsten}, title = {Reasoning with OWL - System Support and Insights -}, abstract = {This report aim at summarizing the current activities around OWL, the Web Ontology Language. At first, the report will present details about the current effort towards a revision of the official OWL W3C recommendation, known as OWL 1.1. Secondly, it describes a selection of inference engines while discussing different approaches as well as conceptual limits. These systems are then empir- ically evaluated using a set of spot tests which are intentionally designed to be hard to solve but small in size. Thirdly, it discusses actual trends and forthcoming developments in the context of ontology development and ontology reasoning. As a whole this report tries to provide some insights into currently available rea- soning systems in order to serve as a decision help for Semantic Web application designers.}, type = {Ulmer Informatik Berichte}, year = {2006}, institution = {Ulm University}, number = {2006-04}, keywords = {reasoning}, tags = {SemanticTechnologies,AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/TR-U-Ulm-2006-04.pdf} } @Inproceedings { weithoener-et-al!06, author = {Timo, Weith\"{o}ner and Liebig, Thorsten and Luther, Marko and B\"{o}hm, Sebastian}, title = {What's Wrong with OWL Benchmarks?}, abstract = {This paper is motivated by experiences in course of developing an ontology-based application within a real-world setting. When comparing these experiences with results of well-known benchmarks we found out that current benchmarks are not well suited to provide helpful hints for application developers who seek for a reasoning system matching typical real-world needs. This work aims at identifying requirements to make future benchmarks more useful for application developers.}, year = {2006}, booktitle = {Proceedings of the Second International Workshop on Scalable Semantic Web Knowledge Base Systems (SSWS 2006)}, address = {Athens, GA, USA}, pages = {101--114}, keywords = {reasoning}, tags = {SemanticTechnologies, AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/weithoener-et-al-ssws06.pdf} } @Inproceedings { Bidot2006, author = {Bidot, Julien and Laborie, Philippe and Christopher Beck, J. and Vidal, Thierry}, title = {Using Constraint Programming and Simulation for Execution Monitoring and Progressive Scheduling}, abstract = {The problem we tackle is progressive scheduling with temporal and resource uncertainty. Operation durations are imprecise and alternative resources may break down. Operation end times and resource breakdowns are observed during execution. In this paper, we assume we have a representation of uncertainty in the form of probability distributions which are used in the simulation of schedule execution. We generate the schedule piece by piece during execution and use simulation to monitor the execution of the partial schedule. This paper describes the basis on which the decision to select and schedule a new subset of operations is made}, year = {2006}, DOI = {10.3182/20060517-3-FR-2903.00313}, booktitle = {Proceedings of the Twelfth IFAC Symposium on Information Control Problems in Manufacturing (INCOM 2006)}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Bidot/INCOM06.pdf} } @Inproceedings { Schattenberg2006WebTechnology, author = {Schattenberg, Bernd and Balzer, Steffen and Biundo, Susanne}, title = {Semantic Web Technology as a Basis for Planning and Scheduling Systems}, abstract = {This paper presents an architecture for planning and scheduling systems that addresses key requirements of real-world applications in a unique manner. The system provides a robust, scalable and flexible framework for planning and scheduling software through the use of industrial-strength middleware and multi-agent technology. The architectural concepts extend knowledge-based components that dynamically perform and verify the system's configuration. The use of standardized components and communication protocols allows a seamless integration with third-party libraries and existing application environments. The system is based on a proper formal account of hybrid planning, the integration of HTN and POCL planning. The theoretical framework allows to decouple flaw detection, modification computation, and search control. In adopting this methodology, planning and scheduling capabilities can be easily combined by orchestrating respective elementary modules and strategies. The conceptual platform can be used to implement and evaluate various configurations of planning methods and strategies, without jeopardizing system consistency through interfering module activity.}, year = {2006}, booktitle = {Proceedings of the 20th Workshop ``Planen und Konfigurieren\textasciiacute\textasciiacute (PuK 2006)}, address = {University of Bremen}, editor = {J\"{u}rgen Sauer}, pages = {26--36}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/Schattenberg2006WebTechnologySlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/Schattenberg2006WebTechnology.pdf} } @Inproceedings { Mueller2006ADLReasoner, author = {M\"{u}ller, Felix and Hanselmann, Michael and Liebig, Thorsten and Noppens, Olaf}, title = {A Tableaux-based Mobile DL Reasoner - An Experience Report}, year = {2006}, booktitle = {Proceedings of the 2006 International Workshop on Description Logics (DL 2006)}, address = {Lake District, UK}, tags = {SemanticTechnologies}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Noppens/publications/dl06.pdf} } @Inproceedings { noppens-etal!06, author = {Noppens, Olaf and Luther, Marko and Wagner, Matthias and Paolucci, Massimo}, title = {Ontology-supported Preference Handling for Mobile Music Selection}, abstract = {Mobile music selection is becoming an emerging market.So far only little work has been spent in selection of semantically enriched music streams on tiny mobile devices. This paper discusses how ontology-based preferences can be used for the selection of mobile music and presents a service discovery toolbox which is responsible for service matchmaking and preference relaxation. On the basis of this toolbox we developed a ubiquitous user interface using a graphical service discovery paradigm.}, year = {2006}, booktitle = {Proceedings of the Multidisciplinary Workshop on Advances in Preference Handling}, keywords = {service discovery}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/mwap06-noppens.pdf} } @Inproceedings { noppens-liebig!06, author = {Noppens, Olaf and Liebig, Thorsten}, title = {Interactive Visualization of Large OWL Instance Sets}, abstract = {The adaption of Semantic Web techniques in real-world applications showed that it becomes a more and more demanding issue to understand not only the conceptual knowledge of an ontology but also the highly dynamic knowledge consisting of individuals and relationships between them. In this paper, we present an visualization paradigm and a prototypical implementation which allows to interactively browse large sets of individuals and to discover relationships between them in an easy and animated manner. The approach is optimized for efficient navigation as well as manipulation of ontologies containing a large number of individuals.}, year = {2006}, booktitle = {Proceedings of the 3rd International Semantic Web User Interaction Workshop (SWUI'06)}, keywords = {OWL, visualization, ontology}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/swui06-noppens.pdf} } @Inproceedings { turhan-etal!06, author = {Turhan, Anni-Yasmin and Bechhofer, Sean and Kaplunova, Alissa and Liebig, Thorsten and Luther, Marko and M\"{o}ller, Ralf and Noppens, Olaf and Patel-Schneider, Peter F. and Boontawee, Suntisrivaporn and Timo, Weith\"{o}ner}, title = {DIG 2.0 - Towards a Flexible Interface for Description Logic Reasoners}, abstract = {The DIG Interface provides an implementation-neutral mechanism for accessing Description Logic reasoner functionality. At a high level the interface can be realised as XML messages sent to the reasoner over HTTP connections, with the reasoner responding as appropriate. Key changes in the current version (DIG 2.0) include support for OWL 1.1 and well-defined mechanisms for extensions to the basic interface.}, year = {2006}, booktitle = {Proceedings of the 2nd International Workshop on OWL: Experiences and Directions (OWLED 2006)}, address = {Athens, GA, USA}, editor = {Bernardo Cuenca Grau and Pascal Hitzler and Conor Shankey and Evan Wallace}, keywords = {owllink}, tags = {SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/owled06-turhan.pdf} } @Inproceedings { liebig-etal!05, author = {Liebig, Thorsten and Luther, Marko and Noppens, Olaf and Paolucci, Massimo and Wagner, Matthias}, title = {Building Applications and Tools for OWL - Experiences and Suggestions}, abstract = {The success of the Semantic Web will largely depend on whether W3Cs Web Ontology Language can reach broad acceptance and a critical mass of industry-strength applications. We have been exploit- ing the use of OWL with a particular focus on tool support for ontology authoring and on providing access to the Semantic Web for mobile appli- cations. In the latter case our vision is to overlay the Semantic Web on ubiquitous computing environments making it possible to represent and interlink content and services as well as users, devices, their capabilities and the functionality they offer. In this paper we present our first expe- riences and lessons learned from early work and try to give constructive feedback for possible enhancements of OWL and its tools.}, year = {2006}, booktitle = {Proceedings of the OWLED 2005 Workshop on OWL: Experiences and Directions}, volume = {188}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {Bernardo Cuenca Grau and Ian Horrocks and Bijan Parsia and Peter F. Patel-Schneider}, keywords = {OWL, Semantic Web, ontology}, tags = {SemanticTechnologies}, file_url = {http://ceur-ws.org/Vol-188/sub28.pdf} } @Inproceedings { Schattenberg06KnowledgeBasedMiddleware, author = {Schattenberg, Bernd and Balzer, Steffen and Biundo, Susanne}, title = {Knowledge-based Middleware as an Architecture for Planning and Scheduling Systems}, abstract = {We present an architecture that provides a robust, scalable and flexible software framework for planning and scheduling systems through the use of standardized industrial-strength middleware and multi-agent technology. It utilizes knowledgebased components that dynamically perform and verify the system's configuration. The system is based on a proper formal account of hybrid planning, the integration of HTN and POCL planning, which allows to decouple flaw detection, modification computation,and search control. In adopting this methodology, planning and scheduling capabilities can be easily combined by orchestrating respective elementary modules and strategies without jeopardizing system consistency through interfering module activity.}, year = {2006}, booktitle = {Proceedings of the 16th International Conference on Automated Planning and Scheduling (ICAPS 2006)}, publisher = {AAAI Press}, address = {Ambleside, The English Lake District, UK}, editor = {Derek Long and Stephen F. Smith and Daniel Borrajo and Thomas Lee McCluskey}, pages = {422--425}, tags = {Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2006/Schattenberg06KnowledgeBasedMiddleware.pdf} } @Inproceedings { liebig-halfmann!05, author = {Liebig, Thorsten and Halfmann, Michael}, title = {A Tableau-based Explainer for DL Subsumption}, abstract = {This paper describes the implementation of a tableau-based reasoning component which is capable of providing quasi natural language explanations for subsumptions within ALEHFr+ TBoxes.}, year = {2005}, DOI = {10.1007/11554554\_26}, booktitle = {Proceedings of the International Conference on Automated Reasoning with Analytic Tableaux and Related Methods (TABLEAUX 2005)}, volume = {3702}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Beckert, Bernhard}, pages = {323--327}, keywords = {explaining}, tags = {AutomatedReasoning,SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2005/liebig-halfmann-tableaux05.pdf} } @Inproceedings { pantschenko-etal!05, author = {Pantschenko, Konstantin and Noppens, Olaf and Liebig, Thorsten}, title = {Grounding Web Services Semantically: Why and How?}, year = {2005}, booktitle = {W3C Workshop on Frameworks for Semantic in Services (W3C SWSF)}, keywords = {reasoning, service discovery}, tags = {SemanticTechnologies, KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2005/swsf-pantschenko-et-al.pdf} } @Inproceedings { Schattenberg05FlexibleStrategies, author = {Schattenberg, Bernd and Weigl, Andreas and Biundo, Susanne}, title = {Hybrid Planning Using Flexible Strategies}, abstract = {In this paper we present a highly modular planning system architecture. It is based on a proper formal account of hybrid planning, which allows for the formal definition of (flexible) planning strategies. Groups of modules for flaw detection and plan refinement provide the basic functionalities of a planning system. The concept of explicit strategy modules serves to formulate and implement strategies that orchestrate the basic modules. This way a variety of fixed plan generation procedures as well as novel flexible planning strategies can easily be implemented and evaluated. We present a number of such strategies and show some first comparative performance results.}, year = {2005}, isbn = {3-540-28761-2}, DOI = {10.1007/10.1007/11551263\_21}, booktitle = {Advances in Artificial Intelligence, Proceedings of the 28th German Conference on Artificial Intelligence (KI 2005)}, volume = {3698}, publisher = {Springer-Verlag Berlin Heidelberg}, series = {Lecture Notes in Artificial Intelligence}, pages = {249--263}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2005/Schattenberg05FlexibleStrategiesSlides.pdf}, web_url2 = {http://www.springerlink.com/content/ab18236tekp4wvfu/}, file_url = {http://www.springerlink.com/content/ab18236tekp4wvfu/fulltext.pdf} } @Article { liebig-noppens!05, author = {Liebig, Thorsten and Noppens, Olaf}, title = {OntoTrack: A Semantic Approach for Ontology Authoring}, abstract = {OntoTrack is an ontology authoring tool that combines a graph-based hierarchical layout and instant reasoning feedback within one single view. Currently OntoTrack can handle ontologies with an expressivity almost comparable to OWL Lite. The graphical representation provides an animated and zoomable subsumption graph with context sensitive features such as click-able miniature branches or selective detail views, together with drag-and-drop editing. Each editing step is instantly synchronised with an external reasoner in order to provide appropriate graphical feedback about relevant modelling consequences. A recent extention of OntoTrack provides an on-demand textual explanation for subsumption relationships between classes. This paper describes the key features of the current implementation and discusses future work, as well as some development issues.}, year = {2005}, DOI = {10.1016/j.websem.2005.06.004}, journal = {Web Semantics: Science, Services and Agents on the World Wide Web}, volume = {3}, pages = {116--131}, number = {2}, keywords = {OWL, Semantic Web, ontology}, tags = {SemanticTechnologies, KnowledgeModeling} } @Incollection { Biundo05PlanningUnderTemporalUncertainty, author = {Biundo, Susanne and Holzer, Roland and Schattenberg, Bernd}, title = {Project Planning Under Temporal Uncertainty}, abstract = {This paper presents an approach towards probabilistic planning with continuous time. It adopts stochastic concepts for continuous probabilities and integrates them into an HTN-based planning framework. Based on uncertain time durations associated with primitive tasks the time consumption probabilities of non-linear plans can be accumulated and thus an overall probability for a successful execution of complex plans can be computed. Furthermore, heuristics for the decomposition of abstract tasks can be derived that guide the search towards plans with a minimized average value/variance of their overall time consumption. An example from software project planning is used to demonstrate our approach.}, year = {2005}, booktitle = {Planning, Scheduling, and Constraint Satisfaction: From Theory to Practice}, volume = {117}, publisher = {IOS Press}, series = {Frontiers in Artificial Intelligence and Applications}, editor = {Luis A. Castillo and Daniel Borrajo and Miguel A. Salido and Angelo Oddi}, pages = {189--198}, tags = {Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/Biundo04ProjectPlanning.pdf} } @Inproceedings { liebig-noppens!04b, author = {Liebig, Thorsten and Noppens, Olaf}, title = {OntoTrack: A New Ontology Authoring Approach}, abstract = {The following provides a short description of OntoTracks main features. In concrete, OntoTrack provides a sophisticated graph-based ontology layout with animated expansion and de-expansion of class descendants, zooming, and paning. Uses elaborated layout techniques like click-able miniature branches or selective detail views. Allows for editing features like mouse-over anchor buttons, graphical selections or restriction editing without switching into a special editing layout. Synchronizes every single editing step with an external reasoner in order to provide instant feedback about relevant modeling consequences. Implements instant search highlighting or an overlay representation of classes and properties.}, year = {2004}, booktitle = {The Semantic Web - ISWC 2004: Third International Semantic Web Conference, Hiroshima, Japan, November 7-11, 2004. Demo Track}, keywords = {Semantic Web, Ontology, OWL}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/iswc04demo-liebig.pdf} } @Inproceedings { wagner-et-al!04, author = {Wagner, Matthias and Liebig, Thorsten and Balzer, Steffen and Kellerer, Wolfgang}, title = {Towards Semantic-based Service Discovery on Tiny Mobile Devices}, abstract = {The vision of a mobile Web in which the computing environment will be composed of various devices that are carried by different users as they go through their daily routine might soon become a reality. In this context, Web services and the Semantic Web are already recognized as important building blocks. On the other hand, so far only little work has been done to support the discovery and selection of semantically enriched Web services on mobile terminals. In this paper we present a graphical toolbox for preference-based service discovery on tiny mobile devices. Our prototypical toolbox is developed as an extension of the Prot\'{e}eg\'{e} OWL plugin and consists of a component called MobiOnt responsible for service matchmaking and preference relaxation together with an emulator for different mobile devices called MobiXpl. Our work aims to serve as an open platform in order to gain experiences with different discovery strategies as well as mobile end-user interfaces.}, year = {2004}, booktitle = {Proceedings of the International Workshop on Semantic Web Technology for Mobile and Ubiquitous Applications}, address = {Hiroshima, Japan}, keywords = {service discovery}, tags = {KnowledgeModeling,SemanticTechnologies}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/iswc2004\_ws\_mobiONT.pdf} } @Inproceedings { liebig-pfeifer-vonhenke!04, author = {Liebig, Thorsten and Pfeifer, Holger and von Henke, Friedrich}, title = {Reasoning Services for an OWL Authoring Tool: An Experience Report}, abstract = {OWL has been designed to be a formal language for representing ontologies in the Semantic Web. In short, OWL is the result of combining an expressive Description Logic (DL) with techniques and standards of the Web. DLs have been well studied in the eld of knowledge representation over the last decades. As one result, some highly optimized DL reasoners have been implemented, which provide an excellent starting point for building a sound and complete OWL DL/Lite reasoner. However, having a traditional DL system with standard functionality is not enough in the current context. So far, DL systems have been used by KR experts mainly in isolated application domains. Now, in order to make the Semantic Web happen far more exible and interactive DL-based tools are needed for building, maintaining, linking, and applying ontologies even for non-experienced users. The importance of so-called non-standard inference services that support building and maintaining knowledge bases has been pointed out recently. We argue that the availability of those inference services is a fundamental premise for upcoming real-world SemanticWeb systems and applications. Our experience in the course of developing the graphical ontology editor OntoTrack is a prime example here.}, year = {2004}, issn = {1613-0073}, booktitle = {Proceedings of the 2004 International Workshop on Description Logics (DL 2004)}, keywords = {ontology}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {http://www.CEUR-WS.org/Vol-104/09Liebig-final.pdf} } @Inproceedings { LPvH:DL04, author = {Liebig, Thorsten and Pfeifer, Holger and von Henke, Friedrich}, title = {Reasoning Services for an OWL Authoring Tool: An Experience Report}, abstract = {OWL has been designed to be a formal language for representing ontologies in the Semantic Web. In short, OWL is the result of combining an expressive Description Logic (DL) with techniques and standards of the Web. DLs have been well studied in the field of knowledge representation over the last decades. As one result, some highly optimized DL reasoners have been implemented, which provide an excellent starting point for building a sound and complete OWL DL/Lite reasoner. However, having a traditional DL system with standard functionality is not enough in the current context. So far, DL systems have been used by KR experts mainly in isolated application domains. Now, in order to make the Semantic Web happen far more flexible and interactive DL-based tools are needed for building, maintaining, linking, and applying ontologies even for non-experienced users. The importance of so-called non-standard inference services that support building and maintaining knowledge bases has been pointed out recently. We argue that the availability of those inference services is a fundamental premise for upcoming real-world SemanticWeb systems and applications. Our experience in the course of developing the graphical ontology editor OntoTrack is a prime example here.}, year = {2004}, booktitle = {Proceedings of the 2004 International Workshop on Description Logics - DL2004}, volume = {104}, address = {Whistler, Canada}, series = {CEUR Workshop Proceedings, ISSN 1613-0073, online CEUR-WS.org/Vol-104/09Liebig-final.pdf}, editor = {Volker Haarslev and Ralf M\"{o}ller}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Liebig/papers/liebig-pfeifer-vhenke-dl04.pdf} } @Inproceedings { Biundo04ProjectPlanning, author = {Biundo, Susanne and Holzer, Roland and Schattenberg, Bernd}, title = {Project Planning Under Temporal Uncertainty}, abstract = {This paper presents an approach towards probabilistic planning with continuous time. It adopts stochastic concepts for continuous probabilities and integrates them into an HTN-based planning framework. Based on uncertain time durations associated with primitive tasks the time consumption probabilities of non-linear plans can be accumulated and thus an overall probability for a successful execution of complex plans can be computed. Furthermore, heuristics for the decomposition of abstract tasks can be derived that guide the search towards plans with a minimized average value/variance of their overall time consumption. An example from software project planning is used to demonstrate our approach.}, year = {2004}, booktitle = {Proceedings of the Workshop on Planning and Scheduling: Bridging Theory to Practice at The 16th European Conference on Artificial Intelligence (ECAI 2004)}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/Biundo04ProjectPlanningSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/Biundo04ProjectPlanning.pdf} } @Inproceedings { balzer-et-al!04, author = {Balzer, Steffen and Liebig, Thorsten and Wagner, Matthias}, title = {Pitfalls of OWL-S - A practical Semantic Web Use Case}, abstract = {OWL-S is a combined effort of the Semantic Web and the Web Service community to facilitate an intelligent service provisioning on the Semantic Web. The vision of OWL-S includes automatic service discovery, invocation, composition, orchestration and monitoring of Web-Services through their semantic descriptions. In this paper, we investigate the practical applicability of the current OWL-S specification and show that, in spite of the large momentum of OWL-S, significantly more work needs to be done before the vision of truly intelligent Semantic Web Services can become true. We therefore study the case of an autonomous travel agent that helps users with online hotel arrangements. The aim of our work is twofold: on the one side, we show step-by-step how a prototypical implementation can be realized based on current semantic technologies around UDDI, WSDL, and SOAP. On the other hand, we reveal pitfalls in the current version of OWL-S that severely limit its support for mechanizing service discovery, configuration, combination and automated execution. Throughout the paper, we present practical solutions and workarounds to existing OWL-S shortcomings and hope to therewith further stimulate the ongoing work on Semantic Web Services.}, year = {2004}, DOI = {10.1145/1035167.1035209}, booktitle = {Proceedings of the 2nd International Conference on Service Oriented Computing (ICSOC 2004)}, publisher = {ACM Press}, address = {New York City, USA}, pages = {289--298}, keywords = {reasoning}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/balzer-et-al-icsoc04.pdf} } @Phdthesis { Sorea04, author = {Sorea, Maria}, title = {Verification of real-time systems through lazy approximations}, abstract = {We develop in this thesis effective verification techniques for real-time systems based on novel combinations of theorem proving and model checking. Although the resulting algorithms do not improve the worst-case complexity, they are usually much more effective in practice, since state spaces are only explored on demand. Moreover, in contrast to dedicated model-checking techniques for real-time systems, our algorithms are not restricted to specific modeling formalisms such as timed automata, and are therefore applicable for a much larger class of problems.The main contribution, lazy approximation, is an effective and complete method for verifying safety and liveness properties of real-time systems, and is based on predicate abstraction for timed automata, finite-state model checking, and counterexample-guided abstraction refinement. This is the first time that predicate abstraction is used for model checking real-time systems specified with real-time logics. This method is also complete for verifying liveness properties. The proposed technique is lazy in that approximations of real-time systems are computed on demand and incrementally refined until the desired property is refuted or verified. In this way, lazy approximation is significantly less memory and time consuming than conventional, region graph based verification methods for real-time systems.Lazy approximation requires information about counterexamples from failed model-checking attempts on the abstract, finite state space. We define a general form of counterexamples both for CTL and TCTL logics, in a symbolic way, as sequences over sets of states. We use symbolic counterexamples in the abstraction-refinement algorithm as a heuristic for selecting new abstraction predicates from the given set of abstraction predicates. The use of symbolic counterexamples for the lazy refinement of approximations has two main advantages compared to refinement methods based on linear counterexamples. First, the refinement process converge more quickly towards a strongly preserving abstraction since multiple spurious counterexamples are discarded in every refinement step. Second, lazy approximation is applicable for full TCTL, and not only for a fragment of universal formulas as is the case when using linear counterexamples.Bounded model checking (BMC) has been recently introduced as a technique for finding bugs in finite state systems. We extend the BMC paradigm to timed automata and LTL formulas augmented with a decidable set of clock constraints, and show that this yields a complete refutation method for real-time systems. Through the technique of k-induction it is possible to extend BMC for timed automata to proof by induction, providing therefore a complete verification method for timed automata and invariant properties.The model-checking problem for real-time systems can be recast as a validity problem in an appropriate logic, which in turn can be solved using theorem proving. Unfortunately, the satisfiability problem for existing branching-time logics with dense-time models is undecidable. We introduce the Event-Recording Logic (ERL) as a real-time extension of the modal mu-calculus with clock variables. ERL is the first decidable real-time logic for specifying branching-time properties of real-time systems. The decidability problem is shown to be EXPTIME complete.}, type = {PhD Thesis}, year = {2004}, school = {University of Ulm}, file_url = {http://www.cs.man.ac.uk/~msorea/diss/diss.pdf} } @Inproceedings { PvH04, author = {Pfeifer, Holger and von Henke, Friedrich}, title = {Modular Formal Analysis of the Central Guardian in the Time-Triggered Architecture}, abstract = {We present a modular formal analysis of the communication properties of the Time-Triggered Protocol TTP/C based on the guardian approach. The guardian is an independent component that employs static knowledge about the system to transform arbitrary node failures into failure modes that are covered by the rather optimistic fault hypothesis of TTP/C. Through a hierarchy of formal models, we give a precise description of the arguments that support the desired correctness properties of TTP/C. First, requirements for correct communication are expressed on an abstract level. By stepwise refinement we show that the abstract requirements are met under the optimistic fault hypothesis, and how the guardian model allows a broader class of failures be tolerated.}, year = {2004}, booktitle = {Proceedings of the 23rd International Conference on Computer Safety, Reliability, and Security (SAFECOMP)}, volume = {3219}, publisher = {Springer}, address = {Potsdam, Germany}, series = {Lecture Notes in Computer Science}, editor = {Maritta Heisel and Peter Liggesmeyer and Stefan Wittmann}, pages = {240--253}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Pfeifer/safecomp2004.pdf} } @Inproceedings { weithoener-et-al!04, author = {Timo, Weith\"{o}ner and Liebig, Thorsten and Specht, G\"{u}nther}, title = {Efficient Processing of Huge Ontologies in Logic and Relational Databases}, abstract = {Today ontologies are heavily used in the sematic web. As they grow in size reasoning systems canrsquot work without secondary storage anymore. Thus database technology is required for storing and processing huge ontologies. In this paper we present an efficient technique for representing and reasoning with ontologies in databases. We also present some benchmarking results in comparison with previous approaches.}, year = {2004}, DOI = {10.1007/978-3-540-30470-8\_13}, booktitle = {Proceedings of the International Conference on Ontologies, Databases and Application of Semantics (ODBASE 2004)}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, pages = {28--29}, number = {3292}, keywords = {reasoning}, tags = {SemanticTechnologies,AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/weithoener-et-al-odbase04.pdf} } @Inproceedings { balzer-liebig!04, author = {Balzer, Steffen and Liebig, Thorsten}, title = {Bridging the Gap Between Abstract and Concrete Services - A Semantic Approach for Grounding OWL-S}, abstract = {OWL-S is one of the emerging standards for the semantic description of web services in order to enable their automatic discovery, execution and composition by software agents. An important task within automatic execution of OWL-S services is the bi-directional mapping between semantically higher level OWL-S service parameter descriptions and primitive XML Schema types of its grounding. OWL-S proposes to utilize XSL Transformations (XSLTs) for the mapping between this representation levels. However, this approach has a substantial shortcoming due to the fact that one OWL model can have many different RDF serializations whereas each requires a specific XSL stylesheet in the worst case. This severely limits the practical applicability of OWL-S in general. In this paper we present a simple but powerful approach of OWL-S parameter type mappings on a semantical basis. We therefore define an RDFS ontology of RDF Mappings that enable mappings between OWL and XML Schema types into simple XML Schema types and vice versa. We show how to integrate RDF Mappings into the OWL-S grounding ontology and prove their feasibility by describing our prototypical implementation}, year = {2004}, booktitle = {Proceedings of the International Workshop on Semantic Web Services: Preparing to Meet the World of Business Applications}, address = {Hiroshima, Japan}, keywords = {reasoning}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/SWS2004-Balzer-Final.pdf} } @Inproceedings { Biundo04ContinuousResources, author = {Biundo, Susanne and Holzer, Roland and Schattenberg, Bernd}, title = {Dealing with Continuous Resources in AI Planning}, abstract = {This paper presents an approach towards probabilistic planning with continuous resources. It adopts stochastic concepts for continuous probabilities and integrates them into a STRIPS-based planning framework. The approach enables the construction of plans that are guaranteed to meet certain probability thresholds w.r.t. the consumption of critical resources. Furthermore, the consumption probabilities of multiple resources can be accumulated and thus an overall probability for a successful execution of an aggregate plan can be computed. We extend our approach to HTN-based planning and show how heuristics can be derived that lead to plans with a minimized average value/variance of their overallresource consumption.}, year = {2004}, booktitle = {Proceedings of the 4th International Workshop on Planning and Scheduling for Space (IWPSS 2004)}, publisher = {European Space Agency Publications Division}, address = {ESA-ESOC, Darmstadt, Germany}, pages = {213--218}, number = {WPP-228}, tags = {Planning}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/Biundo04ContinuousResources.pdf} } @Inproceedings { liebig-noppens!04, author = {Liebig, Thorsten and Noppens, Olaf}, title = {OntoTrack: Combining Browsing and Editing with Reasoning and Explaining for OWL Lite Ontologies}, abstract = {OntoTrack is a new browsing and editing ``in-one-view\\" ontology authoring tool that combines a hierarchical graphical layout and instant reasoning feedback for (the most rational fraction of) OWL Lite. OntoTrack provides an animated and zoomable view with context sensitive features like click-able miniature branches or selective detail views together with drag-and-drop editing. Each editing step is instantly synchronized with an external reasoner in order to provide appropriate graphical feedback about relevant modeling consequences. The most recent feature of OntoTrack is an on demand textual explanation for subsumption and equivalence between or unsatisfiability of classes. This paper describes the key features of the current implementation and discusses future work as well as some development issues.}, year = {2004}, isbn = {3-540-23798-4}, DOI = {10.1007/978-3-540-30475-3\_18}, booktitle = {Proceedings of the Third International Semantic Web Conference (ISWC 2004)}, volume = {3298}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Sheila A. McIlraith and Dimitris Plexousakis and Frank van Harmelen}, pages = {244--258}, keywords = {OWL, OWL2, Semantic Web, ontology, visualization}, tags = {SemanticTechnologies, KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/iswc04-liebig.pdf} } @Inproceedings { PvH04b, author = {Pfeifer, Holger and von Henke, Friedrich}, title = {Formal Modelling and Analysis of Fault Tolerance Properties in the Time-Triggered Architecture}, abstract = {The Time-Triggered Architecture is a distributed computer architecture for the implementation of highly dependable real-time systems specifically targeting embedded applications, such as digital control systems in the automotive and avionics domain. We have formally modelled and analysed various aspects of the underlying communication protocol TTP/C and its fault tolerance properties. This paper provides an overview of these analyses from a broader perspective and describes the relationships between the individual items. The algorithms implementing the basic protocol services of TTP/C are heavily intertwined and pose challenging problems for formal analysis. This is true not only with regard to the construction of formal proofs, but also for the development of the formal models themselves. We argue that an adequate structuring of the models and proofs along different levels of abstraction is necessary to enable the formal modelling of the central protocol algorithms, make their analysis feasible, and resolve the mutual dependencies among the services.}, year = {2004}, booktitle = {Proceedings of the 5th Symposium on Formal Methods for Automation and Safety in Railway and Automotive Systems (FORMS/FORMAT 2004)}, publisher = {Technical University of Braunschweig, Institute for Traffic Safety and Automation Engineering}, editor = {E. Schnieder and G. Tarnai}, pages = {230--240}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Pfeifer/forms2004.pdf} } @Inproceedings { Sorea04LazyApproximation, author = {Sorea, Maria}, title = {Lazy Approximation for Dense Real-Time Systems}, abstract = {We propose an effective and complete method for verifying safety andliveness properties of timed systems, which is based on predicate abstraction forcomputing finite abstractions of timed automata and TCTL formulas, finite-stateCTL model checking, and successive refinement of finite-state abstractions. Startingwith some coarse abstraction of the given timed automaton and the TCTLformula we define a finite sequence of refined abstractions that converges to theregion graph of the real-time system. In each step, new abstraction predicatesare selected nondeterministically from a finite, predetermined basis of abstractionpredicates. Symbolic counterexamples from failed model-checking attemptsare used to heuristically choose a small set of new abstraction predicates for incrementallyrefining the current abstraction. Without sacrificing completeness,this algorithm usually does not require computing the complete region graph todecide model-checking problems. Abstraction refinement terminates quickly, asa multitude of spurious counterexamples is eliminated in every refinement stepthrough the use of symbolic counterexamples for TCTL.}, year = {2004}, isbn = {3-540-23167-6}, DOI = {10.1007/978-3-540-30206-3\_25}, booktitle = {Proceedings of the Joint Conference Formal Modelling and Analysis of Timed Systems (FORMATS) and Formal Techniques in Real-Time and Fault-Tolerant Systems (FTRTFT)}, volume = {3253}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Yassine Lakhnech and Sergio Yovine}, pages = {363--378}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/Sorea04Lazy.pdf} } @Inproceedings { SRSP04, author = {Steiner, Wilfried and Rushby, John and Sorea, Maria and Pfeifer, Holger}, title = {Model Checking a Fault-Tolerant Startup Algorithm: From Design Exploration To Exhaustive Fault Simulation}, abstract = {The increasing performance of modern model-checking tools offers high potential for the computer-aided design of fault-tolerant algorithms. Instead of re lying on human imagination to generate taxing failure scenarios to probe a fault-tolerant algorithm during development, we define the fault behavior of a faulty process at its interfaces to the remaining system and use model checking to automatically examine all possible failure scenarios. We call this approach \dqexhaustive fault simulation\dq. In this paper we illustrate exhaustive fault simulation using a new startup algorithm for the Time-Triggered Architecture (TTA) and show that this approach is fast enough to be deployed in the design loop. We use the SAL toolset from SRI for our experiments and describe an approach to modeling and analyzing fault-tolerant algorithms that exploits the capabilities of tools such as this.}, year = {2004}, booktitle = {Proceedings of the International Conference on Dependable Systems and Networks}, publisher = {IEEE Computer Society}, address = {Florence, Italy}, pages = {189--198}, web_url = {http://doi.ieeecomputersociety.org/10.1109/DSN.2004.1311889}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2004/dsn04.pdf} } @Inproceedings { zimmermann03ASMModeling, author = {Zimmermann, Wolf and Dold, Axel}, title = {A Framework for Modeling the Semantics of Expression Evaluation with Abstract State Machines}, abstract = {We present a framework for formalizing the semantics of expression evaluation using Abstract State Machines. Many programming languages allow some non-determinism for evaluating expressions. The semantics only have in common that arguments are evaluated before an operator is applied. The evaluation of one argument may be interleaved with the evaluation of the other arguments. However, programming languages usually restrict this most liberal evaluation order. For example, the expression evaluation may take into account short-circuit evaluation of boolean expressions which implies that right operands must not be evaluated before the complete left operand is evaluated. Our approach provides a generic expression evaluation semantics that only need to be instantiated adequatly. We demonstrate this approach by the example of Ada95, C, C++, Java, C{\#}, and Fortran.}, year = {2003}, booktitle = {Abstract State Machines 2003 -- Proceedings of the 10th International Workshop, ASM 2003}, pages = {391--406}, file_url = {http://www.informatik.uni-ulm.de/ki/Dold/expressions.pdf} } @Phdthesis { Herbst03phd, author = {Herbst, Joachim}, title = {Ein induktiver Ansatz zur Akquisition und Adaption von Workflow-Modellen.}, type = {PhD Thesis}, year = {2003}, school = {University of Ulm} } @Phdthesis { luther03phd, author = {Luther, Marko}, title = {Elaboration and Erasure in Type Theory}, abstract = {This thesis contributes to the construction of a convenient specification language on top of a type theoretic substrate. The subject arose in the context of the Typelab project that aimed at improving the machine assistance for the formal development of mathematics, software and hardware. Type theory was chosen as underlying theoretical framework, because it homogeneously comprises both the notion of computation and deduction. However, the price for its expressiveness is a verbose syntax. When I joined the Typelab group, my responsibility was to shape the external language of the Typelab system. Naturally, I first looked at related implementations. Most of them cope with the wordiness of type theory by allowing their users to omit on input redundant parts that can be inferred automatically through a process called elaboration. While the use of such a mechanism seems indispensable for serious verification tasks, I found the existing solutions unsatisfactory. Not only are the implemented algorithms seldom precisely documented and formally analyzed, they also lack strength. It is disappointing how much redundant information still has to be supplied on input. Furthermore, the ad hoc erasure algorithms, used to reduce the redundancy of expressions on output, often produce wordy or even ambiguous external representations. Such failures are especially fatal for interactive verification systems, where the output describing the actual system state is often the only hint for a user on how to proceed with a proof. To improve this situation, I developed the elaboration and erasure methods described in this thesis. The design of elaboration is inspired by the conciseness of functional programming languages and is formally grounded on type inference in the underlying type theory. To establish correctness, intermediate elaboration states are represented by open terms, adapting techniques recently developed for the representation of partial proofs. The erasure methods are based on estimations of the corresponding elaboration process and guarantee a successful reconstruction of the elided information. Experiments performed with Typelab proved the proposed methods both effective and efficient.}, type = {PhD Thesis}, year = {2003}, school = {Universit\"{a}t Ulm}, address = {Germany}, file_url = {http://www.informatik.uni-ulm.de/ki/Papers/luther03-diss.pdf} } @Phdthesis { Pfe03, author = {Pfeifer, Holger}, title = {Formal Analysis of Fault-Tolerant Algorithms in the Time-Triggered Architecture}, abstract = {This thesis is about formal analysis of fault-tolerant algorithms implemented in the Time-Triggered Protocol TTP/C. The services provided by this communication protocol are characterized by a deep integration and mutual dependencies, which constitute significant sources of complicacy for formal analysis in addition to the inherent difficulty of analyzing fault-tolerant distributed algorithms. Consequently, the main goal of this thesis is to explore and develop techniques to structure and organize the formal models that describe the protocol services and the fault assumptions, as well as the proofs in a way that enables the required correctness properties be established. Due to their central role for TTP/C we concentrate the formal analysis on the two most important algorithms of the protocol, namely clock synchronization and group membership. Because of the immense complexity and detail of TTP/C a formal approach must try to isolate the core algorithms from the integrated protocol and analyse them separately. However, as the services are tightly intertwined, the process of isolating a particular service also amounts to identifying the dependencies on and suitable interfaces to other protocol parts. On the other hand, if the formal analysis of a particular service is carried out in isolation of other protocol aspects, the question arises whether any correctness statements are meaningful for the integrated protocol. Therefore, the formal models and proofs must be structured in a way that enables the various isolated analyses to be combined into a larger context. In order to do so, the formal models must provide interfaces through which other models or proofs can exchange information or other requirements. For the provision of these interfaces abstraction plays a crucial role. We show how two flavours of abstraction techniques can provide the required separation of protocol services of TTP/C without compromising the possibility of adequately integrating the different analyses. Altogether, the formal analysis contributes to providing an isolated formal model for the single services, a clear statement of the interfaces and mutual dependencies, the realization of correctness propositions for the individual services, and finally, the demonstration of the validity of these correctness arguments for the integrated protocol description.}, type = {PhD Thesis}, year = {2003}, school = {Ulm University}, address = {Germany}, file_url = {https://cs05.informatik.uni-ulm.de/ki/Papers/pfeifer03-diss.pdf} } @Inproceedings { liebig-noppens!03, author = {Liebig, Thorsten and Noppens, Olaf}, title = {OntoTrack: Fast Browsing and Easy Editing of Large Ontologies}, abstract = {OntoTrack is a new browsing and editing ``in-one-view\\" ontology authoring tool. It combines a sophisticated graphical layout with mouse enabled editing features optimized for efficient navigation and manipulation of large ontologies. The system is based on SpaceTree [PGB02] and implemented in Java2D. OntoTrack provides animated expansion and de-expansion of class descendants, zooming, paning and uses elaborated layout techniques like click-able miniature branches or selective detail views. At the same time OntoTrack allows for quite a number of editing features using mouse-over anchor buttons and graphical selections without switching into a special editing layout. In addition, every single editing step is synchronized with an external reasoner in order to provide instant feedback about relevant modeling consequences.}, year = {2003}, booktitle = {Proceedings of the 2nd International Workshop on Evaluation of Ontology-based Tools (EON 2004)}, volume = {87}, publisher = {CEUR-WS.org}, series = {CEUR Workshop Proceedings}, editor = {York Sure and Oscar Corcho}, keywords = {OWL, OWL 2, Sematnic Web, ontology, visualization}, tags = {SemanticTechnologies, KnowledgeModeling}, file_url = {http://sunsite.informatik.rwth-aachen.de/Publications/CEUR-WS//Vol-87/EON2003\_Liebig.pdf} } @Inproceedings { weithoener-et-al!03, author = {Timo, Weith\"{o}ner and Liebig, Thorsten and Specht, G\"{u}nther}, title = {Storing and Querying Ontologies in Logic Databases}, abstract = {The intersection of Description Logic inspired ontology lan- guages with Logic Programs has been recently analyzed in [GHVD03]. The resulting language, called Description Logic Programs, covers RDF Schema and a notable portion of OWL Lite. However, the proposed map- ping in [GHVD03] from the corresponding OWL fragment into Logic Programs has shown scalability as well as representational deficits within our experiments and analysis. In this paper we propose an alternative mapping resulting in lower computational complexity and more repre- sentational flexibility. We also present benchmarking results for both mappings with ontologies of different size and complexity.}, year = {2003}, booktitle = {Proceedings of the First International Workshop on Semantic Web and Databases (SWDB'03)}, address = {Berlin, Germany}, pages = {329--348}, keywords = {reasoning}, tags = {SemanticTechnologies,AutomatedReasoning}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2003/swdb03-weithoener\_etal.pdf} } @Article { Xu02FaultTolerantSystem, author = {Xu, Jie and Randell, Brian and B. Romanovsky, Alexander and J. Stroud, Robert and F. Zorzo, Avelino and Canver, Ercument and von Henke, Friedrich}, title = {Rigorous Development of an Embedded Fault-Tolerant System Based on Coordinated Atomic Actions}, abstract = {AbstractÐThis paper describes our experience using coordinated atomic (CA) actions as a system structuring tool to design and validate a sophisticated and embedded control system for a complex industrial application that has high reliability and safety requirements. Our study is based on an extended production cell model, the specification and simulator for which were defined and developed by FZI (Forschungszentrum Informatik, Germany). This aFault-Tolerant Production Cello represents a manufacturing process involving redundant mechanical devices (provided in order to enable continued production in the presence of machine faults). The challenge posed by the model specification is to design a control system that maintains specified safety and liveness properties even in the presence of a large number and variety of device and sensor failures. Based on an analysis of such failures, we provide in this paper details of: 1) a design for a control program that uses CA actions to deal with both safety-related and fault tolerance concerns and 2) the formal verification of this design based on the use of model-checking. We found that CA action structuring facilitated both the design and verification tasks by enabling the various safety problems (involving possible clashes of moving machinery) to be treated independently. Even complex situations involving the concurrent occurrence of any pairs of the many possible mechanical and sensor failures can be handled simply yet appropriately. The formal verification activity was performed in parallel with the design activity and the interaction between them resulted in a combined exercise in adesign for validationo; formal verification was very valuable in identifying some very subtle residual bugs in early versions of our design which would have been difficult to detect otherwise.}, year = {2002}, DOI = {10.1109/12.980006}, journal = {IEEE Transactions on Computers}, volume = {51}, pages = {164--179}, number = {2} } @Inproceedings { Schattenberg02HierarchicalResources, author = {Schattenberg, Bernd and Biundo, Susanne}, title = {On the Identification and Use of Hierarchical Resources in Planning and Scheduling}, abstract = {Many real-world planning applications have to deal with resource allocation problems, and so does planning in the domain of crisis management assistance. In order to support resource allocation in these kind of applications, we present a new approach to the integration of scheduling capabilities and planning. The proposed methodology relies on a hybrid planner, which combines action and state abstraction by integrating hierarchical task network (HTN) planning and state based partial order causal link (POCL) planning into a common framework. We extend the abstraction mechanism of the planner to different kinds of abstraction for resources, namely subsumption, approximation, qualification, and aggregation. We show how these abstractions can be used when modeling the domain and how reasoning about resources can be performed in a flexible way, namely by merging opportunistic planning and scheduling strategies.}, year = {2002}, booktitle = {Proceedings of the 6th International Conference on Artificial Intelligence Planning Systems (AIPS 2002)}, publisher = {AAAI Press}, pages = {263--272}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2002/Schattenberg02HierarchicalResourcesSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2002/Schattenberg02HierarchicalResources.pdf} } @Inproceedings { seitz2002docs, author = {Seitz, Alexander and Dannenberg, Matthias}, title = {Docs'n Drugs - Gegenwart und Zukunft}, abstract = {Der vorliegende Text liefert einen kurzen Abriss \"{u}ber den aktuellen Stand des Projektes \dqDocs 'n Drugs - Die Virtuelle Poliklinik\dq. Es werden die wichtigsten Programmkomponenten des Lehrsystems vorgestellt, sowie die Position, die das System im aktuellen Lehrbetrieb mittlerweile innehat. Au\"{s}erdem erfolgt ein Ausblick auf die noch verbleibende Projektphase sowie weitere Zukunftsperspektiven.}, year = {2002}, booktitle = {Proceedings of the 6. Workshop der AG CBT in der Medizin der GMDS}, file_url = {www.informatik.uni-ulm.de/ki/Seitz/papers/GMDS02.pdf} } @Inproceedings { schalk-et-all!02, author = {Schalk, Michael and Liebig, Thorsten and Illmann, Torsten and Kargl, Frank}, title = {Combining FIPA ACL With DAML OIL - A Case Study}, abstract = {The Collaboration and Coordination Infrastructure for personal Agents (CIA) is a Java-based multi-agent framework for personal assistance. Until now, inter-agent communication in CIA is done via topic-based communication chan- nels with Java-based event classes. Information within these events is represented in proprietary classes, which are serialized for transfer. As a result, agent communication is lim- ited to an a priori defined domain of information chunks to which collaborating agents have to be tailored. In order to achieve wider inter-operability we are currently evaluating the combination of two techniques. For standardized communication between heterogeneous agents we will use the FIPA Agent Communication Language (ACL). The DARPA Agent Communication Language / Ontology Inference Layer (DAML+OIL) will serve as content language for the ACL. This architecture seems to be a promising combination because of two reasons. First, agents of this kind are able to collaborate with other heterogenous agents in an ad hoc manner because of the standardized FIPA communication interface. Second, they do not have to be tailored to proprietary content vocabularies in advance, because they can use ontology-based Semantic Web techniques as a mechanism for identification of the meaning of the terms they communicate. However, it has to be figured out if these two techniques fit seamlessly into a given agent architecture like CIA. Potential problems include the semantic compatibility of DAML and ACL for example. These and other questions have to be considered with respect to the highly dynamic infrastructure of a CIA system.}, year = {2002}, booktitle = {Proceedings of the Second International Workshop on Ontologies in Agent Systems (OAS'02)}, keywords = {ontology}, tags = {KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2002/oas02-liebig.pdf} } @Inproceedings { Seitz02AnthoringSystem, author = {Seitz, Alexander and Dannenberg, Matthias and Liebhart, Hubert}, title = {An Anthoring System for Instructionally Designed Tutoring Processes}, abstract = {This paper describes the design and implementation of a domain independent authoring tool for tutoring systems. The presented work incorporates standard authoring features, including a hierarchical development of learning material, but also supports users in a didactically suitabledesign of the tutoring process. Requirements for the realization of Goal Based Scenarios are fulfilled by a number of implemented add-ons.}, year = {2002}, booktitle = {Proceedings of the International Conference on Computers in Education (ICCE’02)}, pages = {1317--1318}, file_url = {http://www.computer.org/csdl/proceedings/icce/2002/1509/00/15091317.pdf} } @Techreport { Dold02RealisticCompiler, author = {Dold, Axel and von Henke, Friedrich and Vialard, Vincent and Goerigk, Wolfgang}, title = {A Mechanically Verified Compiling Specification for a Realistic Compiler}, abstract = {We report on a large formal verification effort in mechanically proving correct a compiling specification for a realistic bootstrap compiler from ComLisp (a subset of ANSI Common Lisp sufficiently expressive to serve as a compiler implementation language) to binary Transputer code using the PVS system. The compilation is carried out in five steps through a series of intermediate languages. In the first phase, ComLisp is translated into a stack intermediate language (SIL), where parameter passing is implemented by a stack technique. Expressions are transformed from a prefix notation into a postfix notation according to the stack principle. SIL is then compiled into Cint where the ComLisp data structures (s-expressions) and operators are implemented in linear integer memory using a run-time stack and a heap. These two steps are machine independent. In the compiler’s backend, first control structures (loops, conditionals) of the intermediate language Cint are implemented by linear assembler code with relative jumps, the infinite memory model of Cint is realized on the finite Transputer memory, and the basic Cint statements for accessing the stack and heap are implemented by a sequence of assembler instructions. The fourth phase consists of the implementation of code instructions with large and negative word operands, while the last phase is concerned with the integration of the assembly program into the memory. The context of this work is the joint research effort Verifix aiming at developing methods for the construction of correct compilers for realistic programming languages and real target architectures.}, year = {2002}, institution = {University of Ulm}, number = {UIB 03-02}, file_url = {http://vts.uni-ulm.de/docs/2005/5349/vts\_5349.pdf} } @Inproceedings { Volz02ExecutionSystem, author = {Volz, Edwin and Martens, Alke and Seitz, Alexander}, title = {An Execution System for Variable Tutoring Processes}, abstract = {A multitude of adaptive computer-based training systems have been implemented in the last years. Whereas several aspects of these systems necessarily are dependent on the application domain, including the learning style and the system type, some other parts of the systems are very similar: usually they require an execution component, which steers and controls the interaction between the learner and the underlying data of the application domain, e.g. databases. For reusing purposes we have developed a highly flexible and adaptable domain independent execution system that allows the realization of different learning strategies in a web-based scenario. In addition to steering the interaction between learner and data, the execution system is enriched with model checking facilities. They provide a verification mechanism for learning material and thus can be used to support the authors. The described system is integrated into the web- and case-based project \dqDocs 'N Drugs\dq.}, year = {2002}, DOI = {10.1109/CIE.2002.1186005}, booktitle = {Proceedings of the International Conference on Computers in Education (ICCE '02)}, pages = {559--563} } @Inproceedings { Dold:01:verification, author = {Dold, Axel and Vialard, Vincent}, title = {A Mechanically Verified Compiling Specification for a Lisp Compiler}, abstract = {We report on an ongoing effort in mechanically proving correct a compiling specification for a bootstrap compiler from ComLisp (a subset of ANSI Common Lisp sufficiently expressive to serve as a compiler implementation language) to binary Transputer code using the PVS system. The compilation is carried out in four steps through a series of intermediate languages. This paper focuses on the first phase, namely, the compilation of ComLisp to the stack-intermediate language SIL, where parameter passing is implemented by a stack technique. The context of this work is the joint research effort Verifix aiming at developing methods for the construction of correct compilers for realistic programming languages.}, year = {2001}, booktitle = {FST TCS 2001: Foundations of Software Technology and Theoretical Computer Sience}, volume = {2245}, publisher = {springer}, series = {lncs}, editor = {Ramesh Hariharan and Madhavan Mukund and V. Vinay}, pages = {144--155}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2001/Dold01LispCompiler.pdf} } @Inproceedings { ns01ad, author = {Martens, Alke and Bernauer, Jochen and Illmann, Torsten and Seitz, Alexander}, title = {Docs 'n Drugs - The Virtual Polyclinic. An Intelligent Tutoring System for Web-Based and Case-Oriented Training in Medicine}, abstract = {Since the beginning of the year 2000 medical students of the University of Ulm are working in their curriculum with the web-based and case-oriented tutoring system \dqDocs 'n Drugs \textendash the virtual Polyclinic\dq. The system consists of different subsystems and services. One subsystem is the Training System. It is based on three models: the Tutoring Process Model, the Case Knowledge Model and the Medical Knowledge Model. They describe the tutoring process as a series of nodes and steps, depict the structure of the medical cases, and provide the structure of the medical knowledge respectively. Case knowledge and medical knowledge form the expert knowledge of the medical domain. Together with the tutoring process, they build the basis for automatic intelligent tutoring. After giving a deeper insight into the system architecture and the training case structure, an informal evaluation shows a first feedback of the learners.}, year = {2001}, booktitle = {Proceedings of the American Medical Informatics Conference}, publisher = {AMIA}, address = {Washington, USA}, pages = {433--437}, file_url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2243325/pdf/procamiasymp00002-0472.pdf} } @Inproceedings { PvH01, author = {Pfeifer, Holger and von Henke, Friedrich}, title = {Formal Analysis for Dependability Properties: the Time-Triggered Architecture Example}, abstract = {This paper describes the mechanized formal verification we have performed on some of the crucial algorithms used in the Time-Triggered Architecture (TTA) for safety-critical distributed control. We outline the approach taken to formally analyse the clock synchronization algorithm and the group membership service of TTA, summarize our experience and describe remaining challenges.}, year = {2001}, booktitle = {8th IEEE International Conference on Emerging Technologies and Factory Automation (ETFA 2001)}, publisher = {IEEE}, address = {Antibes Juan-les-Pins}, pages = {343--352}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2001/Pfeifer01DependabilityProperties.pdf} } @Inproceedings { Biundo01FromCrisisToRelief, author = {Biundo, Susanne and Schattenberg, Bernd}, title = {From Abstract Crisis to Concrete Relief (A Preliminary Report on Combining State Abstraction and HTN Planning)}, abstract = {Flexible support for crisis management can definitely be improved by making use of advanced planning capabilities. However, the complexity of the underlying domain often causes intractable efforts in modeling the domain as well as a huge search space to be explored by the system. A way to overcome these problems is to impose a structure not only according to tasks but also according to relationships between and properties of the objects involved, thereby using so-called decomposition axioms. We outline the prototype of a system that is capable of tackling planning for complex application domains. It is based on a well-founded combination of action and state abstractions. The paper presents the basic techniques and provides a formal semantic foundation of the approach. It introduces the planning system and illustrates its underlying principles by examples taken from the crisis management domain used in our ongoing project.}, year = {2001}, booktitle = {Proceedings of the 6th European Conference on Planning (ECP 2001)}, publisher = {AAAI Press}, pages = {157--168}, tags = {Planning}, web_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2001/Biundo01FromCrisisToReliefSlides.pdf}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2001/Biundo01FromCrisisToRelief.pdf} } @Incollection { Luther01ImplicitSyntax, author = {Luther, Marko}, title = {More On Implicit Syntax}, abstract = {Proof assistants based on type theories, such as Coq and Lego, allow users to omit subterms on input that can be inferred automatically. While those mechanisms are well known, ad-hoc algorithms are used to suppress subterms on output. As a result, terms might be printed identically although they differ in hidden parts. Such ambiguous representations may confuse users. Additionally, terms might be rejected by the type checker because the printer has erased too much type information. This paper addresses these problems by proposing effective erasure methods that guarantee successful term reconstruction, similar to the ones developed for the compression of proof-terms in Proof-Carrying Code environments. Experiences with the implementation in Typelab proved them both efficient and practical.}, year = {2001}, isbn = {978-3-540-42254-9}, DOI = {10.1007/3-540-45744-5\_31}, booktitle = {Automated Reasoning - First International Joint Conference (IJCAR)}, volume = {2083}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Gor\'{e}, Rajeev and Leitsch, Alexander and Nipkow, Tobias}, pages = {386--400}, file_url = {www.informatik.uni-ulm.de/ki/Papers/ijcar01.pdf} } @Article { Schattenberg2001PlanningAgents, author = {Schattenberg, Bernd and Uhrmacher, Adelinde M.}, title = {Planning Agents in James}, abstract = {Testing is an obligatory step in developing multi-agent systems. For testing multi-agent systems in virtual, dynamic environments, simulation systems are required that support a modular, declarative construction of experimental frames, that facilitate the embeddence of a variety of agent architectures, and that allow an efficient parallel, distributed execution. We introduce the system JAmes (A Java-Based Agent Modeling Environment for Simulation). In James agents and their dynamic environment are modeled as reflective, time triggered state automata. Its possibilities to compose experimental frames based on predefined components, to express temporal interdependencies, to capture the phenomenon of pro-activeness and reflectivity of agents are illuminated by experiments with planning agents. The underlying planning system is a general purpose system, about which no empirical results exist besides traditional static benchmark tests. We analyze the interplay between heuristics for selecting goals, viewing range, commitment strategies, explorativeness, and trust in the persistence of the world and uncover properties of the agent, the planning engine and the chosen test scenario: Tileworld.}, year = {2001}, DOI = {10.1109/5.910852}, journal = {Proceedings of the IEEE}, volume = {89}, pages = {158--173}, number = {2}, tags = {Planning}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2001/Schattenberg2001PlanningAgents.pdf} } @Inproceedings { 943863, author = {Illman, Torsten and Seitz, Alexander and Martens, Alke and Weber, Michael}, title = {Structure of training cases in Web-based case-oriented training systems}, abstract = {In this paper we discuss the structure of training cases in web-based and case-oriented training systems. We combine the benefits of systems that support a structural representation of knowledge underlying a training case and those that allow a fine granularity of didactical aspects. Our results emerged from the development of a training system in medicine, \dqDocs 'n Drugs - The Virtual Polyclinic\dq. The abstraction from the proposed trainin case structure offers a good basis for general case-oriented training systems, an easy exchange of tutoring data, and the web-based indexing of learning material.}, year = {2001}, DOI = {10.1109/ICALT.2001.943863}, booktitle = {Proceedings of the IEEE International Conference on Advanced Learning Technologies 2001}, pages = {90--93} } @Inproceedings { ns00m, author = {Martens, Alke and Uhrmacher, Adelinde M.}, title = {How to Execute a Tutoring Process}, abstract = {The process of tutoring can be perceived as a discrete dynamic process and can be treated as such. Based on JAMES, modeling and execution are distinguished. The tutoring process is described in a compositional hierarchical manner. Agents are used to represent the user which moves from one interaction context to the next, steering and adapting the structure of the tutoring process. The first prototype has used the original JAMES execution mechanism. In contrast to the original application domain, a tutoring process comprises many facets of discrete control rather than of discrete event simulation. Therefore, the execution layer of JAMES has to be replaced to substitute ``simulation time\\" with ``real time\\" and to integrate the ``human in the loop\\" into the execution more naturally.}, year = {2000}, booktitle = {Proceedings of Artificial Intelligence, Simulation and Planning}, publisher = {SCS}, address = {San Diego}, pages = {114--122} } @Incollection { Dold00GenericProgramming, author = {Dold, Axel}, title = {Software Development in PVS Using Generic Development Steps}, abstract = {This paper is concerned with a mechanized formal treatment of the transformational software development process in a unified framework. We utilize the PVS system to formally represent, verify and correctly apply generic software development steps and development methods from different existing transformational approaches. We illustrate our approach by representing the well-known divide-and-conquer paradigm, two optimization steps, and by formally deriving a mergesort program.}, year = {2000}, isbn = {978-3-540-41090-4}, DOI = {10.1007/3-540-39953-4\_12}, booktitle = {Generic Programming}, volume = {1766}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Jazayeri, Mehdi and Loos, R\"{u}digerG.K. and Musser, DavidR.}, pages = {146--161}, keywords = {formal verification; mechanized theorem proving; generic development steps; transformational software development}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2000/Dold00GenericProgramming.pdf} } @Inproceedings { ns01b, author = {Uhrmacher, Adelinde M. and Kullick, Bernd G.}, title = {Plug and Test - Software Agents in Virtual Environments}, abstract = {James, a Java based agent modeling environment for simulation has been developed to support the compositional construction of test beds for multi-agent systems and their execution in distributed environments. The modeling formalism of James imposes only few constraints on the modeling of agents and facilitates a \dqplug and test\dq with pieces of agent code which has been demonstrated in earlier work. However, even entire agents can be run in James as they are run in their run-time environment. The integration of agents as a whole is based on model templates which serve as the agents' interface and representative during the simulation run. The effort which is put into defining model templates for selected agent systems obviates the need for the single agent programmer to get acquainted with the underlying modeling and simulation formalism. Instead, the agent programmer can compose the experimental frame and test the programmed agents as they are. The approach is illustrated with agents of the mobile agent system, Mole}, year = {2000}, DOI = {10.1109/WSC.2000.899162}, booktitle = {Proceedings of the 2000 Winter Simulation Conference}, volume = {2}, publisher = {Wyndham Palace Resort \\& Spa}, address = {Orlando, Florida, USA}, editor = {Joines, J.A. and Barton, R.R. and Kang, K. and Fishwick, P.A.}, pages = {1722--1729} } @Inproceedings { Liebig-et-al!00, author = {Liebig, Thorsten and Finkenzeller, Dieter and Luther, Marko}, title = {KB-VISION: A Tool for Graphical Manipulation and Visualization of Domain Models}, abstract = {Even if a problem solving method and a domain ontology has been identified, there still remains the problem of adding sufficient and consistent domain knowledge to a knowledge processing system. Our KB-Vision system supports this knowledge acquisition process by making use of a 3D graphical user interface in which domain objects can be easily created and composed to a domain model. Using object sensitive manipulation options and by evaluating each graphical action in the underlying knowledge representation formalism, the system ensures a consistent domain model. It can also be used as a knowledge-based graphical simulation environment for various reasoning components (e.g. planners, path generators).}, year = {2000}, booktitle = {Proceedings of the 10th European-Japanese Conference on Information Modelling and Knowledge Bases (EJC 2000)}, publisher = {IOS Press}, series = {Frontiers in Artificial Intelligence and Applications}, pages = {161--168}, keywords = {ontology}, tags = {SemanticTechnologies,KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2000/ejc00-liebig.pdf} } @Inproceedings { ns01c, author = {Uhrmacher, Adelinde M. and Gugler, K.}, title = {Distributed, Parallel Simulation of Multiple, Deliberative Agents}, abstract = {Multi agent systems comprise multiple, deliberative agents embedded in and recreating patterns of interactions. Each agent's execution consumes considerable storage and calculation capacities. For testing multi agent systems, distributed parallel simulation techniques are required that take the dynamic pattern of composition and interaction of multi-agent systems into account. Analyzing the behavior of agents in virtual, dynamic environments necessitates relating the simulation time to the actual execution time of agents. Since the execution time of deliberative components can hardly be foretold, conservative techniques based on lookahead are not applicable. On the other hand, optimistic techniques become very expensive if mobile agents and the creation and deletion of model components are affected by a rollback. The developed simulation layer of JAMES (a Java Based Agent Modeling Environment for Simulation) implements a moderately optimistic strategy which splits simulation and external deliberation into different threads and allows simulation and deliberation to proceed concurrently by utilizing simulation events as synchronization points}, year = {2000}, DOI = {10.1109/PADS.2000.847150}, booktitle = {Proceedings of the 14th Workshop on Parallel and Distributed Simulation}, publisher = {IEEE Computer Society}, address = {Los Alamitos, California}, editor = {Bruce, David and Donatiello, Lorenzo and Turner, Stephen}, pages = {101--108}, file_url = {http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=\\&arnumber=847150} } @Inproceedings { Pfe00, author = {Pfeifer, Holger}, title = {Formal Verification of the TTP Group Membership Algorithm}, abstract = {The paper describes the formal verification of a fault-tolerant group membership algorithm that constitutes one of the central services of the Time-Triggered Protocol (TTP). The group membership algorithm is formally specified and verified using a diagrammatic representation of the algorithm. We describe the stepwise development of the diagram and outline the main part of the correctness proof. The verification has been mechanically checked with the PVS theorem prover.}, year = {2000}, booktitle = {Formal Methods for Distributed System Development -- Proceedings of FORTE XIII / PSTV XX 2000}, publisher = {Kluwer Academic Publishers}, address = {Pisa, Italy}, editor = {T. Bolognesi and D. Latella}, pages = {3--18}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2000/forte-pstv2000.pdf} } @Inproceedings { ns01k, author = {Martens, Alke and Illmann, Torsten and Seitz, Alexander and Scheuerer, Claudia and Weber, Michael and Bernauer, Jochen}, title = {CSCW in webbasierten Lehrsystemen}, abstract = {Fallorientiertes, webbasiertes Lernen gibt Studierenden die M\"{o}glichkeit, das von ihnen im Laufe des Studiums erworbene theoretische Wissen anhand konkreter Fallsituationen unabh\"{a}ngig von Zeit und Ort zu trainieren. Insbesondere in dem Bereich, der Anwendungsschwerpunkt des im Projekt \dqDocs 'n Drugs - Die Virtuelle Poliklinik\dq entwickelten Lehrsystems ist, der Medizin, bietet diese neue Art des Lernens eine sinnvolle und interessante Erg\"{a}nzung zu klassischen Lehrformen. Zus\"{a}tzlich zu der M\"{o}glichkeit, jedem Studierenden einzeln den Zugriff auf medizinische Lehrf\"{a}lle im Web zu bieten, arbeitet das System auch mit einem gezielten Einsatz von webbasierter Telematik und CSCW-Komponenten. Hierdurch wird nicht nur die Interaktion unter Studierenden und zwischen Studierenden und Tutoren erm\"{o}glicht, sondern auch der Austausch unter Tutoren bzw. unter Fallautoren gef\"{o}rdert.}, year = {2000}, booktitle = {Proceedings of 5. Workshop der AG CBT in der Medizin der GMDS}, pages = {90--95} } @Article { ns00ap, author = {Uhrmacher, Adelinde M. and Seitz, Alexander}, title = {Case-Based Simulation of Ecological and Biological Systems}, abstract = {Simulation means experimenting with a model aimed at analysing the behaviour of a dynamic system. The causal relationships of ecological and biological systems are often not completely known, which hampers the application of quantitative and even qualitative deductive simulation methodologies. If empirical studies that describe the system's behaviour are available, case-based methods can be used to develop a behaviour model from a set of data, i.e. prototypical cases. In contrast to purely inductive methods, case-based approaches directly apply cases to a given situation. Thereby, the specificity of cases is preserved for simulation. Two approaches and their applications illustrate principles, perspectives, and problems of case-based simulation.}, year = {2000}, journal = {Journal of Systems Analysis, Modelling and Simulation (SAMS)}, volume = {39}, pages = {215--234}, number = {2}, web_url = {http://portal.acm.org/citation.cfm?id=363182.363202} } @Inproceedings { ns01j, author = {Martens, Alke and Bernauer, Jochen and Illmann, Torsten and Scheuerer, Claudia and Seitz, Alexander}, title = {A Flexible Architecture for Constructing and Executing Tutoring Processes}, abstract = {The tutoring process of conventional case-oriented medical training systems can be characterised as either guided or unguided. In contrast to that, the aim of the system \dqDocs'n Drugs\dq is to distinguish between different levels of guidance. The author can realise the tutoring case as a guided, a half guided or an unguided tutoring process. The system architecture distinguishes between an authoring system and a tutoring system. Fundaments of these are the tutoring process model and the case data model. This structure allows the reuse of elements of existing tutoring cases. The tutoring cases can be realised in different langugages, e.g. German and English.}, year = {2000}, booktitle = {Proceedings of the 45. Jahrestagung der GMDS, Hannover 2000}, pages = {494--498} } @Phdthesis { Dold:PhD, author = {Dold, Axel}, title = {Formal Software Development using Generic Development Steps}, abstract = {This dissertation is concerned with a mechanized formal treatment of the transformational software development process in a unified framework. As a formal vehicle, the specification and verification system PVS is utilized to integrate development steps and development methods from different existing transformational approaches. Integration comprises the formalization (that is, a representation in the PVS specification language), the verification, and the correct application of the generic development steps. Transformations of different kind and complexity are integrated into this framework. They include well-known algorithmic paradigms and problem solving strategies such as global-search and divide-and-conquer, as well as transformations for the modification of functional specifications such as transformations from the Bird-Meertens Formalism like fusion or Horner's rule, transformation steps for optimizing recursive functions, transformations on the level of procedural programs, and implementations of data structures. All software artifacts are represented generically within parameterized PVS theories which specify the semantic requirements on the parameters by means of assumptions and define the result of the transformation. Based on the semantic requirements, correctness of the generic step can be proved once and for all. Application of such a development step to a given problem is then carried out by providing a concrete instantiation for the parameters and verifying that it satisfies the theory requirements. Some of the problem-solving strategies are organized by means of taxonomies (that is, hierarchies of specializations for specific problem classes and / or data structures). This approach greatly improves the applicability of the transformations and leads to an elegant structure of the algorithmic knowledge captured in the development steps. As a basis for the realization of refinement hierarchies and for formal software development in PVS, a notion of refinement between parameterized PVS theories and a development methodology is presented which is based on correct partial instantiations of parameterized theories. The usability of the approach is illustrated by many examples of different complexity. They include, among others, the derivation of several search and sorting algorithms, the derivation of a Prolog interpreter, the implementation of a symbol table used in compilers, and finally, as a larger non-trivial case-study: the construction of a compiler program from a given relational compiling specification specifying the translation of a Common Lisp-like language into a stack-based intermediate language.}, type = {PhD Thesis}, year = {2000}, school = {Universit\"{a}t Ulm}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/2000/Dold00Thesis.pdf} } @Inproceedings { Dold99SoftwareDevelopment, author = {Dold, Axel}, title = {Formal Software Development using Generic Development Steps}, year = {1999}, booktitle = {Proceedings of the 11th Nordic Workshop on Programming Theory}, event_place = {Uppsala, Sweden} } @Incollection { Pfab99Verification, author = {Pfab, Stephan and Rue\"{s}, Harald and Owre, Sam and von Henke, Friedrich}, title = {Towards Light-Weight Verification and Heavy-Weight Testing}, abstract = {We give an overview on our approach to symbolic simulation in the PVS theorem prover and demonstrate its usage in the realm of validation by executing specification on incomplete data. In this way, one can test executable models for a possibly infinite class of test vectors with one run only. One of the main benefits of symbolic simulation in a theorem proving environment is enhanced productivity by early detection of errors and increased confidence in the specification itself.}, year = {1999}, isbn = {978-3-211-83282-0}, DOI = {10.1007/978-3-7091-6355-9\_14}, booktitle = {Tool Support for System Specification, Development and Verification}, publisher = {Springer Vienna}, series = {Advances in Computing Science}, editor = {Berghammer, Rudolf and Lakhnech, Yassine}, pages = {189--200}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1999/Pfab99Verification.pdf} } @Inproceedings { hoefling-et-al!99, author = {H\"{o}fling, Bj\"{o}rn and Liebig, Thorsten and Webel, Lars and R\"{o}sner, Dietmar}, title = {Towards an Ontology for Substances and Related Actions}, abstract = {Modelling substances in knowledge representation has to be different from the treatment of discrete objects. For example liquids need a different approach to individuation. We propose an ontology which represents physical states and other properties of substances in a uniform way. Based on this we describe how to model a hierarchy of actions that can deal with such substances. For these actions a general distinction is made with respect to the type of properties the actions are changing. Further we describe an implementation in description logic allowing especially the definition of actions by specialization of more abstract actions and the inheritance of pre- and postconditions.}, year = {1999}, DOI = {10.1007/3-540-48775-1\_12}, booktitle = {Proceedings of the 11th European Workshop on Knowledge Acquisition, Modeling and Management (EKAW'99)}, volume = {1621}, publisher = {Springer Berlin / Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Fensel, Dieter and Studer, Rudi}, pages = {191--206}, keywords = {ontology}, tags = {KnowledgeModeling}, file_url = {fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1999/ekaw99-liebig.pdf} } @Phdthesis { Kienzler99PhD, author = {Kienzler, Friedemann}, title = {Synthese versus Analyse in modellbasierten KI-Planungssystemen? DIAKON - ein auto-adaptiver diagnostischer L\"{o}sungsansatz f\"{u}r Aktionsplanungs- und Konfigurierungsprobleme}, type = {PhD Thesis}, year = {1999}, school = {Universit\"{a}t Ulm} } @Inproceedings { xu1999rigorous, author = {Xu, Jie and Randell, Brian and Romanovsky, A. and J. Stroud, Robert and F. Zorzo, Avelino and Canver, Ercument and von Henke, Friedrich}, title = {Rigorous development of a safety-critical system based on coordinated atomic actions}, abstract = {This paper describes our experience using coordinated atomic (CA) actions as a system structuring tool to design and validate a sophisticated control system for a complex industrial application that has high reliability and safety requirements. Our study is based on the \dqFault-Tolerant Production Cell\dq, which represents a manufacturing process involving redundant mechanical devices (provided in order to enable continued production in the presence of machine faults). The challenge posed by the model specification is to design a control system that maintains specified safety and liveness properties even in the presence of a large number and variety of device and sensor failures. We discuss in this paper: i) a design for a control program that uses CA actions to deal with both safety-related and fault tolerance concerns, and ii) the formal verification of this design based on the use of model-checking. We found that CA action structuring facilitated both the design and verification tasks by enabling the various safety problems (e.g. clashes of moving machinery) to be treated independently. The formal verification activity was performed in parallel with the design activity the interaction between them resulted in a combined exercise in \dqdesign for validation\dq.}, year = {1999}, DOI = {10.1109/FTCS.1999.781035}, booktitle = {Twenty-Ninth Annual International Symposium on Fault-Tolerant Computing. Digest of Papers.}, pages = {68--75} } @Inproceedings { PR:99, author = {Pfeifer, Holger and Rue\"{s}, Harald}, title = {Polytypic Proof Construction}, abstract = {This paper deals with formalizations and verifications in type theory that are abstracted with respect to a class of datatypes; i.e polytypic constructions. The main advantage of these developments are that they can not only be used to define functions in a generic way but also to formally state polytypic theorems and to synthesize polytypic proof objects in a formal way. This opens the door to mechanically proving many useful facts about large classes of datatypes once and for all.}, year = {1999}, booktitle = {Proceedings of the 12th International Conference on Theorem Proving in Higher Order Logics}, volume = {1690}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {Y. Bertot and G. Dowek and A. Hirschowitz and C. Paulin and L. Th\'{e}ry}, pages = {55--72}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1999/Pfeifer99Polytypic.pdf} } @Inproceedings { rintanen:numeric, author = {Rintanen, Jussi and Jungholt, Hartmut}, title = {Numeric State Variables in Constraint-Based Planning}, abstract = {We extend a planning algorithm to cover simple forms of arithmetics. The operator preconditions can refer to the values of numeric variables and the operator postconditions can modify the values of numeric variables. The basis planning algorithm is based on techniques from propositional satisfiability testing and does not restrict to forward or backward chaining. When several operations affect a numeric variable by increasing and decreasing its value in parallel, the effects have to be combined in a meaningful way. This problem is especially acute in planning algorithms that maintain an incomplete state description of every time point of a plan execution. The approach we take requires that for operators that are executed in parallel, all linearizations of the operations to total orders behave equivalently. We provide an efficient and general solution to the problem.}, year = {1999}, booktitle = {Recent Advances in AI Planning, Proceedings of the 5th European Conference on Planning (ECP '99)}, pages = {109--121}, file_url = {http://users.ics.aalto.fi/rintanen/jussi/papers/RintanenJungholt99.pdf} } @Inproceedings { ns00n, author = {Martens, Alke and Uhrmacher, Adelinde M.}, title = {Modelling Tutoring as a Dynamic Process - A Discrete Event Simulation Approach}, abstract = {Most of the work in the area of simulation and education is dedicated to the process of teaching students the essentials of dynamic systems by simulation. Thereby the system to be taught becomes the subject to be modeled and simulated. However, the process of tutoring can itself be perceived as a dynamic process and can be treated as such. To support a flexible and intelligent tutoring process we distinguish between model and simulation level. At the model level we employ a state-based, modular, and hierarchical agent-oriented model design. The user is represented as an agent which moves through the tutoring process accessing its structure. Thus, agents and their changing interaction structure become central.}, year = {1999}, booktitle = {Proceedings of European Simulation Multiconference ESM'99}, publisher = {SCS}, pages = {111--119}, file_url = {http://wwwmosi.informatik.uni-rostock.de/diemosiris/static/Papers\_archiv/esm99.pdf} } @Techreport { Canver99ModelChecking, author = {Canver, Ercument}, title = {Model-Checking zur Analyse von Message Sequence Charts \"{u}ber Statecharts}, abstract = {Die Unified Modeling Language (UML) enth\"{a}lt sowohl Statecharts als auch mit Sequence Diagrams eine Variante von Message Sequence Charts (MSCs). Da beide eingesetzt werden k\"{o}nnen, um verschiedene Aspekte eines Systems zu beschreiben, ist es sinnvoll, die Konsistenz zwischen beiden Beschreibungstechniken zu pr\"{u}fen. Im vorliegenden Bericht werden beide Ansatze miteinander notationell und semantisch integriert und eine formale Konsistenzbeziehung formuliert. Zu diesem Zweck werden hier die MSC-Notation zu MSCCTL erweitert, die Semantik von Statecharts in Termini von Transitionssystemen beschrieben und in MSCCTL formulierte Anforderungen nach CTL abgebildet. Die eigentliche Analyse erfolgt unter Einsatz eines Model-Checkers. Dazu sind hier Abbildungsvorschriften f\"{u}r einen speziellen Model-Checker angegeben. Aufgrund der mit der Pr\"{u}fung von CTL-Formeln verbundenen Komplexit\"{a}t ist hier zudem ein Verfahren angegeben, um die Beweisverpflichtungen in eine einfacher zu handhabende Form umzusetzen.}, type = {Ulmer Informatik-Berichte}, year = {1999}, institution = {Universit\"{a}t Ulm}, web_url = {http://d-nb.info/1016487037}, file_url = {http://vts.uni-ulm.de/docs/2009/7093/vts\_7093\_9943.pdf} } @Inproceedings { rintanen:improvements, author = {Rintanen, Jussi}, title = {Improvements to the Evaluation of Quantified Boolean Formulae}, abstract = {We present a theorem-prover for quantified Boolean formulae and evaluate it on random quantified formulae and formulae that represent problems from automated planning. Even though the notion of quantified Boolean formula is theoretically important, automated reasoning with QBF has not been thoroughly investigated. Universal quantifiers are needed in representing many computational problems that cannot be easily translated to the propositional logic and solved by satisfiability algorithms. Therefore efficient reasoning with QBF is important. The Davis-Putnam procedure can be extended to evaluate quantified Boolean formulae. A straightforward algorithm of this kind is not very efficient. We identify universal quantifiers as the main area where improvements to the basic algorithm can be made. We present a number of techniques for reducing the amount of search that is needed, and evaluate their effectiveness by running the algorithm on a collection of formulae obtained from planning and generated randomly. For the structured problems we consider, the techniques lead to a dramatic spead-up.}, year = {1999}, booktitle = {Proceedings of the 16th International Joint Conference on Artificial Intelligence (IJCAI 1999)}, publisher = {Morgan Kaufmann Publishers}, pages = {1192--1197}, file_url = {http://www.eecs.berkeley.edu/~russell/classes/cs289/f04/readings/Rintanen:1999.pdf} } @Inproceedings { dold:formal, author = {Dold, Axel and Vialard, Vincent}, title = {Formal Verification of a Compiler Back-End Generic Checker Program}, abstract = {This paper reports on a non-trivial case-study carried out in the context on the German correct compiler construction project Verifix. The PVS system is here used as a vehicle to formally represent and verify a generic checker routine (run-time result verification) used in compiler back-ends. The checker verifies the results of a sophisticated labeling process of intermediate language expression trees with instances of compilation rule schemata. Starting from an operational specification (i.e. a set of recursive PVS functions), necessary declarative properties of the checker are formally stated and proved correct.}, year = {1999}, booktitle = {Proceedings of the Andrei Ershov Third International Conference on Perspectives of System Informatics (PSI'99)}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, pages = {470--480}, number = {1755} } @Inproceedings { PSvH:99:DCCA7, author = {Pfeifer, Holger and Schwier, Detlef and von Henke, Friedrich}, title = {Formal Verification for Time-Triggered Clock Synchronization}, abstract = {Distributed dependable real-time systems crucially depend on fault-tolerant clock synchronization. This paper reports on the formal analysis of the clock synchronization service provided as an integral feature by the Time-Triggered Protocol (TTP), a communication protocol particularly suitable for safety-critical control applications, such as in automotive \dqby-wire\dq systems. We describe the formal model extracted from the TTP specification and its formal verification, using the PVS system. Verification of the central clock synchronization properties is achieved by linking the TTP model of the synchronization algorithm to a generic derivation of the properties from abstract assumptions, essentially establishing the TTP algorithm as a concrete instance of the generic one by verifying that it satisfies the abstract assumptions. We also show how the TTP algorithm provides the clock synchronization that is required by a previously proposed general framework for verifying time-triggered algorithms.}, year = {1999}, booktitle = {Dependable Computing for Critical Applications 7}, volume = {12}, publisher = {IEEE Computer Society}, series = {Dependable Computing and Fault-Tolerant Systems}, editor = {C. Weinstock and J. Rushby}, pages = {207--226}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1999/Pfeifer99ClockSynchronization.pdf} } @Inproceedings { canver:formal, author = {Canver, Ercument and von Henke, Friedrich}, title = {Formal Development of Object-Based Systems in a Temporal Logic Setting}, abstract = {This paper presents TLO, an approach to the formal development of object-based systems in a temporal logic framework. The behavior of an object-based system is viewed as derivable from the behaviors of its constituent component objects. Temporal logic is a formalism well suited for specifying behavior of concurrent systems; it also provides conceptually simple notions of composition and refinement: Composition of objects is expressed as conjunction of the associated component specifications. The refinement relation between a low-level and a high-level specification requires that the former specification implies the latter. Specifically in an object-based approach, systems and their components need to be viewed as open systems: Each object guarantees some service (behavior), provided its environment conforms to certain assumptions. Hence, such components are most appropriately specified in an assumption/guarantee style. TLO adopts TLA as the underlying logical formalism. It encompasses a specification language for object-based designs and a corresponding method for specification and verification. The concepts are illustrated by an example involving both functional and fault-tolerance requirements.}, year = {1999}, DOI = {10.1007/978-0-387-35562-7\_33}, booktitle = {Formal Methods for Open Object-Based Distributed Systems (FMOODS '99)}, volume = {10}, publisher = {Springer US}, pages = {419--436} } @Article { rintanen:constructing, author = {Rintanen, Jussi}, title = {Constructing Conditional Plans by a Theorem-Prover}, abstract = {The research on conditional planning rejects the assumptions that there is no uncertainty or incompleteness of knowledge with respect to the state and changes of the system the plans operate on. Without these assumptions the sequences of operations that achieve the goals depend on the initial state and the outcomes of nondeterministic changes in the system. This setting raises the questions of how to represent the plans and how to perform plan search. The answers are quite different from those in the simpler classical framework. In this paper, we approach conditional planning from a new viewpoint that is motivated by the use of satisfiability algorithms in classical planning. Translating conditional planning to formulae in the propositional logic is not feasible because of inherent computational limitations. Instead, we translate conditional planning to quantified Boolean formulae. We discuss three formalizations of conditional planning as quantified Boolean formulae, and present experimental results obtained with a theorem-prover.}, year = {1999}, DOI = {10.1613/jair.591}, journal = {Journal of Artificial Intelligence Research (JAIR)}, pages = {323--352}, file_url = {http://arxiv.org/pdf/1105.5465} } @Book { ns00b, title = {1999 International Conference on Web-Based Modeling and Simulation}, year = {1999}, volume = {31}, publisher = {SCS}, address = {San Diego, CA}, editor = {A. Bruzzone and Adelinde M. Uhrmacher and E. Page}, number = {3} } @Inproceedings { seitz:a, author = {Seitz, Alexander}, title = {A Case-Based Methodology for Planning Individualized Case Oriented Tutoring}, abstract = {Case oriented tutoring gives students the possibility to practice their acquired theoretical knowledge in the context of concrete cases. Accordingly, tutoring systems for individualized learning have to take the skills of students at applying their knowledge to problem solving into account. This paper describes a case-based methodology for planning tutoring processes depending on the skills of individual users. We develop methods for retrieving tutoring plans of users with similar skills and present adaptation techniques for improving these plans based on the student's behavior during the corresponding tutoring process. The developed methodology is based on the notion that a student has to perform threads of associations in the process of problem solving.}, year = {1999}, DOI = {10.1007/3-540-48508-2\_23}, booktitle = {Case-Based Reasoning Research and Development - Third International Conference on Case-Based Reasoning (ICCBR '99)}, publisher = {Springer Berlin Heidelberg}, pages = {318--328} } @Article { ns00e, author = {Seitz, Alexander and Uhrmacher, Adelinde M. and Damm, D.}, title = {A Case-Based Prediction of Experimental Studies}, abstract = {Case-based approaches predict the behaviour of dynamic systems by analysing a given experimental setting in the context of others. To select similar cases and to control adaptation of cases, they employ general knowledge. If that is neither available nor inductively derivable, the knowledge implicit in cases can be utilized for a case-based ranking and adaptation of similar cases. We introduce the system OASES and its application to medical experimental studies to demonstrate this approach.}, year = {1999}, DOI = {10.1016/S0933-3657(98)00057-8}, journal = {International Journal on Artificial Intelligence in Medicine}, volume = {15}, pages = {255--273}, number = {3} } @Inproceedings { ns01m, author = {Seitz, Alexander and Martens, Alke and Bernauer, Jochen and Scheuerer, Claudia and Thomsen, Jens}, title = {An Architecture for Intelligent Support of Authoring and Tutoring in Medical Multimedia Learning Environments}, abstract = {Both representing cases as fixed scripts as well as the development of initial expert systems as a basis for computer based tutoring systems is particularly difficult and time intensive in a multi-institutional development project. On the other hand, structured systematic knowledge and a common terminology is necessary for automatic generation of information and quiz pages as well as automatic feedback. Therefore we provide an architecture that relates tutoring cases to general concept representation systems containing common terminologies, and facilitates an incremental acquisition of systematic knowledge.}, year = {1999}, booktitle = {Proceedings of the World Conference on Educational Multimedia, Hypermedia}, publisher = {AACE}, address = {Chesapeake, VA}, editor = {Piet Kommers and Griff Richards}, pages = {852--857}, file_url = {http://www.informatik.uni-rostock.de/~martens/Papers/dnd\_edmed99.pdf} } @Inproceedings { ns01l, author = {Illmann, Torsten and Weber, Michael and Martens, Alke and Seitz, Alexander}, title = {A Pattern-Oriented Design of a Web-Based and Case-Oriented Multimedia Training System in Medicine}, abstract = {In this paper, we introduce the design of a web-based and case-oriented multimedia training system in medicine. It allows students to learn systematic knowledge and decision making in medicine by computer-based solving of guided and unguided medical cases. We show how to meet complex requirements such as web-based client-server architecture, multiuser capability, telecollaboration among users, storage of large database models, database access over heterogeneous networks and knowledge-based intelligent authoring and tutoring. We further explain, how a design pattern, the presentation-abstraction-control pattern, can be applied to make the overall design of such a complex system. It helps to identify and integrate many different and modular designed components. It additionally provides the possibility to easily exchange certain functionality of the system without affecting other components. Finally we will show, that such a system architecture also applies to caseoriented training systems in other subjects.}, year = {1999}, booktitle = {4th World Conference on Integrated Design \\& Process Technology} } @Phdthesis { strecker1999construction, author = {Strecker, Martin}, title = {Construction and Deduction in Type Theories}, abstract = {This dissertation is concerned with interactive proof construction and automated proof search in type theories, in particular the Calculus of Constructions and its subsystems. Type theories can be conceived as expressive logics which combine a functional programming language, strong typing and a higher-order logic. They are therefore a suitable formalism for specification and verification systems. However, due to their expressiveness, it is difficult to provide appropriate deductive support for type theories. This dissertation first examines general methods for proof construction in type theories and then explores how these methods can be refined to yield proof search procedures for specialized fragments of the language. Proof development in type theories usually requires the construction of a term having a given type in a given context. For the term to be constructed, a metavariable is introduced which is successively instantiated in the course of the proof. A naive use of metavariables leads to problems, such as non-commutativity of reduction and instantiation and the generation of ill-typed terms during reduction. For solving these problems, a calculus with explicit substitutions is introduced, and it is shown that this calculus preserves properties such as strong normalization and decidability of typing. In order to obtain a calculus appropriate for proof search, the usual natural deduction presentation of type theories is replaced by a sequent style presentation. It is shown that the calculus thus obtained is correct with respect to the original calculus. Completeness (proved with a cut-elimination argument) is shown for all predicative fragments of the lambda cube. The dissertation concludes with a discussion of some techniques that make proof search practically applicable, such as unification and pruning of the proof search space by exploiting impermutabilities of the sequent calculus.}, type = {PhD Thesis}, year = {1999}, school = {Universit\"{a}t Ulm, Fakult\"{a}t f\"{u}r Informatik}, file_url = {http://www.informatik.uni-ulm.de/ki/Strecker/diss-strecker.pdf} } @Poster { Martens99Drugs, author = {Martens, Alke and Bernauer, Jochen}, title = {Docs 'n Drugs - A System for Case-Oriented and Web-based Training in Medicine}, abstract = {The tutoring process of conventional case-oriented medical training systems can be characterised as either guided or unguided. In contrast to that, the aim of the system \dqDocs'n Drugs\dq is to distinguish between different levels of guidance. The author can realise the tutoring case either as a guided, a half guided or a unguided tutoring process. The system architecture distinguishes between an authoring system and a tutoring system. Fundaments of these are the tutoring process model and the case-based knowledge model. This structure allows the reuse of elements of existing tutoring cases. The tutoring cases can be realised in German and English.}, type = {Poster}, year = {1999}, booktitle = {Proceedings of AMIA Symposium}, pages = {1115}, file_url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2232626/pdf/procamiasymp00004-1152.pdf} } @Inproceedings { Martens99MedicineTraining, author = {Martens, Alke and Bernauer, Jochen and Seitz, Alexander and Illmann, Torsten and Scheuerer, Claudia}, title = {Docs 'n Drugs - A System for Case-Oriented and Web-based Training in Medicine}, abstract = {The tutoring process of conventional case-oriented medical training systems can be characterised as either guided or unguided. In contrast to that, the aim of the system \dqDocs'n Drugs\dq is to distinguish between different levels of guidance. The author can realise the tutoring case either as a guided, a half guided or a unguided tutoring process. The system architecture distinguishes between an authoring system and a tutoring system. Fundaments of these are the tutoring process model and the case-based knowledge model. This structure allows the reuse of elements of existing tutoring cases. The tutoring cases can be realised in German and English.}, year = {1999}, booktitle = {Proceedings of AMIA Symposium}, file_url = {http://www.informatik.uni-ulm.de/ki/Seitz/papers/amia99.pdf} } @Inproceedings { ns01o, author = {Scheuerer, Claudia and Martens, Alke and Bernauer, Jochen and Illmann, Torsten and Seitz, Alexander and Weber, Michael}, title = {Docs 'n Drugs - a Web-Based and Case-Oriented Training System in Medicine}, abstract = {In the virtual hospital medical students, medical doctors and students of related will have the opportunity to study knowledge and decision making in medicine using a web-based and case-oriented program in order to train their understanding of diseases, and their abiltity to treat patients.}, year = {1999}, booktitle = {Proceedings of the 44. Jahrestagung der GMDS}, file_url = {http://wwwmosi.informatik.uni-rostock.de/diemosiris/static/Papers\_archiv/dnd\_gmds99.pdf} } @Inproceedings { ns01n, author = {Martens, Alke and Bernauer, Jochen and Illmann, Torsten and Scheuerer, Claudia and Seitz, Alexander and Weber, Michael}, title = {Docs 'n Drugs - Ein webbasiertes, multimediales Lehrsystem f\"{u}r die Medizin}, abstract = {Docs'n Drugs - Die virtuelle Poliklinik\dq ist ein Projekt zur Entwicklung eines webbasierten, multimedialen Lehrsystems f\"{u}r die Medizin. Das Projekt basiert auf zwei Modellen, dem Falldaten- und dem Lehrproze\"{s}modell, und m\"{u}ndet in seiner Realisierung in zwei Systemen, dem eigentlichen webbasierten Lehrsystem und dem Autorensystem. Zielgruppe des Lehrsystems sind Studierende der Medizin, die durch eine geplante curriculare Einbettung des Programmes fr\"{u}hzeitig an fallorientierte Gedankeng\"{a}nge und Verfahrensweisen herangef\"{u}hrt werden und auf diesem Weg eine Bereicherung ihres Lehrplanes erfahren}, year = {1999}, booktitle = {4. Workshop der AG CBT in der Medizin der GMDS}, pages = {65--72}, file_url = {www.informatik.uni-rostock.de/~martens/Papers/dnd\_cbt99.pdf} } @Incollection { Strecker:98a, author = {Strecker, Martin and Luther, Marko and von Henke, Friedrich}, title = {Interactive and Automated Proof Construction in Type Theory}, abstract = {This chapter gives a survey of Typelab, a specification and verification environment that integrates interactive proof development and automated proof search. Typelab is based on a constructive type theory, the Calculus of Constructions, which can be understood as a combination of a typed Lambda calculus and an expressive higher-order logic. Distinctive features of the type system are dependent function types (Pi types) for modeling polymorphism and dependent record types (Sigma types) for encoding specifications and mathematical theories. Type theory provides a homogeneous theoretical framework in which the construction of a function and the construction of a proof can be considered to be essentially the same activity. There is, however, a practical difference in that the development of a function requires more insight and therefore usually has to be performed under human guidance, whereas proof search can, to a large extent, be automated. Internally, Typelab exploits the homogeneity provided by type theory, while externally offering an interface to the human user which conceals most of the complexities of type theory. Interactive construction of proof objects is possible whenever desired; metavariables serve as placeholders which can be refined incrementally until the desired object is complete. For procedures which can reasonably be automated, high-level tactics are available. In this respect, Typelab can be understood as a proof assistant which, in addition to the manipulations of formulae traditionally performed by theorem provers, permits to carry out operations on entities such as functions and types. For an illustration of program development in Typelab, the report TR-98-03 should be consulted, which holds an extended version of this chapter.}, year = {1998}, booktitle = {Automated Deduction --- A Basis for Applications}, publisher = {Kluwer Academic Publishers}, chapter = {3: Interactive Theorem Proving}, editor = {W. Bibel and P. Schmitt} } @Inproceedings { ns00q, author = {Seitz, Alexander and Uhrmacher, Adelinde M.}, title = {The Treatment of Time in a Case-Based Analysis of Experimental Medical Studies}, abstract = {Case-based approaches are employed within a multitude of application areas one of which is the prediction of dynamic behaviour. Given a situation the possible development after a time span shall be determined. If only a small set of heterogenously structured cases describing observations at a variety of time points is given to start with, as it is the case when experimental medical studies shall be analysed, it becomes necessary to analyse and evaluate the temporal horizon from case to case differently, and treat time as a first class variable. This is the strategy OASES (Our Approach to Simulation based on Experimental Studies) employs. For this purpose, OASES utilises knowledge implicit in cases for matching and adaptation. Whether different time points do match in the current situation or how different time points of observation might effect the development, is decided based on cases which time is an explicit part of. Keywords: Case-Based Reasoning; Case-Based Similarity Ranking; Case-Based Adaptation; Prediction; Experimental Studies}, year = {1998}, booktitle = {Advances in Artificial Intelligence, Proceedings of the 22nd German Conference on Artificial Intelligence (KI 1998)}, publisher = {Springer}, pages = {213--224} } @Incollection { PR98, author = {Pfeifer, Holger and Rue\"{s}, Harald}, title = {Polytypic Abstraction in Type Theory}, abstract = {This paper deals with formalizations and verifications in type theory that are abstracted with respect to a class of datatypes; i.e polytypic constructions. The main advantage of these developments are that they can not only be used to define functions in a generic way but also to formally state polytypic theorems and to synthesize polytypic proof objects in a formal way. This opens the door to mechanically proving many useful facts about large classes of datatypes once and for all.}, year = {1998}, booktitle = {Workshop on Generic Programming (WGP'98)}, publisher = {Dept. of Computing Science, Chalmers Univ. of Technology, and G\"{o}teborg Univ.}, editor = {Roland Backhouse and Tim Sheard}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1998/Pfeifer98PolytypicAbstraction.pdf} } @Inproceedings { ns00r, author = {Uhrmacher, Adelinde M. and Tyschler, Petra and Tyschler, Dirk}, title = {Modeling Mobile Agents}, year = {1998}, booktitle = {Proceedings of the International Conference on Web-Based Modeling and Simulation}, pages = {15--20}, file_url = {http://www.researchgate.net/publication/222803651\_Modeling\_and\_simulation\_of\_mobile\_agents/file/3deec51a51179d6d80.pdf} } @Inproceedings { Dold98MechanizedVerification, author = {Dold, Axel and Gaul, Thilo and Zimmermann, Wolf}, title = {Mechanized Verification of Compiler Backends}, abstract = {We describe an approach to mechanically prove the correctness of BURS specifications and show how such a tool can be connected with BURS based back-end generators [9]. The proofs are based on the operational semantics of both source and target system languages specified by means of Abstract State Machines [15]. In [31] we decomposed the correctness condition based on these operational semantics into local correctness conditions for each BURS rule and showed that these local correctness conditions can be proven independently. The specification and verification system PVS is used to mechanicalyy verify BURS-rules based on formal representations of the languages involved. In particular, we have defined PVS proof strategies which enable an automatic verification of the rules. Using PVS, several erroneous rules have been found. Moreover, from failed proof attempts we were able to correct them.}, year = {1998}, booktitle = {Proceedings of the International Workshop on Software Tools for Technology Transfer (STTT '98)}, event_place = {Aalborg, Denmark}, file_url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.46.5876\\&rep=rep1\\&type=pdf} } @Inproceedings { Schwier98Verification, author = {Schwier, Detlef and von Henke, Friedrich}, title = {Mechanical Verification of Clock Synchronization Algorithms}, abstract = {Clock synchronization algorithms play a crucial role in a variety of fault-tolerant distributed architectures. Although those algorithms are similar in their basic structure, the particular designs differ considerably, for instance in the way clock adjustments are computed. This paper develops a formal generic theory of clock synchronization algorithms which extracts the commonalities of specific algorithms and their correctness arguments; this generalizes previous work by Shankar and Miner by covering non-averaging adjustment functions, in addition to averaging algorithms. The generic theory is presented as a set of parameterized PVS theories, stating the general assumptions on parameters and demonstrating the verification of generic clock synchronization. The generic theory is then specialized to the class of algorithms using averaging functions, yielding a theory that corresponds to those of Shankar and Miner. As examples of the verification of concrete, published algorithms, the formal verification of an instance of an averaging algorithms (by Welch and Lynch) and of a non-averaging algorithm (by Srikanth and Toueg) is discussed.}, year = {1998}, booktitle = {Formal Techniques in Real-Time and Fault-Tolerant Systems (FTRTFT '98)}, publisher = {Springer}, series = {LNCS}, editor = {Anders P. Ravn and Hans Rischel}, pages = {262--271}, number = {1486}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1998/Schwier98Verification.pdf} } @Article { Rintanen98lexicographicpriorities, author = {Rintanen, Jussi}, title = {Lexicographic Priorities in Default Logic}, abstract = {Resolving conflicts between default rules is a major subtask in performing default reasoning. A declarative way of controlling the resolution of conflicts is to assign priorities to default rules, and to prevent conflict resolution in ways that violate the priorities. This work extends Reiter's default logic with a priority mechanism that is based on lexicographic comparison. Given a default theory and a partial ordering on the defaults, the preferred extensions are the lexicographically best extensions. We discuss alternative ways of using lexicographic comparison, and investigate their properties and relations between them. The applicability of the priority mechanism to inheritance reasoning is investigated by presenting two translations from inheritance networks to prioritized default theories, and relating them to inheritance theories presented earlier by Gelfond and Przymusinska and by Brewka.}, year = {1998}, journal = {Artificial Intelligence}, volume = {106}, pages = {221--265} } @Inproceedings { Uhrmacher1998Agents, author = {Uhrmacher, Adelinde M. and Schattenberg, Bernd}, title = {Agents in Discrete Event Simulation}, abstract = {Test beds for multi-agent systems provide the means for experimenting with multiple agents that act and interact concurrently in their environment. Mostly, test beds constitute specialized tools which provide specific scenarios for testing specific agent-architectures. To facilitate reuse and a flexible compositional construction of experimental frames for multi-agent systems, JAMES, a Java Based Agent Modeling Environment for Simulation, explores the integration of agents within a general modeling and simulation formalism for discrete event systems. Therefore, a compositional and hierarchical model design which supports variable structure models is complemented with a distributed, concurrent execution. Two experiments with planning agents illustrate our approach.}, year = {1998}, organization = {The Society for Computer Simulation International (SCS)}, booktitle = {10TH European Simulation Symposium ``Simulation in Industry -- Simulation Technology: Science and Art\\" (ESS 1998)}, publisher = {SCS Publications, Ghent}, editor = {Andre Bargiela and Eugene Kerckhoffs}, pages = {129--136}, file_url = {http://www.informatik.uni-ulm.de/ki/Schattenberg/Publications/Papers/ess98.pdf} } @Techreport { Canver98Verification, author = {Canver, Ercument}, title = {Formal verification of a coordinated atomic action based design}, abstract = {Coordinated atomic actions (CAAs) have been used in a semi-formal way for the design of the production cell case study. This paper presents a formal specification and verification of the production cell building on this design. However, this report is not intended to present yet another formalization of the production cell case study but rather as an approach to formalizing a CAA based system design in order to formally verify its properties. Each CAA is modeled as an atomic state transition characterized by its pre- and postconditions. In order for such transitions to become enabled, conditions are formalized requiring all associated roles to be activated. Activation of roles is performed by controllers, which are again modeled in terms of state transitions. The state space of the production cell can be viewed as being finite; hence, the production cell is specified as a finite state transition system and the formal verification of the CAA-design is carried out using model-checking.}, year = {1998}, institution = {University of Ulm}, number = {UIB 98-05}, web_url = {http://vts.uni-ulm.de/doc.asp?id=8522}, file_url = {http://vts.uni-ulm.de/docs/2013/8522/vts\_8522\_12573.pdf} } @Inproceedings { Uhrmacher98Simulation, author = {Uhrmacher, Adelinde M. and Seitz, Alexander}, title = {Fallbasierte Simulation \"{o}kologischer und biologischer Systeme}, abstract = {Simulation bedeutet ein Experimentieren mit Modellen. Die kausalen Zusammenh\"{a}nge \"{o}kologischer und biologischer Systeme sind h\"{a}ufig nur unvollst\"{a}ndig bekannt oder nur auf einem f\"{u}r die Simulation nicht nutzbaren Abstraktionsniveau formuliert. Qualitative deduktive Simulationsans\"{a}tze, die auf die Quantifizierung von Systemparametern verzichten k\"{o}nnen, erfordern eine bekannte und zudem wenig komplexe Systemstruktur und sind daher nur beschr\"{a}nkt anwendbar. Eine Alternative bieten fallbasierte Ans\"{a}tze, die ausgehendvon Daten ein Modell entwickeln, welches als Grundlage der Simulation genutzt werden kann. Im Gegensatz zu rein induktiven Methoden, wird das in F\"{a}llen vorliegende Wissen nicht generalisiert und dann angewendet, sondern die Spezifit\"{a}t einzelner Situationen bleibt f\"{u}r die Simulation erhalten. Das Modell in fallbasierten Methoden besteht aus einer Reihe prototypischer F\"{a}lle. Die F\"{a}lle werden erg\"{a}nzt durch Wissen, um die f\"{u}r eine bestimmte Situation prinzipiell geeigneten F\"{a}lle bestimmen und diese nach ihrer N\"{u}tzlichkeit bewerten zu k\"{o}nnen und durch Wissen, wie sich der pr\"{a}ferierte Fall auf die aktuelle Situation \"{u}bertragen l\"{a}\"{s}t. Zum Teil wird dieses Wissen vom Benutzer vordefiniert, oder das System leitet es induktiv aus den F\"{a}llen her oder nutzt die F\"{a}lle in ihrer eigentlichen Form. Letzteres unterst\"{u}tzt eine kontextsensitive Verarbeitung sehr heterogener F\"{a}lle. Anhand zweier Verfahren sollen M\"{o}glichkeiten und Anforderungen der fallbasierten Simulation untersucht werden. Gemeinsam ist beiden Verfahren, da\"{s} sie das Verhalten von dynamischen Systemen auf der Basis von F\"{a}llen vorhersagen, die durch eine Menge von unterschiedlich skalierten Parametern beschrieben sind. Der Ansatz des \dqFuzzy-Based Inductive Reasoning\dq (FIR) arbeitet auf Prim\"{a}rdaten, die als Zeitreihen vorliegen Anwendungsgebiet ist der Kohlendioxydzyklus in Biosph\"{a}re 2. Die Vereinheitlichung des Skalenniveaus in \dqFuzzy-Werte\dq ist Voraussetzung f\"{u}r alle weiteren Schritte in FIR. Das Wissen, das ben\"{o}tigt wird, um geeignete F\"{a}lle zu identifizieren, wird von FIR induktiv anhand der Fallbasis hergeleitet. Aufgrund der Homogenit\"{a}t der F\"{a}lle k\"{o}nnen danach einfache distanzorientierte Verfahren f\"{u}r die Pr\"{a}ferenzfindung und Adaption angewendet werden. OASES, \dqOur Approach to Simulate based on Experimental Studies\dq, basiert auf Sekund\"{a}rdaten, Ergebnisse experimenteller Studien, wie sie typischerweise in der Literatur gefunden werden. Die Fallbasis mit ca. 600 F\"{a}llen fa\"{s}t die Ergebnisse aus 45 Studien zur Knochenheilung zusammen. Versuchs- und Ergebnisparameter und deren Skalierung variieren von Fall zu Fall. Metrische Angaben sind je nach Kontext v\"{o}llig unterschiedlich zu bewerten. Eine Anpassung des Skalenniveaus ist ebenso wie die weiteren Phasen des fallbasierten Schlie\"{s}ens nur im Hinblick auf eine bestimmte gegebene Situation zu leisten. Um die geeigneten F\"{a}lle zu identifizieren, sie zu ordnen und letztendlich an die gegebene Situation anzupassen, interpretiert OASES das in der Fallbasis inh\"{a}rente Wissen situationsspezifisch.}, year = {1998}, booktitle = {Proceedings of the 8th Workshop, AK5, GI-Fachgruppe 4.5.9/4.6.3: Werkzeuge f\"{u}r die Simulation und Modellierung in Umweltanwendungen. Wissenschaftliche Berichte des Forschungszentrums Karlsruhe (FZKA)} } @Techreport { Damm98Knochenheilung, author = {Damm, D. and von Henke, Friedrich and Seitz, Alexander and Uhrmacher, Adelinde M. and Claes, L. and Wolf, S.}, title = {Ein fallbasiertes System f\"{u}r die Interpretation von Literatur zur Knochenheilung}, abstract = {Im Projekt SILK wurden fallbasierte Methoden konzipiert und implementiert, die es dem Wissenschaftler erlauben, den Stand der experimentellen Forschung systematisch zu analysieren, neue Ergebnisse im Kontext anderer einzuordnen und ausgew\"{a}hlte Fragestellungen der Wissenschaftler auf dieser Basis effizient zu beantworten. Als exemplarischer Anwendungsbereich diente das Gebiet der Knochen-heilung.}, year = {1998}, institution = {University of Ulm}, number = {UIB 98-01} } @Article { Rintanen98Complexity, author = {Rintanen, Jussi}, title = {Complexity of Prioritized Default Logics}, abstract = {In default reasoning, usually not all possible ways of resolving conflicts between default rules are acceptable. Criteria expressing acceptable ways of resolving the conflicts may be hardwired in the inference mechanism, for example specificity in inheritance reasoning can be handled this way, or they may be given abstractly as an ordering on the default rules. In this article we investigate formalizations of the latter approach in Reiter's default logic. Our goal is to analyze and compare the computational properties of three such formalizations in terms of their computational complexity: the prioritized default logics of Baader and Hollunder, and Brewka, and a prioritized default logic that is based on lexicographic comparison. The analysis locates the propositional variants of these logics on the second and third levels of the polynomial hierarchy, and identifies the boundary between tractable and intractable inference for restricted classes of prioritized default theories.}, year = {1998}, journal = {Journal of Artificial Intelligence Research (JAIR)}, volume = {9}, pages = {423--461}, file_url = {http://www.jair.org/media/554/live-554-1756-jair.pdf} } @Inproceedings { vHPP+:98, author = {von Henke, Friedrich and Pfab, Stephan and Pfeifer, Holger and Rue\"{s}, Harald}, title = {Case Studies in Meta-Level Theorem Proving}, abstract = {We describe an extension of the PVS system that provides a reasonably efficient and practical notion of reflection and thus allows for soundly adding formalized and verified new proof procedures. These proof procedures work on representations of a part of the underlying logic and their correctness is expressed at the object level using a computational reflection function. The implementation of the PVS system has been extended with an efficient evaluation mechanism, since the practicality of the approach heavily depends on careful engineering of the core system, including efficient normalization of functional expressions. We exemplify the process of applying meta-level proof procedures with a detailed description of the encoding of cancellation in commutative monoids and of the kernel of a BDD package.}, year = {1998}, booktitle = {Proceedings Intl. Conf. on Theorem Proving in Higher Order Logics}, volume = {1479}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {J. Grundy and M. Newey}, pages = {461--478}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1998/vHenke98TheoremProving.pdf} } @Inproceedings { Dold98Verification, author = {Dold, Axel and Gaul, Thilo and Vialard, Vincent and Zimmermann, Wolf}, title = {ASM-based Mechanized Verification of Compiler Back-Ends}, abstract = {We describe an approach to mechanically prove the correctness of BURS specifications and show how such a tool can be connected with BURS based back-end generators. The proofs are based on an operational semantics of both source and target system languages specified by means of Abstract State Machines (ASM's). The correctness condition based on these operational semantics is decomposed into local correctness conditions for each BURS rule and the rules can be proved independently. The specification and verification system PVS is used to mechanically verify BURS-rules based on formal representations of the languages involved. In particular, we have defined PVS proof strategies which enable an automatic verification of the rules. Using PVS, several erroneous rules have been found. Moreover, from failed proof attempts we were able to correct them.}, year = {1998}, booktitle = {Workshop on Abstract State Machines '98}, pages = {50--67}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1998/Dold98Verification.pdf} } @Techreport { Typelab:98a, author = {Luther, Marko and Strecker, Martin}, title = {A guided tour through TYPELAB}, abstract = {This report gives a survey of T YPELAB, a specification and verification environment that integrates interactive proof development and automated proof search. T YPELAB is based on a constructive type theory, the Calculus of Constructions, which can be understood as a combination of a typed λ-calculus and an expressive higher-order logic. Distinctive features of the type system are dependent function types for modeling polymorphism and dependent record types for encoding specifications and mathematical theories. After presenting an extended example which demonstrates how program development by stepwise refinement of specifications can be carried out, the theory underlying the prover component of T YPELAB is described in detail. A calculus with metavariables and explicit substitutions is introduced, and the meta-theoretic properties of this calculus are analyzed. Furthermore, it is shown that this calculus provides an adequate foundation for automated proof search in fragments of the logic.}, year = {1998}, institution = {Universit\"{a}t Ulm}, number = {98-03}, file_url = {http://vts.uni-ulm.de/docs/2013/8520/vts\_8520\_12569.pdf} } @Inproceedings { Rintanen98aplanning, author = {Rintanen, Jussi}, title = {A Planning Algorithm not based on Directional Search}, abstract = {The initiative in STRIPS planning has recently been taken by work on propositional satisfiability. Best current planners, like Graphplan, and earlier planners originating in the partial-order or refinement planning community have proved in many cases to be inferior to general-purpose satisfiability algorithms in solving planning problems. However, no explanation of the success of programs like Walksat or relsat in planning has been offered. In this paper we discuss a simple planning algorithm that reconstructs the planner in the background of the SAT/CSP approach.}, year = {1998}, booktitle = {Proceedings of the 6th International Conference on Principles of Knowledge Representation and Reasoning (KR '98)}, publisher = {Morgan Kaufmann Publishers}, pages = {617--624}, file_url = {http://users.ics.aalto.fi/rintanen/jussi/papers/Rintanen98plan.pdf} } @Inproceedings { henke:typelab, author = {von Henke, Friedrich and Luther, Marko and Strecker, Martin}, title = {Typelab: An Environment for Modular Program Development}, year = {1997}, DOI = {10.1007/BFb0030645}, booktitle = {Theory and Practice of Software Development (TAPSOFT '97) - Proceedings of the 7th International Joint Conference CAAP/FASE}, volume = {1214}, publisher = {Springer Berlin Heidelberg}, pages = {849--854} } @Techreport { BPvHR97, author = {Bartels, F. and von Henke, Friedrich and Pfeifer, Holger and Rue\"{s}, Harald}, title = {Mechanizing Domain Theory}, abstract = {We describe an encoding of major parts of domain theory and fixed-point theory in the PVS extension of the simply-typed lambda-calculus; these formalizations comprise the encoding of mathematical structures like complete partial orders (domains), domain constructions, the Knaster-Tarski fixed-point theorem for monotonic functions, and variations of fixed-point induction. Altogether, these encodings form a conservative extension of the underlying PVS logic. A major problem of embedding mathematical theories like domain theory lies in the fact that developing and working with those theories usually generates myriads of applicability and type-correctness conditions. Our approach to exploiting the PVS devices of predicate subtypes and judgements to establish many applicability conditions behind the scenes leads to a considerable reduction in the number of the conditions that actually need to be proved. We illustrate the applicability of our encodings by means of simple examples including a mechanized fixed-point induction proof in the context of relating different semantics of imperative programming constructs.}, type = {Ulmer Informatik-Berichte}, year = {1997}, institution = {Universit\"{a}t Ulm, Fakult\"{a}t f\"{u}r Informatik}, number = {96-10}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1997/Bartels97MechanizingDomainTheory.pdf}, note = {Major revision as of October 1997} } @Inproceedings { strecker:integrating, author = {Strecker, Martin and Sorea, Maria}, title = {Integrating an Equality Prover into a Software Development System Based on Type Theory}, abstract = {This paper reports on the integration of an untyped equational prover into a proof system based on an expressive constructive type theory. The proofs returned by the equational prover are effectively verified for type correctness, a proof term can be constructed. The scheme of proof translation described here is illustrated by the integration of the Discount prover into the software development system TYPELAB.}, year = {1997}, DOI = {10.1007/3540634932\_11}, booktitle = {Advances in Artificial Intelligence - Proceedings of the 21st Annual German Conference on Artificial Intelligence (KI '97)}, publisher = {Springer Berlin Heidelberg}, pages = {147--158} } @Techreport { PvHR97, author = {von Henke, Friedrich and Pfeifer, Holger and Rue\"{s}, Harald}, title = {Guided Tour Through a Mechanized Semantics of Simple Imperative Programming Constructs}, type = {Ulmer Informatik-Berichte}, year = {1997}, institution = {Universit\"{a}t Ulm, Fakult\"{a}t f\"{u}r Informatik}, number = {96-11}, note = {Major revision as of July 1997} } @Article { Canver97Steuerungssoftware, author = {Canver, Ercument and Gayen, Jan-Tecker and Moik, Adam}, title = {Formale Spezifikation von Steuerungssoftware am Beispiel einer Weiche}, abstract = {Dieser Beitrag beschreibt die Ergebnisse einer Fallstudie zur Erprobung der in dem System Verification Support Environment (VSE) bereitgestellten formalen Methoden am Beispiel einer computergesteuerten elektrisch ortsbedientenWeiche (EOW). VSE ist ein Werkzeug zur formalen Spezifikation und Verifikation von sicherheitsrelevanten Softwaresystemen. Die elektrisch ortsbediente Weiche ist ein Industrieprodukt aus dem Bereich des spurgef\"{u}hrten Verkehrs: sie wird in Gleisanlagen eingesetzt, die mit nur geringer Geschwindigkeit befahren werden d\"{u}rfen, die Weichenumstellung erfolgt bei Bedarf elektrischvor Ort. F\"{u}r einen sicheren und ordnungsgem\"{a}\"{s}en Betrieb werden dennoch hohe Anforderungen an die Steuerungssoftware der EOW gestellt. Anforderungen an die Steuerungssoftware der EOW gestellt. Die Fallstudie wurde von einer interdisziplin\"{a}ren Arbeitsgruppe bearbeitet und es wurde versucht, das anwendungsorientierte Vorgehen eines Ingenieurs mit einer streng formalen Vorgehensweise aus der Informatik zu verkn\"{u}pfen. Der vorliegende Artikel wendet sich an alle Leser, die sich f\"{u}r Software mit Sicherheitsverantwortung interessieren.}, year = {1997}, journal = {atp -- Automatisierungstechnische Praxis}, volume = {39}, publisher = {R. Oldenbourg Verlag}, pages = {57--64}, number = {5} } @Inproceedings { DHPR97, author = {Dold, Axel and von Henke, Friedrich and Pfeifer, Holger and Rue\"{s}, Harald}, title = {Formal Verification of Transformations for Peephole Optimization}, year = {1997}, booktitle = {FME '97: Formal Methods: Their Industrial Application and Strengthened Foundations}, volume = {1313}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {J. Fitzgerald and C. Jones and P. Lucas}, pages = {459--472} } @Article { ns00f, author = {Uhrmacher, Adelinde M.}, title = {Concepts of Object- and Agent-Oriented Simulation}, abstract = {Object-oriented concepts are widely employed in simulation. The increasing activity in developing agent-oriented simulation systems calls for exploring the relationship between object-oriented and agent-oriented modeling and simulation more closely. The paper constitutes one step to that enterprise.}, year = {1997}, journal = {Transactions of the Society for Computer Simulation International}, volume = {14}, pages = {59--67}, number = {2}, web_url = {http://dl.acm.org/citation.cfm?id=271934} } @Article { ns00g, author = {Uhrmacher, Adelinde M. and Cellier, Francois E. and Frye, R. J.}, title = {Applying Fuzzy-Based Inductive Reasoning to Analyze Qualitatively the Dynamic Behaviour of an Ecological System}, abstract = {In the last decade a variety of methodologies for representing and evaluating knowledge qualitatively has been developed, particularly within the field of Artificial Intelligence. Qualitative reasoning methodologies represent an alternative to quantitative modeling approaches, if the knowledge about the system of interest is imprecise or incomplete, as it is often the case when dealing with ecological systems. As most of the methodologies have not outgrown toy examples, it re-mains challenging to apply those methodologies to real world applica-tions. In Biosphere 2, a closed ecological system, the level of O2 has dropped and the CO2 level has risen continuously during its closure between 1991 and 1993. The mechanisms of carbon cycles have been subject to multiple research efforts, and are therefore formulated as general rules in principle. However, the specific situation within Biosphere 2, a closed ecosystem, might influence the validity of these rules. Thus, the structure of the carbon cycle in Biosphere 2 is not well known, yet abundant data exist on some of the important fluxes and pools. Whereas deductive, quantitative as well as qualitative, method-o-logies need knowledge about the structure of the system to derive the behavior of the system, the fuzzy-based inductive reasoning method-ology FIR derives inductively the behavior model by analyzing time series. The derived behavior model comprises cases and information how to retrieve prototypical cases that can be adapted to the given situation. Thus, FIR combines one-shot inductive and incremental case-based reaso-ning techniques in analyzing and forecasting dynamic systems.}, year = {1997}, journal = {International Journal on Applied Artificial Intelligence in Natural Resource Management}, volume = {11}, pages = {1--10}, number = {2} } @Inproceedings { cyrluk:an, author = {Cyrluk, David and M\"{o}ller, Oliver and Rue\"{s}, Harald}, title = {An Efficient Decision Procedure for the Theory of Fixed-Sized Bit-Vectors}, abstract = {In this paper we describe a decision procedure for the core theory of fixed-sized bit-vectors with extraction and composition that can readily be integrated into Shostak's procedure for deciding combinations of theories. Inputs to the solver are unquantified bit-vector equations t=u and the algorithm returns true if t=u is valid in the bit-vector theory, false if t=u is unsatisfiable, and a system of solved equations otherwise. The time complexity of the solver is |t|*log(n)+n^2, where t is the length of the bit-vector term t and n denotes the number of bits on either side of the equation. Then, the solver for the core bit-vector theory is extended to handle other bit-vector operations like bitwise logical operations, shifting, and arithmetic interpretations of bit-vectors. We develop a BDD-like data-structure called bit-vector BDDs to represent bit-vectors, various operations on bit-vectors, and a solver on bit-vector BDDs.}, year = {1997}, DOI = {10.1007/3-540-63166-6\_9}, booktitle = {Computer Aided Verification - 9th International Conference, (CAV '97)}, volume = {1254}, publisher = {Springer Berlin Heidelberg}, pages = {60--71} } @Inproceedings { ns00u, author = {Uhrmacher, Adelinde M.}, title = {Variable Structure Modelling - Discrete Events in Simulation}, abstract = {Variable structure models are models that entail in their description the possibility to change their own structure. Several modeling approaches which deal with variable structure modeling are presented. Different application domains pinpoint different perspectives in systems' modeling implying different solutions for variable structure modeling. Particularly, in the context of agent-oriented simulation variable structure modeling becomes a necessary precondition to describe individuals in processes of patterned interactions, embedded in and always (re-)creating perspectives. The focus of interest shifts from simulating systems to simulating actors, and the mediating role of variable structure modeling.}, year = {1996}, booktitle = {Proc. of the 6th Annual Conference on Artificial Intelligence, Simulation and Planning in High Autonomy Systems}, publisher = {IEEE-Press}, pages = {133--140} } @Incollection { ns00k, author = {Uhrmacher, Adelinde M.}, title = {Object-Oriented and Agent-Oriented Simulation-Implications for Social Science Applications}, year = {1996}, booktitle = {Social Science Micro Simulation- A Challenge for Computer Science}, publisher = {Springer}, editor = {Doran, J. and Gilbert, N. and M\"{u}ller, U. and Troitzsch, K.G.}, pages = {432--447} } @Article { ns00h, author = {Uhrmacher, Adelinde M. and Zeigler, Bernard P.}, title = {Variable Structure Modeling in Object-Oriented Simulation}, abstract = {Two different object-oriented modeling approaches, DEVS and EMSY, constitute the background to explore the area of variable structure modeling. Realizations of various kinds of structural changes are discussed in both approaches. Against the background of their prime application domains, both approaches deal with the problem of structural change differently. While DEVS emphasizes intelligent control of structural change, EMSY stresses the autonomous character of the system. Like autonomy and control, holism and reductionism play different roles in both approaches and affect the realization of structural changes. However, unlike the former which tend to transcend each other, the reductionistic and holistic view realized in the two modeling approaches prove to set a rigorous framework for variable structure modeling.}, year = {1996}, DOI = {10.1080/03081079608945128}, journal = {International Journal on General Systems}, volume = {24}, pages = {359--375}, number = {4} } @Inproceedings { vonHenke:96a, author = {von Henke, Friedrich and Luther, Marko and Pfeifer, Holger and Rue\"{s}, Harald and Schwier, Detlef and Strecker, Martin and Wagner, Matthias}, title = {The TYPELAB Specification and Verification Environment}, year = {1996}, DOI = {10.1007/BFb0014353}, booktitle = {Proceedings of the 5th International Conference on Algebraic Methodology and Software Technology (AMAST '96)}, volume = {1101}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, editor = {M. Wirsing and M. Nivat}, pages = {604--607} } @Incollection { nebel1996terminologische, author = {Nebel, Bernhard}, title = {Terminologische Logiken}, year = {1996}, booktitle = {W\"{o}rterbuch der Kognitionswissenschaft}, publisher = {Klett-Cotta}, editor = {Gerhard Strube, B. Becker, C. Freksa, U. Hahn, K. Opwis and G. Plam}, pages = {385} } @Incollection { nebel1996subsumption, author = {Nebel, Bernhard}, title = {Subsumption}, year = {1996}, booktitle = {W\"{o}rterbuch der Kognitionswissenschaft}, publisher = {Klett-Cotta}, editor = {Gerhard Strube, B. Becker, C. Freksa, U. Hahn, K. Opwis and G. Plam}, pages = {695} } @Inproceedings { Strecker:96a, author = {Strecker, Martin and Luther, Marko and Wagner, Matthias}, title = {Structuring and Using a Knowledge Base of Mathematical Concepts: A Type-Theoretic Approach}, abstract = {This paper describes an approach to representing mathematical concepts in a knowledge base which is structured by a subsumption relation between concepts. Two kinds of concepts are examined: Propositional concepts, with the subsumption relation given by a generalized implication, and parameterized theories, with the subsumption relation given by theory morphisms. It is shown which kinds of reasoning activities can be supported by such a knowledge base. A type theory in which the entities to be represented are first-class objects serves as formal framework.}, year = {1996}, booktitle = {ECAI-96 Workshop on Representation of mathematical knowledge}, pages = {23--26} } @Inproceedings { nebel1996solving, author = {Nebel, Bernhard}, title = {Solving Hard Qualitative Temporal Reasoning Problems: Evaluating the Efficiency of Using the ORD-Horn Class}, abstract = {While the worst-case computational properties of Allen's calculus for qualitative temporal reasoning have been analyzed quite extensively, the determination of the empirical efficiency of algorithms for solving the consistency problem in this calculus has received only little research attention. In this paper, we will demonstrate that using the ORD-Horn class in Ladkin and Reinefeld's backtracking algorithm leads to performance improvements when deciding consistency of hard instances in Allen's calculus. For this purpose, we prove that Ladkin and Reinefeld's algorithm is complete when using the ORD-Horn class, we identify phase transition regions of the reasoning problem, and compare the improvements of ORD-Horn with other heuristic methods when applied to instances in the phase transition region. Finally, we give evidence that combining search methods orthogonally can dramatically improve the performance of the backtracking algorithm.}, year = {1996}, booktitle = {Proceedings of the 12th European Conference on Artificial Intelligence (ECAI'96)}, pages = {38--42} } @Incollection { nebel1996artificial, author = {Nebel, Bernhard}, title = {Artificial Intelligence: A Computational Perspective}, abstract = {Although the computational perspective on cognitive tasks has always played a major role in Artificial Intelligence, the interest in the precise determination of the computational costs that are required for solving typical AI problems has grown only recently. In this paper, we will describe what insights a computational complexity analysis can provide and what methods are available to deal with the complexity problem.}, year = {1996}, booktitle = {Principles of Knowledge Representation}, publisher = {CSLI Publications}, editor = {G. Brewka}, pages = {237--266} } @Inproceedings { rueß:modular, author = {Rue\"{s}, Harald and Shankar, Natarajan and K. Srivas, Mandayam}, title = {Modular Verification of SRT Division}, abstract = {We describe a formal specification and verification in PVS for the general theory of SRT division, and for the hardware design of a specific implementation. The specification demonstrates how attributes of the PVS language (in particular, predicate subtypes) allow the general theory to be developed in a readable manner that is similar to textbook presentations, while the PVS table construct allows direct specification of the implementation's quotient look-up table. Verification of the derivations in the SRT theory and for the data path and look-up table of the implementation are highly automated and performed for arbitrary, but finite precision; in addition, the theory is verified for general radix, while the implementation is specialized to radix 4. The effectiveness of the automation derives from PVS's tight integration of rewriting with decision procedures for equality, linear arithmetic over integers and rationals, and propositional logic. This example demonstrates that the resources of an expressive specification language and of a general-purpose theorem prover are not inimical to highly automated verification in this domain, and can contribute to clarity, generality, and reuse.}, year = {1996}, DOI = {10.1007/3-540-61474-5\_63}, booktitle = {Proceedings of the 8th International Conference on Computer Aided Verification (CAV '96)}, volume = {1102}, publisher = {Springer Berlin Heidelberg}, pages = {123--134} } @Techreport { dvhpr96, author = {Dold, Axel and von Henke, Friedrich and Pfeifer, Holger and Rue\"{s}, Harald}, title = {Generic Compilation Schemes for Simple Programming Constructs}, year = {1996}, institution = {Ulm University}, number = {96-12} } @Techreport { Canver96:FESEOWV, author = {Canver, Ercument and Gayen, Jan-Tecker and Moik, Adam}, title = {Formale Entwicklung der Steuerungssoftware f\"{u}r eine elektrisch ortsbediente Weiche mit VSE}, abstract = {Die computergesteuerte \dqelektrisch ortsbediente Weiche (EOW)\dq ist ein Produkt aus dem Bereich des spurgefuhrten Verkehrs und wird in Gleisanlagen eingesetzt, die mit nur geringer Geschwindigkeit befahren werden durfen. F\"{u}r einen sicheren und ordnungsgem\"{a}\"{s}en Betrieb werden dennoch hohe Anforderungen an die Steuerungssoftware der EOW gestellt. Die EOW ist als Industrieprodukt von hoher praktischer Relevanz. Da die Gr\"{o}\"{s}e der Steuerungssoftware in einem handhabbaren Rahmen liegt, bietet sich mit dieser Anwendung eine geeignete Fallstudie zur Erprobung der in dem System \dqVerification Support Environment (VSE)\dq bereitgestellten formalen Methoden, einem Werkzeug zur formalen Spezifikation und Verifikation von sicherheitsrelevanten Softwaresystemen. Die Fallstudie wurde von einer interdisziplinaren Arbeitsgruppe bearbeitet und es wurde versucht, das anwendungsorientierte Vorgehen eines Ingenieurs und die methodikorientierte formale Vorgehensweise eines Softwareentwicklers miteinander zu verkn\"{u}pfen. Vorliegender Bericht wendet sich insbesondere an diese beiden Lesergruppen. Zunachst werden die EOW und die Anforderungen an die Steuerungssoftware beschrieben und der mit VSE zu modellierende Softwareausschnitt definiert. Die Abgrenzung und Schnittstellen dieses Ausschnitts zum Gesamtsystem werden festgelegt. Danach ist an diesem Ausschnitt die VSE-Methodik illustriert. Abschlie\"{s}end sind die Erfahrungen und Schlu\"{s}folgerungen aus dieser Fallstudie zusammengefa\"{s}t.}, type = {UIB}, year = {1996}, institution = {Universit\"{a}t Ulm}, number = {96-01}, file_url = {http://vts.uni-ulm.de/docs/2009/7066/vts\_7066\_9889.pdf} } @Inproceedings { ns00t, author = {Seitz, Alexander and Uhrmacher, Adelinde M.}, title = {F\"{a}lle statt Modellwissen - Eine Anwendung auf dem Gebiet der Knochenheilung}, year = {1996}, booktitle = {4th German Workshop on Case-Based Reasoning} } @Inproceedings { ns00s, author = {Uhrmacher, Adelinde M.}, title = {Concepts of Object- and Agent-Oriented Simulation}, abstract = {Object-oriented concepts are widely employed in simulation. The increasing activity in developing agent-oriented simulation systems calls for exploring the relationship between object-oriented and agent-oriented modeling and simulation more closely. The paper constitutes one step to that enterprise.}, year = {1996}, booktitle = {Workshop on Multiagent Systems and Simulation}, pages = {1--8}, file_url = {http://wwwmosi.informatik.uni-rostock.de/diemosiris/static/Papers\_archiv/scs97.pdf} } @Inproceedings { Goerigk+96, author = {Goerigk, Wolfgang and Dold, Axel and Gaul, Thilo and Goos, Gerhard and Heberle, Andreas and von Henke, Friedrich and Hoffmann, Ulrich and Langmaack, Hans and Pfeifer, Holger and Rue\"{s}, Harald and Zimmermann, Wolf}, title = {Compiler Correctness and Implementation Verification: The \em Verifix Approach}, abstract = {Compiler correctness is crucial to the software engineering of safety critical software. It depends on both the correctness of the compiling specificati on and the correctness of the compiler implementation. We will discuss compiler correctness for practically relevant source languages and target machines in orde r to find an adequate correctness notion for the compiling specification, i. e. for the mapping from source to target programs with respect to their standard sema ntics, which allows for proving both specification and implementation correctness. We will sketch our approach of proving the correctness of the compiler impleme ntation as a binary machine program, using a special technique of bootstrapping and double checking the results. We will discuss mechanical proof support for bot h compiling verification and compiler implementation verification in order to make them feasible parts of the software engineering of correct compilers. Verifix is a joint project on Correct Compilers funded by the Deutsche Forschungsgemeinschaft (DFG).}, year = {1996}, booktitle = {Proceedings of the Poster Session of CC '96 -- International Conference on Compiler Construction}, address = {IDA Technical Report LiTH-IDA-R-96-12, Link\oping, Sweden}, editor = {P. Fritzson}, pages = {65--73}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1996/Goerigk96Verifix.pdf} } @Techreport { CMR96, author = {Cyrluk, David and M\"{o}ller, Oliver and Rue\"{s}, Harald}, title = {An Efficient Decision Procedure for a Theory of Fixed-Sized Bitvectors with Composition and Extraction}, abstract = {The theory of fixed-sized bitvectors with composition and extraction has been shown to be useful in the realm of hardware verification, and in this paper we develop an efficient algorithm for deciding this theory. A proper input is an unquantified bitvector equation, say t = u, and our algorithm returns true if t = u is valid in the bitvector theory, false if t = u is unsatisfiable, and a system of solved equations otherwise. The time complexity of this solver is O(|t| log(n) + n^2), where |t| is the length of the bitvector term t and n denotes the number of bits on either side of the equation. Moreover, the resulting procedure can readily be integrated into Shostak's procedure for deciding combinations of theories.}, type = {Ulmer Informatik-Berichte}, year = {1996}, institute = {Universit\"{a}T Ulm, Fakult\"{a}t f\"{u}r Informatik}, number = {96-8} } @Incollection { Ruess96Verification, author = {Rue\"{s}, Harald}, title = {Hierarchical verification of two-dimensional high-speed multiplication in PVS: A case study}, abstract = {It is shown how to use the PVS specification language and proof checker to present a hierarchical formalization of a two-dimensional, high-speed integer multiplier on the gate level. We first give an informal description of iterative array multiplier circuits together with a natural refinement into vertical and horizontal stages, and then show how the various features of PVS can be used to obtain a readable, high-level specification. The verification exploits the tight integration between rewriting, arithmetic decision procedures, and equality that is present in PVS. Altogether, this case study demonstrates that the resources of an expressive specification language and of a general-purpose theorem prover permit highly automated verification in this domain, and can contribute to clarity, generality, and reuse.}, year = {1996}, isbn = {978-3-540-61937-6}, DOI = {10.1007/BFb0031801}, booktitle = {Formal Methods in Computer-Aided Design}, volume = {1166}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Srivas, Mandayam and Camilleri, Albert}, pages = {79--93} } @Poster { Willy95Poster, author = {Willy, C. and Uhrmacher, Adelinde M. and Gerngross, H.}, title = {Leucozyte-Endothelium Interaction in Traumatology - A Knowledge-Based Systems Approach}, type = {Poster}, year = {1995}, address = {EFFORD Congress, Munich} } @Incollection { owsnicki-klewe1995wissensrepräsentation, author = {Owsnicki-Klewe, Bernd and von Luck, Kai and Nebel, Bernhard}, title = {Wissensrepr\"{a}sentation und Logik - Eine Einf\"{u}hrung}, abstract = {Wir geben eine Einf\"{u}hrung in die Modellierung mit Hilfe logischer Methoden, skizzieren das Design von Wissensrepr\"{a}sentationssystemen auf logischer und algorithmischer Ebene und diskutieren die Umsetzung in Implementationen.}, year = {1995}, booktitle = {Einf\"{u}hrung in die K\"{u}nstliche Intelligenz}, publisher = {Addison-Wesley}, editor = {G. G\"{o}rz}, pages = {3--54} } @Inproceedings { andré1995wip, author = {Andre, Elisabeth and Finkler, Wolfgang and Graf, Winfried and Harbusch, Karin and Heinsohn, Jochen and Kilger, Anne and Nebel, Bernhard and Profitlich, Hans-J\"{u}rgen and Rist, Thomas and Wahlster, Wolfgang and Butz, Andreas and Jameson, Anthony}, title = {WIP: From Multimedia to Intellimedia (Abstract of Video)}, abstract = {This video provides an overview of the WIP project. WIP aimed at the development of a presentation system that is able to generate a variety of multimedia documents considering generation parameters, such as target group, presentation objective, resource limitations, and target language (see Fig. 1). The major components of the WIP system are: a presentation planner that is responsible for determining the contents and selecting an appropriate medium combination, medium-specific generators and a layout manager that arranges the generated output in a document. The video gives a survey of the WIP architecture and demonstrates the performance of the single modules by several system runs. A basic assumption behind the WIP model is that no}, year = {1995}, booktitle = {Proceedings of the 14th International Joint Conference on Artificial Intelligence (IJCAI'95)}, pages = {2053--2054} } @Inproceedings { dold:representing, author = {Dold, Axel}, title = {Representing, Verifying and Applying Software Development Steps using the PVS System}, abstract = {In this paper generic software development steps of different complexity are represented and verified using the (higher-order, strongly typed) specification and verification system PVS. The transformations considered in this paper include “large” powerful steps encoding general algorithmic paradigms as well as “smaller” transformations for the operationalization of a descriptive specification. The application of these transformation patterns is illustrated by means of simple examples. Furthermore, we show how to guide proofs of correctness assertions about development steps. Finally, this work serves as a case-study and test for the usefulness of the PVS system.}, year = {1995}, DOI = {10.1007/3-540-60043-4\_69}, booktitle = {Proceedings of the 4th International Conference on Algebraic Methodology and Software Technology (AMAST '95)}, volume = {936}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {V. S. Alagar and Maurice Nivat}, pages = {431--445} } @Article { Nebel1995TemporalRelations, author = {Nebel, Bernhard and B\"{u}rckert, Hans-J\"{u}rgen}, title = {Reasoning About Temporal Relations: A Maximal Tractable Subclass of Allen's Interval Algebra}, abstract = {We introduce a new subclass of Allen's interval algebra we call ``ORD-Horn subclass,\\" which is a strict superset of the ``pointisable subclass.\\" We prove that reasoning in the ORD-Horn subclass is a polynomial-time problem and show that the path-consistency method is sufficient for deciding satisfiability. Further, using an extensive machine-generated case analysis, we show that the ORD-Horn subclass is a maximal tractable subclass of the full algebra (assuming P=/=NP). In fact, it is the unique greatest tractable subclass amongst the subclasses that contain all basic relations.}, year = {1995}, DOI = {10.1145/200836.200848}, journal = {Journal of the ACM}, volume = {42}, publisher = {ACM}, pages = {43--66}, number = {1} } @Article { ns00i, author = {Uhrmacher, Adelinde M.}, title = {Reasoning about Changing Structure, A Modeling Concept for Ecological Systems}, abstract = {This paper focuses on reasoning about change within the object-oriented modeling system EMSY. EMSY has been developed to support modeling and simulation in the domains of ecology and biology. Ecological systems are described as entities consisting of a set of attributes. rules, information about composition, environment, and coupling structure. Change takes place as the change of single entities, and is initiated by them. This specific view of systems serves as a base to describe a characteristic phenomenon of ecological systems: the change of system structure.}, year = {1995}, DOI = {10.1080/08839519508945472}, journal = {Applied Artificial Intelligence}, volume = {9}, pages = {157--180}, number = {2}, file_url = {http://wwwmosi.informatik.uni-rostock.de/diemosiris/static/Papers\_archiv/jaai95.pdfs} } @Article { Nebel1995427, author = {Nebel, Bernhard and Koehler, Jana}, title = {Plan reuse versus plan generation: a theoretical and empirical analysis}, abstract = {The ability of a planner to reuse parts of old plans is hypothesized to be a valuable tool for improving efficiency of planning by avoiding the repetition of the same planning effort. We test this hypothesis from an analytical and empirical point of view. A comparative worst-case complexity analysis of generation and reuse under different assumptions reveals that it is not possible to achieve a provable efficiency gain of reuse over generation. Further, assuming “conservative” plan modification, plan reuse can actually be strictly more difficult than plan generation. While these results do not imply that there won't be an efficiency gain in some situations, retrieval of a good plan may present a serious bottleneck for plan reuse systems, as we will show. Finally, we present the results of an empirical study of two different plan reuse systems, pointing out possible pitfalls one should be aware of when attempting to employ reuse methods.}, year = {1995}, DOI = {10.1016/0004-3702(94)00082-C}, journal = {Artificial Intelligence}, volume = {76}, pages = {427--454}, number = {1--2} } @Inproceedings { RPvH95, author = {Rue\"{s}, Harald and Pfeifer, Holger and von Henke, Friedrich}, title = {Formalization and Reasoning in a Reflective Architecture}, abstract = {This paper is concerned with developing a reflective architecture for formalizing and reasoning about entities that occur in the process of software development, such as specifications, theorems, programs, and proofs. The starting point is a syntactic extension of the type theory ECC. An encoding of this object calculus within itself comprises the meta-level, and reflection principles are provided for switching between different levels. These reflection principles are used to mix object- and meta-level reasoning, to generate \dqstandard\dq units by executing meta-operators, and to apply formal tactics that allow for abstraction from the base logic.}, year = {1995}, booktitle = {IJCAI 1995 Workshop on Reflection and Meta Level Architecture and their Application in AI}, address = {Montreal, Canada}, editor = {M. Ibrahim and P. Cointe and F. Cummins and F. Giunchiglia and J. Malenfant}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1995/Ruess95FormalizationAndReasoning.pdf} } @Article { nebel1995komplexitätsanalysen, author = {Nebel, Bernhard}, title = {Komplexit\"{a}tsanalysen in der K\"{u}nstlichen Intelligenz}, abstract = {Die Analyse der Berechenbarkeitskomplexit\"{a}t von typischen KI-Problemen sowie der Entwurf effizienter Verfahren zum L\"{o}sen dieser Probleme hat in den letzten Jahren verst\"{a}rkt Interesse gefunden. In diesem Beitrag wollen wir auf den Sinn von Komplexit\"{a}tsanalysen eingehen, examplarisch das Vorgehen bei solchen Analysen darstellen und einige Methoden zum Umgang mit dem Komplexit\"{a}tsproblem skizzieren.}, year = {1995}, journal = {K\"{u}nstliche Intelligenz}, volume = {9}, pages = {6--14}, number = {2} } @Phdthesis { Ruess95phd, author = {Rue\"{s}, Harald}, title = {Formal Meta-Programming in the Calculus of Constructions}, type = {PhD Thesis}, year = {1995}, school = {University of Ulm} } @Inproceedings { Henke95Construction, author = {von Henke, Friedrich and Dold, Axel and Rue\"{s}, Harald and Schwier, Detlef and Strecker, Martin}, title = {Construction and Deduction Methods for the Formal Development of Software}, abstract = {In this paper we present an approach towards a framework based on the type theory ECC (Extended Calculus of Constructions) in which specifications, programs and operators for modular development by stepwise refinement can be formally described and reasoned about. We demonstrate how generic software development steps can be expressed as higher-order functions and how proofs about their asserted effects can be carried out in the underlying logical calculus. For formalizing transformations that require syntactic manipulation of objects, we introduce a two-level system combining a meta-level and an object level and show how to express and reason about transformations that faithfully represent object-level operators.}, year = {1995}, booktitle = {KORSO: Methods, Languages, and Tools for the Construction of Correct Software}, publisher = {Springer-Verlag}, series = {Lecture Notes in Computer Science}, editor = {Manfred Broy}, pages = {239--254}, file_url = {http://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.090/Publikationen/1995/Henke95Construction.pdf} } @Inproceedings { Nebel95QualitativeSpatialReasoning, author = {Nebel, Bernhard}, title = {Computational Properties of Qualitative Spatial Reasoning: First Results}, abstract = {While the computational properties of qualitative temporal reasoning have been analyzed quite thoroughly, the computational properties of qualitative spatial reasoning are not very well investigated. In fact, almost no completeness results are known for qualitative spatial calculi and no computational complexity analysis has been carried out yet. In this paper, we will focus on the so-called RCC approach and use Bennett's encoding of spatial reasoning in intuitionistic logic in order to show that consistency checking for the topological base relations can be done efficiently. Further, we show that path-consistency is sufficient for deciding global consistency. As a side-effect we prove a particular fragment of propositional intuitionistic logic to be tractable.}, year = {1995}, isbn = {978-3-540-60343-6}, DOI = {10.1007/3-540-60343-3\_40}, booktitle = {KI-95: Advances in Artificial Intelligence}, volume = {981}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Wachsmuth, Ipke and Rollinger, Claus-Rainer and Brauer, Wilfried}, pages = {233--244} } @Article { Bäckström95complexityresults, author = {B\"{a}ckstr\"{o}m, Christer and Nebel, Bernhard}, title = {Complexity Results for SAS+ Planning}, abstract = {We have previously reported a number of tractable planning problems defined in the SAS+ formalism. This paper complements these results by providing a complete map over the complexity of SAS+ planning under all combinations of the previously considered restrictions. We analyze the complexity both of finding a minimal plan and of finding any plan. In contrast to other complexity surveys of planning we study not only the complexity of the decision problems but also of the generation problems. We prove that the SAS+-PUS problem is the maximal tractable problem under the restrictions we have considered if we want to generate minimal plans. If we are satisfied with any plan, then we can generalize further to the SAS+-US problem, which we prove to be the maximal tractable problem in this case.}, year = {1995}, journal = {Computational Intelligence}, volume = {11}, pages = {625--655} } @Inproceedings { Nebel95BaseRevisionOperations, author = {Nebel, Bernhard}, title = {Base Revision Operations and Schemes: Semantics, Representation and Complexity}, abstract = {The theory of belief revision developed by G\"{a}rdenfors and his colleagues characterizes the classes of reasonable belief revision operations. However, some of the assumptions made in the theory of belief revision are unrealistic from a computational point of view. We address this problem by considering revision operations that are based on a priority ordering over a set of sentences representing a belief state instead of using preference relations over all sentences that are accepted in a belief state. In addition to providing a semantic justification for such operations, we investigate also the computational complexity. We show how to generate an epistemic entrenchment ordering for a belief state from an arbitrary priority ordering over a set of sentences representing the belief state and show that the resulting revision is very efficient. Finally, we show that some schemes for generating revision operations from bases can encode the preference relations more concisely than others.}, year = {1995}, isbn = {978-3-211-82713-0}, DOI = {10.1007/978-3-7091-2690-5\_11}, booktitle = {Proceedings of the ISSEK94 Workshop on Mathematical and Statistical Methods in Artificial Intelligence}, volume = {363}, publisher = {Springer-Verlag}, series = {International Centre for Mechanical Sciences}, editor = {Della Riccia, G. and Kruse, R. and Viertl, R.}, pages = {157--170} } @Techreport { dvhpr95, author = {Dold, Axel and von Henke, Friedrich and Pfeifer, Holger and Rue\"{s}, Harald}, title = {A Generic Specification for Verifying Peephole Optimizations}, year = {1995}, institution = {Ulm University}, number = {95-14} } @Proceedings { Lakemeyer:1994:645296, title = {Foundation of Knowledge Representation and Reasoning [the Book Grew out of an ECAI-92 Workshop]}, abstract = {This collection of thoroughly refereed papers presents state-of-the-art research results by well-known researchers on the foundations of knowledge representation and reasoning. In addition, there are two surveys, one by the volume editors intended as a guide to this book and another by Shoham and Cousins on menytal attitudes. In total, the volume provides a well-organized report on current research in knowledge representation, which is one of the central subfields of AI. Except the surveys, the papers grew out of a workshop on Theoretical Foundations of Knowledge Representation and Reasoning, held in conjunction with the 10th European Conference on Artificial Intelligence (ECAI-92) in Vienna in August 1992.}, year = {1994}, isbn = {3-540-58107-3}, volume = {810}, publisher = {Springer-Verlag}, series = {LNAI}, editor = {Lakemeyer, Gerhard and Nebel, Bernhard}, web_url = {http://dl.acm.org/citation.cfm?id=645296\\&picked=prox} } @Inproceedings { nebel1994reasoning, author = {Nebel, Bernhard and B\"{u}rckert, Hans-J\"{u}rgen}, title = {Reasoning about Temporal Relations: A Maximal Tractable Subclass of Allen's Interval Algebra}, abstract = {We introduce a new subclass of Allen's interval algebra we call ``ORD-Horn subclass,\\" which is a strict superset of the ``pointisable subclass.\\" We prove that reasoning in the ORD-Horn subclass is a polynomial-time problem and show that the path-consistency method is sufficient for deciding satisfiability. Further, using an extensive machine-generated case analysis, we show that the ORD-Horn subclass is a maximal tractable subclass of the full algebra (assuming P=/=NP). In fact, it is the unique greatest tractable subclass amongst the subclasses that contain all basic relations.}, year = {1994}, booktitle = {Proceedings of the 12th National Conference of the American Association for Artificial Intelligence (AAAI'94)}, pages = {356--361} } @Article { nebel1994computational, author = {Nebel, Bernhard and B\"{a}ckstr\"{o}m, Christer}, title = {On the computational complexity of temporal projection, planning, and plan validation}, abstract = {One kind of temporal reasoning is temporal projection - the computation of the consequences of a set of events. This problem is related to a number of other temporal reasoning tasks such as plan validation and planning. We show that one particular, simple case of temporal projection on partially ordered events turns out to be harder than previously conjectured, while planning is easy under the same restrictions. Additionally, we show that plan validation is tractable for an even larger class of plans - the unconditional plans - for which temporal projection is NP-hard, thus indicating that temporal projection may not be a necessary ingredient in planning and plan validation. Analyzing the partial decision procedure for the temporal projection problem that has been proposed by other authors, we notice that it fails to be complete for unconditional plans, a case where we have shown plan validation tractable.}, year = {1994}, journal = {Artificial Intelligence}, volume = {66}, publisher = {Elsevier}, pages = {125--160}, number = {1}, web_url = {http://www.sciencedirect.com/science/article/pii/0004370294900051} } @Inproceedings { Nebel94QTI, author = {Nebel, Bernhard and B\"{u}rckert, Hans-J\"{u}rgen}, title = {Managing Qualitative Temporal Information: Expressiveness vs. Complexity}, abstract = {For natural language understanding and generation, plan generation and recognition, and knowledge representation, it is necessary to represent qualitiave temporal information and to reason with it. Allen's interval calculus provides an appropriate framework for such a task. We introduce a new subclass of Allen's interval algebra we call ``ORD-Horn subclass,\\" which is a strict superset of the ``pointisable subclass.\\" We prove that reasoning in the ORD-Horn subclass is a polynomial-time problem and show that the path-consistency method is sufficient for deciding satisfiability. Further, using an extensive machine-generated case analysis, we show that the ORD-Horn subclass is a maximal tractable subclass of the full algebra (assuming P=/=NP). In fact, it is the unique greatest tractable subclass amongst the subclasses that contain all basic relations.}, year = {1994}, isbn = {3-540-57802-1}, booktitle = {Management and Processing of Complex Data Structures}, publisher = {Springer-Verlag Berlin}, editor = {K. von Luck and H. Marburger}, pages = {104--117} } @Inproceedings { ns00w, author = {Uhrmacher, Adelinde M.}, title = {Fuzzy-basiertes Induktives Schliessen - Eine Analyse der Kohlendioxiddynamik in Biosph\"{a}re 2}, abstract = {Die Biosphare 2, gelegen in der W\"{u}ste Arizonas, bildet ein geschlossenes \"{O}kosystem, das dazu dient, in Langzeitversuchen Erfahrungen mit geschlossenen Mensch-Umwelt-Technik Systemen zu sammeln. Wahrend der erstenTestphase traten Ph\"{a}nomene in der Kohlendioxiddynamik auf, die eine Untersuchung der konkreten Situation des Kohlenstoffzyklus in Biosphare 2 nahelegten. Da zum einen die kausale Struktur nur unzureichend bekannt ist, undzum anderen die Entwicklungen unterschiedlichster Variablen durch Zeitreihen dokumentiert sind, bietet sich eine induktive Herangehensweise an. Das Verfahren des Fuzzy-basierten Induktiven Schliessens (FIS), das zur Analyse der Kohlendioxiddynamik von Biosphare 2 angewendet wird, ist f\"{u}r die Untersuchung und Simulation dynamischer Systeme entwickelt worden. Die Zeitreihen von m\"{o}glicherweise den Kohlendioxidverlauf beeinflussenden Gr\"{o}\"{s}en, wie z.B. Temperatur und Sonneneinstrahlung, dienen als Ausgangspunkt fur FIS, die relevanten \dqInputs zu identifizieren und Verhaltensmodelle\dq f\"{u}r die Kohlendioxiddynamik zu entwickeln. Simulationsexperimente mit FIS demonstrieren die G\"{u}te der entwickelten Verhaltensmodelle.}, year = {1994}, booktitle = {GI-Fachgruppe Maschinelles Lernen}, pages = {95--104}, file_url = {http://wwwmosi.informatik.uni-rostock.de/diemosiris/static/Papers\_archiv/mal94.pdf} } @Incollection { lakemeyer1994foundations, author = {Lakemeyer, Gerhard and Nebel, Bernhard}, title = {Foundations of knowledge representation and reasoning}, abstract = {Knowledge representation (KR) is the area of Artificial Intelligence that deals with the problem of representing, maintaining, and manipulating knowledge about an application domain. Since virtually all Artificial Intelligence systems have to address this problem, KR is one of the central subfields of Artificial Intelligence. While knowledge about an application domain may be represented in a variety of forms, e.g., procedurally in form of program code or implicitly as patterns of activation in a neural network, research in the area of knowledge representation assumes an explicit and declarative representation, an assumption that distinguishes KR from research in, e.g., programming languages and neural networks. In this paper, we summarize the current state of the art in KR and classify the contributions in this volume.}, year = {1994}, isbn = {978-3-540-58107-9}, DOI = {10.1007/3-540-58107-3\_1}, booktitle = {Foundations of Knowledge Representation and Reasoning}, volume = {810}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, editor = {Lakemeyer, Gerhard and Nebel, Bernhard}, pages = {1--12} } @Article { baader1994empirical, author = {Baader, Franz and Hollunder, Bernhard and Nebel, Bernhard and Profitlich, Hans-J\"{u}rgen and Franconi, Enrico}, title = {An Empirical Analysis of Optimization Techniques for Terminological Representation Systems or \dqMaking KRIS get a move on\dq}, abstract = {We consider different methods of optimizing the classification process of terminological representation systems, and evaluate their effect on three different types of test data. Though these techniques can probably be found in many existing systems, until now there has been no coherent description of these techniques and their impact on the performance of a system. One goal of this paper is to make such a description available for future implementors of terminological systems. Building the optimizations that came off best into the KRIS system greatly enhanced its efficiency.}, year = {1994}, DOI = {10.1007/BF00872105}, journal = {Applied Intelligence}, volume = {4}, publisher = {Kluwer Academic Publishers}, pages = {109--132}, number = {2}, web_url = {http://link.springer.com/article/10.1007{\%}2FBF00872105}, web_url2 = {http://scidok.sulb.uni-saarland.de/volltexte/2011/3799/} } @Book { dold1994formalisierung, author = {Dold, Axel}, title = {Formalisierung schematischer Algorithmen}, year = {1994}, publisher = {Universit\"{a}t Ulm}, series = {Ulmer Informatik-Berichte} } @Inproceedings { Nebel94baserevision, author = {Nebel, Bernhard}, title = {Base Revision Operations and Schemes: Semantics, Representation, and Complexity}, abstract = {The theory of belief revision developed by G\"{a}rdenfors and his colleagues characterizes the classes of reasonable belief revision operations. However, some of the assumptions made in the theory of belief revision are unrealistic from a computational point of view. We address this problem by considering revision operations that are based on a priority ordering over a set of sentences representing a belief state instead of using preference relations over all sentences that are accepted in a belief state. In addition to providing a semantic justification for such operations, we investigate also the computational complexity. We show how to generate an epistemic entrenchment ordering for a belief state from an arbitrary priority ordering over a set of sentences representing the belief state and show that the resulting revision is very efficient. Finally, we show that some schemes for generating revision operations from bases can encode the preference relations more concisely than others.}, year = {1994}, booktitle = {Proceedings of the 11th European Conference on Artificial Intelligence (ECAI '94)}, publisher = {John Wiley \\& Sons}, pages = {341--345} } @Article { bergamaschi1994automatic, author = {Bergamaschi, Sonia and Nebel, Bernhard}, title = {Automatic Building and Validation of Multiple Inheritance Complex Complex Object Database Schemata}, abstract = {We present an intelligent tool for the acquisition of object oriented schemata supporting multiple inheritance, which preserves taxonomy coherence and performs taxonomic inferences. Its theoretical framework is based on terminological logics, which have been developed in the area of artificial intelligence. The framework includes a rigorous formalization of complex objects, which is able to express cyclic references on the schema and instance level; a subsumption algorithm, which computes all implied specialization relationships between types; and an algorithm to detect incoherent types, i.e., necessarily empty types. Using results from formal analyses of knowledge representation languages, we show that subsumption and incoherence detection are computationally intractable from a theoretical point of view. However, the problems appear to be feasible in almost all practical cases.}, year = {1994}, journal = {Applied Intelligence}, volume = {4}, pages = {185--204}, number = {2} } @Article { heinsohn1994an, author = {Heinsohn, Jochen and Kudenko, Daniel and Nebel, Bernhard and Profitlich, Hans-J\"{u}rgen}, title = {An Empirical Analysis of Terminological Representation Systems}, abstract = {The family of terminological representation systems has its roots in the representation system KL-ONE. Since the development of KL-ONE more than a dozen similar representation systems have been developed by various research groups. These systems vary along a number of dimensions. In this paper, we present the results of an empirical analysis of six such systems. Surprisingly, the systems turned out to be quite diverse, leading to problems when transporting knowledge bases from one system to another. Additionally, the runtime performance between different systems and knowledge bases varied more than we expected. Finally, our empirical runtime performance results give an idea of what runtime performance to expect from such representation systems. These findings complement previously reported analytical results about the computational complexity of reasoning in such systems.}, year = {1994}, journal = {Artificial Intelligence}, volume = {68}, pages = {367--397}, number = {2} } @Proceedings { Nebel94KI, title = {Advances in Artificial Intelligence: Proceedings of the 18th Annual German Conference}, abstract = {This volume presents the proceedings of the 18th German Annual Conference on Artificial Intelligence (KI-94), held in Saarbr\"{u}cken in September 1994. Besides the invited paper \dqAI approaches towards sensor-based support in road vehicles\dq by H.-H. Nagel, the book contains 33 full research papers and 12 poster presentations selected from a total of 98 contributions, half of them originating from outside Germany. The papers cover all relevant aspects of AI with a certain focus on knowledge representation and logical foundations of AI; further topics covered are neural network applications, logic programming, natural language, machine learning, and reasoning.}, year = {1994}, isbn = {3-540-58467-6}, volume = {861}, publisher = {Springer-Verlag}, series = {LNAI}, editor = {Nebel, Bernhard and Dreschler-Fischer, Leonie S.}, web_url = {http://link.springer.com/book/10.1007{\%}2F3-540-58467-6} } @Inproceedings { ns00v, author = {Uhrmacher, Adelinde M. and Arnold, R.}, title = {Distributing and Maintaining Knowledge - Agents in Variable Structure Environments}, abstract = {The maintaining and adaptation of knowledge within changing environments is one of the crucial aspects in decentralized controlled and distributed systems. To explore different strategies and their consequences, the authors use an example where processors, structured in a hierarchy, are hired or fired responding to the requests of the current work load. The modeling and simulation approach uses the actor metaphor of open systems, where the nodes of the hierarchy are perceived as autonomous agents with an internal explicit model about their environment. Questions about the distribution and maintenance of knowledge referring to the structure of systems, needs for cooperation, and the change of roles are discussed against the background of the example and complete the picture about the specific effects of the single strategies. DEVS, a knowledge-based simulation environment, constitutes the background of the authors' exploration.}, year = {1994}, DOI = {10.1109/AIHAS.1994.390487}, booktitle = {Proceedings of the Fifth Annual Conference on AI, Simulation, and Planning in High Autonomy Systems. Distributed Interactive Simulation Environments}, publisher = {IEEE-Press}, address = {Gainesville, FL, USA}, pages = {178--184} } @Techreport { pub5900, author = {Andre, Elisabeth and Graf, Winfried and Heinsohn, Jochen and Nebel, Bernhard and Profitlich, Hans-J\"{u}rgen and Rist, Thomas and Wahlster, Wolfgang}, title = {PPP: Personalized Plan-Based Presenter - Project Proposal}, abstract = {The aim of the project 'Personlized Plan-Based Presenter' (PPP) is to explore and develop innovative presentation techniques for future intelligent user interfaces. The central issues of the project are:1. Planning multimedia presentation acts2. Interactive multimedia presentations3. Monitoring the effectiveness of a presentation4. Providing a firm representation foundationPresentation design can be viewed as a relatively unexplored area of common-sense reasoning. Unlike most research on common-sense reasoning to date, the PPP project does not deal with metadomain research on general design principles, but focuses on formal methods capturing some of the reasoning in the design space of presentations for specific and realistic domains. The development of an interactive, multimedia presentation system requires efforts from various research areas such as planning, knowledge representation, constraint processing, natural language, and knowledge-based graphics generation.}, year = {1993}, institution = {Deutsches Forschungszentrum f\"{u}r K\"{u}nstliche Intelligenz,DFKI}, series = {DFKI Documents, D}, number = {DFKI Document D-93-05} } @Incollection { Ullmann93VerificationSupport, author = {Ullmann, M. and Hauff, H. and Loevenich, D. and Kejwal, P. and F\"{o}rster, R. and Baur, P. and G\"{o}hner, P. and Drexler, R. and Reif, W. and Stephan, Werner and Wolpers, A. and Cleve, J. and Hutter, D. and Sengler, C. and Canver, Ercument}, title = {VSE Verification Support Environment}, abstract = {Um das Vertrauen in die Korrektheit eines IT-Systems (Informationstechnisches System) zu beurteilen, werden in einschl\"{a}gigen Sicherheitskriterien [IT-SK, ITSEC] Qualit\"{a}tsanforderungen u.a. an den Entwicklungsproze\"{s} der Sicherheitsfunktionalit\"{a}t von IT-Systemen gestellt. F\"{u}r die hohen Qualit\"{a}tssstufen [IT-SK]/Evaluationsstufen [ITSEC] wird insbesondere der Einsatz formaler Methoden zur Entwicklung der sicherheitsrelevanten Systemkomponenten vorgeschrieben.In diesem Beitrag wird das Entwicklungswerkzeug Verification Support Environment und seine Methodik zur Entwicklung vertrauensw\"{u}rdiger Software-Systeme vorgestellt. Dieses Werkzeug ist konzeptionell an den Anforderungen der h\"{o}heren Qualit\"{a}tsstufen/Evaluationsstufen einschl\"{a}giger Sicherheitskriterienwerke ausgerichtet.Das Neue und Au\"{s}ergew\"{o}hnliche an diesem Werkzeug gegen\"{u}ber klassischen CASE-Werkzeugen ist die M\"{o}glichkeit, \"{u}ber formale Spezifikations- und Verifikationsmethodiken, die Korrektheit ganzer Software-Systeme oder Teilen davon formal nachzuweisen.}, year = {1993}, isbn = {978-3-528-05344-4}, DOI = {10.1007/978-3-322-88782-5\_12}, booktitle = {Verl\"{a}\"{s}liche Informationssysteme}, publisher = {Vieweg+Teubner Verlag}, series = {DuD-Fachbeitr\"{a}ge}, editor = {Weck, Gerhard and Horster, Patrick}, pages = {175--190} } @Inproceedings { Foerster93VSE, author = {F\"{o}rster, R. and Kejwal, P. and Baur, P. and G\"{o}hner, P. and Cleve, J. and Drexler, R. and Hutter, D. and Sengler, C. and Siekmann and Stephan, Werner and Wolpers, A. and Reif, W. and Canver, Ercument and von Henke, Friedrich}, title = {Verification Support Environment (VSE)}, abstract = {The potential dangers from a malfunctioning of information processing systems range from simple loss of data to loss of life. This led many states come up with (hierarchies of) criteria to evaluate the trustworthiness of software systems. The Verification Support Environment (VSE) was designed to satisfies the requirements of the higher levels of such catalogs of criteria. VSE complements the usual functionality of CASE tools with support for formal specification and verification of software systems or parts thereof. In this paper we will outline the formal concepts of VSE, the system architecture, and the system's application to real world examples, taking the VSE [Vorgehensmodell] as a guideline.}, year = {1993}, booktitle = {Proceedings of the 3. Deutscher IT-Sicherheitskongress des BSI}, pages = {327--337} } @Inproceedings { ns00x, author = {Uhrmacher, Adelinde M.}, title = {Variable Structure Models: Autonomy and Control - Answers from Two Different Modeling Approaches}, abstract = {Two different object-oriented modeling approaches, DEVS and EMSY, constitute the background to explore the area of variable structure modeling. The realization of various kinds of structural changes is discussed in both approaches. Against the background of their prime application domain, both approaches deal with the problem of structural change differently. While DEVS implies attention more to the intelligent control of structural change, EMSY emphasizes the autonomous character of the system. Nevertheless, control and autonomy are intertwined in both approaches to handle structural change.}, year = {1993}, DOI = {10.1109/AIHAS.1993.410588}, booktitle = {Proceedings of the 4th Annual Conference on AI, Simulation, and Planning in High Autonomy Systems - Integrating Virtual Reality and Model-Based Environments}, publisher = {IEEE Press}, address = {Tucson, AZ}, pages = {133--139} } @Inproceedings { Nebel1993PlanModVSPlanGenPuK, author = {Nebel, Bernhard and Koehler, Jana}, title = {Plan Modification versus Plan Generation: A Complexity-Theoretic Perspective}, year = {1993}, booktitle = {Planen und Konfigurieren (PuK'93)}, pages = {7--17} } @Inproceedings { Nebel1993PlanModVSPlanGen, author = {Nebel, Bernhard and Koehler, Jana}, title = {Plan Modification versus Plan Generation: A Complexity-Theoretic Perspective}, abstract = {The ability of a planner to modify a plan is considered as a valuable tool for improving efficiency of planning by avoiding the repetition of the same planning effort. From a computational complexity point of view, however, it is by no means obvious that modifying a plan is computationally as easy as planning from scratch if the modification has to follow the principle of ``conservatism,\\" i.e., to reuse as much of the old plan as possible. Indeed, considering propositional STRIPS planning, it turns out that conservative plan modification is as hard as planning and can sometimes be harder than plan generation. Furthermore, this holds even if we consider modification problems where the old and the new goal specification are similar. We put these results into perspective and discuss the relationship to existing plan modification systems.}, year = {1993}, booktitle = {Proceedings of the Thirteenth International Joint Conference on Artificial Intelligence (IJCAI '93)}, publisher = {Morgan Kaufmann}, pages = {1436--1441} } @Article { baader1993expressivity, author = {Baader, Franz and B\"{u}rckert, Hans-J\"{u}rgen and Nebel, Bernhard and Nutt, Werner and Smolka, Gert}, title = {On the expressivity of feature logics with negation, functional uncertainty, and sort equations}, abstract = {Feature logics are the logical basis for so-called unification grammars studied in computational linguistics. We investigate the expressivity of feature terms with negation and the functional uncertainty construct needed for the description of long-distance dependencies and obtain the following results: satisfiability of feature terms is undecidable, sort equations can be internalized, consistency of sort equations is decidable if there is at least one atom, and consistency of sort equations is undecidable if there is no atom.}, year = {1993}, journal = {Journal of Logic, Language and Information}, volume = {2}, publisher = {Springer}, pages = {1--18}, number = {1}, web_url = {http://link.springer.com/article/10.1007{\%}2FBF01051766} } @Inproceedings { backstrom1993complexity, author = {B\"{a}ckstr\"{o}m, Christer and Nebel, Bernhard}, title = {Complexity Results for SAS+ Planning}, abstract = {We have previously reported a number of tractable planning problems defined in the SAS+ formalism. This paper complements these results by providing a complete map over the complexity of SAS+ planning under all combinations of the previously considered restrictions. We analyse the complexity both of finding a minimal plan and of finding any plan. In contrast to other complexity surveys of planning we study not only the complexity of the existence problems but also of the search problems. We prove that the SAS+-PUS problem is the maximal tractable problem under the restrictions we have considered if we want to find a minimal plan. If we are satisfied with finding any plan, then we can generalize further to the SAS+-US problem, which we prove to be the maximal tractable problem in this case.}, year = {1993}, booktitle = {Proceedings of the Thirteenth International Joint Conference on Artificial Intelligence (IJCAI '93)} } @Book { padgham1993combining, author = {Padgham, Lin and Nebel, Bernhard}, title = {Combining classification and nonmonotonic inheritance reasoning: A first step}, abstract = {The formal analysis of semantic networks and frame systems led to the development of nonmonotonic inheritance networks and terminological logics. While nonmonotonic inheritance networks formalize the notion of defeasible inheritance of typical properties, terminological logics formalize the notion of defining concepts and reasoning about definitions. Although it seems to be desirable to (re-)unify the two approaches, such an attempt has not been made until now. In this paper, we will make a first step into this direction by specifying a nonmonotonic extension of a simple terminological logic.}, year = {1993}, booktitle = {Methodologies for Intelligent Systems (ISMIS '93)}, publisher = {Springer-Verlag}, pages = {132--141} } @Incollection { ns00l, author = {Uhrmacher, Adelinde M.}, title = {EMSY - An Extended Modeling System}, year = {1992}, booktitle = {AI, Expert Systems, and Symbolic Computing for Scientific Computation}, publisher = {North Holland, Amsterdam}, editor = {Houstis, E. N. and Rice, J. R.} } @Inproceedings { ns00y, author = {Uhrmacher, Adelinde M.}, title = {Qualitative and Quantitative Simulation - Some Comparative Aspects}, year = {1992}, booktitle = {SoftStat '91. Advances in Statistical Software.}, publisher = {Gustav Fischer}, address = {Stuttgart}, pages = {343-352} } @Inproceedings { Baur92VSE, author = {Baur, P. and Plasa and Kejwal, P. and Drexler, R. and Reif, W. and Stephan, Werner and Wolpers, A. and Hutter, D. and Sengler, C. and Canver, Ercument}, title = {The Verification Support Environment VSE}, year = {1992}, booktitle = {IFA Symposium on Safety, Security and Reliability of Computers} }