% IMPORTANT: The following is UTF-8 encoded. This means that in the presence % of non-ASCII characters, it will not work with BibTeX 0.99 or older. % Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or % “biber”. @PHDTHESIS{Ecker:975208, author = {Ecker, Christian}, othercontributors = {Brecher, Christian and Verl, Alexander}, title = {{F}lexible {I}nbetriebnahme roboterbasierter {M}ontagesysteme durch {P}rozessdemonstration; 1. {A}uflage}, volume = {2023,31}, school = {RWTH Aachen University}, type = {Dissertation}, address = {Aachen}, publisher = {Apprimus Verlag}, reportid = {RWTH-2023-11893}, series = {Ergebnisse aus der Produktionstechnik}, pages = {1 Online-Ressource : Illustrationen}, year = {2023}, note = {Druckausgabe: 2023. - Auch veröffentlicht auf dem Publikationsserver der RWTH Aachen University 2024; Dissertation, RWTH Aachen University, 2023}, abstract = {A dynamic market environment and a high number of variants are making it increasingly difficult for manufacturing companies to adopt cost-effective operating parameters for their production system. Flexibility and adaptability of the production equipment at the interface to the manufactured product are therefore of increasing importance. Regarding automated assembly using robot-based systems, there is a particular lack of suitable methods that allow employees to adapt processes independently to meet new requirements. Therefore, this thesis focuses on the development of an intuitive interaction method for assembly workers, which enables the holistic learning of automated assembly processes. The approach avoids the use of control-specific programming procedures, which would require a high degree of expert knowledge and coordination of distributed control systems. Instead, a process -oriented view of the control task is provided, which is aligned with the domain knowledge of the operator. The interaction method applies the concept of "programming by demonstration" (PbD). Technically, the realized interaction interface consists of a combination of motion and gripping sensors as well as image processing, by which the operator is detected during assembly demonstration. The sensor data is used for interaction-parallel tracking of the operator's actions and, subsequently, for the recognition of defined basic assembly patterns. The immediate visual feedback of recognized assembly steps results in a direct feedback loop that enables operators to validate assembly sequences and iteratively adjust process parameters. The interpretation of sensor data is based on a multi-agent approach and an event-driven architecture. For information exchange, the implemented agent types use a proposed communication protocol. The open and generic design of the information flow facilitates extensibility by additional assembly functions and sensor systems. The application and validation of the concept is shown using a demonstration system equipped with a freely programmable robot system and flexibly and universally usable function modules for assembly. Furthermore, the integration of the interaction method into a comprehensive engineering framework is described. The final evaluation characterizes the designed commissioning approach in its effects on the interacting operator (usability) and the achieved flexibility for the assembly organization. It also provides an assessment of its technical components' performance.}, cin = {417310 / 417200}, ddc = {620}, cid = {$I:(DE-82)417310_20140620$ / $I:(DE-82)417200_20140620$}, typ = {PUB:(DE-HGF)11 / PUB:(DE-HGF)3}, doi = {10.18154/RWTH-2023-11893}, url = {https://publications.rwth-aachen.de/record/975208}, }