@techreport{StollenwerkHinkenjannKlein2018, author = {Stollenwerk, Katharina and Hinkenjann, Andr{\´e} and Klein, Reinhard}, title = {GraspDB14 - Documentation on a database of grasp motions and its creation}, isbn = {978-3-96043-061-2}, issn = {1869-5272}, doi = {10.18418/978-3-96043-061-2}, institution = {Fachbereich Informatik}, series = {Technical Report / Hochschule Bonn-Rhein-Sieg University of Applied Sciences. Department of Computer Science}, number = {01-2018}, pages = {47}, year = {2018}, abstract = {Motion capture, often abbreviated mocap, generally aims at recording any kind of motion -- be it from a person or an object -- and to transform it to a computer-readable format. Especially the data recorded from (professional and non-professional) human actors are typically used for analysis in e.g. medicine, sport sciences, or biomechanics for evaluation of human motion across various factors. Motion capture is also widely used in the entertainment industry: In video games and films realistic motion sequences and animations are generated through data-driven motion synthesis based on recorded motion (capture) data. Although the amount of publicly available full-body-motion capture data is growing, the research community still lacks a comparable corpus of specialty motion data such as, e.g. prehensile movements for everyday actions. On the one hand, such data can be used to enrich (hand-over animation) full-body motion capture data - usually captured without hand motion data due to the drastic dimensional difference in articulation detail. On the other hand, it provides means to classify and analyse prehensile movements with or without respect to the concrete object manipulated and to transfer the acquired knowledge to other fields of research (e.g. from 'pure' motion analysis to robotics or biomechanics). Therefore, the objective of this motion capture database is to provide well-documented, free motion capture data for research purposes. The presented database GraspDB14 in sum contains over 2000 prehensile movements of ten different non-professional actors interacting with 15 different objects. Each grasp was realised five times by each actor. The motions are systematically named containing an (anonymous) identifier for each actor as well as one for the object grasped or interacted with. The data were recorded as joint angles (and raw 8-bit sensor data) which can be transformed into positional 3D data (3D trajectories of each joint). In this document, we provide a detailed description on the GraspDB14-database as well as on its creation (for reproducibility). Chapter 2 gives a brief overview of motion capture techniques, freely available motion capture databases for both, full body motions and hand motions, and a short section on how such data is made useful and re-used. Chapter 3 describes the database recording process and details the recording setup and the recorded scenarios. It includes a list of objects and performed types of interaction. Chapter 4 covers used file formats, contents, and naming patterns. We provide various tools for parsing, conversion, and visualisation of the recorded motion sequences and document their usage in chapter 5.}, language = {en} }