@inproceedings{SchaferJ2019JunFeasibilityAnalysisSensora,
    author = {Sch{\"a}fer, Jeroen and Gebhard, Marion},
    title = {Feasibility {{Analysis}} of {{Sensor Modalities}} to {{Control}} a {{Robot}} with {{Eye}} and {{Head Movements}} for {{Assistive Tasks}}},
    booktitle = {Proceedings of the 12th {{ACM International Conference}} on {{PErvasive Technologies Related}} to {{Assistive Environments}}},
    series = {{{PETRA}} '19},
    year = {2019},
    isbn = {978-1-4503-6232-0},
    month = jun,
    abstract = {Assistive robotics has offered a way for people with severe motor disabilities (i. e. tetraplegics) to perform every day tasks without help. New sensor modalities to control a robot system are investigated within this work to enable tetraplegics to gain more autonomy in everyday life. In this work several modalities to capture information related to the user are tested and compared. The five sensor modalities, electrooculography, video-based eye tracking, MARG sensors, video-based head tracking and electromyography of the posterior auricular muscle, can be used to control a robot hands-free. It is proposed to use movements of the head as continuous control and eye movements as discrete event control. The tests show that the MARG sensors are most reliable to track head movements and eye tracking glasses to capture movements of the eyes.},
    address = {{Rhodes, Greece}},
    copyright = {All rights reserved},
    doi = {10.1145/3316782.3322774},
    pages = {482--488},
    publisher = {{ACM}},
    keywords = {assistive robotics,eye tracker,head tracker,HRI,human robot interaction,MARG sensor,sensor modalities}
}