@inproceedings{2844d54136694d3785bcd9b7493f6cc5,
title = "EMOEEG: A new multimodal dataset for dynamic EEG-based emotion recognition with audiovisual elicitation",
abstract = "EMOEEG is a multimodal dataset where physiological responses to both visual and audiovisual stimuli were recorded, along with videos of the subjects, with a view to developing affective computing systems, especially automatic emotion recognition systems. The experimental setup involves various physiological sensors, among which electroencephalographic sensors. The experiment is performed with 8 participants, 4 from both genders. The stimuli include both sequences of static images from the IAPS dataset, and short video excerpts focusing on negative fear-type emotions. The annotation is obtained by participant self assessment, after a calibration phase. In the case of video stimuli, a novel simplified dynamic annotation strategy is used to enhance the quality and consistency of the self-assessments. This paper also analyses the annotation results and provides a statistical study of inter-annotator agreement. The dataset will continue to grow and will be made publicly available.",
keywords = "Affective computing, Annotation, Arousal, Electroencephalography (EEG), Fear-type emotions, Inter-annotator agreement, Multimodal data, Valence",
author = "Conneau, \{Anne Claire\} and Ayoub Hajlaoui and Mohamed Chetouani and Slim Essid",
note = "Publisher Copyright: {\textcopyright} 2017 EURASIP.; 25th European Signal Processing Conference, EUSIPCO 2017 ; Conference date: 28-08-2017 Through 02-09-2017",
year = "2017",
month = oct,
day = "23",
doi = "10.23919/EUSIPCO.2017.8081305",
language = "English",
series = "25th European Signal Processing Conference, EUSIPCO 2017",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "738--742",
booktitle = "25th European Signal Processing Conference, EUSIPCO 2017",
}