@mastersthesis {xGranero, title = {A Video Database for Analyzing Affective Physiological Responses}, year = {2019}, abstract = {

Affective computing, leveraged by machine learning techniques, is advancing rapidly in the task of affect recognition in videos. However, there is a need for more annotated data. Several studies have built huge video datasets with emotions annotations. Others have collected music videos or film scenes datasets with physiological signals. However, none of them approached a solution with both physiological signals and user-generated videos. The work introduced here presents GALLUS, a novel database of user-generated videos with affective physiological responses. The database is composed of 775 videos that have been previously annotated through an online crowdsourcing platform. Physiological responses such as electroencephalography, electrocardiography, galvanic skin response, facial emotion recognition, and eye-gaze have been collected from 30 participants while they watched the stimuli. Our dataset will be made public to foster research in affect recognition.

}, author = {Granero, Marcel}, editor = {Borth, Damian and Weber, Barbara and Xavier Gir{\'o}-i-Nieto} }