@inproceedings{2e460c8db4cb4646b582a6667f2ae492,
title = "Sensing Affective States using Facial Expression Analysis",
abstract = "An important factor for the next generation of Human Computer Interaction is the implementation of an interaction model that automatically reasons in context of the users goals, attitudes, affective characteristics and capabilities, and adapts the system accordingly. Although various techniques have been proposed for automatically detecting affective states using facial expression, this is still a research challenge in terms of classification accuracy. This paper investigates an extensible automatic affective state detection approach via the analysis of facial expressions from digital photographs. The main contribution of this study can be summarised in two points. Firstly, utilising facial point distance vectors within the representation of facial expressions is shown to be more accurate and robust in comparison to using standard Cartesian coordinates. Secondly, employing a two-stage Support Vector Machine-based classification model, entitled Hierarchical Parallelised Binary Sup- port Vector Machines (HPBSVM), is shown to improve classification performance over other machine learning techniques. The resulting classification model has been evaluated using two different facial expression datasets (namely CKPLUS and KDEF), yielding accuracy rates of 96.9% and 96.2% over each dataset respectively.",
keywords = "User Modelling, Facial Expression, Emotion Detection, Affective Computing, Human Computer Interaction",
author = "Anas Samara and Leo Galway and Raymond Bond and Hui Wang",
note = "10th International Conference on Ubiquitous Computing and Ambient Intelligence UCAmI 2016 ; Conference date: 08-08-2016",
year = "2016",
month = nov,
day = "2",
language = "English",
series = " Lecture Notes in Computer Science ",
publisher = "Springer",
pages = "341--352",
booktitle = "UCAmI 2016: Ubiquitous Computing and Ambient Intelligence",
}