@article{J.2021,
title = {Facial emotions are accurately encoded in the brains of those with autism: A deep learning approach},
author = {Torres M. J., Clarkson T., Hauschild K., Luhmann C. C., Lerner D. M. and Riccardi G.},
url = {https://www.sciencedirect.com/science/article/abs/pii/S2451902221001075?via%3Dihub},
year = {2021},
date = {2021-04-16},
journal = {Biological Psychiatry: Cognitive Neuroscience and Neuroimaging},
keywords = {Affective Computing, Autism, Machine Learning, Signal Annotation and Interpretation}
}
@article{A.2020b,
title = {Emotion Carrier Recognition from Personal Narratives },
author = {Tammewar A., Cervone A. and Riccardi G.},
editor = {arXiv.org, 2020},
url = {https://sisl.disi.unitn.it/wp-content/uploads/2020/12/2008.07481.pdf},
year = {2020},
date = {2020-08-17},
keywords = {Affective Computing, Natural Language Processing}
}
@article{F.2018,
title = {Annotating and Modeling Empathy in Spoken Conversations},
author = {Alam F., Danieli M. and Riccardi G.},
url = {https://www.sciencedirect.com/science/article/pii/S088523081730133X},
year = {2018},
date = {2018-07-01},
journal = {Computer Speech and Language},
volume = {50},
pages = {40-61},
keywords = {Affective Computing, Discourse, Signal Annotation and Interpretation}
}
@article{Riccardi2005,
title = {Grounding Emotions in Human-Machine Conversational Systems},
author = {Riccardi G. and Hakkani-Tur D.},
url = {https://sisl.disi.unitn.it/wp-content/uploads/2014/11/intetain05.pdf},
year = {2005},
date = {2005-01-01},
journal = {Lecture Notes in Computer Science, Springer-Verlag, , pp. 144 – 154, 2005},
abstract = {Abstract. In this paper we investigate the role of user emotions in human-machine goal-oriented conversations. There has been a growing interest in predicting emotions from acted and non-acted spontaneous speech. Much of the research work has gone in determining what are the correct labels and improving emotion prediction accuracy. In this paper we evaluate the value of user emotional state towards a computational model of emotion processing. We consider a binary representation of emotions (positive vs. negative) in the context of a goal-driven conversational system. For each human-machine interaction we acquire the temporal emotion sequence going from the initial to the final conversational state. These traces are used as features to characterize the user state dynamics. We ground the emotion traces by associating its patterns to dialog strategies and their effectiveness. In order to quantify the value of emotion indicators, we evaluate their predictions in terms of speech recognition and spoken language understanding errors as well as task success or failure. We report results on the 11.5K dialog corpus samples from the How may I Help You? corpus.},
keywords = {Affective Computing, Conversational and Interactive Systems }
}
Abstract. In this paper we investigate the role of user emotions in human-machine goal-oriented conversations. There has been a growing interest in predicting emotions from acted and non-acted spontaneous speech. Much of the research work has gone in determining what are the correct labels and improving emotion prediction accuracy. In this paper we evaluate the value of user emotional state towards a computational model of emotion processing. We consider a binary representation of emotions (positive vs. negative) in the context of a goal-driven conversational system. For each human-machine interaction we acquire the temporal emotion sequence going from the initial to the final conversational state. These traces are used as features to characterize the user state dynamics. We ground the emotion traces by associating its patterns to dialog strategies and their effectiveness. In order to quantify the value of emotion indicators, we evaluate their predictions in terms of speech recognition and spoken language understanding errors as well as task success or failure. We report results on the 11.5K dialog corpus samples from the How may I Help You? corpus.