2018
Maharjan, Suraj; Kar, Sudipta; Montes, Manuel; Gonzalez, Fabio A.; Solorio, Thamar
Letting Emotions Flow: Success Prediction by Modeling the Flow of Emotions in Books Inproceedings
In: Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Association for Computational Linguistics, New Orleans, Louisiana, 2018.
Abstract | Links | BibTeX | Tags: Attention Model, Emotion Flow, Emotion Shapes, Likability Classification, Multitask
@inproceedings{Maharjan2018,
title = {Letting Emotions Flow: Success Prediction by Modeling the Flow of Emotions in Books},
author = {Suraj Maharjan and Sudipta Kar and Manuel Montes and Fabio A. Gonzalez and Thamar Solorio},
url = {http://www.aclweb.org/anthology/N18-2042},
year = {2018},
date = {2018-06-01},
booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies},
publisher = {Association for Computational Linguistics},
address = {New Orleans, Louisiana},
abstract = {Books have the power to make us feel happiness, sadness, pain, surprise, or sorrow. An author's dexterity in the use of these emotions captivates readers and makes it difficult for them to put the book down. In this paper, we model the flow of emotions over a book using recurrent neural networks and quantify its usefulness in predicting the book's success. We obtained the best weighted F1-score of 0.690 for predicting books' success in a multitask setting (simultaneously predicting success and genre of books)},
keywords = {Attention Model, Emotion Flow, Emotion Shapes, Likability Classification, Multitask},
pubstate = {published},
tppubtype = {inproceedings}
}
Books have the power to make us feel happiness, sadness, pain, surprise, or sorrow. An author’s dexterity in the use of these emotions captivates readers and makes it difficult for them to put the book down. In this paper, we model the flow of emotions over a book using recurrent neural networks and quantify its usefulness in predicting the book’s success. We obtained the best weighted F1-score of 0.690 for predicting books’ success in a multitask setting (simultaneously predicting success and genre of books)