@inproceedings{960379face2941de9d51aedeb5e4bb77,
title = "Learning contextual features with multi-head self-attention for fake news detection",
abstract = "Automatic fake news detection has attracted great concern in recent years due to it{\textquoteright}s tremendous negative impacts on public. Since fake news is usually written to mislead readers, lexical features based methods have great limitations. Previous work has proven the effectiveness of contextual information for fake news detection. However, they ignore the influence of sequence order when extract features from contextual information. Inspired by transformer technique, we propose Contextual Features with Multi-head Self-attention model(CMS) to extract features from contextual information for fake news detection. CMS can automatic capture the dependencies between contextual information and learning a global representation from contextual information for fake news detection. Experimental results on the real-world data demonstrate the effectiveness of the proposed model.",
keywords = "Contextual information, Fake news detection, Multi-head self-attention",
author = "Yangqian Wang and Hao Han and Ye Ding and Xuan Wang and Qing Liao",
note = "Publisher Copyright: {\textcopyright} Springer Nature Switzerland AG 2019.; 3rd International Conference on Cognitive Computing, ICCC 2019, held as part of the Services Conference Federation, SCF 2019 ; Conference date: 25-06-2019 Through 30-06-2019",
year = "2019",
doi = "10.1007/978-3-030-23407-2\_11",
language = "英语",
isbn = "9783030234065",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "132--142",
editor = "Ruifeng Xu and Jianzong Wang and Liang-Jie Zhang",
booktitle = "Cognitive Computing – ICCC 2019 - 3rd International Conference, Held as Part of the Services Conference Federation, SCF 2019, Proceedings",
address = "德国",
}