@inproceedings{3804b8bc72064934bf1d4beea91ec87d,
title = "Effective deep memory networks for distant supervised relation extraction",
abstract = "Distant supervised relation extraction (RE) has been an effective way of finding novel relational facts from text without labeled training data. Typically it can be formalized as a multi-instance multi-label problem. In this paper, we introduce a novel neural approach for distant supervised RE with special focus on attention mechanisms. Unlike the feature-based logistic regression model and compositional neural models such as CNN, our approach includes two major attention-based memory components, which are capable of explicitly capturing the importance of each context word for modeling the representation of the entity pair, as well as the intrinsic dependencies between relations. Such importance degree and dependency relationship are calculated with multiple computational layers, each of which is a neural attention model over an external memory. Experiment on real-world datasets shows that our approach performs significantly and consistently better than various baselines.",
author = "Xiaocheng Feng and Jiang Guo and Bing Qin and Ting Liu and Yongjie Liu",
year = "2017",
doi = "10.24963/ijcai.2017/559",
language = "英语",
series = "IJCAI International Joint Conference on Artificial Intelligence",
publisher = "International Joint Conferences on Artificial Intelligence",
pages = "4002--4008",
editor = "Carles Sierra",
booktitle = "26th International Joint Conference on Artificial Intelligence, IJCAI 2017",
address = "美国",
note = "26th International Joint Conference on Artificial Intelligence, IJCAI 2017 ; Conference date: 19-08-2017 Through 25-08-2017",
}