| { |
| "paper_id": "2019", |
| "header": { |
| "generated_with": "S2ORC 1.0.0", |
| "date_generated": "2023-01-19T14:54:58.049405Z" |
| }, |
| "title": "Document Retrieval based on Neural Network and Document Concept Graph", |
| "authors": [ |
| { |
| "first": "Chia-Hsin", |
| "middle": [], |
| "last": "\u76e7\u5bb6\u99a8", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "National Taipei University of Technology", |
| "location": {} |
| }, |
| "email": "" |
| }, |
| { |
| "first": "", |
| "middle": [], |
| "last": "Lu", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "National Taipei University of Technology", |
| "location": {} |
| }, |
| "email": "" |
| }, |
| { |
| "first": "Jenq-Haur", |
| "middle": [], |
| "last": "\u738b\u6b63\u8c6a", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "Taipei University of Technology", |
| "location": {} |
| }, |
| "email": "" |
| }, |
| { |
| "first": "", |
| "middle": [], |
| "last": "Wang", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "Taipei University of Technology", |
| "location": {} |
| }, |
| "email": "jhwang@csie.ntut.edu.tw" |
| } |
| ], |
| "year": "", |
| "venue": null, |
| "identifiers": {}, |
| "abstract": "If the search results can consider topics or similar situations, we can find results that are more in line with user's expectations. Therefore, our research uses neural network and document concept graph to explore the topics or semantics similarity. The experimental results show that the best macro-F1 is 70.0% in the classifier trained via the neural network. Combined with the calculation of the concept graph of the document, the nDCG score can reach 0.959 in terms of the similarity between the search content and the results. This proves that the results based on the neural network and the document concept graph can be used to complement and enhance the performance of information retrieval.", |
| "pdf_parse": { |
| "paper_id": "2019", |
| "_pdf_hash": "", |
| "abstract": [ |
| { |
| "text": "If the search results can consider topics or similar situations, we can find results that are more in line with user's expectations. Therefore, our research uses neural network and document concept graph to explore the topics or semantics similarity. The experimental results show that the best macro-F1 is 70.0% in the classifier trained via the neural network. Combined with the calculation of the concept graph of the document, the nDCG score can reach 0.959 in terms of the similarity between the search content and the results. This proves that the results based on the neural network and the document concept graph can be used to complement and enhance the performance of information retrieval.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Abstract", |
| "sec_num": null |
| } |
| ], |
| "body_text": [ |
| { |
| "text": "EQUATION", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [ |
| { |
| "start": 0, |
| "end": 8, |
| "text": "EQUATION", |
| "ref_id": "EQREF", |
| "raw_str": "( ) = ( , )", |
| "eq_num": "(5)" |
| } |
| ], |
| "section": "", |
| "sec_num": null |
| }, |
| { |
| "text": "EQUATION", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [ |
| { |
| "start": 0, |
| "end": 8, |
| "text": "EQUATION", |
| "ref_id": "EQREF", |
| "raw_str": "= ( \u210e ( ) | \u2208 \u210e ( ( )))", |
| "eq_num": "(6)" |
| } |
| ], |
| "section": "", |
| "sec_num": null |
| } |
| ], |
| "back_matter": [], |
| "bib_entries": { |
| "BIBREF0": { |
| "ref_id": "b0", |
| "title": "International Conference on Learning Representations", |
| "authors": [], |
| "year": 2018, |
| "venue": "", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "International Conference on Learning Representations, 2018.", |
| "links": null |
| }, |
| "BIBREF1": { |
| "ref_id": "b1", |
| "title": "Towards open set deep networks", |
| "authors": [ |
| { |
| "first": "A", |
| "middle": [], |
| "last": "Bendale", |
| "suffix": "" |
| }, |
| { |
| "first": "T", |
| "middle": [ |
| "E" |
| ], |
| "last": "Boult", |
| "suffix": "" |
| } |
| ], |
| "year": 2016, |
| "venue": "Proceedings of the IEEE conference on computer vision and pattern recognition", |
| "volume": "", |
| "issue": "", |
| "pages": "1563--1572", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "A. Bendale and T. E. Boult, \"Towards open set deep networks,\" in Proceedings of the IEEE conference on computer vision and pattern recognition, 2016, pp. 1563-1572.", |
| "links": null |
| }, |
| "BIBREF2": { |
| "ref_id": "b2", |
| "title": "Breaking the closed world assumption in text classification", |
| "authors": [ |
| { |
| "first": "G", |
| "middle": [], |
| "last": "Fei", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| } |
| ], |
| "year": 2016, |
| "venue": "Proceedings of the 2016 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies", |
| "volume": "", |
| "issue": "", |
| "pages": "506--514", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "G. Fei and B. Liu, \"Breaking the closed world assumption in text classification,\" in Proceedings of the 2016 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, 2016, pp. 506-514.", |
| "links": null |
| }, |
| "BIBREF3": { |
| "ref_id": "b3", |
| "title": "Multi-class open set recognition using probability of inclusion", |
| "authors": [ |
| { |
| "first": "L", |
| "middle": [ |
| "P" |
| ], |
| "last": "Jain", |
| "suffix": "" |
| }, |
| { |
| "first": "W", |
| "middle": [ |
| "J" |
| ], |
| "last": "Scheirer", |
| "suffix": "" |
| }, |
| { |
| "first": "T", |
| "middle": [ |
| "E" |
| ], |
| "last": "Boult", |
| "suffix": "" |
| } |
| ], |
| "year": 2014, |
| "venue": "European Conference on Computer Vision", |
| "volume": "", |
| "issue": "", |
| "pages": "393--409", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "L. P. Jain, W. J. Scheirer, and T. E. Boult, \"Multi-class open set recognition using probability of inclusion,\" in European Conference on Computer Vision, 2014: Springer, pp. 393-409.", |
| "links": null |
| }, |
| "BIBREF4": { |
| "ref_id": "b4", |
| "title": "Doc: Deep open classification of text documents", |
| "authors": [ |
| { |
| "first": "L", |
| "middle": [], |
| "last": "Shu", |
| "suffix": "" |
| }, |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Xu", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| } |
| ], |
| "year": 2017, |
| "venue": "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing", |
| "volume": "", |
| "issue": "", |
| "pages": "2911--2916", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "L. Shu, H. Xu, and B. Liu, \"Doc: Deep open classification of text documents,\" in Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing, 2017, pp. 2911-2916.", |
| "links": null |
| }, |
| "BIBREF5": { |
| "ref_id": "b5", |
| "title": "Open-world Learning and Application to Product Classification", |
| "authors": [ |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Xu", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| }, |
| { |
| "first": "L", |
| "middle": [], |
| "last": "Shu", |
| "suffix": "" |
| }, |
| { |
| "first": "P", |
| "middle": [], |
| "last": "Yu", |
| "suffix": "" |
| } |
| ], |
| "year": 2019, |
| "venue": "The World Wide Web Conference", |
| "volume": "", |
| "issue": "", |
| "pages": "3413--3419", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "H. Xu, B. Liu, L. Shu, and P. Yu, \"Open-world Learning and Application to Product Classification,\" in The World Wide Web Conference, 2019: ACM, pp. 3413-3419.", |
| "links": null |
| }, |
| "BIBREF6": { |
| "ref_id": "b6", |
| "title": "Semantic documents relatedness using concept graph representation", |
| "authors": [ |
| { |
| "first": "Y", |
| "middle": [], |
| "last": "Ni", |
| "suffix": "" |
| } |
| ], |
| "year": 2016, |
| "venue": "Proceedings of the Ninth ACM International Conference on Web Search and Data Mining", |
| "volume": "", |
| "issue": "", |
| "pages": "635--644", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Y. Ni et al., \"Semantic documents relatedness using concept graph representation,\" in Proceedings of the Ninth ACM International Conference on Web Search and Data Mining, 2016: ACM, pp. 635-644.", |
| "links": null |
| }, |
| "BIBREF7": { |
| "ref_id": "b7", |
| "title": "Entity-duet neural ranking: Understanding the role of knowledge graph semantics in neural information retrieval", |
| "authors": [ |
| { |
| "first": "Z", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| }, |
| { |
| "first": "C", |
| "middle": [], |
| "last": "Xiong", |
| "suffix": "" |
| }, |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Sun", |
| "suffix": "" |
| }, |
| { |
| "first": "Z", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| } |
| ], |
| "year": 2018, |
| "venue": "Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics", |
| "volume": "1", |
| "issue": "", |
| "pages": "2395--2405", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Z. Liu, C. Xiong, M. Sun, and Z. Liu, \"Entity-duet neural ranking: Understanding the role of knowledge graph semantics in neural information retrieval,\" in Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics, 2018, vol. 1, pp. 2395-2405.", |
| "links": null |
| }, |
| "BIBREF8": { |
| "ref_id": "b8", |
| "title": "Efficient estimation of word representations in vector space", |
| "authors": [ |
| { |
| "first": "T", |
| "middle": [], |
| "last": "Mikolov", |
| "suffix": "" |
| }, |
| { |
| "first": "K", |
| "middle": [], |
| "last": "Chen", |
| "suffix": "" |
| }, |
| { |
| "first": "G", |
| "middle": [], |
| "last": "Corrado", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [], |
| "last": "Dean", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "ICLR Workshop Papers", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "T. Mikolov, K. Chen, G. Corrado, and J. Dean, \"Efficient estimation of word representations in vector space,\" in ICLR Workshop Papers, 2013.", |
| "links": null |
| }, |
| "BIBREF9": { |
| "ref_id": "b9", |
| "title": "Tagme: on-the-fly annotation of short text fragments (by wikipedia entities)", |
| "authors": [ |
| { |
| "first": "P", |
| "middle": [], |
| "last": "Ferragina", |
| "suffix": "" |
| }, |
| { |
| "first": "U", |
| "middle": [], |
| "last": "Scaiella", |
| "suffix": "" |
| } |
| ], |
| "year": 2010, |
| "venue": "Proceedings of the 19th ACM international conference on Information and knowledge management", |
| "volume": "", |
| "issue": "", |
| "pages": "1625--1628", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "P. Ferragina and U. Scaiella, \"Tagme: on-the-fly annotation of short text fragments (by wikipedia entities),\" in Proceedings of the 19th ACM international conference on Information and knowledge management, 2010: ACM, pp. 1625-1628.", |
| "links": null |
| }, |
| "BIBREF10": { |
| "ref_id": "b10", |
| "title": "Distributed representations of sentences and documents", |
| "authors": [ |
| { |
| "first": "Q", |
| "middle": [], |
| "last": "Le", |
| "suffix": "" |
| }, |
| { |
| "first": "T", |
| "middle": [], |
| "last": "Mikolov", |
| "suffix": "" |
| } |
| ], |
| "year": 2014, |
| "venue": "International conference on machine learning", |
| "volume": "", |
| "issue": "", |
| "pages": "1188--1196", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Q. Le and T. Mikolov, \"Distributed representations of sentences and documents,\" in International conference on machine learning, 2014, pp. 1188-1196.", |
| "links": null |
| }, |
| "BIBREF11": { |
| "ref_id": "b11", |
| "title": "Mining topics in documents: standing on the shoulders of big data", |
| "authors": [ |
| { |
| "first": "Z", |
| "middle": [], |
| "last": "Chen", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Liu", |
| "suffix": "" |
| } |
| ], |
| "year": 2014, |
| "venue": "Proceedings of the 20th ACM SIGKDD international conference on Knowledge discovery and data mining", |
| "volume": "", |
| "issue": "", |
| "pages": "1116--1125", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Z. Chen and B. Liu, \"Mining topics in documents: standing on the shoulders of big data,\" in Proceedings of the 20th ACM SIGKDD international conference on Knowledge discovery and data mining, 2014: ACM, pp. 1116-1125.", |
| "links": null |
| }, |
| "BIBREF12": { |
| "ref_id": "b12", |
| "title": "A framework for benchmarking entityannotation systems", |
| "authors": [ |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Cornolti", |
| "suffix": "" |
| }, |
| { |
| "first": "P", |
| "middle": [], |
| "last": "Ferragina", |
| "suffix": "" |
| }, |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Ciaramita", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "Proceedings of the 22nd international conference on World Wide Web", |
| "volume": "", |
| "issue": "", |
| "pages": "249--260", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "M. Cornolti, P. Ferragina, and M. Ciaramita, \"A framework for benchmarking entity- annotation systems,\" in Proceedings of the 22nd international conference on World Wide Web, 2013: ACM, pp. 249-260.", |
| "links": null |
| }, |
| "BIBREF13": { |
| "ref_id": "b13", |
| "title": "Newsweeder: Learning to filter netnews", |
| "authors": [ |
| { |
| "first": "K", |
| "middle": [], |
| "last": "Lang", |
| "suffix": "" |
| } |
| ], |
| "year": 1995, |
| "venue": "Machine Learning Proceedings", |
| "volume": "", |
| "issue": "", |
| "pages": "331--339", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "K. Lang, \"Newsweeder: Learning to filter netnews,\" in Machine Learning Proceedings 1995: Elsevier, 1995, pp. 331-339.", |
| "links": null |
| } |
| }, |
| "ref_entries": { |
| "TABREF0": { |
| "content": "<table><tr><td colspan=\"2\">\u5728\u958b\u653e\u5f0f\u5206\u985e\u554f\u984c\u4e2d\uff0c\u8fd1\u5e74\u4f86\u5df2\u7d93\u5b58\u5728\u4e00\u4e9b\u53ef\u4ee5\u8fa8\u5225 unseen \u985e\u5225\u7684\u7814\u7a76\uff0c\u4f8b\u5982[2-4]\uff0c \u51fa\u5c64\uff0c\u5c0d\u61c9\u7684\u985e\u5225\u63a1\u7528\u6240\u6709\u6b63\u4f8b\u7684\u4f8b\u5b50\uff0c\u800c\u5176\u9918\u5269\u4e0b\u7684\u4f8b\u5b50\u7686\u4f5c\u70ba\u53cd\u4f8b\u3002 = ( \u210e ( ) | \u2208 \u210e ( )) (4)</td></tr><tr><td colspan=\"2\">(\u4e8c). Document Concept Construction 2. \u8a08\u7b97</td></tr><tr><td>\u6b64\u5c0f\u7bc0\u6211\u5011\u8a73\u5217</td><td>\u7684\u8a08\u7b97\u65b9\u6cd5\uff1a</td></tr><tr><td colspan=\"2\">\u4e00\u3001\u7dd2\u8ad6 \u904e\u53bb\u5df2\u6709\u7814\u7a76\u4f7f\u7528\u8a3b\u91cb\u3001\u95dc\u9375\u5b57\u3001\u6982\u5ff5\u7b49\u65b9\u5f0f\u4f86\u4ee3\u8868\u6fc3\u7e2e\u5f8c\u7684\u6587\u4ef6[10]\u3002\u5728[7]\u7684\u7814\u7a76 (1)\u3001 \u70ba\u4e86\u53d6\u5f97\u6574\u6bb5\u67e5\u8a62\u5167\u5bb9\u7684\u5411\u91cf\u8868\u793a\uff0c\u6211\u5011\u4f7f\u7528\u985e\u795e\u7d93\u7db2\u8def\u8a13\u7df4 Doc2vec[11]\u5411\u91cf\u6a21</td></tr><tr><td colspan=\"2\">\u53c3\u8003\u4e86 DOC \u67b6\u69cb\uff0c\u5728\u96fb\u5b50\u5546\u52d9\u7522\u54c1\u5206\u985e\u9032\u884c\u5be6\u9a57\uff0c\u89e3\u6c7a\u4e86\u8fa8\u5225\u65b0\u7522\u54c1\u985e\u5225\u7684\u554f\u984c\u3002 \u65b9\u6cd5\u4e2d\uff0c\u63d0\u53d6\u6587\u4ef6\u7684\u6982\u5ff5\uff0c\u5c07\u6982\u5ff5\u4f5c\u70ba\u7bc0\u9ede\uff0c\u908a\u4ee3\u8868\u5169\u500b\u6982\u5ff5\u7684\u4e0d\u540c\u9023\u7d50\u95dc\u4fc2\uff0c\u4e26\u5c07 \u578b\uff0c\u4f7f\u7528\u8a72\u6a21\u578b\u900f\u904e word embedding \u8f49\u70ba\u5411\u91cf\uff0c\u5c07\u67e5\u8a62\u5167\u5bb9\u8207\u6587\u4ef6\u6982\u5ff5\u5716\u7684\u6587\u4ef6 \u96a8\u8457\u8cc7\u8a0a\u6642\u4ee3\u7684\u5230\u4f86\uff0c\u5982\u4f55\u4f7f\u6aa2\u7d22\u7cfb\u7d71\u66f4\u7b26\u5408\u4f7f\u7528\u8005\u671f\u5f85\uff0c\u662f\u6211\u5011\u4e3b\u8981\u7684\u7814\u7a76\u4e3b\u984c\u3002\u70ba \u7531\u4e0a\u8ff0\u7814\u7a76\u4f86\u770b\uff0c\u958b\u653e\u5f0f\u554f\u984c\u5df2\u6210\u70ba\u8fd1\u5e74\u4f86\u63a2\u8a0e\u8b70\u984c\u4e4b\u4e00\u3002\u56e0\u6b64\uff0c\u672c\u7814\u7a76\u53c3\u8003 DOC \u6b64\u6982\u5ff5\u5716\u4f86\u4ee3\u8868\u4e00\u500b\u6587\u4ef6\uff0c\u8a08\u7b97\u6982\u5ff5\u5716\u7684\u76f8\u4f3c\u4f86\u4ee3\u8868\u6587\u4ef6\u76f8\u4f3c\u5ea6\u3002\u800c\u5728\u672c\u7814\u7a76\u4e2d\uff0c\u6211 \u5411\u91cf\u5316\u3002 \u4e86\u4f7f\u6aa2\u7d22\u7cfb\u7d71\u80fd\u641c\u5c0b\u51fa\u542b\u6709\u8a9e\u610f\u6216\u4e3b\u984c\u76f8\u8fd1\u7684\u7d50\u679c\uff0c\u6211\u5011\u63a2\u8a0e\u5982\u4f55\u53d6\u5f97\u6587\u4ef6\u7684\u8a9e\u610f\u6216\u8005 \u67b6\u69cb\uff0c\u63a2\u8a0e\u7b26\u5408\u6211\u5011\u7814\u7a76\u4e3b\u984c\u4e4b\u985e\u795e\u7d93\u7db2\u8def\u6a21\u578b\uff0c\u8a73\u7d30\u65b9\u6cd5\u5c07\u5728\u5f8c\u7e8c\u9032\u884c\u8aaa\u660e\u3002 \u5011\u53c3\u8003\u524d\u8ff0\u7814\u7a76\u65b9\u5f0f\uff0c\u5c07\u6587\u4ef6\u53c3\u8003\u77e5\u8b58\u5eab\u53d6\u5f97\u6587\u4ef6\u7684\u591a\u500b\u6982\u5ff5\uff0c\u4e26\u7528\u591a\u500b\u6982\u5ff5\u4f86\u4ee3\u8868 (2)\u3001 \u5169\u5411\u91cf\u8a08\u7b97 similarity \u627e\u51fa\u76f8\u4f3c\u5ea6\u6700\u9ad8\u7684\u76ee\u6a19\u7bc0\u9ede\uff0c\u5982\u5f0f 5\u3002 \u4e3b\u984c\uff0c\u672c\u7814\u7a76\u65e8\u5728\u589e\u52a0\u6aa2\u7d22\u7d50\u679c\u6240\u8003\u616e\u7684\u56e0\u7d20\uff0c\u4f8b\u5982\u985e\u5225\u3001\u6982\u5ff5\u3001\u6392\u5e8f\u7b49\uff0c\u671f\u671b\u80fd\u63d0\u5347 \u6aa2\u7d22\u7d50\u679c\u3002 \u6211\u5011\u91dd \u5c0d \u985e \u795e \u7d93 \u7db2 \u8def\u9810 \u6e2c \u672a \u77e5 \u8cc7 \u6599 \u4e4b \u80fd \u529b\u9032 \u884c \u7814 \u7a76 \uff0c\u4e26\u5229 \u7528 \u8a3b\u91cb \u5de5 \u5177 \u53d6 \u5f97 \u6982\u5ff5 (\u4e09). \u6587\u4ef6\u8a9e\u610f\u7814\u7a76 \u8fd1\u5e74\u4f86\u5be6\u9ad4\u3001\u6982\u5ff5\u641c\u7d22\u5df2\u6210\u70ba Web \u7814\u7a76\u7684\u4e00\u9805\u91cd\u8981\u4efb\u52d9\uff0c\u9678\u7e8c\u6709\u7814\u7a76\u4f7f\u7528\u6b64\u7a2e\u65b9\u5f0f\u4f86 \u63a2\u8a0e\u6587\u4ef6\u8a9e\u610f\u5c0d\u65bc\u8cc7\u8a0a\u6aa2\u7d22\u9818\u57df\u4e4b\u767c\u5c55\u3002Yuan Ni \u7b49\u4eba[7] \u65bc 2016 \u5e74\u63d0\u51fa\u4e86\u5229\u7528\u6982\u5ff5 \u5716 \u4e00 \u3001\u7cfb\u7d71\u67b6\u69cb\u5716 \u4e00\u500b\u6587\u4ef6\u7bc0\u9ede\uff0c\u6211\u5011\u5c07\u6982\u5ff5\u76f8\u4f3c\u7684\u6587\u4ef6\u5efa\u7acb\u9023\u7d50\u95dc\u4fc2\uff0c\u5efa\u69cb\u6587\u4ef6\u6982\u5ff5\u5716\uff0c\u9032\u800c\u5229\u7528\u6b64 (3)\u3001 \u53d6\u5f97\u76ee\u6a19\u7bc0\u9ede\u5f8c\uff0c\u6211\u5011\u627e\u51fa\u5176\u9130\u5c45\u7bc0\u9ede\u4e26\u4f9d\u64da weight \u9ad8\u4f4e\u9032\u884c\u6392\u5e8f\uff0c\u5f97\u5230\u8207\u76ee\u6a19 (\u4e00). Category Classification \u5efa\u7acb \u3002\u6211\u5011\u4f7f\u7528\u7121\u5411\u5716(Undirected Graph)\u4f86\u5efa\u69cb\u6587\u4ef6\u6982\u5ff5\u5716\uff0c\u4e26\u5229\u7528\u76f8\u9130\u9663\u5217 \u672c\u7814\u7a76\u53c3\u8003 DOC[5]\u5229\u7528\u5377\u7a4d\u795e\u7d93\u7db2\u8def(Convolutional neural network, CNN)\uff0c\u6b64\u7a2e\u985e\u795e \u5716\u4f86\u8a08\u7b97\u6587\u4ef6\u4e4b\u9593\u7684\u76f8\u4f3c\u5ea6\u3002\u6587\u4ef6\u8868\u793a\u70ba\u7bc0\u9ede \uff0c\u5229\u7528\u5f0f 1 \u53ca\u5f0f 2 \u8a08\u7b97\u6b0a\u91cd \u210e \u4e26 \u7bc0\u9ede\u76f8\u4f3c\u5ea6\u9ad8\u7684\u524d\u5e7e\u7bc7\u6587\u4ef6 \uff0c\u5982\u5f0f 6\u3002</td></tr><tr><td colspan=\"2\">(Concepts)\uff0c\u5efa\u7acb\u6587\u4ef6\u6982\u5ff5\u5716\u4e26\u63a2\u8a0e\u5716\u5f62\u4e4b\u9593\u95dc\u4fc2\uff0c\u6700\u5f8c\u6211\u5011\u7d50\u5408\u985e\u795e\u7d93\u7db2\u8def\u53ca\u6587\u4ef6\u6982 \u5716\u4e4b\u6587\u4ef6\u8868\u793a\uff0c\u6e2c\u91cf\u6587\u4ef6\u4e4b\u9593\u7684\u8a9e\u610f\u76f8\u95dc\u6027\u3002\u6587\u4ef6\u4f7f\u7528\u591a\u500b\u6982\u5ff5\u7bc0\u9ede(node)\u4f86\u8868\u793a\uff0c \u7d93\u7db2\u8def\u5df2\u88ab\u5be6\u9a57\u8b49\u5be6\uff0c\u5c0d\u65bc\u8655\u7406\u958b\u653e\u5f0f\u60c5\u5883\u554f\u984c\u4e5f\u5177\u6709\u4e00\u5b9a\u7a0b\u5ea6\u7684\u80fd\u529b\uff0c\u56e0\u6b64\uff0c\u672c\u7814 (Adjacency Matrix)\u4f86\u5132\u5b58\u6b0a\u91cd( \u210e )\u3002 \u5ff5\u5716\u8a08\u7b97\uff0c\u5c0d\u4e0d\u540c\u641c\u5c0b\u5167\u5bb9\u4f7f\u7528\u4e0d\u540c\u7b97\u6cd5\u4e4b\u6aa2\u7d22\u7d50\u679c\u9032\u884c\u8a0e\u8ad6\u3002\u900f\u904e\u672c\u7814\u7a76\u65b9\u6cd5\uff0c\u80fd\u7d93 \u7531\u985e\u795e\u7d93\u7db2\u8def\u9810\u6e2c\u6587\u4ef6\u985e\u5225\uff0c\u4e26\u85c9\u7531\u6587\u4ef6\u6982\u5ff5\u5716\u7684\u5716\u5f62\u7d50\u69cb\u95dc\u4fc2\uff0c\u627e\u5c0b\u5177\u6709\u985e\u5225\u8cc7\u8a0a\u53ca \u5176\u7bc0\u9ede\u70ba\u900f\u904e\u5de5\u5177\u5f9e\u6587\u4ef6\u4e2d\u63d0\u53d6\u7684\u6982\u5ff5\u3002\u7bc0\u9ede\u4e4b\u9593\u7684\u908a(edge)\u4ee3\u8868\u6982\u5ff5\u4e4b\u9593\u7684\u8a9e\u610f\u548c\u7d50 \u7a76\u52a0\u5165\u6b64\u6a21\u7d44\uff0c\u4ee5\u5354\u52a9\u6211\u5011\u63a2\u8a0e\u6587\u4ef6\u76f8\u4f3c\u5ea6\u3002 \u210e = \u210e , (1)</td></tr><tr><td colspan=\"2\">\u6982\u5ff5\u76f8\u8fd1\u4e4b\u6aa2\u7d22\u7d50\u679c\uff0c\u7d93\u904e\u5be6\u9a57\u9a57\u8b49\uff0c\u672c\u7cfb\u7d71\u5728\u6aa2\u7d22\u7d50\u679c\u4e4b\u6392\u540d\u4e0a\u5177\u6709\u826f\u597d\u7684\u6548\u679c\uff0c\u5e73 \u5747 nDCG \u5206\u6578\u9ad8\u65bc 0.9\uff0c\u6b64\u5169\u7a2e\u7279\u5fb5\u7d50\u5408\u78ba\u5be6\u80fd\u8f14\u52a9\u6211\u5011\u5f97\u5230\u66f4\u7b26\u5408\u4f7f\u7528\u8005\u67e5\u8a62\u5167\u5bb9\u7684 \u7d50\u679c\u3002 \u69cb\u95dc\u4fc2\u3002\u6b64\u6982\u5ff5\u5716\u4f7f\u7528 closeness centrality \u5c0d\u6982\u5ff5\u9032\u884c\u52a0\u6b0a\uff0c\u8a72\u6b0a\u91cd\u53cd\u6620\u4e86\u5b83\u5011\u8207\u6587\u4ef6 \u7684\u76f8\u95dc\u6027\u3002Zhenghao Liu \u7b49\u4eba[8]\u65bc 2018 \u5e74\u63d0\u51fa\u4e86\u4e00\u7a2e Entity-Duet Neural Ranking Model(EDRM) \uff0c\u5b83\u5c07\u77e5\u8b58\u5716(Knowledge graph)\u5f15\u5165\u795e\u7d93\u641c\u7d22\u7cfb\u7d71\uff0c\u901a\u904e\u5176\u55ae\u8a5e\u548c\u5be6 ( , ) = \u210e (2) (\u4e09). Document Similarity Calculation 1. \u81ea\u7136\u8a9e\u8a00\u8655\u7406\u9818\u57df\u4e0a\u5e38\u7528\u7684\u65b9\u6cd5\u6703\u4f7f\u7528 Word2Vec[9]\u4e8b\u5148\u8a13\u7df4\u4e00\u500b\u5b57\u8a5e\u6a21\u578b\u3002\u5b57\u8a5e\u6a21 \u6b64\u6a21\u7d44\u6211\u5011\u5c07\u7d50\u5408\u5206\u985e\u5668\u53ca\u6587\u4ef6\u6982\u5ff5\u5716\u4f86\u9032\u884c\u6aa2\u7d22\u76f8\u4f3c\u5ea6\u8a08\u7b97\u3002\u5efa\u7acb\u5716\u5f62\u6642\uff0c\u6211\u5011\u5c07</td></tr><tr><td colspan=\"2\">\u9ad4\u8a3b\u91cb\u8868\u793a\u67e5\u8a62\u548c\u6587\u4ef6\uff0c\u767c\u73fe\u6b64\u7a2e\u77e5\u8b58\u5716\u8a9e\u610f\u986f\u8457\u63d0\u9ad8\u4e86\u795e\u7d93\u6392\u5e8f\u6a21\u578b\u7684\u6cdb\u5316\u80fd\u529b\u3002 \u578b\u6703\u4ef0\u8cf4\u4e0d\u540c\u8cc7\u6599\u96c6\u7684\u7279\u6027\u7522\u751f\u4e0d\u540c\u5411\u91cf\uff0c\u6240\u4ee5\u672c\u7814\u7a76\u5e0c\u671b\u53ef\u4ee5\u4e0d\u4f7f\u7528\u5b57\u8a5e\u6a21\u578b\u8f49\u63db \u4e2d\u5fc3\u9ede\u7684\u6982\u5ff5\u52a0\u5165\u6587\u4ef6\u6982\u5ff5\u5716\uff0c\u56e0\u70ba\u5176\u6b0a\u91cd\u4ee3\u8868\u5169\u7bc0\u9ede\u76f8\u540c\u6982\u5ff5\u500b\u6578\uff0c\u4e26\u4e14\u6211\u5011\u8981\u5f97</td></tr><tr><td colspan=\"2\">\u4e8c\u3001\u76f8\u95dc\u7814\u7a76 \u800c\u672c\u7814\u7a76\u53c3\u8003\u4e0a\u8ff0\u7814\u7a76\u65b9\u6cd5\uff0c\u63d0\u51fa\u4e00\u500b\u57fa\u65bc\u6587\u4ef6\u4e4b\u9593\u95dc\u4fc2\u7684\u6587\u4ef6\u6982\u5ff5\u5716\uff0c\u4e26\u5229\u7528\u6b64\u5716 \u7684\u65b9\u5f0f\uff0c\u50c5\u4f7f\u7528\u985e\u795e\u7d93\u7db2\u8def\u7684 Embedding layer \u8a08\u7b97\u4e5f\u80fd\u9054\u5230\u826f\u597d\u7684\u6548\u679c\u3002 \u5230\u8a72\u5716\u5f62\u7d50\u69cb\u4e2d\u64c1\u6709\u6700\u591a\u6982\u5ff5\u8cc7\u8a0a\u4e4b\u7bc0\u9ede\uff0c\u56e0\u6b64\u6211\u5011\u8a08\u7b97\u7bc0\u9ede\u7684 Degree Centrality\u3002</td></tr><tr><td colspan=\"2\">(\u4e00). \u8cc7\u8a0a\u6aa2\u7d22\u7814\u7a76 \u5f62\u95dc\u4fc2\u8a08\u7b97\u6587\u4ef6\u76f8\u4f3c\u7a0b\u5ea6\u3002\u4e0d\u540c\u65bc\u904e\u53bb\u65b9\u6cd5\uff0c\u6211\u5011\u64f7\u53d6\u51fa\u6587\u4ef6\u7684\u591a\u500b\u6982\u5ff5\u4f86\u4ee3\u8868\u4e00\u500b \u7b2c\u4e8c\u5c64 Convolutional layer\uff0c\u4f7f\u7528\u4e0d\u540c\u5927\u5c0f\u7684 filter \u5206\u5225\u5c0d\u5bc6\u96c6\u5411\u91cf\u9032\u884c\u5377\u7a4d\uff0cfilter \u5728 1. \u8a08\u7b97</td></tr><tr><td colspan=\"2\">\u4e86\u89e3\u4f7f\u7528\u8005\u7684\u9700\u6c42\u4e26\u4e0d\u5bb9\u6613\uff0c\u5982\u4f55\u67e5\u8a62\u5230\u9069\u5408\u7684\u7d50\u679c\uff0c\u662f\u8cc7\u8a0a\u6aa2\u7d22(Information \u6587\u4ef6\uff0c\u4e26\u7528\u4e00\u500b\u7bc0\u9ede\u4f86\u8868\u793a\uff0c\u800c\u7bc0\u9ede\u4e4b\u9593\u7684\u908a\u4ee3\u8868\u5169\u7bc0\u9ede\u4e4b\u9593\u6709\u5171\u540c\u7684\u6982\u5ff5\u3002 \u795e\u7d93\u7db2\u8def\u4e2d\u4ee3\u8868\u5c0d\u61c9\u7684\u904e\u6ffe\u5668\uff0c\u56e0\u6b64\uff0c\u6b64\u8a08\u7b97\u65b9\u5f0f\u53ef\u4ee5\u5f97\u5230\u7d93\u7531\u4e0d\u540c\u904e\u6ffe\u689d\u4ef6\u8a08\u7b97\u5f8c \u5728\u6b64\u5c0f\u7bc0\u6211\u5011\u8a73\u5217 \u7684\u8a08\u7b97\u65b9\u6cd5\uff1a</td></tr><tr><td colspan=\"2\">\u7684\u7d50\u679c\u3002 (1)\u3001 \u5c07\u67e5\u8a62\u5167\u5bb9\u8f38\u5165\u5206\u985e\u5668\uff0c\u4e26\u8f09\u5165\u9810\u5148\u8a13\u7df4\u597d\u7684\u6a21\u578b\uff0c\u5c0d\u67e5\u8a62\u5167\u5bb9\u9032\u884c\u985e\u5225\u9810\u6e2c\u3002</td></tr><tr><td colspan=\"2\">Retrieval, IR)\u9818\u57df\u6301\u7e8c\u5728\u7814\u7a76\u7684\u5176\u4e2d\u4e00\u500b\u65b9\u5411\u3002\u8fd1\u5e74\u4f86\u8cc7\u8a0a\u6aa2\u7d22\u9818\u57df\u9010\u6f38\u671d\u8457\u4f7f\u6aa2\u7d22\u7d50</td></tr><tr><td colspan=\"2\">\u679c\u66f4\u7b26\u5408\u4f7f\u7528\u8005\u671f\u5f85\u7684\u65b9\u5411\u767c\u5c55\uff0c\u672c\u7814\u7a76\u5617\u8a66\u63a2\u8a0e\u6aa2\u7d22\u7d50\u679c\u7684\u76f8\u4f3c\u5ea6\uff0c\u8a0e\u8ad6\u4f55\u7a2e\u8a08\u7b97 \u65b9\u5f0f\u80fd\u5f97\u5230\u8207\u67e5\u8a62\u5167\u5bb9\u66f4\u76f8\u4f3c\u7684\u7d50\u679c\u3002 (\u4e8c). \u6587\u4ef6\u5206\u985e\u7814\u7a76 \u5c0d\u65bc\u4e00\u822c\u7684\u76e3\u7763\u5f0f\u6a5f\u5668\u5b78\u7fd2\u6587\u4ef6\u5206\u985e\u800c\u8a00\uff0c\u8a13\u7df4\u96c6\u7684\u8cc7\u6599\u5fc5\u9808\u5305\u542b\u6240\u6709\u7684\u985e\u5225\uff0c\u7136 \u800c\uff0c\u6b64\u7a2e\u5047\u8a2d\u5728\u5f88\u591a\u61c9\u7528\u4e26\u4e0d\u6210\u7acb\uff0c\u6211\u5011\u7121\u6cd5\u78ba\u4fdd\u8cc7\u6599\u90fd\u662f\u7cfb\u7d71\u66fe\u7d93\u78b0\u904e\u7684\u985e\u578b\u3002\u6b64 \u4e09\u3001\u7814\u7a76\u65b9\u6cd5 \u6b64\u7ae0\u7bc0\u8aaa\u660e\u7814\u7a76\u7684\u65b9\u6cd5\u53ca\u67b6\u69cb\uff0c\u5982\u5716\u4e00\u6240\u793a\uff0c\u5f8c\u9762\u7ae0\u7bc0\u5c07\u91dd\u5c0d\u67b6\u69cb\u5404\u6a21\u7d44\u9032\u884c\u8aaa\u660e\u3002 (2)\u3001 \u5229\u7528\u8a72\u985e\u5225\u81f3\u6587\u4ef6\u6982\u5ff5\u5716\u627e\u5230\u5c0d\u61c9\u7684\u985e\u5225\u4e2d\u5fc3\u9ede\u3002\u6839\u64da\u524d\u9762\u5c0d\u4e2d\u5fc3\u9ede\u7684\u63cf\u8ff0\uff0c\u6211\u5011 \u4f9d\u7167\u9130\u5c45\u7bc0\u9ede\u591a\u5be1\u9032\u884c\u6392\u5e8f\uff0c\u6392\u5e8f\u5f8c\u518d\u6311\u9078\u51fa\u524d\u5e7e\u500b\u7bc0\u9ede\uff0c\u4f9d\u64da\u64c1\u6709\u7684 weight \u591a \u5be1\u518d\u6b21\u9032\u884c\u6392\u5e8f\uff0c\u5982\u5f0f 3\uff0c\u627e\u51fa\u6700\u5f8c\u7d93\u904e\u5169\u6b21\u6392\u5e8f\u5f8c\u6700\u524d\u9762\u7684\u7bc0\u9ede\uff0c\u4f5c\u70ba\u985e\u5225\u4e2d\u5fc3 \u9ede\u3002 (3)\u3001 \u53d6\u5f97\u985e\u5225\u4e2d\u5fc3\u9ede\u5f8c\uff0c\u6211\u5011\u627e\u51fa\u5176\u9130\u5c45\u7bc0\u9ede\u4e26\u4f9d\u64da weight \u9ad8\u4f4e\u9032\u884c\u6392\u5e8f\uff0c\u5f97\u5230\u8207\u76ee \u4e0b\u4e00\u6b65\uff0c\u6211\u5011\u4f7f\u7528 \u50b3\u7d71\u7684\u591a\u5206\u985e\u5668\u4f7f\u7528 \u4f5c\u70ba\u6700\u5f8c\u7684\u8f38\u51fa\u5c64\uff0c\u5982\u6b64\u6bcf\u4e00\u500b\u985e\u5225\u7684\u9810\u6e2c\u6a5f\u7387\u5df2\u7d93\u5728 \u6a19\u7bc0\u9ede\u76f8\u4f3c\u5ea6\u9ad8\u7684\u524d\u5e7e\u7bc7\u6587\u4ef6 \uff0c\u5982\u5f0f 4\u3002</td></tr><tr><td colspan=\"2\">\u7a2e\u554f\u984c\u88ab\u7a31\u4f5c open world classification \u6216 open classification[1]\uff0c\u8b6f\u70ba\u958b\u653e\u5f0f\u5206\u985e\u554f\u984c\u3002 \u8a13\u7df4\u7684\u6642\u5019\u9032\u884c\u4e86\u6b63\u898f\u5316\uff0c\u4fbf\u5c11\u4e86\u5f48\u6027\u8abf\u6574\u7684\u80fd\u529b\u3002\u56e0\u6b64\uff0c\u6211\u5011 \u51fd\u5f0f\u4f5c\u70ba\u8f38 = ( (\u2211 \u210e ( )) \u22c2( (\u2211 \u210e ( ))) (3)</td></tr></table>", |
| "num": null, |
| "html": null, |
| "type_str": "table", |
| "text": "Keywords: Information retrieval, Neural network, Document concept graph, Semantic similarity Shu Lei[5]\u7b49\u4eba\u65bc 2017 \u5e74\u63d0\u51fa\u4e00\u7a2e\u540d\u70ba DOC \u7684\u6df1\u5ea6\u5b78\u7fd2\u7b97\u6cd5\uff0c\u7d93\u5be6\u9a57\u767c\u73fe\uff0c\u7c21\u55ae\u7684 CNN \u6a21\u578b\u5728\u6b64\u7a2e\u985e\u578b\u958b\u653e\u5f0f\u5206\u985e\u554f\u984c\u4e0a\u5177\u6709\u826f\u597d\u7684\u6548\u679c\u3002Hu Xu \u7b49\u4eba[6]\u65bc 2019 \u5e74\uff0c The Architecture of our CNN model \u7b2c\u4e00\u5c64 Embedding layer \u5c07\u8cc7\u6599\u96c6 D \u4e2d\u7684\u55ae\u8a5e\u76f4\u63a5 embedding \u5230\u5bc6\u96c6\u5411\u91cf\u4e2d\u3002\u8fd1\u5e74\u4f86\u5728 Max-over-time pooling layer \u5f9e Convolutional layer \u7684\u7d50\u679c\u4e2d\u6311\u9078\u6700\u5927 \u503c\u4ee5\u5f62\u6210 m \u7dad\u5ea6\u7279\u5fb5\u5411\u91cf f\uff0c\u900f\u904e\u5169\u500b Fully connected layer \u548c\u4e00\u500b\u4e2d\u9593 activation layer \u5c07 f \u964d\u4f4e\u7dad\u5ea6\u5230 n \u7dad\u5411\u91cf x\uff0c\u6700\u5f8c\u8f38\u51fa\u5c64\u662f\u61c9\u7528\u65bc x \u7684 One-vs-rest \u5c64\uff0c \u6b64\u90e8\u5206\u5728\u4e0b\u4e00\u5c0f\u7bc0\u6703\u505a\u66f4\u8a73\u7d30\u7684\u63cf\u8ff0\u3002 2. One-vs-Rest Layer of CNN" |
| } |
| } |
| } |
| } |