Datasets:

ArXiv:
License:
File size: 7,261 Bytes
c574d3a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
/*

 * To change this template, choose Tools | Templates

 * and open the template in the editor.

 */
package tool;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import tool.sieves.Sieve;
import tool.util.Abbreviation;
import tool.util.AmbiguityResolution;
import tool.util.Concept;
import tool.util.DocumentConcepts;
import tool.util.Documents;
import tool.util.Ling;
import tool.util.Terminology;
import tool.util.Util;
import java.io.*;
import java.io.FileWriter; 

/**

 *

 * @author 

 */
public class Main {
    
    public static File training_data_dir;
    public static File test_data_dir;
    MultiPassSieveNormalizer multiPassSieve;
    PassAllSievesNormalizer allPassSieve;
    public static File output_data_dir;
    
    /**

     *

     * @param args

     * @throws IOException

     */
    public Main(String[] args) throws IOException {
        init(args);
    }

    public static void writeCandidateFileLine (FileWriter writer, Concept concept) throws IOException
    { 
        int sieveLevel = concept.getNormalizingSieve();
        String mentionName  = concept.getName();
        List<String> namesKB  = concept.getNamesKnowledgeBase();
        List<String> alternateCUIs =  concept.getAlternateCuis();
        
        //System.out.println(concept.getNamesKnowledgeBase().size());
        
        //System.out.println("Normalizing sieve is " + concept.getNormalizingSieve());
        //System.out.println("Size of names KB is " + namesKB.size());
        System.out.println("Concept Name is " + concept.getName());
        for (String name : namesKB)
        {
            //Map<String, List<String>> stdTerm  = Sieve.getTerminologyNameCui();
            //Map<String, List<String>> trainTerm  = Sie
            //String cuiNameIfExists = Sieve.getTerminologyNameCui().get(name);
            String cuiNameIfExists =  Sieve.exactMatchSieve(name);
            if (!cuiNameIfExists.equals(""))
            {  
            System.out.println("Candidate is " + name);
            System.out.println("CUI is " + cuiNameIfExists);
            writer.write(concept.getName() + "\t" + concept.getCui() + "\t" + name + "\t" + cuiNameIfExists + "\n");
            }
          
        }
        
        //System.out.println(alternateCUIs.size());

    }

    private void init(String[] args) throws IOException {
        if (new File(args[0]).isDirectory())
            training_data_dir = new File(args[0]);
        else
            Util.throwIllegalDirectoryException(args[0]);
        if (new File(args[1]).isDirectory())
            test_data_dir = new File(args[1]);
        else
            Util.throwIllegalDirectoryException(args[1]);
        if (new File(args[2]).isFile())
            Terminology.terminologyFile = new File(args[2]);
        else
            Util.throwIllegalFileException(args[2]);
        
        //set stopwords, correct spellings, and abbreviations data
        boolean ncbi = test_data_dir.toString().contains("ncbi") ? true : false;
        Ling.setSpellingCorrectionMap(ncbi ? new File("resources/ncbi-spell-check.txt") : new File("resources/semeval-spell-check.txt"));
        Ling.setStopwordsList(new File("resources/stopwords.txt"));
        Abbreviation.setWikiAbbreviationExpansionMap(ncbi ? new File("resources/ncbi-wiki-abbreviations.txt") : new File("resources/semeval-wiki-abbreviations.txt"));
        Ling.setDigitToWordformMapAndReverse(new File("resources/number.txt"));
        Ling.setSuffixMap(new File("resources/suffix.txt"));
        Ling.setPrefixMap(new File("resources/prefix.txt"));
        Ling.setAffixMap(new File("resources/affix.txt"));        
        
        //initialize normalizer flags
        MultiPassSieveNormalizer.maxSieveLevel = Integer.parseInt(args[3]);
        PassAllSievesNormalizer.maxSieveLevel = Integer.parseInt(args[3]);
    }
        
    /**

     *

     * @throws IOException

     */
    public void runMultiPassSieve() throws IOException {
        Sieve.setStandardTerminology();
        Sieve.setTrainingDataTerminology();
        File f = new File("/home/megh/projects/entity-norm/cnnormaliztion/candidate.txt");
        f.createNewFile();

        System.out.println(Sieve.getStandardTerminology());
        FileWriter writer = new FileWriter(f);

        List<DocumentConcepts> dataset = Documents.getDataSet();
        List<DocumentConcepts> datasetAux = Documents.getDataSet();

        for (DocumentConcepts concepts : dataset) {
            
            Map<String, List<String>> cuiNamesMap = new HashMap<>();
            //System.out.println("Size of concepts for this iteration is " +  concepts.getConcepts().size());
            for (Concept concept : concepts.getConcepts()) {
                
                System.out.println("==========new concept===========================");
                MultiPassSieveNormalizer.applyMultiPassSieve(concept);
                
                //PassAllSievesNormalizer.applyPassAllSieves(concept);

                writeCandidateFileLine(writer, concept);
                
   
                if (concept.getCui().equals(""))
                    concept.setCui("CUI-less");
                

                
                cuiNamesMap = Util.setMap(cuiNamesMap, concept.getCui(), concept.getName());

            }

            writer.write("===========================\n");
            AmbiguityResolution.start(concepts, cuiNamesMap);
        } 
        writer.close();  
    }
    
    /**

     *

     * @param args

     * @throws IOException

     */
    public static void main(String[] args) throws IOException {
        if (args.length == 4) {
            Main main = new Main(args);
            //Logger.setLogFile(new FileOutputStream("log.txt"));
            output_data_dir = new File(test_data_dir.toString().replace(test_data_dir.getName(), "output"));
            output_data_dir.mkdirs();            
            main.runMultiPassSieve();
            Evaluation.computeAccuracy();
            Evaluation.printResults();
        }
        else {
            System.out.println("Usage: java tool.Main <training-data-dir> <test-data-dir> <terminology/ontology-file> max-sieve-level");
            System.out.println("---------------------");
            System.out.println("Sieve levels:");
            System.out.println("1 for exact match");
            System.out.println("2 for abbreviation expansion");
            System.out.println("3 for subject<->object conversion");
            System.out.println("4 for numbers replacement");
            System.out.println("5 for hyphenation");
            System.out.println("6 for affixation");
            System.out.println("7 for disorder synonyms replacement");
            System.out.println("8 for stemming");
            System.out.println("9 for composite disorder mentions");
            System.out.println("10 for partial match");
            System.out.println("---------------------");
            System.exit(1);
        }
    }
       
}