bugged
stringlengths 6
599k
| fixed
stringlengths 10
599k
| __index_level_0__
int64 0
1.13M
|
|---|---|---|
public void testSaveIsWellFormed() throws Exception { boolean validate = false; testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.setName("FOO<BAR"); // Implicitly testing sanitizeXML method here! project.save(out); System.err.println("Parsing " + tmp + "..."); // Make the document a URL so relative DTD works. String uri = "file:" + tmp.getAbsolutePath(); DocumentBuilderFactory f = DocumentBuilderFactory.newInstance(); if (validate) f.setValidating(true); DocumentBuilder p = f.newDocumentBuilder(); p.parse(uri); System.out.println("Parsed OK"); }
|
public void testSaveIsWellFormed() throws Exception { boolean validate = false; testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.setName("FOO<BAR"); // Implicitly testing sanitizeXML method here! project.save(out,ENCODING); System.err.println("Parsing " + tmp + "..."); // Make the document a URL so relative DTD works. String uri = "file:" + tmp.getAbsolutePath(); DocumentBuilderFactory f = DocumentBuilderFactory.newInstance(); if (validate) f.setValidating(true); DocumentBuilder p = f.newDocumentBuilder(); p.parse(uri); System.out.println("Parsed OK"); }
| 1,109,450
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.save(out); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(tmp.length(), tmp2.length()); // Quick test }
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp,ENCODING); assertNotNull(out); project.save(out); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(tmp.length(), tmp2.length()); // Quick test }
| 1,109,451
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.save(out); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(tmp.length(), tmp2.length()); // Quick test }
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.save(out,ENCODING); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(tmp.length(), tmp2.length()); // Quick test }
| 1,109,452
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.save(out); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(tmp.length(), tmp2.length()); // Quick test }
|
public void testSavePrintWriter() throws Exception { testLoad(); File tmp = File.createTempFile("test", ".architect"); if (deleteOnExit) { tmp.deleteOnExit(); } PrintWriter out = new PrintWriter(tmp); assertNotNull(out); project.save(out); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(tmp)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2,ENCODING),ENCODING); assertEquals(tmp.length(), tmp2.length()); // Quick test }
| 1,109,453
|
public void testSaveProgressMonitor() throws Exception { System.out.println("TestSwingUIProject.testSaveProgressMonitor()"); MockProgressMonitor mockProgressMonitor = new MockProgressMonitor(null, "Hello", "Hello again", 0, 100); File file = File.createTempFile("test", "architect"); project.setFile(file); project.save(mockProgressMonitor); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(file)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2)); assertEquals(file.length(), tmp2.length()); // Quick test }
|
public void testSaveProgressMonitor() throws Exception { System.out.println("TestSwingUIProject.testSaveProgressMonitor()"); MockProgressMonitor mockProgressMonitor = new MockProgressMonitor(null, "Hello", "Hello again", 0, 100); File file = File.createTempFile("test", "architect"); project.setFile(file); project.save(mockProgressMonitor); SwingUIProject p2 = new SwingUIProject("test2"); p2.load(new FileInputStream(file)); File tmp2 = File.createTempFile("test2", ".architect"); if (deleteOnExit) { tmp2.deleteOnExit(); } p2.save(new PrintWriter(tmp2,ENCODING),ENCODING); assertEquals(file.length(), tmp2.length()); // Quick test }
| 1,109,454
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
| 1,109,455
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
| 1,109,456
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ htp.makeTable(theData.getHaplotypes()); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(VIEW_TDT_NUM); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel hasp = (HaploAssocPanel)metaAssoc.getComponent(1); hasp.makeTable(theData.getHaplotypes()); } }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
| 1,109,457
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command.equals(READ_GENOTYPES)){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command.equals(READ_MARKERS)){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command.equals(READ_ANALYSIS_TRACK)){ fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION){ readAnalysisFile(fc.getSelectedFile()); } }else if (command.equals(READ_BLOCKS_FILE)){ fc.setSelectedFile(new File("")); if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION){ readBlocksFile(fc.getSelectedFile()); } }else if (command.equals(CUST_BLOCKS)){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command.equals(CLEAR_BLOCKS)){ changeBlocks(BLOX_NONE); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method); /*for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true);*/ //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ Options.setLDColorScheme(Integer.valueOf(command.substring(5)).intValue()+1); dPrimeDisplay.colorDPrime(); changeKey(); //exporting clauses }else if (command.equals(EXPORT_PNG)){ export(tabs.getSelectedIndex(), PNG_MODE, 0, Chromosome.getSize()); }else if (command.equals(EXPORT_TEXT)){ export(tabs.getSelectedIndex(), TXT_MODE, 0, Chromosome.getSize()); }else if (command.equals(EXPORT_OPTIONS)){ ExportDialog exDialog = new ExportDialog(this); exDialog.pack(); exDialog.setVisible(true); }else if (command.equals("Select All")){ checkPanel.selectAll(); }else if (command.equals("Rescore Markers")){ String cut = cdc.hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = cdc.genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = cdc.mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); cut = cdc.mafcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.mafCut = Double.parseDouble(cut); checkPanel.redoRatings(); JTable jt = checkPanel.getTable(); jt.repaint(); }else if (command.equals("LD Display Spacing")){ ProportionalSpacingDialog spaceDialog = new ProportionalSpacingDialog(this, "Adjust LD Spacing"); spaceDialog.pack(); spaceDialog.setVisible(true); }else if (command.equals("Tutorial")){ showHelp(); } else if (command.equals("Quit")){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command.equals(viewItems[i])) tabs.setSelectedIndex(i); } } }
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command.equals(READ_GENOTYPES)){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command.equals(READ_MARKERS)){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command.equals(READ_ANALYSIS_TRACK)){ fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION){ readAnalysisFile(fc.getSelectedFile()); } }else if (command.equals(READ_BLOCKS_FILE)){ fc.setSelectedFile(new File("")); if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION){ readBlocksFile(fc.getSelectedFile()); } }else if (command.equals(CUST_BLOCKS)){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command.equals(CLEAR_BLOCKS)){ changeBlocks(BLOX_NONE); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method); /*for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true);*/ //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ Options.setLDColorScheme(Integer.valueOf(command.substring(5)).intValue()); dPrimeDisplay.colorDPrime(); changeKey(); //exporting clauses }else if (command.equals(EXPORT_PNG)){ export(tabs.getSelectedIndex(), PNG_MODE, 0, Chromosome.getSize()); }else if (command.equals(EXPORT_TEXT)){ export(tabs.getSelectedIndex(), TXT_MODE, 0, Chromosome.getSize()); }else if (command.equals(EXPORT_OPTIONS)){ ExportDialog exDialog = new ExportDialog(this); exDialog.pack(); exDialog.setVisible(true); }else if (command.equals("Select All")){ checkPanel.selectAll(); }else if (command.equals("Rescore Markers")){ String cut = cdc.hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = cdc.genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = cdc.mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); cut = cdc.mafcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.mafCut = Double.parseDouble(cut); checkPanel.redoRatings(); JTable jt = checkPanel.getTable(); jt.repaint(); }else if (command.equals("LD Display Spacing")){ ProportionalSpacingDialog spaceDialog = new ProportionalSpacingDialog(this, "Adjust LD Spacing"); spaceDialog.pack(); spaceDialog.setVisible(true); }else if (command.equals("Tutorial")){ showHelp(); } else if (command.equals("Quit")){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command.equals(viewItems[i])) tabs.setSelectedIndex(i); } } }
| 1,109,458
|
public void clearDisplays() { if (dPrimeDisplay != null){ dPrimeDisplay.setVisible(false); dPrimeDisplay = null; } if (hapDisplay != null){ hapDisplay.setVisible(false); hapDisplay = null; } if (tdtPanel != null){ tdtPanel.setVisible(false); tdtPanel = null; } }
|
public void clearDisplays() { if (dPrimeDisplay != null){ dPrimeDisplay.setVisible(false); dPrimeDisplay = null; } if (hapDisplay != null){ hapDisplay.setVisible(false); hapDisplay = null; } if (tdtPanel != null){ tdtPanel.setVisible(false); tdtPanel = null; } }
| 1,109,459
|
public void doTag(final XMLOutput output) throws Exception { log.debug("doTag(..):" + name); Goal goal = getProject().getGoal( getName(), true ); goal.setDescription( this.description ); addPrereqs( goal ); Action action = new DefaultAction() { public void performAction() throws Exception { log.debug("Running action of target: " + getName() ); getBody().run(context, output); } }; goal.setAction( action ); }
|
public void doTag(final XMLOutput output) throws Exception { log.debug("doTag(..):" + name); Goal goal = getProject().getGoal( getName(), true ); goal.setDescription( this.description ); addPrereqs( goal ); Action action = new DefaultAction() { public void performAction() throws Exception { log.debug("Running action of target: " + getName() ); invokeBody(output); } }; goal.setAction( action ); }
| 1,109,461
|
public void performAction() throws Exception { log.debug("Running action of target: " + getName() ); getBody().run(context, output); }
|
public void performAction() throws Exception { log.debug("Running action of target: " + getName() ); invokeBody(output); }
| 1,109,462
|
public void doTag(XMLOutput output) throws Exception { // run the body first to configure the task via nested getBody().run(context, output); // output the fileScanner if ( var == null ) { throw new MissingAttributeException( "var" ); } context.setVariable( var, fileScanner ); }
|
public void doTag(XMLOutput output) throws Exception { // run the body first to configure the task via nested invokeBody(output); // output the fileScanner if ( var == null ) { throw new MissingAttributeException( "var" ); } context.setVariable( var, fileScanner ); }
| 1,109,463
|
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Selection list and children are out of sync: selection="+columnSelection+"; children="+this.model.getColumns()); } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(this, "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
|
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Selection list and children are out of sync: selection="+columnSelection+"; children="+this.model.getColumns()); } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(this, "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
| 1,109,465
|
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Selection list and children are out of sync: selection="+columnSelection+"; children="+this.model.getColumns()); } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(this, "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
|
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(this, "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
| 1,109,466
|
private void deSelectEverythingElse (MouseEvent evt) { TablePane tp = (TablePane) evt.getSource(); Iterator it = getPlayPen().getSelectedTables().iterator(); while (it.hasNext()) { TablePane t3 = (TablePane)it.next(); logger.debug("(" + tp.getModel().getTableName() + ") zoomed selected table point: " + tp.getLocationOnScreen()); logger.debug("(" + t3.getModel().getTableName() + ") zoomed iterator table point: " + t3.getLocationOnScreen()); if (!tp.getLocationOnScreen().equals(t3.getLocationOnScreen())) { // equals operation might not work so good here // unselect logger.debug("found matching table!"); t3.setSelected(false); t3.selectNone(); } } }
|
private void deSelectEverythingElse (MouseEvent evt) { TablePane tp = (TablePane) evt.getSource(); Iterator it = getPlayPen().getSelectedTables().iterator(); while (it.hasNext()) { TablePane t3 = (TablePane)it.next(); logger.debug("(" + tp.getModel().getTableName() + ") zoomed selected table point: " + tp.getLocationOnScreen()); logger.debug("(" + t3.getModel().getTableName() + ") zoomed iterator table point: " + t3.getLocationOnScreen()); if (!tp.getLocationOnScreen().equals(t3.getLocationOnScreen())) { // equals operation might not work so good here // unselect logger.debug("found matching table!"); t3.setSelected(false); t3.selectNone(); } } }
| 1,109,468
|
protected Action createAction(final StylesheetTag tag, final XMLOutput output) { return new Action() { public void run(Node node) throws Exception { // store the context for use by applyTemplates tag tag.setXPathSource( node ); xpathSource = node; if (log.isDebugEnabled()) { log.debug( "Firing template body for match: " + match + " and node: " + node ); } invokeBody(output); } }; }
|
protected Action createAction(final StylesheetTag tag, final XMLOutput output) { return new Action() { public void run(Node node) throws Exception { // store the context for use by applyTemplates tag tag.setXPathSource( node ); xpathSource = node; if (log.isDebugEnabled()) { log.debug( "Firing template body for match: " + match + " and node: " + node ); } XMLOutput actualOutput = tag.getStylesheetOutput(); if (actualOutput == null) { actualOutput = output; } invokeBody(actualOutput); } }; }
| 1,109,470
|
public void run(Node node) throws Exception { // store the context for use by applyTemplates tag tag.setXPathSource( node ); xpathSource = node; if (log.isDebugEnabled()) { log.debug( "Firing template body for match: " + match + " and node: " + node ); } invokeBody(output); }
|
public void run(Node node) throws Exception { // store the context for use by applyTemplates tag tag.setXPathSource( node ); xpathSource = node; if (log.isDebugEnabled()) { log.debug( "Firing template body for match: " + match + " and node: " + node ); } XMLOutput actualOutput = tag.getStylesheetOutput(); if (actualOutput == null) { actualOutput = output; } invokeBody(actualOutput); }
| 1,109,471
|
private void argHandler(String[] args){ //TODO: -specify values from HaplotypeDisplayController (min hap percentage etc) // -want to be able to output haps file from pedfile boolean nogui = false; String batchMode = ""; String hapsFileName = ""; String pedFileName = ""; String infoFileName = ""; boolean showCheck = false; boolean skipCheck = false; Vector ignoreMarkers = new Vector(); int outputType = -1; int maxDistance = -1; boolean quietMode = false; boolean outputDprime=false; for(int i =0; i < args.length; i++) { if(args[i].equals("-help") || args[i].equals("-h")) { System.out.println("HaploView command line options\n" + "-h, -help print this message\n" + "-n command line output only\n" + "-q quiet mode- doesnt print any warnings or information to screen\n" + "-p <pedfile> specify an input file in pedigree file format\n" + " pedfile specific options (nogui mode only): \n" + " --showcheck displays the results of the various pedigree integrity checks\n" + " --skipcheck skips the various pedfile checks\n" + //TODO: fix ignoremarkers //" --ignoremarkers <markers> ignores the specified markers.<markers> is a comma\n" + //" seperated list of markers. eg. 1,5,7,19,25\n" + "-ha <hapsfile> specify an input file in .haps format\n" + "-i <infofile> specify a marker info file\n" + "-b <batchfile> batch mode. batchfile should contain a list of haps files\n" + "--dprime outputs dprime to <inputfile>.DPRIME\n" + " note: --dprime defaults to no blocks output. use -o to also output blocks\n" + "-o <SFS,GAM,MJD,ALL> output type. SFS, 4 gamete, MJD output or all 3. default is SFS.\n" + "-m <distance> maximum comparison distance in kilobases (integer). default is 500"); System.exit(0); } else if(args[i].equals("-n")) { nogui = true; } else if(args[i].equals("-p")) { i++; if( i>=args.length || (args[i].charAt(0) == '-') || args[i].equals("showcheck") ){ System.out.println("-p requires a filename"); System.exit(1); } else{ if(!pedFileName.equals("")){ System.out.println("multiple -p arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equals("--showcheck")){ showCheck = true; } else if (args[i].equals("--skipcheck")){ skipCheck = true; } /* else if (args[i].equals("--ignoremarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("--ignoremarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); while(str.hasMoreTokens()) { ignoreMarkers.add(str.nextToken()); } } } */ else if(args[i].equals("-ha")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-ha requires a filename"); System.exit(1); } else{ if(!hapsFileName.equals("")){ System.out.println("multiple -ha arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equals("-i")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-i requires a filename"); System.exit(1); } else{ if(!infoFileName.equals("")){ System.out.println("multiple -i arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if(args[i].equals("-o")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(outputType != -1){ System.out.println("only one -o argument is allowed"); System.exit(1); } if(args[i].equals("SFS")){ outputType = 0; } else if(args[i].equals("GAM")){ outputType = 1; } else if(args[i].equals("MJD")){ outputType = 2; } else if(args[i].equals("ALL")) { outputType = 3; } } else { //defaults to SFS output outputType =0; i--; } } else if(args[i].equals("--dprime")) { outputDprime = true; } else if(args[i].equals("-m")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-m requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one -m argument allowed"); System.exit(1); } maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println("-m argument must be a positive integer"); System.exit(1); } } } else if(args[i].equals("-b")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-b requires a filename"); System.exit(1); } else{ if(!batchMode.equals("")){ System.out.println("multiple -b arguments found. only last batch file listed will be used"); } batchMode = args[i]; } } else if(args[i].equals("-q")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); } } //mess with vars, set defaults, etc if( outputType == -1 && ( !pedFileName.equals("") || !hapsFileName.equals("") || !batchMode.equals("")) && !outputDprime ) { outputType = 0; if(nogui && !quietMode) { System.out.println("No output type specified. Default of SFS will be used"); } } if(showCheck && !nogui && !quietMode) { System.out.println("pedfile showcheck option only applies in nogui mode. ignored."); } if(skipCheck && !quietMode) { System.out.println("Skipping pedigree file check"); } if(maxDistance == -1){ maxDistance = 500; } //set the global variables arg_nogui = nogui; arg_hapsfile = hapsFileName; arg_infoFileName = infoFileName; arg_pedfile = pedFileName; arg_showCheck = showCheck; arg_skipCheck = skipCheck; arg_ignoreMarkers = ignoreMarkers; arg_output = outputType; arg_distance = maxDistance; arg_batchMode = batchMode; arg_quiet = quietMode; arg_dprime = outputDprime; }
|
private void argHandler(String[] args){ //TODO: -specify values from HaplotypeDisplayController (min hap percentage etc) // -want to be able to output haps file from pedfile boolean nogui = false; String batchMode = ""; String hapsFileName = ""; String pedFileName = ""; String infoFileName = ""; boolean showCheck = false; boolean skipCheck = false; Vector ignoreMarkers = new Vector(); int outputType = -1; int maxDistance = -1; boolean quietMode = false; boolean outputDprime=false; for(int i =0; i < args.length; i++) { if(args[i].equals("-help") || args[i].equals("-h")) { System.out.println("HaploView command line options\n" + "-h, -help print this message\n" + "-n command line output only\n" + "-q quiet mode- doesnt print any warnings or information to screen\n" + "-p <pedfile> specify an input file in pedigree file format\n" + " pedfile specific options (nogui mode only): \n" + " --showcheck displays the results of the various pedigree integrity checks\n" + " --skipcheck skips the various pedfile checks\n" + //TODO: fix ignoremarkers //" --ignoremarkers <markers> ignores the specified markers.<markers> is a comma\n" + //" seperated list of markers. eg. 1,5,7,19,25\n" + "-ha <hapsfile> specify an input file in .haps format\n" + "-i <infofile> specify a marker info file\n" + "-b <batchfile> batch mode. batchfile should contain a list of files either all genotype or alternating genotype/info\n" + "--dprime outputs dprime to <inputfile>.DPRIME\n" + " note: --dprime defaults to no blocks output. use -o to also output blocks\n" + "-o <SFS,GAM,MJD,ALL> output type. SFS, 4 gamete, MJD output or all 3. default is SFS.\n" + "-m <distance> maximum comparison distance in kilobases (integer). default is 500"); System.exit(0); } else if(args[i].equals("-n")) { nogui = true; } else if(args[i].equals("-p")) { i++; if( i>=args.length || (args[i].charAt(0) == '-') || args[i].equals("showcheck") ){ System.out.println("-p requires a filename"); System.exit(1); } else{ if(!pedFileName.equals("")){ System.out.println("multiple -p arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equals("--showcheck")){ showCheck = true; } else if (args[i].equals("--skipcheck")){ skipCheck = true; } /* else if (args[i].equals("--ignoremarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("--ignoremarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); while(str.hasMoreTokens()) { ignoreMarkers.add(str.nextToken()); } } } */ else if(args[i].equals("-ha")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-ha requires a filename"); System.exit(1); } else{ if(!hapsFileName.equals("")){ System.out.println("multiple -ha arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equals("-i")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-i requires a filename"); System.exit(1); } else{ if(!infoFileName.equals("")){ System.out.println("multiple -i arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if(args[i].equals("-o")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(outputType != -1){ System.out.println("only one -o argument is allowed"); System.exit(1); } if(args[i].equals("SFS")){ outputType = 0; } else if(args[i].equals("GAM")){ outputType = 1; } else if(args[i].equals("MJD")){ outputType = 2; } else if(args[i].equals("ALL")) { outputType = 3; } } else { //defaults to SFS output outputType =0; i--; } } else if(args[i].equals("--dprime")) { outputDprime = true; } else if(args[i].equals("-m")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-m requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one -m argument allowed"); System.exit(1); } maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println("-m argument must be a positive integer"); System.exit(1); } } } else if(args[i].equals("-b")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-b requires a filename"); System.exit(1); } else{ if(!batchMode.equals("")){ System.out.println("multiple -b arguments found. only last batch file listed will be used"); } batchMode = args[i]; } } else if(args[i].equals("-q")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); } } //mess with vars, set defaults, etc if( outputType == -1 && ( !pedFileName.equals("") || !hapsFileName.equals("") || !batchMode.equals("")) && !outputDprime ) { outputType = 0; if(nogui && !quietMode) { System.out.println("No output type specified. Default of SFS will be used"); } } if(showCheck && !nogui && !quietMode) { System.out.println("pedfile showcheck option only applies in nogui mode. ignored."); } if(skipCheck && !quietMode) { System.out.println("Skipping pedigree file check"); } if(maxDistance == -1){ maxDistance = 500; } //set the global variables arg_nogui = nogui; arg_hapsfile = hapsFileName; arg_infoFileName = infoFileName; arg_pedfile = pedFileName; arg_showCheck = showCheck; arg_skipCheck = skipCheck; arg_ignoreMarkers = ignoreMarkers; arg_output = outputType; arg_distance = maxDistance; arg_batchMode = batchMode; arg_quiet = quietMode; arg_dprime = outputDprime; }
| 1,109,472
|
private void processFile(String fileName,boolean fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(!fileType){ //read in haps file textData.prepareHapsInput(inputFile); } else { //read in ped file PedFile ped; Vector pedFileStrings; BufferedReader reader; String line; boolean[] markerResultArray; ped = new PedFile(); pedFileStrings = new Vector(); reader = new BufferedReader(new FileReader(inputFile)); result = new Vector(); while((line = reader.readLine())!=null){ pedFileStrings.add(line); } ped.parseLinkage(pedFileStrings); if(!arg_skipCheck) { result = ped.check(); } markerResultArray = new boolean[ped.getNumMarkers()]; for (int i = 0; i < markerResultArray.length; i++){ if(this.arg_skipCheck) { markerResultArray[i] = true; } else if(((MarkerResult)result.get(i)).getRating() > 0) { markerResultArray[i] = true; } else { markerResultArray[i] = false; } } /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ textData.linkageToChrom(markerResultArray,ped,null); } String name = fileName; String baseName = fileName.substring(0,name.length()-5); if(!infoFileName.equals("")) { File infoFile = new File(infoFileName); if(infoFile.exists()) { textData.prepareMarkerInput(infoFile,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; } else if(!this.arg_quiet) { System.out.println("info file " + infoFileName + " does not exist"); } } else { File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ textData.prepareMarkerInput(maybeInfo,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + maybeInfo.getName()); } textData.infoKnown = true; } } if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\t\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(outputType != -1){ textData.generateDPrimeTable(maxDistance); Haplotype[][] haplos; switch(outputType){ case 0: OutputFile = new File(fileName + ".SFSblocks"); break; case 1: OutputFile = new File(fileName + ".4GAMblocks"); break; case 2: OutputFile = new File(fileName + ".MJDblocks"); break; default: OutputFile = new File(fileName + ".SFSblocks"); break; } //this handles output type ALL if(outputType == 3) { OutputFile = new File(fileName + ".SFSblocks"); textData.guessBlocks(0); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(1); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".MJDblocks"); textData.guessBlocks(2); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile); }else{ //this means that we're just writing dprime so we won't //keep the (potentially huge) dprime table in memory but instead //write out one line at a time forget FileWriter saveDprimeWriter = new FileWriter(OutputFile); if (textData.infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + linkageResult.toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write((Chromosome.realIndex[i]+1) + "\t" + (Chromosome.realIndex[j]+1) + "\t" + linkageResult + "\n"); } } } } } saveDprimeWriter.close(); } } if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName,boolean fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(!fileType){ //read in haps file textData.prepareHapsInput(inputFile); } else { //read in ped file PedFile ped; Vector pedFileStrings; BufferedReader reader; String line; boolean[] markerResultArray; ped = new PedFile(); pedFileStrings = new Vector(); reader = new BufferedReader(new FileReader(inputFile)); result = new Vector(); while((line = reader.readLine())!=null){ pedFileStrings.add(line); } ped.parseLinkage(pedFileStrings); if(!arg_skipCheck) { result = ped.check(); } markerResultArray = new boolean[ped.getNumMarkers()]; for (int i = 0; i < markerResultArray.length; i++){ if(this.arg_skipCheck) { markerResultArray[i] = true; } else if(((MarkerResult)result.get(i)).getRating() > 0) { markerResultArray[i] = true; } else { markerResultArray[i] = false; } } /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ textData.linkageToChrom(markerResultArray,ped,null); } String name = fileName; String baseName = fileName.substring(0,name.length()-5); if(!infoFileName.equals("")) { File infoFile = new File(infoFileName); if(infoFile.exists()) { textData.prepareMarkerInput(infoFile,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; } else if(!this.arg_quiet) { System.out.println("info file " + infoFileName + " does not exist"); } } else { File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ textData.prepareMarkerInput(maybeInfo,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + maybeInfo.getName()); } textData.infoKnown = true; } } if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\t\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(outputType != -1){ textData.generateDPrimeTable(maxDistance); Haplotype[][] haplos; switch(outputType){ case 0: OutputFile = new File(fileName + ".SFSblocks"); break; case 1: OutputFile = new File(fileName + ".4GAMblocks"); break; case 2: OutputFile = new File(fileName + ".MJDblocks"); break; default: OutputFile = new File(fileName + ".SFSblocks"); break; } //this handles output type ALL if(outputType == 3) { OutputFile = new File(fileName + ".SFSblocks"); textData.guessBlocks(0); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(1); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".MJDblocks"); textData.guessBlocks(2); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile); }else{ //this means that we're just writing dprime so we won't //keep the (potentially huge) dprime table in memory but instead //write out one line at a time forget FileWriter saveDprimeWriter = new FileWriter(OutputFile); if (textData.infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + linkageResult.toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write((Chromosome.realIndex[i]+1) + "\t" + (Chromosome.realIndex[j]+1) + "\t" + linkageResult + "\n"); } } } } } saveDprimeWriter.close(); } } if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,473
|
private void processFile(String fileName,boolean fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(!fileType){ //read in haps file textData.prepareHapsInput(inputFile); } else { //read in ped file PedFile ped; Vector pedFileStrings; BufferedReader reader; String line; boolean[] markerResultArray; ped = new PedFile(); pedFileStrings = new Vector(); reader = new BufferedReader(new FileReader(inputFile)); result = new Vector(); while((line = reader.readLine())!=null){ pedFileStrings.add(line); } ped.parseLinkage(pedFileStrings); if(!arg_skipCheck) { result = ped.check(); } markerResultArray = new boolean[ped.getNumMarkers()]; for (int i = 0; i < markerResultArray.length; i++){ if(this.arg_skipCheck) { markerResultArray[i] = true; } else if(((MarkerResult)result.get(i)).getRating() > 0) { markerResultArray[i] = true; } else { markerResultArray[i] = false; } } /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ textData.linkageToChrom(markerResultArray,ped,null); } String name = fileName; String baseName = fileName.substring(0,name.length()-5); if(!infoFileName.equals("")) { File infoFile = new File(infoFileName); if(infoFile.exists()) { textData.prepareMarkerInput(infoFile,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; } else if(!this.arg_quiet) { System.out.println("info file " + infoFileName + " does not exist"); } } else { File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ textData.prepareMarkerInput(maybeInfo,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + maybeInfo.getName()); } textData.infoKnown = true; } } if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\t\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(outputType != -1){ textData.generateDPrimeTable(maxDistance); Haplotype[][] haplos; switch(outputType){ case 0: OutputFile = new File(fileName + ".SFSblocks"); break; case 1: OutputFile = new File(fileName + ".4GAMblocks"); break; case 2: OutputFile = new File(fileName + ".MJDblocks"); break; default: OutputFile = new File(fileName + ".SFSblocks"); break; } //this handles output type ALL if(outputType == 3) { OutputFile = new File(fileName + ".SFSblocks"); textData.guessBlocks(0); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(1); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".MJDblocks"); textData.guessBlocks(2); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile); }else{ //this means that we're just writing dprime so we won't //keep the (potentially huge) dprime table in memory but instead //write out one line at a time forget FileWriter saveDprimeWriter = new FileWriter(OutputFile); if (textData.infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + linkageResult.toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write((Chromosome.realIndex[i]+1) + "\t" + (Chromosome.realIndex[j]+1) + "\t" + linkageResult + "\n"); } } } } } saveDprimeWriter.close(); } } if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName,boolean fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(!fileType){ //read in haps file textData.prepareHapsInput(inputFile); } else { //read in ped file PedFile ped; Vector pedFileStrings; BufferedReader reader; String line; boolean[] markerResultArray; ped = new PedFile(); pedFileStrings = new Vector(); reader = new BufferedReader(new FileReader(inputFile)); result = new Vector(); while((line = reader.readLine())!=null){ pedFileStrings.add(line); } ped.parseLinkage(pedFileStrings); if(!arg_skipCheck) { result = ped.check(); } markerResultArray = new boolean[ped.getNumMarkers()]; for (int i = 0; i < markerResultArray.length; i++){ if(this.arg_skipCheck) { markerResultArray[i] = true; } else if(((MarkerResult)result.get(i)).getRating() > 0) { markerResultArray[i] = true; } else { markerResultArray[i] = false; } } /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ textData.linkageToChrom(inputFile, 3, arg_skipCheck); } String name = fileName; String baseName = fileName.substring(0,name.length()-5); if(!infoFileName.equals("")) { File infoFile = new File(infoFileName); if(infoFile.exists()) { textData.prepareMarkerInput(infoFile,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; } else if(!this.arg_quiet) { System.out.println("info file " + infoFileName + " does not exist"); } } else { File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ textData.prepareMarkerInput(maybeInfo,maxDistance,null); if(!arg_quiet){ System.out.println("Using marker file " + maybeInfo.getName()); } textData.infoKnown = true; } } if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\t\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(outputType != -1){ textData.generateDPrimeTable(maxDistance); Haplotype[][] haplos; switch(outputType){ case 0: OutputFile = new File(fileName + ".SFSblocks"); break; case 1: OutputFile = new File(fileName + ".4GAMblocks"); break; case 2: OutputFile = new File(fileName + ".MJDblocks"); break; default: OutputFile = new File(fileName + ".SFSblocks"); break; } //this handles output type ALL if(outputType == 3) { OutputFile = new File(fileName + ".SFSblocks"); textData.guessBlocks(0); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(1); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".MJDblocks"); textData.guessBlocks(2); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile); }else{ //this means that we're just writing dprime so we won't //keep the (potentially huge) dprime table in memory but instead //write out one line at a time forget FileWriter saveDprimeWriter = new FileWriter(OutputFile); if (textData.infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + linkageResult.toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); long dist; PairwiseLinkage linkageResult; for (int i = 0; i < Chromosome.getFilteredSize(); i++){ for (int j = 0; j < Chromosome.getFilteredSize(); j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); if (maxDistance > 0){ if ((dist > maxDistance || dist < negMaxDistance)){ continue; } } linkageResult = textData.computeDPrime(Chromosome.realIndex[i],Chromosome.realIndex[j]); if(linkageResult != null) { saveDprimeWriter.write((Chromosome.realIndex[i]+1) + "\t" + (Chromosome.realIndex[j]+1) + "\t" + linkageResult + "\n"); } } } } } saveDprimeWriter.close(); } } if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,474
|
public void testGetDerivedInstance() throws Exception { SQLTable derivedTable; SQLTable table1; assertNotNull(table1 = db.getTableByName("REGRESSION_TEST1")); derivedTable = SQLTable.getDerivedInstance(table1, table1.getParentDatabase()); TreeMap derivedPropertyMap = new TreeMap(BeanUtils.describe(derivedTable)); TreeMap table1PropertyMap = new TreeMap(BeanUtils.describe(table1)); derivedPropertyMap.remove("parent"); derivedPropertyMap.remove("schemaName"); derivedPropertyMap.remove("schema"); derivedPropertyMap.remove("shortDisplayName"); table1PropertyMap.remove("parent"); table1PropertyMap.remove("schemaName"); table1PropertyMap.remove("schema"); table1PropertyMap.remove("shortDisplayName"); assertEquals("Derived table not properly copied", derivedPropertyMap.toString(), table1PropertyMap.toString()); }
|
public void testGetDerivedInstance() throws Exception { SQLTable derivedTable; SQLTable table1; assertNotNull(table1 = db.getTableByName("REGRESSION_TEST1")); derivedTable = SQLTable.getDerivedInstance(table1, pp); TreeMap derivedPropertyMap = new TreeMap(BeanUtils.describe(derivedTable)); TreeMap table1PropertyMap = new TreeMap(BeanUtils.describe(table1)); derivedPropertyMap.remove("parent"); derivedPropertyMap.remove("schemaName"); derivedPropertyMap.remove("schema"); derivedPropertyMap.remove("shortDisplayName"); table1PropertyMap.remove("parent"); table1PropertyMap.remove("schemaName"); table1PropertyMap.remove("schema"); table1PropertyMap.remove("shortDisplayName"); assertEquals("Derived table not properly copied", derivedPropertyMap.toString(), table1PropertyMap.toString()); }
| 1,109,475
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(orderedHaplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(orderedHaplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,477
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(orderedHaplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(orderedHaplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,478
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(orderedHaplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; if(!quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.quietMode) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, skipCheck); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,skipCheck); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL Haplotype[][] orderedHaplos; Haplotype[][] crossedHaplos; if(outputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, false); orderedHaplos = orderHaps(haplos); crossedHaplos = textData.generateCrossovers(orderedHaplos); textData.saveHapsToText(crossedHaplos, textData.getMultiDprime(), OutputFile); } //todo: should this output hap assoc for each block type if they do more than one? if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { //Haplotype[][] orderedHaps = orderHaps(textData.getHaplotypes()); HaploData.saveHapAssocToText(haplos, fileName + ".HAPASSOC"); } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } if(Options.getAssocTest() == ASSOC_TRIO){ Vector tdtResults = TDT.calcTrioTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(tdtResults, fileName + ".ASSOC"); } else if(Options.getAssocTest() == ASSOC_CC) { Vector ccResults = TDT.calcCCTDT(textData.getPedFile()); HaploData.saveMarkerAssocToText(ccResults, fileName + ".ASSOC"); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,479
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,481
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,482
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { Iterator readers = ImageIO.getImageReadersByFormatName( "jpg" ); if ( readers.hasNext() ) { ImageReader reader = (ImageReader)readers.next(); log.debug( "Creating stream" ); ImageInputStream iis = ImageIO.createImageInputStream( original.getImageFile() ); reader.setInput( iis, true ); if ( reader.getNumThumbnails(0) > 0 ) { log.debug( "Original has thumbnail, size " + reader.getThumbnailWidth( 0, 0 ) + " x " + reader.getThumbnailHeight( 0, 0 ) ); origImage = reader.readThumbnail( 0, 0 ); log.debug( "Read thumbnail" ); } else { log.debug( "No thumbnail in original" ); ImageReadParam param = reader.getDefaultReadParam(); param.setSourceSubsampling( 16, 16, 0, 0 ); origImage = reader.read( 0, param ); log.debug( "Read original" ); } } } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,483
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,484
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,485
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.debug( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,486
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.debug( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,487
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.warn( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
|
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail" ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.warn( "Found original, reading it..." ); // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } log.warn( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.warn( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_NEAREST_NEIGHBOR ); log.warn( "Filtering..." ); BufferedImage thumbImage = atOp.filter( origImage, null ); log.warn( "done Filtering..." ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); }
| 1,109,488
|
public Thumbnail getThumbnail() { log.debug( "getThumbnail: Finding thumbnail for " + uid ); if ( thumbnail == null ) { // First try to find an instance from existing instances ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_THUMBNAIL && instance.getRotated() == prefRotation ) { thumbnail = Thumbnail.createThumbnail( this, instance.getImageFile() ); break; } } if ( thumbnail == null ) { // Next try to create a new thumbnail instance log.debug( "No thumbnail found, creating" ); createThumbnail(); } } if ( thumbnail == null ) { // Thumbnail creating was not successful, most probably because there is no available instance// return Thumbnail.getDefaultThumbnail(); thumbnail = Thumbnail.getDefaultThumbnail(); } return thumbnail; }
|
public Thumbnail getThumbnail() { log.debug( "getThumbnail: entry, Finding thumbnail for " + uid ); if ( thumbnail == null ) { // First try to find an instance from existing instances ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_THUMBNAIL && instance.getRotated() == prefRotation ) { thumbnail = Thumbnail.createThumbnail( this, instance.getImageFile() ); break; } } if ( thumbnail == null ) { // Next try to create a new thumbnail instance log.debug( "No thumbnail found, creating" ); createThumbnail(); } } if ( thumbnail == null ) { // Thumbnail creating was not successful, most probably because there is no available instance// return Thumbnail.getDefaultThumbnail(); thumbnail = Thumbnail.getDefaultThumbnail(); } return thumbnail; }
| 1,109,490
|
public boolean hasThumbnail() { log.debug( "hasThumbnail: Finding thumbnail for " + uid ); if ( thumbnail == null ) { // First try to find an instance from existing instances ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_THUMBNAIL && instance.getRotated() == prefRotation ) { thumbnail = Thumbnail.createThumbnail( this, instance.getImageFile() ); break; } } } return ( thumbnail != null ); }
|
public boolean hasThumbnail() { log.debug( "hasThumbnail: entry, Finding thumbnail for " + uid ); if ( thumbnail == null ) { // First try to find an instance from existing instances ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_THUMBNAIL && instance.getRotated() == prefRotation ) { thumbnail = Thumbnail.createThumbnail( this, instance.getImageFile() ); break; } } } return ( thumbnail != null ); }
| 1,109,492
|
private void processFile(String fileName, int fileType, String infoFileName){ try { int outputType; long maxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; outputType = this.arg_output; textData = new HaploData(0); Vector result = null; if(fileType == 0){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == 1) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, arg_skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,arg_skipCheck); } File infoFile; if(infoFileName.equals("")) { infoFile = null; }else{ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,maxDistance,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,maxDistance,null); } if(!arg_quiet && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(this.arg_showCheck && result != null) { CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(null); } if(this.arg_check && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(new File (fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = new File(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(arg_blockfile); cust = textData.readBlocks(blocksFile); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL int start = 0; int stop = Chromosome.getFilteredSize(); if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getFilteredSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getFilteredSize()); } } if (this.arg_png || this.arg_smallpng){ OutputFile = new File(fileName + ".LD.PNG"); if (textData.filteredDPrimeTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (this.arg_trackName != null){ textData.readAnalysisTrack(new File(arg_trackName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),this.arg_smallpng); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
|
private void processFile(String fileName, int fileType, String infoFileName){ try { int outputType; long maxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; outputType = this.arg_output; textData = new HaploData(0); Vector result = null; if(fileType == 0){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == 1) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, arg_skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,arg_skipCheck); } File infoFile; if(infoFileName.equals("")) { infoFile = null; }else{ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,maxDistance,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,maxDistance,null); } if(!arg_quiet && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(this.arg_showCheck && result != null) { CheckDataPanel cp = new CheckDataPanel(textData, false); cp.printTable(null); } if(this.arg_check && result != null){ CheckDataPanel cp = new CheckDataPanel(textData, false); cp.printTable(new File (fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = new File(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(arg_blockfile); cust = textData.readBlocks(blocksFile); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL int start = 0; int stop = Chromosome.getFilteredSize(); if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getFilteredSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getFilteredSize()); } } if (this.arg_png || this.arg_smallpng){ OutputFile = new File(fileName + ".LD.PNG"); if (textData.filteredDPrimeTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (this.arg_trackName != null){ textData.readAnalysisTrack(new File(arg_trackName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),this.arg_smallpng); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
| 1,109,494
|
public Object createObject(Attributes attributes) { int x = Integer.parseInt(attributes.getValue("x")); int y = Integer.parseInt(attributes.getValue("y")); SQLTable tab = (SQLTable) objectIdMap.get(attributes.getValue("table-ref")); TablePane tp = new TablePane(tab); playPen.add(tp, new Point(x, y)); return tp; }
|
public Object createObject(Attributes attributes) { int x = Integer.parseInt(attributes.getValue("x")); int y = Integer.parseInt(attributes.getValue("y")); SQLTable tab = (SQLTable) objectIdMap.get(attributes.getValue("table-ref")); TablePane tp = new TablePane(tab, playPen.getFontRenderContext()); playPen.add(tp, new Point(x, y)); return tp; }
| 1,109,499
|
void drawPicture(HaploData theData){ Container contents = getContentPane(); contents.removeAll(); //remember which tab we're in if they've already been set up int currentTabIndex = 0; if (!(tabs == null)){ currentTabIndex = tabs.getSelectedIndex(); } tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData.dPrimeTable, infoKnown, theData.markerInfo); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[0], panel); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); hapDisplay = new HaplotypeDisplay(theData); HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); JScrollPane hapScroller = new JScrollPane(hapDisplay); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[1], panel); tabs.setSelectedIndex(currentTabIndex); contents.add(tabs); //next add a little spacer //ontents.add(Box.createRigidArea(new Dimension(0,5))); //and then add the block display //theBlocks = new BlockDisplay(theData.markerInfo, theData.blocks, dPrimeDisplay, infoKnown); //contents.setBackground(Color.black); //put the block display in a scroll pane in case the data set is very large. //JScrollPane blockScroller = new JScrollPane(theBlocks, // JScrollPane.VERTICAL_SCROLLBAR_NEVER, // JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); //blockScroller.getHorizontalScrollBar().setUnitIncrement(60); //blockScroller.setMinimumSize(new Dimension(800, 100)); //contents.add(blockScroller); repaint(); setVisible(true); }
|
void drawPicture(HaploData theData){ Container contents = getContentPane(); contents.removeAll(); //remember which tab we're in if they've already been set up int currentTabIndex = 0; if (!(tabs == null)){ currentTabIndex = tabs.getSelectedIndex(); } tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData.dPrimeTable, infoKnown); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[0], panel); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); hapDisplay = new HaplotypeDisplay(theData); HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); JScrollPane hapScroller = new JScrollPane(hapDisplay); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[1], panel); tabs.setSelectedIndex(currentTabIndex); contents.add(tabs); //next add a little spacer //ontents.add(Box.createRigidArea(new Dimension(0,5))); //and then add the block display //theBlocks = new BlockDisplay(theData.markerInfo, theData.blocks, dPrimeDisplay, infoKnown); //contents.setBackground(Color.black); //put the block display in a scroll pane in case the data set is very large. //JScrollPane blockScroller = new JScrollPane(theBlocks, // JScrollPane.VERTICAL_SCROLLBAR_NEVER, // JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); //blockScroller.getHorizontalScrollBar().setUnitIncrement(60); //blockScroller.setMinimumSize(new Dimension(800, 100)); //contents.add(blockScroller); repaint(); setVisible(true); }
| 1,109,500
|
public static void main(String[] args) {//throws IOException{ boolean nogui = false; HaploView window; for(int i = 0;i<args.length;i++) { if(args[i].equals("-nogui") || args[i].equals("-n") ) { nogui = true; } } if(nogui) { window = new HaploView(args); } else { window = new HaploView(); window.argHandler(args); try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } }
|
public static void main(String[] args) {//throws IOException{ boolean nogui = false; HaploView window; for(int i = 0;i<args.length;i++) { if(args[i].equals("-nogui") || args[i].equals("-n") ) { nogui = true; } } if(nogui) { window = new HaploView(args); } else { window = new HaploView(); window.argHandler(args); try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } }
| 1,109,501
|
void readMarkers(File inputFile){ try { int good = theData.prepareMarkerInput(inputFile); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; if (dPrimeDisplay != null){ dPrimeDisplay.loadMarkers(theData.markerInfo); } } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
|
void readMarkers(File inputFile){ try { int good = theData.prepareMarkerInput(inputFile); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; if (dPrimeDisplay != null){ dPrimeDisplay.loadMarkers(); } } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
| 1,109,503
|
void saveDprimeToText(){ fc.setSelectedFile(null); try{ fc.setSelectedFile(null); int returnVal = fc.showSaveDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { new TextMethods().saveDprimeToText(theData.dPrimeTable, fc.getSelectedFile(), infoKnown, theData.markerInfo); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void saveDprimeToText(){ fc.setSelectedFile(null); try{ fc.setSelectedFile(null); int returnVal = fc.showSaveDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { new TextMethods().saveDprimeToText(theData.dPrimeTable, fc.getSelectedFile(), infoKnown, new Vector()); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,109,504
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,506
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,507
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,508
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,509
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,510
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
|
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; Class generatorClass = null; try { Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); generatorClass = (Class) ddlGeneratorMap.get( col1.getParentTable().getParentDatabase().getDataSource().getDriverClass()); // FIXME: make warning user visable if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } ddlg = (DDLGenerator) generatorClass.newInstance(); } catch (InstantiationException e1) { logger.error("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { logger.error("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); int i = 0; for (SQLColumn col : columns ) { ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = new ColumnProfileResult(System.currentTimeMillis()); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + "). please setup the profile function mapping"); continue; } StringBuffer sql = new StringBuffer(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValus() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n AVG(\""); sql.append(col.getName()); sql.append("\") AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG(LENGTH(\""); sql.append(col.getName()); sql.append("\")) AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { lastSQL = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(lastSQL)); } if (findingMin && pfd.isMinValue() ) { lastSQL = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(lastSQL)); } if (findingMax && pfd.isMaxValus() ) { lastSQL = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(lastSQL)); } if (findingAvg && pfd.isAvgValue() ) { lastSQL = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(lastSQL)); } if (findingMinLength && pfd.isMinLength() ) { lastSQL = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(lastSQL)); } if (findingMaxLength && pfd.isMaxLength() ) { lastSQL = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(lastSQL)); } if (findingAvgLength && pfd.isAvgLength() ) { lastSQL = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getInt(lastSQL)); } if ( findingNullCount && pfd.isSumDecode() ) { lastSQL = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(lastSQL)); } } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } i++; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT \""); sql.append(col.getName()); sql.append("\" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY \""); sql.append(col.getName()); sql.append("\" ORDER BY COUNT1 DESC"); colResult = (ColumnProfileResult) getResult(col); try { lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < 10; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } catch ( SQLException ex ) { colResult.setError(true); colResult.setEx(ex); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { colResult.setCreateEndTime(System.currentTimeMillis()); putResult(col, colResult); try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } rs = null; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
| 1,109,511
|
private void doTableProfile(SQLTable table) throws SQLException, ArchitectException { SQLDatabase db = table.getParentDatabase(); Connection conn = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; TableProfileResult tableResult = new TableProfileResult(System.currentTimeMillis()); try { conn = db.getConnection(); Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); Class generatorClass = (Class) ddlGeneratorMap.get( db.getDataSource().getDriverClass()); if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } StringBuffer sql = new StringBuffer(); sql.append("SELECT COUNT(*) AS ROWCOUNT"); sql.append("\nFROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { tableResult.setCreateEndTime(System.currentTimeMillis()); tableResult.setRowCount(rs.getInt("ROWCOUNT")); } rs.close(); rs = null; doColumnProfile(table.getColumns(), conn); // XXX: add where filter later } catch (SQLException ex) { logger.error("Error in SQL query: "+lastSQL, ex); tableResult.setError(true); tableResult.setEx(ex); } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } try { if (conn != null) conn.close(); } catch (SQLException ex) { logger.error("Couldn't clean up connection", ex); } putResult(table, tableResult); } }
|
private void doTableProfile(SQLTable table) throws SQLException, ArchitectException { SQLDatabase db = table.getParentDatabase(); Connection conn = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; TableProfileResult tableResult = new TableProfileResult(System.currentTimeMillis()); try { conn = db.getConnection(); Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); Class generatorClass = (Class) ddlGeneratorMap.get( db.getDataSource().getDriverClass()); if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } StringBuffer sql = new StringBuffer(); sql.append("SELECT COUNT(*) AS ROWCOUNT"); sql.append("\nFROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { tableResult.setCreateEndTime(System.currentTimeMillis()); tableResult.setRowCount(rs.getInt("ROWCOUNT")); } rs.close(); rs = null; doColumnProfile(table.getColumns(), conn); // XXX: add where filter later } catch (SQLException ex) { logger.error("Error in SQL query: "+lastSQL, ex); tableResult.setError(true); tableResult.setEx(ex); } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } try { if (conn != null) conn.close(); } catch (SQLException ex) { logger.error("Couldn't clean up connection", ex); } putResult(table, tableResult); } }
| 1,109,513
|
private void doTableProfile(SQLTable table) throws SQLException, ArchitectException { SQLDatabase db = table.getParentDatabase(); Connection conn = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; TableProfileResult tableResult = new TableProfileResult(System.currentTimeMillis()); try { conn = db.getConnection(); Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); Class generatorClass = (Class) ddlGeneratorMap.get( db.getDataSource().getDriverClass()); if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } StringBuffer sql = new StringBuffer(); sql.append("SELECT COUNT(*) AS ROWCOUNT"); sql.append("\nFROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { tableResult.setCreateEndTime(System.currentTimeMillis()); tableResult.setRowCount(rs.getInt("ROWCOUNT")); } rs.close(); rs = null; doColumnProfile(table.getColumns(), conn); // XXX: add where filter later } catch (SQLException ex) { logger.error("Error in SQL query: "+lastSQL, ex); tableResult.setError(true); tableResult.setEx(ex); } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } try { if (conn != null) conn.close(); } catch (SQLException ex) { logger.error("Couldn't clean up connection", ex); } putResult(table, tableResult); } }
|
private void doTableProfile(SQLTable table) throws SQLException, ArchitectException { SQLDatabase db = table.getParentDatabase(); Connection conn = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; TableProfileResult tableResult = new TableProfileResult(System.currentTimeMillis()); try { conn = db.getConnection(); Map ddlGeneratorMap = ArchitectUtils.getDriverDDLGeneratorMap(); Class generatorClass = (Class) ddlGeneratorMap.get( db.getDataSource().getDriverClass()); if (generatorClass == null) { System.out.println("Unable to create Profile for the target database."); return; } StringBuffer sql = new StringBuffer(); sql.append("SELECT COUNT(*) AS ROWCOUNT"); sql.append("\nFROM ").append(DDLUtils.toQualifiedName(table.getCatalogName(),table.getSchemaName(),table.getName())); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { tableResult.setCreateEndTime(System.currentTimeMillis()); tableResult.setRowCount(rs.getInt("ROW__COUNT")); } rs.close(); rs = null; doColumnProfile(table.getColumns(), conn); // XXX: add where filter later } catch (SQLException ex) { logger.error("Error in SQL query: "+lastSQL, ex); tableResult.setError(true); tableResult.setEx(ex); } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } try { if (conn != null) conn.close(); } catch (SQLException ex) { logger.error("Couldn't clean up connection", ex); } putResult(table, tableResult); } }
| 1,109,514
|
public Object getValueAt(int rowIndex, int columnIndex) { ColumnProfileResult columnProfile = resultList.get(rowIndex); SQLColumn col = columnProfile.getProfiledObject(); int rowCount = ((TableProfileResult) profileManager.getResult(col.getParentTable())).getRowCount(); if (columnIndex == 0) { return ArchitectUtils.getAncestor(col,SQLDatabase.class); } else if (columnIndex == 1) { return ArchitectUtils.getAncestor(col,SQLCatalog.class); } else if (columnIndex == 2) { return ArchitectUtils.getAncestor(col,SQLSchema.class); } else if (columnIndex == 3) { return ArchitectUtils.getAncestor(col,SQLTable.class); } else if (columnIndex == 4) { return col; } else if (columnIndex == 5) { // Run date return columnProfile.getCreateStartTime(); } else if (columnIndex == 6) { // Row Count return rowCount; } else if (columnIndex == 7) { // data type DDLGenerator gddl; try { gddl = DDLUtils.createDDLGenerator(col.getParentTable().getParentDatabase().getDataSource()); return gddl.columnType(col); } catch (Exception e) { throw new ArchitectRuntimeException(new ArchitectException( "Unable to get DDL information. Do we have a valid data source?", e)); } } else if (columnIndex == 8) { // Number of null records return columnProfile.getNullCount(); } else if (columnIndex == 9) { // Percent null records return columnProfile.getNullCount() == 0 ? null : columnProfile.getNullCount() * 100D / rowCount ; } else if (columnIndex == 10) { // Number of unique records return columnProfile.getDistinctValueCount(); } else if (columnIndex == 11) { // percent of unique records return columnProfile.getDistinctValueCount() == 0 ? null : columnProfile.getDistinctValueCount() * 100D / rowCount; } else if (columnIndex == 12) { // min Length return columnProfile.getMinLength(); } else if (columnIndex == 13) { // Max Length return columnProfile.getMaxLength(); } else if (columnIndex == 14) { // Avg Length return columnProfile.getAvgLength(); } else if (columnIndex == 15) { // min Value return columnProfile.getMinValue(); } else if (columnIndex == 16) { // Max value return columnProfile.getMaxValue(); } else if (columnIndex == 17) { // Avg Value return columnProfile.getAvgValue(); } else { throw new IllegalArgumentException("Column Index out of bounds"); } }
|
public Object getValueAt(int rowIndex, int columnIndex) { ColumnProfileResult columnProfile = resultList.get(rowIndex); SQLColumn col = columnProfile.getProfiledObject(); int rowCount = ((TableProfileResult) profileManager.getResult(col.getParentTable())).getRowCount(); if (columnIndex == 0) { return ArchitectUtils.getAncestor(col,SQLDatabase.class); } else if (columnIndex == 1) { return ArchitectUtils.getAncestor(col,SQLCatalog.class); } else if (columnIndex == 2) { return ArchitectUtils.getAncestor(col,SQLSchema.class); } else if (columnIndex == 3) { return ArchitectUtils.getAncestor(col,SQLTable.class); } else if (columnIndex == 4) { return col; } else if (columnIndex == 5) { // Run date return columnProfile.getCreateStartTime(); } else if (columnIndex == 6) { // Row Count return rowCount; } else if (columnIndex == 7) { // data type DDLGenerator gddl; try { gddl = DDLUtils.createDDLGenerator(col.getParentTable().getParentDatabase().getDataSource()); return gddl.columnType(col); } catch (Exception e) { throw new ArchitectRuntimeException(new ArchitectException( "Unable to get DDL information. Do we have a valid data source?", e)); } } else if (columnIndex == 8) { // Number of null records return columnProfile.getNullCount(); } else if (columnIndex == 9) { // Percent null records return rowCount == 0 ? null : (double)columnProfile.getNullCount() / rowCount ; } else if (columnIndex == 10) { // Number of unique records return columnProfile.getDistinctValueCount(); } else if (columnIndex == 11) { // percent of unique records return columnProfile.getDistinctValueCount() == 0 ? null : columnProfile.getDistinctValueCount() * 100D / rowCount; } else if (columnIndex == 12) { // min Length return columnProfile.getMinLength(); } else if (columnIndex == 13) { // Max Length return columnProfile.getMaxLength(); } else if (columnIndex == 14) { // Avg Length return columnProfile.getAvgLength(); } else if (columnIndex == 15) { // min Value return columnProfile.getMinValue(); } else if (columnIndex == 16) { // Max value return columnProfile.getMaxValue(); } else if (columnIndex == 17) { // Avg Value return columnProfile.getAvgValue(); } else { throw new IllegalArgumentException("Column Index out of bounds"); } }
| 1,109,515
|
public Object getValueAt(int rowIndex, int columnIndex) { ColumnProfileResult columnProfile = resultList.get(rowIndex); SQLColumn col = columnProfile.getProfiledObject(); int rowCount = ((TableProfileResult) profileManager.getResult(col.getParentTable())).getRowCount(); if (columnIndex == 0) { return ArchitectUtils.getAncestor(col,SQLDatabase.class); } else if (columnIndex == 1) { return ArchitectUtils.getAncestor(col,SQLCatalog.class); } else if (columnIndex == 2) { return ArchitectUtils.getAncestor(col,SQLSchema.class); } else if (columnIndex == 3) { return ArchitectUtils.getAncestor(col,SQLTable.class); } else if (columnIndex == 4) { return col; } else if (columnIndex == 5) { // Run date return columnProfile.getCreateStartTime(); } else if (columnIndex == 6) { // Row Count return rowCount; } else if (columnIndex == 7) { // data type DDLGenerator gddl; try { gddl = DDLUtils.createDDLGenerator(col.getParentTable().getParentDatabase().getDataSource()); return gddl.columnType(col); } catch (Exception e) { throw new ArchitectRuntimeException(new ArchitectException( "Unable to get DDL information. Do we have a valid data source?", e)); } } else if (columnIndex == 8) { // Number of null records return columnProfile.getNullCount(); } else if (columnIndex == 9) { // Percent null records return columnProfile.getNullCount() == 0 ? null : columnProfile.getNullCount() * 100D / rowCount ; } else if (columnIndex == 10) { // Number of unique records return columnProfile.getDistinctValueCount(); } else if (columnIndex == 11) { // percent of unique records return columnProfile.getDistinctValueCount() == 0 ? null : columnProfile.getDistinctValueCount() * 100D / rowCount; } else if (columnIndex == 12) { // min Length return columnProfile.getMinLength(); } else if (columnIndex == 13) { // Max Length return columnProfile.getMaxLength(); } else if (columnIndex == 14) { // Avg Length return columnProfile.getAvgLength(); } else if (columnIndex == 15) { // min Value return columnProfile.getMinValue(); } else if (columnIndex == 16) { // Max value return columnProfile.getMaxValue(); } else if (columnIndex == 17) { // Avg Value return columnProfile.getAvgValue(); } else { throw new IllegalArgumentException("Column Index out of bounds"); } }
|
public Object getValueAt(int rowIndex, int columnIndex) { ColumnProfileResult columnProfile = resultList.get(rowIndex); SQLColumn col = columnProfile.getProfiledObject(); int rowCount = ((TableProfileResult) profileManager.getResult(col.getParentTable())).getRowCount(); if (columnIndex == 0) { return ArchitectUtils.getAncestor(col,SQLDatabase.class); } else if (columnIndex == 1) { return ArchitectUtils.getAncestor(col,SQLCatalog.class); } else if (columnIndex == 2) { return ArchitectUtils.getAncestor(col,SQLSchema.class); } else if (columnIndex == 3) { return ArchitectUtils.getAncestor(col,SQLTable.class); } else if (columnIndex == 4) { return col; } else if (columnIndex == 5) { // Run date return columnProfile.getCreateStartTime(); } else if (columnIndex == 6) { // Row Count return rowCount; } else if (columnIndex == 7) { // data type DDLGenerator gddl; try { gddl = DDLUtils.createDDLGenerator(col.getParentTable().getParentDatabase().getDataSource()); return gddl.columnType(col); } catch (Exception e) { throw new ArchitectRuntimeException(new ArchitectException( "Unable to get DDL information. Do we have a valid data source?", e)); } } else if (columnIndex == 8) { // Number of null records return columnProfile.getNullCount(); } else if (columnIndex == 9) { // Percent null records return columnProfile.getNullCount() == 0 ? null : columnProfile.getNullCount() * 100D / rowCount ; } else if (columnIndex == 10) { // Number of unique records return columnProfile.getDistinctValueCount(); } else if (columnIndex == 11) { // percent of unique records return rowCount == 0 ? null : (double)columnProfile.getDistinctValueCount() / rowCount; } else if (columnIndex == 12) { // min Length return columnProfile.getMinLength(); } else if (columnIndex == 13) { // Max Length return columnProfile.getMaxLength(); } else if (columnIndex == 14) { // Avg Length return columnProfile.getAvgLength(); } else if (columnIndex == 15) { // min Value return columnProfile.getMinValue(); } else if (columnIndex == 16) { // Max value return columnProfile.getMaxValue(); } else if (columnIndex == 17) { // Avg Value return columnProfile.getAvgValue(); } else { throw new IllegalArgumentException("Column Index out of bounds"); } }
| 1,109,516
|
public TablePane findTablePane(SQLTable t) { for (int i = 0; i < getComponentCount(); i++) { Component c = getComponent(i); if (c instanceof TablePane && ((TablePane) c).getModel() == t) { return (TablePane) c; } } return null; }
|
public TablePane findTablePane(SQLTable t) { for (int i = 0; i < getComponentCount(); i++) { Component c = getComponent(i); if (c instanceof TablePane && ((TablePane) c).getModel() == t) { return (TablePane) c; } } return null; }
| 1,109,517
|
public String getTextForCell(int row, int col) { // note: this will only work because we know all the renderers are jlabels JLabel renderer = (JLabel) getCellRenderer(row, col).getTableCellRendererComponent(this, getModel().getValueAt(row, col), false, false, row, col); return renderer.getText(); }
|
public String getTextForCell(int row, int col) { // note: this will only work because we know all the renderers are jlabels JLabel renderer = (JLabel) getCellRenderer(row, col).getTableCellRendererComponent(this, getModel().getValueAt(row, getColumnModel().getColumn(col).getModelIndex()), false, false, row, col); return renderer.getText(); }
| 1,109,518
|
public void run(JellyContext context, XMLOutput output) throws Exception { if ( ! context.isCacheTags() ) { clearTag(); } try { Tag tag = getTag(); if ( tag == null ) { return; } tag.setContext(context); if ( tag instanceof DynaTag ) { DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); } } else { // treat the tag as a bean DynaBean dynaBean = new ConvertingWrapDynaBean( tag ); for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaBean.set(name, value); } } tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); } }
|
public void run(JellyContext context, XMLOutput output) throws Exception { if ( ! context.isCacheTags() ) { clearTag(); } try { Tag tag = getTag(); if ( tag == null ) { return; } tag.setContext(context); if ( tag instanceof DynaTag ) { DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Class type = dynaTag.getAttributeType(name); Object value = null; if (type != null && type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } } else { // treat the tag as a bean DynaBean dynaBean = new ConvertingWrapDynaBean( tag ); for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Class type = dynaTag.getAttributeType(name); Object value = null; if (type != null && type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } dynaBean.set(name, value); } } tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); } }
| 1,109,519
|
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); //Hashtable parenthom = new Hashtable(); int[] founderHomCount = new int[5]; //Hashtable count = new Hashtable(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getMarkerA(loc); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getMarkerB(loc); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getMarkerA(loc); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getMarkerB(loc); //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); } else{ founderGenoCount.put(familyID, new Integer(1)); } if(allele1 != allele2) { founderHetCount++; } else{ founderHomCount[allele1]++; } count[allele1]++; count[allele2]++; }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if(allele1 == allele2) { hom++; } else { het++; } } //missing data else missing++; } } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(_pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); return result; }
|
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); //Hashtable parenthom = new Hashtable(); int[] founderHomCount = new int[5]; //Hashtable count = new Hashtable(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getMarkerA(loc); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getMarkerB(loc); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getMarkerA(loc); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getMarkerB(loc); //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); } else{ founderGenoCount.put(familyID, new Integer(1)); } if(allele1 != allele2) { founderHetCount++; } else{ founderHomCount[allele1]++; } count[allele1]++; count[allele2]++; }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if(allele1 == allele2) { hom++; } else { het++; } } //missing data else missing++; } } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(_pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); return result; }
| 1,109,520
|
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); //Hashtable parenthom = new Hashtable(); int[] founderHomCount = new int[5]; //Hashtable count = new Hashtable(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getMarkerA(loc); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getMarkerB(loc); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getMarkerA(loc); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getMarkerB(loc); //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); } else{ founderGenoCount.put(familyID, new Integer(1)); } if(allele1 != allele2) { founderHetCount++; } else{ founderHomCount[allele1]++; } count[allele1]++; count[allele2]++; }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if(allele1 == allele2) { hom++; } else { het++; } } //missing data else missing++; } } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(_pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); return result; }
|
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); //Hashtable parenthom = new Hashtable(); int[] founderHomCount = new int[5]; //Hashtable count = new Hashtable(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getMarkerA(loc); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getMarkerB(loc); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getMarkerA(loc); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getMarkerB(loc); //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getMarkerA(loc); allele2 = currentInd.getMarkerB(loc); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); } else{ founderGenoCount.put(familyID, new Integer(1)); } if(allele1 != allele2) { founderHetCount++; } else{ founderHomCount[allele1]++; } count[allele1]++; count[allele2]++; }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if(allele1 == allele2) { hom++; } else { het++; } } //missing data else missing++; } } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(_pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); return result; }
| 1,109,521
|
public PhotoFolderEvent( PhotoFolder source, PhotoFolder subfolder, PhotoFolder[] path ) { super( source ); this.subfolder = subfolder; this.path = path; }
|
public PhotoFolderEvent( PhotoFolder source, PhotoFolder subfolder, PhotoFolder[] path ) { super( source ); this.subfolder = subfolder; this.path = (path != null ) ? path.clone() : null; }
| 1,109,523
|
public void redo() throws CannotRedoException { try { modifyProperty(event.getNewValue()); } catch (IllegalAccessException e) { logger.error("Couldn't access setter for "+ event.getPropertyName(), e); throw new CannotRedoException(); } catch (InvocationTargetException e) { logger.error("Setter for "+event.getPropertyName()+ " on "+event.getSource()+" threw exception", e); throw new CannotRedoException(); } catch (IntrospectionException e) { logger.error("Couldn't introspect source object "+ event.getSource(), e); throw new CannotRedoException(); } super.redo(); }
|
public void redo() throws CannotRedoException { try { modifyProperty(event.getNewValue()); } finally { event.getSQLSource().setMagicEnabled(true); } catch (IllegalAccessException e) { logger.error("Couldn't access setter for "+ event.getPropertyName(), e); throw new CannotRedoException(); } finally { event.getSQLSource().setMagicEnabled(true); } catch (InvocationTargetException e) { logger.error("Setter for "+event.getPropertyName()+ " on "+event.getSource()+" threw exception", e); throw new CannotRedoException(); } finally { event.getSQLSource().setMagicEnabled(true); } catch (IntrospectionException e) { logger.error("Couldn't introspect source object "+ event.getSource(), e); throw new CannotRedoException(); } finally { event.getSQLSource().setMagicEnabled(true); } super.redo(); } finally { event.getSQLSource().setMagicEnabled(true); }
| 1,109,524
|
public void undo() throws CannotUndoException { try { modifyProperty(event.getOldValue()); } catch (IllegalAccessException e) { logger.error("Couldn't access setter for "+ event.getPropertyName(), e); throw new CannotUndoException(); } catch (InvocationTargetException e) { logger.error("Setter for "+event.getPropertyName()+ " on "+event.getSource()+" threw exception", e); throw new CannotUndoException(); } catch (IntrospectionException e) { logger.error("Couldn't introspect source object "+ event.getSource(), e); throw new CannotUndoException(); } super.undo(); }
|
public void undo() throws CannotUndoException { try { modifyProperty(event.getOldValue()); } finally { event.getSQLSource().setMagicEnabled(true); } catch (IllegalAccessException e) { logger.error("Couldn't access setter for "+ event.getPropertyName(), e); throw new CannotUndoException(); } finally { event.getSQLSource().setMagicEnabled(true); } catch (InvocationTargetException e) { logger.error("Setter for "+event.getPropertyName()+ " on "+event.getSource()+" threw exception", e); throw new CannotUndoException(); } finally { event.getSQLSource().setMagicEnabled(true); } catch (IntrospectionException e) { logger.error("Couldn't introspect source object "+ event.getSource(), e); throw new CannotUndoException(); } finally { event.getSQLSource().setMagicEnabled(true); } super.undo(); } finally { event.getSQLSource().setMagicEnabled(true); }
| 1,109,525
|
public void componentResized( ComponentEvent e) { if ( isFit ) { fit(); } }
|
public void componentResized( ComponentEvent e) { if ( isFit ) { SwingUtilities.invokeLater( new java.lang.Runnable() { public void run() { fit(); } } ); } }
| 1,109,526
|
public SwingUIProject(String name) { this.name = name; this.sourceDatabases = new ArrayList(); this.targetDatabase = new SQLDatabase(); }
|
public SwingUIProject(String name) throws ArchitectException { this.name = name; this.sourceDatabases = new ArrayList(); this.targetDatabase = new SQLDatabase(); }
| 1,109,527
|
public SwingUIProject(String name) { this.name = name; this.sourceDatabases = new ArrayList(); this.targetDatabase = new SQLDatabase(); }
|
public SwingUIProject(String name) { this.name = name; this.sourceDatabases = new ArrayList(); this.targetDatabase = new SQLDatabase(); }
| 1,109,528
|
public List getSourceDatabases() { return this.sourceDatabases; }
|
public DBTree getSourceDatabases() { return this.sourceDatabases; }
| 1,109,529
|
public SQLDatabase getTargetDatabase() { return this.targetDatabase; }
|
public SQLDatabase getTargetDatabase() { return playPen.getDatabase(); }
| 1,109,530
|
public void setSourceDatabases(List argSourceDatabases) { this.sourceDatabases = argSourceDatabases; }
|
public void setSourceDatabases(DBTree argSourceDatabases) { this.sourceDatabases = argSourceDatabases; }
| 1,109,531
|
public void read(File location) throws IOException { final int MODE_READ_DS = 0; // reading a data source section final int MODE_READ_GENERIC = 1; // reading a generic named section int mode = MODE_READ_GENERIC; if (!location.canRead()) { throw new IllegalArgumentException("pl.ini file cannot be read: " + location.getAbsolutePath()); } lastFileRead = location; ArchitectDataSource currentDS = null; Section currentSection = new Section(null); // this accounts for any properties before the first named section fileSections.add(currentSection); fileTime = location.lastModified(); // Can't use Reader to read this file because the encrypted passwords contain non-ASCII characters BufferedInputStream in = new BufferedInputStream(new FileInputStream(location)); byte[] lineBytes = null; while ((lineBytes = readLine(in)) != null) { String line = new String(lineBytes); logger.debug("Read in new line: "+line); if (line.startsWith("[Databases_")) { logger.debug("It's a new database connection spec!"); currentDS = new ArchitectDataSource(); add(currentDS); mode = MODE_READ_DS; } else if (line.startsWith("[")) { logger.debug("It's a new generic section!"); currentSection = new Section(line.substring(1, line.length()-1)); fileSections.add(currentSection); mode = MODE_READ_GENERIC; } else { String key; String value; int equalsIdx = line.indexOf('='); if (equalsIdx > 0) { key = line.substring(0, equalsIdx); value = line.substring(equalsIdx+1, line.length()); } else { key = line; value = null; } logger.debug("key="+key+",val="+value); if (mode == MODE_READ_DS) { if (key.equals("PWD") && value != null) { byte[] cypherBytes = new byte[lineBytes.length - equalsIdx - 1]; System.arraycopy(lineBytes, equalsIdx + 1, cypherBytes, 0, cypherBytes.length); value = decryptPassword(9, cypherBytes); } currentDS.put(key, value); } else if (mode == MODE_READ_GENERIC) { currentSection.put(key, value); } } } in.close(); if (logger.isDebugEnabled()) logger.debug("Finished reading file. Parsed contents:\n"+toString()); }
|
public void read(File location) throws IOException { final int MODE_READ_DS = 0; // reading a data source section final int MODE_READ_GENERIC = 1; // reading a generic named section int mode = MODE_READ_GENERIC; if (!location.canRead()) { throw new IllegalArgumentException("pl.ini file cannot be read: " + location.getAbsolutePath()); } lastFileRead = location; ArchitectDataSource currentDS = null; Section currentSection = new Section(null); // this accounts for any properties before the first named section fileSections.add(currentSection); fileTime = location.lastModified(); // Can't use Reader to read this file because the encrypted passwords contain non-ASCII characters BufferedInputStream in = new BufferedInputStream(new FileInputStream(location)); byte[] lineBytes = null; while ((lineBytes = readLine(in)) != null) { String line = new String(lineBytes); logger.debug("Read in new line: "+line); if (line.startsWith("[Databases_")) { logger.debug("It's a new database connection spec!"); currentDS = new ArchitectDataSource(); add(currentDS); mode = MODE_READ_DS; } else if (line.startsWith("[")) { logger.debug("It's a new generic section!"); currentSection = new Section(line.substring(1, line.length()-1)); fileSections.add(currentSection); mode = MODE_READ_GENERIC; } else { String key; String value; int equalsIdx = line.indexOf('='); if (equalsIdx > 0) { key = line.substring(0, equalsIdx); value = line.substring(equalsIdx+1, line.length()); } else { key = line; value = null; } logger.debug("key="+key+",val="+value); if (mode == MODE_READ_DS) { if (key.equals("PWD") && value != null) { byte[] cypherBytes = new byte[lineBytes.length - equalsIdx - 1]; System.arraycopy(lineBytes, equalsIdx + 1, cypherBytes, 0, cypherBytes.length); value = decryptPassword(9, cypherBytes); } currentDS.put(key, value); } else if (mode == MODE_READ_GENERIC) { currentSection.put(key, value); } } } in.close(); if (logger.isDebugEnabled()) logger.debug("Finished reading file. Parsed contents:\n"+toString()); }
| 1,109,532
|
public void write(File location) throws IOException { OutputStream out = new BufferedOutputStream(new FileOutputStream(location)); write(out); out.close(); fileTime = location.lastModified(); }
|
public void write(File location) throws IOException { OutputStream out = new BufferedOutputStream(new FileOutputStream(location)); write(out); out.close(); fileTime = location.lastModified(); }
| 1,109,533
|
public void writeSection(OutputStream out, String name, Map properties) throws IOException { if (name != null) { String sectionHeading = "["+name+"]" + DOS_CR_LF; out.write(sectionHeading.getBytes()); } // output LOGICAL first (if it exists) String s = null; if ((s = (String) properties.get("Logical")) != null) { out.write("Logical".getBytes()); out.write("=".getBytes()); out.write(s.getBytes()); out.write(DOS_CR_LF.getBytes()); } // now get everything else, and ignore the LOGICAL property Iterator it = properties.entrySet().iterator(); while (it.hasNext()) { Map.Entry ent = (Map.Entry) it.next(); if (!ent.getKey().equals("Logical")) { out.write(((String) ent.getKey()).getBytes()); if (ent.getValue() != null) { byte[] val; if (ent.getKey().equals("PWD")) { val = encryptPassword(9, ((String) ent.getValue())); } else { val = ((String) ent.getValue()).getBytes(); } out.write("=".getBytes()); out.write(val); } out.write(DOS_CR_LF.getBytes()); } } }
|
private void writeSection(OutputStream out, String name, Map properties) throws IOException { if (name != null) { String sectionHeading = "["+name+"]" + DOS_CR_LF; out.write(sectionHeading.getBytes()); } // output LOGICAL first (if it exists) String s = null; if ((s = (String) properties.get("Logical")) != null) { out.write("Logical".getBytes()); out.write("=".getBytes()); out.write(s.getBytes()); out.write(DOS_CR_LF.getBytes()); } // now get everything else, and ignore the LOGICAL property Iterator it = properties.entrySet().iterator(); while (it.hasNext()) { Map.Entry ent = (Map.Entry) it.next(); if (!ent.getKey().equals("Logical")) { out.write(((String) ent.getKey()).getBytes()); if (ent.getValue() != null) { byte[] val; if (ent.getKey().equals("PWD")) { val = encryptPassword(9, ((String) ent.getValue())); } else { val = ((String) ent.getValue()).getBytes(); } out.write("=".getBytes()); out.write(val); } out.write(DOS_CR_LF.getBytes()); } } }
| 1,109,534
|
public String put(String key, String value) { String oldValue = get(key); properties.put(key,value); getPcs().firePropertyChange(key,oldValue,value); return oldValue; }
|
public String put(String key, String value) { String oldValue = get(key); properties.put(key,value); getPcs().firePropertyChange(key,oldValue,value); return oldValue; }
| 1,109,535
|
public void testVolumeAddition() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp" ); db.addVolume( v ); List volumes = db.getVolumes(); assertTrue( volumes.get( 0 ) == v ); assertTrue( volumes.size() == 1 ); }
|
public void testVolumeAddition() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp" ); try { db.addVolume( v ); } catch (PhotovaultException ex) { fail( ex.getMessage() ); } List volumes = db.getVolumes(); assertTrue( volumes.get( 0 ) == v ); assertTrue( volumes.size() == 1 ); }
| 1,109,536
|
public void testXMLOutput() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp/voltest" ); db.addVolume( v ); ExternalVolume ev = new ExternalVolume( "test_extvol", "c./tem/extvoltest" ); db.addVolume( ev ); File tempFile = null; try { tempFile = File.createTempFile( "pv_settings_", ".xml" ); tempFile.deleteOnExit(); } catch ( Exception e ) { this.fail( e.getMessage() ); } try { BufferedWriter outputWriter = new BufferedWriter( new FileWriter( tempFile )); outputWriter.write("<?xml version='1.0' ?>\n"); BeanWriter beanWriter = new BeanWriter(outputWriter); beanWriter.getXMLIntrospector().setAttributesForPrimitives(true); beanWriter.setWriteIDs(false); beanWriter.enablePrettyPrint(); beanWriter.write("database", db); beanWriter.close(); } catch( Exception e ) { this.fail( e.getMessage() ); } // Now try to read the info BeanReader beanReader = new BeanReader(); beanReader.getXMLIntrospector().getConfiguration().setAttributesForPrimitives(true); beanReader.getBindingConfiguration().setMapIDs(false); try { beanReader.registerBeanClass( "database", PVDatabase.class ); beanReader.registerBeanClass( "volume", Volume.class ); beanReader.registerBeanClass( "external-volume", ExternalVolume.class ); PVDatabase readDB = (PVDatabase) beanReader.parse( tempFile ); assertEquals( readDB.getDbName(), "test" ); List readVolumes = readDB.getVolumes(); assertTrue( readVolumes.size() == 2 ); Volume readVolume = (Volume) readVolumes.get(0); assertEquals( readVolume.getName(), "test" ); assertEquals( readVolume.getBaseDir(), v.getBaseDir() ); ExternalVolume readExtVolume = (ExternalVolume) readVolumes.get(1); assertEquals( ev.getName(), readExtVolume.getName() ); assertEquals( ev.getBaseDir(), readExtVolume.getBaseDir() ); } catch ( Exception e ) { this.fail( e.getMessage() ); } }
|
public void testXMLOutput() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp/voltest" ); ExternalVolume ev = new ExternalVolume( "test_extvol", "c./tem/extvoltest" ); db.addVolume( ev ); File tempFile = null; try { tempFile = File.createTempFile( "pv_settings_", ".xml" ); tempFile.deleteOnExit(); } catch ( Exception e ) { this.fail( e.getMessage() ); } try { BufferedWriter outputWriter = new BufferedWriter( new FileWriter( tempFile )); outputWriter.write("<?xml version='1.0' ?>\n"); BeanWriter beanWriter = new BeanWriter(outputWriter); beanWriter.getXMLIntrospector().setAttributesForPrimitives(true); beanWriter.setWriteIDs(false); beanWriter.enablePrettyPrint(); beanWriter.write("database", db); beanWriter.close(); } catch( Exception e ) { this.fail( e.getMessage() ); } // Now try to read the info BeanReader beanReader = new BeanReader(); beanReader.getXMLIntrospector().getConfiguration().setAttributesForPrimitives(true); beanReader.getBindingConfiguration().setMapIDs(false); try { beanReader.registerBeanClass( "database", PVDatabase.class ); beanReader.registerBeanClass( "volume", Volume.class ); beanReader.registerBeanClass( "external-volume", ExternalVolume.class ); PVDatabase readDB = (PVDatabase) beanReader.parse( tempFile ); assertEquals( readDB.getDbName(), "test" ); List readVolumes = readDB.getVolumes(); assertTrue( readVolumes.size() == 2 ); Volume readVolume = (Volume) readVolumes.get(0); assertEquals( readVolume.getName(), "test" ); assertEquals( readVolume.getBaseDir(), v.getBaseDir() ); ExternalVolume readExtVolume = (ExternalVolume) readVolumes.get(1); assertEquals( ev.getName(), readExtVolume.getName() ); assertEquals( ev.getBaseDir(), readExtVolume.getBaseDir() ); } catch ( Exception e ) { this.fail( e.getMessage() ); } }
| 1,109,537
|
public void testXMLOutput() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp/voltest" ); db.addVolume( v ); ExternalVolume ev = new ExternalVolume( "test_extvol", "c./tem/extvoltest" ); db.addVolume( ev ); File tempFile = null; try { tempFile = File.createTempFile( "pv_settings_", ".xml" ); tempFile.deleteOnExit(); } catch ( Exception e ) { this.fail( e.getMessage() ); } try { BufferedWriter outputWriter = new BufferedWriter( new FileWriter( tempFile )); outputWriter.write("<?xml version='1.0' ?>\n"); BeanWriter beanWriter = new BeanWriter(outputWriter); beanWriter.getXMLIntrospector().setAttributesForPrimitives(true); beanWriter.setWriteIDs(false); beanWriter.enablePrettyPrint(); beanWriter.write("database", db); beanWriter.close(); } catch( Exception e ) { this.fail( e.getMessage() ); } // Now try to read the info BeanReader beanReader = new BeanReader(); beanReader.getXMLIntrospector().getConfiguration().setAttributesForPrimitives(true); beanReader.getBindingConfiguration().setMapIDs(false); try { beanReader.registerBeanClass( "database", PVDatabase.class ); beanReader.registerBeanClass( "volume", Volume.class ); beanReader.registerBeanClass( "external-volume", ExternalVolume.class ); PVDatabase readDB = (PVDatabase) beanReader.parse( tempFile ); assertEquals( readDB.getDbName(), "test" ); List readVolumes = readDB.getVolumes(); assertTrue( readVolumes.size() == 2 ); Volume readVolume = (Volume) readVolumes.get(0); assertEquals( readVolume.getName(), "test" ); assertEquals( readVolume.getBaseDir(), v.getBaseDir() ); ExternalVolume readExtVolume = (ExternalVolume) readVolumes.get(1); assertEquals( ev.getName(), readExtVolume.getName() ); assertEquals( ev.getBaseDir(), readExtVolume.getBaseDir() ); } catch ( Exception e ) { this.fail( e.getMessage() ); } }
|
public void testXMLOutput() { PVDatabase db = new PVDatabase(); db.setDbHost( "" ); db.setDbName( "test" ); Volume v = new Volume( "test", "c:/temp/voltest" ); db.addVolume( v ); ExternalVolume ev = new ExternalVolume( "test_extvol", "c./tem/extvoltest" ); try { db.addVolume( v ); db.addVolume( ev ); } catch (PhotovaultException ex) { fail( ex.getMessage() ); } File tempFile = null; try { tempFile = File.createTempFile( "pv_settings_", ".xml" ); tempFile.deleteOnExit(); } catch ( Exception e ) { this.fail( e.getMessage() ); } try { BufferedWriter outputWriter = new BufferedWriter( new FileWriter( tempFile )); outputWriter.write("<?xml version='1.0' ?>\n"); BeanWriter beanWriter = new BeanWriter(outputWriter); beanWriter.getXMLIntrospector().setAttributesForPrimitives(true); beanWriter.setWriteIDs(false); beanWriter.enablePrettyPrint(); beanWriter.write("database", db); beanWriter.close(); } catch( Exception e ) { this.fail( e.getMessage() ); } // Now try to read the info BeanReader beanReader = new BeanReader(); beanReader.getXMLIntrospector().getConfiguration().setAttributesForPrimitives(true); beanReader.getBindingConfiguration().setMapIDs(false); try { beanReader.registerBeanClass( "database", PVDatabase.class ); beanReader.registerBeanClass( "volume", Volume.class ); beanReader.registerBeanClass( "external-volume", ExternalVolume.class ); PVDatabase readDB = (PVDatabase) beanReader.parse( tempFile ); assertEquals( readDB.getDbName(), "test" ); List readVolumes = readDB.getVolumes(); assertTrue( readVolumes.size() == 2 ); Volume readVolume = (Volume) readVolumes.get(0); assertEquals( readVolume.getName(), "test" ); assertEquals( readVolume.getBaseDir(), v.getBaseDir() ); ExternalVolume readExtVolume = (ExternalVolume) readVolumes.get(1); assertEquals( ev.getName(), readExtVolume.getName() ); assertEquals( ev.getBaseDir(), readExtVolume.getBaseDir() ); } catch ( Exception e ) { this.fail( e.getMessage() ); } }
| 1,109,538
|
public void run(Context context, XMLOutput output) throws Exception { if ( uri == null ) { throw new JellyException( "<j:include> must have a 'uri' attribute defined" ); } // we need to create a new Context of the URI // take off the script name from the URL context.runScript( uri, output ); }
|
public void run(JellyContext context, XMLOutput output) throws Exception { if ( uri == null ) { throw new JellyException( "<j:include> must have a 'uri' attribute defined" ); } // we need to create a new Context of the URI // take off the script name from the URL context.runScript( uri, output ); }
| 1,109,540
|
public void writeDDLTransactionEnd() { out.println("GO"); }
|
public void writeDDLTransactionEnd() { println("GO"); }
| 1,109,541
|
public void writeHeader() { out.println("-- Created by SQLPower SQLServer 2000 DDL Generator "+GENERATOR_VERSION+" --"); }
|
public void writeHeader() { println("-- Created by SQLPower SQLServer 2000 DDL Generator "+GENERATOR_VERSION+" --"); }
| 1,109,542
|
public void saveResultToFile(File outFile) throws IOException { BufferedWriter bw = new BufferedWriter(new FileWriter(outFile)); bw.write("#tagging with r^2 cutoff: " + minRSquared); bw.newLine(); bw.write("Marker\tBest Tag\tr^2 w/tag"); bw.newLine(); for (int i = 0; i < snps.size(); i++) { StringBuffer line = new StringBuffer(); SNP snp = (SNP) snps.elementAt(i); line.append(snp.getName()).append("\t"); TagSequence theTag = snp.getBestTag(); line.append(theTag.getName()).append("\t"); line.append(getPairwiseCompRsq(snp,theTag.getSequence())).append("\t"); bw.write(line.toString()); bw.newLine(); } bw.newLine(); bw.write("Tag\tMarkers Tagged"); bw.newLine(); for(int i=0;i<tags.size();i++) { StringBuffer line = new StringBuffer(); TagSequence theTag = (TagSequence) tags.get(i); line.append(theTag.getName()).append("\t"); Vector tagged = theTag.getBestTagged(); for (int j = 0; j < tagged.size(); j++) { VariantSequence varSeq = (VariantSequence) tagged.elementAt(j); if(j !=0){ line.append(","); } line.append(varSeq.getName()); } bw.write(line.toString()); bw.newLine(); } bw.close(); }
|
public void saveResultToFile(File outFile) throws IOException { BufferedWriter bw = new BufferedWriter(new FileWriter(outFile)); bw.write("#tagging with r^2 cutoff: " + minRSquared); bw.newLine(); bw.write("Marker\tBest Tag\tr^2 w/tag"); bw.newLine(); for (int i = 0; i < snps.size(); i++) { StringBuffer line = new StringBuffer(); SNP snp = (SNP) snps.elementAt(i); line.append(snp.getName()).append("\t"); TagSequence theTag = snp.getBestTag(); line.append(theTag.getName()).append("\t"); line.append(getPairwiseCompRsq(snp,theTag.getSequence())).append("\t"); bw.write(line.toString()); bw.newLine(); } bw.newLine(); bw.write("Tag\tMarkers Tagged"); bw.newLine(); for(int i=0;i<tags.size();i++) { StringBuffer line = new StringBuffer(); TagSequence theTag = (TagSequence) tags.get(i); line.append(theTag.getName()).append("\t"); Vector tagged = theTag.getBestTagged(); for (int j = 0; j < tagged.size(); j++) { VariantSequence varSeq = (VariantSequence) tagged.elementAt(j); if(j !=0){ line.append(","); } line.append(varSeq.getName()); } bw.write(line.toString()); bw.newLine(); } bw.close(); }
| 1,109,543
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane, BorderLayout.CENTER ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
| 1,109,546
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.BOTH; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.BOTH; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
| 1,109,547
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
| 1,109,548
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
|
protected void createUI() { tabPane = new JTabbedPane(); add( tabPane ); // General pane JPanel generalPane = new JPanel(); tabPane.addTab( "General", generalPane ); // Create the fields & their labels // Photographer field JLabel photographerLabel = new JLabel( "Photographer" ); photographerField = new JTextField( 30 ); photographerDoc = photographerField.getDocument(); photographerDoc.addDocumentListener( this ); photographerDoc.putProperty( FIELD_NAME, PhotoInfoController.PHOTOGRAPHER ); // Shooting date field JLabel shootingDayLabel = new JLabel( "Shooting date" ); DateFormat df = DateFormat.getDateInstance(); DateFormatter shootingDayFormatter = new DateFormatter( df ); shootingDayField = new JFormattedTextField( df ); shootingDayField.setColumns( 10 ); shootingDayField.setValue( new Date( )); shootingDayField.addPropertyChangeListener( this ); shootingDayField.putClientProperty( FIELD_NAME, PhotoInfoController.SHOOTING_DATE ); // Shooting place field JLabel shootingPlaceLabel = new JLabel( "Shooting place" ); shootingPlaceField = new JTextField( 30 ); shootingPlaceDoc = shootingPlaceField.getDocument(); shootingPlaceDoc.addDocumentListener( this ); shootingPlaceDoc.putProperty( FIELD_NAME, PhotoInfoController.SHOOTING_PLACE ); // Description text JLabel descLabel = new JLabel( "Description" ); descriptionTextArea = new JTextArea( 5, 40 ); descriptionTextArea.setLineWrap( true ); descriptionTextArea.setWrapStyleWord( true ); JScrollPane descScrollPane = new JScrollPane( descriptionTextArea ); descScrollPane.setVerticalScrollBarPolicy( JScrollPane.VERTICAL_SCROLLBAR_ALWAYS ); Border descBorder = BorderFactory.createEtchedBorder( EtchedBorder.LOWERED ); descBorder = BorderFactory.createTitledBorder( descBorder, "Description" ); descScrollPane.setBorder( descBorder ); descriptionDoc = descriptionTextArea.getDocument(); descriptionDoc.putProperty( FIELD_NAME, PhotoInfoController.DESCRIPTION ); descriptionDoc.addDocumentListener( this ); // Save button JButton saveBtn = new JButton( "Save" ); saveBtn.setActionCommand( "save" ); saveBtn.addActionListener( this ); // Discard button JButton discardBtn = new JButton( "Discard" ); discardBtn.setActionCommand( "discard" ); discardBtn.addActionListener( this ); // Lay out the created controls GridBagLayout layout = new GridBagLayout(); GridBagConstraints c = new GridBagConstraints(); generalPane.setLayout( layout ); JLabel[] labels = { photographerLabel, shootingDayLabel, shootingPlaceLabel }; JTextField[] fields = { photographerField, shootingDayField, shootingPlaceField }; addLabelTextRows( labels, fields, layout, generalPane ); generalPane.add( descScrollPane ); c.gridwidth = GridBagConstraints.REMAINDER; c.weighty = 0.5; c.fill = GridBagConstraints.NONE; layout.setConstraints( descScrollPane, c ); c = new GridBagConstraints(); c.gridwidth = 1; c.weighty = 0; c.fill = GridBagConstraints.NONE; c.gridy = GridBagConstraints.RELATIVE; generalPane.add( saveBtn ); layout.setConstraints( saveBtn, c ); c.gridy = GridBagConstraints.RELATIVE; generalPane.add( discardBtn ); layout.setConstraints( discardBtn, c ); createTechDataUI(); }
| 1,109,549
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
| 1,109,551
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
| 1,109,552
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
| 1,109,553
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
|
public void doTag(XMLOutput output) throws Exception { Project project = getAntProject(); String tagName = getTagName(); if ( project.getTaskDefinitions().containsKey( tagName ) ) { // check if manifest is contained within a jar // special handling for Ant 1.5 manifest which is a task // but can also be contained within the jar task // There has got to be a better way but I couldn't find it if(tagName.equals(ANT_MANIFEST_TAG)) { TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { Object nested = null; Object parentObject = null; parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } return; } } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference task = createTask( tagName ); if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } else { Object nested = null; Object parentObject = null; // must be a datatype. TaskSource ancestor = (TaskSource) findAncestorWithClass( TaskSource.class ); if ( ancestor != null ) { parentObject = ancestor.getTaskObject(); nested = createNestedObject( parentObject, tagName ); } if ( nested == null ) { nested = createDataType( tagName ); } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { } } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { ih.storeElement( project, parentObject, nested, tagName ); } catch (Exception e) { //log.warn( "Caught exception setting nested: " + tagName, e ); } } } }
| 1,109,554
|
public HaploView(){ Options.setMissingThreshold(0.4); try{ fc = new JFileChooser(System.getProperty("user.dir")); }catch(NullPointerException n){ try{ UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); fc = new JFileChooser(System.getProperty("user.dir")); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); }catch(Exception e){ } } //menu setup JMenuBar menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenuItem menuItem; //file menu JMenu fileMenu = new JMenu("File"); menuBar.add(fileMenu); menuItem = new JMenuItem(READ_GENOTYPES); setAccelerator(menuItem, 'O', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /* viewGenotypesItem = new JMenuItem(VIEW_GENOTYPES); viewGenotypesItem.addActionListener(this); //viewGenotypesItem.setEnabled(false); fileMenu.add(viewGenotypesItem); */ readMarkerItem = new JMenuItem(READ_MARKERS); setAccelerator(readMarkerItem, 'I', false); readMarkerItem.addActionListener(this); readMarkerItem.setEnabled(false); fileMenu.add(readMarkerItem); analysisItem = new JMenuItem(READ_ANALYSIS_TRACK); setAccelerator(analysisItem, 'A', false); analysisItem.addActionListener(this); analysisItem.setEnabled(false); fileMenu.add(analysisItem); blocksItem = new JMenuItem(READ_BLOCKS_FILE); setAccelerator(blocksItem, 'B', false); blocksItem.addActionListener(this); blocksItem.setEnabled(false); fileMenu.add(blocksItem); /* viewMarkerItem = new JMenuItem(VIEW_MARKERS); viewMarkerItem.addActionListener(this); //viewMarkerItem.setEnabled(false); fileMenu.add(viewMarkerItem); */ fileMenu.addSeparator(); exportMenuItems = new JMenuItem[exportItems.length]; for (int i = 0; i < exportItems.length; i++) { exportMenuItems[i] = new JMenuItem(exportItems[i]); exportMenuItems[i].addActionListener(this); exportMenuItems[i].setEnabled(false); fileMenu.add(exportMenuItems[i]); } fileMenu.addSeparator(); fileMenu.setMnemonic(KeyEvent.VK_F); menuItem = new JMenuItem("Quit"); setAccelerator(menuItem, 'Q', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /// display menu JMenu displayMenu = new JMenu("Display"); displayMenu.setMnemonic(KeyEvent.VK_D); menuBar.add(displayMenu); ButtonGroup group = new ButtonGroup(); viewMenuItems = new JRadioButtonMenuItem[viewItems.length]; for (int i = 0; i < viewItems.length; i++) { viewMenuItems[i] = new JRadioButtonMenuItem(viewItems[i], i == 0); viewMenuItems[i].addActionListener(this); KeyStroke ks = KeyStroke.getKeyStroke('1' + i, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); viewMenuItems[i].setAccelerator(ks); displayMenu.add(viewMenuItems[i]); viewMenuItems[i].setEnabled(false); group.add(viewMenuItems[i]); } displayMenu.addSeparator(); //a submenu ButtonGroup zg = new ButtonGroup(); JMenu zoomMenu = new JMenu("LD zoom"); zoomMenu.setMnemonic(KeyEvent.VK_Z); zoomMenuItems = new JRadioButtonMenuItem[zoomItems.length]; for (int i = 0; i < zoomItems.length; i++){ zoomMenuItems[i] = new JRadioButtonMenuItem(zoomItems[i], i==0); zoomMenuItems[i].addActionListener(this); zoomMenuItems[i].setActionCommand("zoom" + i); zoomMenu.add(zoomMenuItems[i]); zg.add(zoomMenuItems[i]); } displayMenu.add(zoomMenu); //another submenu ButtonGroup cg = new ButtonGroup(); JMenu colorMenu = new JMenu("LD color scheme"); colorMenu.setMnemonic(KeyEvent.VK_C); colorMenuItems = new JRadioButtonMenuItem[colorItems.length]; for (int i = 0; i< colorItems.length; i++){ colorMenuItems[i] = new JRadioButtonMenuItem(colorItems[i],i==0); colorMenuItems[i].addActionListener(this); colorMenuItems[i].setActionCommand("color" + i); colorMenu.add(colorMenuItems[i]); cg.add(colorMenuItems[i]); } displayMenu.add(colorMenu); //analysis menu JMenu analysisMenu = new JMenu("Analysis"); analysisMenu.setMnemonic(KeyEvent.VK_A); menuBar.add(analysisMenu); //a submenu ButtonGroup bg = new ButtonGroup(); JMenu blockMenu = new JMenu("Define Blocks"); blockMenu.setMnemonic(KeyEvent.VK_B); blockMenuItems = new JRadioButtonMenuItem[blockItems.length]; for (int i = 0; i < blockItems.length; i++){ blockMenuItems[i] = new JRadioButtonMenuItem(blockItems[i], i==0); blockMenuItems[i].addActionListener(this); blockMenuItems[i].setActionCommand("block" + i); blockMenuItems[i].setEnabled(false); blockMenu.add(blockMenuItems[i]); bg.add(blockMenuItems[i]); } analysisMenu.add(blockMenu); clearBlocksItem = new JMenuItem(CLEAR_BLOCKS); setAccelerator(clearBlocksItem, 'C', false); clearBlocksItem.addActionListener(this); clearBlocksItem.setEnabled(false); analysisMenu.add(clearBlocksItem); JMenuItem customizeBlocksItem = new JMenuItem(CUST_BLOCKS); customizeBlocksItem.addActionListener(this); analysisMenu.add(customizeBlocksItem); //color key keyMenu = new JMenu("Key"); menuBar.add(Box.createHorizontalGlue()); menuBar.add(keyMenu); /** NEEDS FIXING helpMenu = new JMenu("Help"); menuBar.add(Box.createHorizontalGlue()); menuBar.add(helpMenu); menuItem = new JMenuItem("Tutorial"); menuItem.addActionListener(this); helpMenu.add(menuItem); **/ addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e){ quit(); } }); }
|
public HaploView(){ Options.setMissingThreshold(1.0); try{ fc = new JFileChooser(System.getProperty("user.dir")); }catch(NullPointerException n){ try{ UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); fc = new JFileChooser(System.getProperty("user.dir")); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); }catch(Exception e){ } } //menu setup JMenuBar menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenuItem menuItem; //file menu JMenu fileMenu = new JMenu("File"); menuBar.add(fileMenu); menuItem = new JMenuItem(READ_GENOTYPES); setAccelerator(menuItem, 'O', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /* viewGenotypesItem = new JMenuItem(VIEW_GENOTYPES); viewGenotypesItem.addActionListener(this); //viewGenotypesItem.setEnabled(false); fileMenu.add(viewGenotypesItem); */ readMarkerItem = new JMenuItem(READ_MARKERS); setAccelerator(readMarkerItem, 'I', false); readMarkerItem.addActionListener(this); readMarkerItem.setEnabled(false); fileMenu.add(readMarkerItem); analysisItem = new JMenuItem(READ_ANALYSIS_TRACK); setAccelerator(analysisItem, 'A', false); analysisItem.addActionListener(this); analysisItem.setEnabled(false); fileMenu.add(analysisItem); blocksItem = new JMenuItem(READ_BLOCKS_FILE); setAccelerator(blocksItem, 'B', false); blocksItem.addActionListener(this); blocksItem.setEnabled(false); fileMenu.add(blocksItem); /* viewMarkerItem = new JMenuItem(VIEW_MARKERS); viewMarkerItem.addActionListener(this); //viewMarkerItem.setEnabled(false); fileMenu.add(viewMarkerItem); */ fileMenu.addSeparator(); exportMenuItems = new JMenuItem[exportItems.length]; for (int i = 0; i < exportItems.length; i++) { exportMenuItems[i] = new JMenuItem(exportItems[i]); exportMenuItems[i].addActionListener(this); exportMenuItems[i].setEnabled(false); fileMenu.add(exportMenuItems[i]); } fileMenu.addSeparator(); fileMenu.setMnemonic(KeyEvent.VK_F); menuItem = new JMenuItem("Quit"); setAccelerator(menuItem, 'Q', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /// display menu JMenu displayMenu = new JMenu("Display"); displayMenu.setMnemonic(KeyEvent.VK_D); menuBar.add(displayMenu); ButtonGroup group = new ButtonGroup(); viewMenuItems = new JRadioButtonMenuItem[viewItems.length]; for (int i = 0; i < viewItems.length; i++) { viewMenuItems[i] = new JRadioButtonMenuItem(viewItems[i], i == 0); viewMenuItems[i].addActionListener(this); KeyStroke ks = KeyStroke.getKeyStroke('1' + i, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); viewMenuItems[i].setAccelerator(ks); displayMenu.add(viewMenuItems[i]); viewMenuItems[i].setEnabled(false); group.add(viewMenuItems[i]); } displayMenu.addSeparator(); //a submenu ButtonGroup zg = new ButtonGroup(); JMenu zoomMenu = new JMenu("LD zoom"); zoomMenu.setMnemonic(KeyEvent.VK_Z); zoomMenuItems = new JRadioButtonMenuItem[zoomItems.length]; for (int i = 0; i < zoomItems.length; i++){ zoomMenuItems[i] = new JRadioButtonMenuItem(zoomItems[i], i==0); zoomMenuItems[i].addActionListener(this); zoomMenuItems[i].setActionCommand("zoom" + i); zoomMenu.add(zoomMenuItems[i]); zg.add(zoomMenuItems[i]); } displayMenu.add(zoomMenu); //another submenu ButtonGroup cg = new ButtonGroup(); JMenu colorMenu = new JMenu("LD color scheme"); colorMenu.setMnemonic(KeyEvent.VK_C); colorMenuItems = new JRadioButtonMenuItem[colorItems.length]; for (int i = 0; i< colorItems.length; i++){ colorMenuItems[i] = new JRadioButtonMenuItem(colorItems[i],i==0); colorMenuItems[i].addActionListener(this); colorMenuItems[i].setActionCommand("color" + i); colorMenu.add(colorMenuItems[i]); cg.add(colorMenuItems[i]); } displayMenu.add(colorMenu); //analysis menu JMenu analysisMenu = new JMenu("Analysis"); analysisMenu.setMnemonic(KeyEvent.VK_A); menuBar.add(analysisMenu); //a submenu ButtonGroup bg = new ButtonGroup(); JMenu blockMenu = new JMenu("Define Blocks"); blockMenu.setMnemonic(KeyEvent.VK_B); blockMenuItems = new JRadioButtonMenuItem[blockItems.length]; for (int i = 0; i < blockItems.length; i++){ blockMenuItems[i] = new JRadioButtonMenuItem(blockItems[i], i==0); blockMenuItems[i].addActionListener(this); blockMenuItems[i].setActionCommand("block" + i); blockMenuItems[i].setEnabled(false); blockMenu.add(blockMenuItems[i]); bg.add(blockMenuItems[i]); } analysisMenu.add(blockMenu); clearBlocksItem = new JMenuItem(CLEAR_BLOCKS); setAccelerator(clearBlocksItem, 'C', false); clearBlocksItem.addActionListener(this); clearBlocksItem.setEnabled(false); analysisMenu.add(clearBlocksItem); JMenuItem customizeBlocksItem = new JMenuItem(CUST_BLOCKS); customizeBlocksItem.addActionListener(this); analysisMenu.add(customizeBlocksItem); //color key keyMenu = new JMenu("Key"); menuBar.add(Box.createHorizontalGlue()); menuBar.add(keyMenu); /** NEEDS FIXING helpMenu = new JMenu("Help"); menuBar.add(Box.createHorizontalGlue()); menuBar.add(helpMenu); menuItem = new JMenuItem("Tutorial"); menuItem.addActionListener(this); helpMenu.add(menuItem); **/ addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e){ quit(); } }); }
| 1,109,558
|
void readGenotypes(String[] inputOptions, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file ("" if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); //deal with max comparison distance if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; try { if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); assocTest = 0; } theData = new HaploData(assocTest); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1].equals("")){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData, true); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); } //let's start the math this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readGenotypes(String[] inputOptions, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file ("" if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); //deal with max comparison distance if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; try { if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); assocTest = 0; } theData = new HaploData(); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1].equals("")){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData, true); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); } //let's start the math this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,109,559
|
void readGenotypes(String[] inputOptions, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file ("" if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); //deal with max comparison distance if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; try { if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); assocTest = 0; } theData = new HaploData(assocTest); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1].equals("")){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData, true); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); } //let's start the math this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readGenotypes(String[] inputOptions, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file ("" if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); //deal with max comparison distance if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; try { if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); assocTest = 0; } theData = new HaploData(assocTest); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1].equals("")){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData, true); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); } //let's start the math this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.getPedFile(), assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,109,560
|
public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; }
|
public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.getPedFile(), assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; }
| 1,109,561
|
private void buildParenTDTTrioSet(PedFile pf, Vector permuteInd, TreeSet snpsToBeTested) throws PedFileException{ Vector results = new Vector(); Vector indList = pf.getAllIndividuals(); if(permuteInd == null || permuteInd.size() != indList.size()) { permuteInd = new Vector(); for (int i = 0; i < indList.size(); i++){ permuteInd.add(new Boolean(false)); } } //todo: need to make sure each set of parents only used once int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ SNP currentMarker = Chromosome.getUnfilteredMarker(i); if (snpsToBeTested.contains(currentMarker)){ int discordantNotTallied=0; int discordantTallied = 0; Individual currentInd; Family currentFam; HashSet usedParents = new HashSet(); AssociationResult.TallyTrio tt = new AssociationResult.TallyTrio(); for (int j = 0; j < indList.size(); j++){ currentInd = (Individual)indList.elementAt(j); currentFam = pf.getFamily(currentInd.getFamilyID()); if (currentFam.containsMember(currentInd.getMomID()) && currentFam.containsMember(currentInd.getDadID()) && currentInd.getAffectedStatus() == 2){ //if he has both parents, and is affected, we can get a transmission Individual mom = currentFam.getMember(currentInd.getMomID()); Individual dad = currentFam.getMember(currentInd.getDadID()); if(usedParents.contains(mom) || usedParents.contains(dad)) { continue; } if(currentInd.getZeroed(i) || dad.getZeroed(i) || mom.getZeroed(i)) { continue; } byte kid1 = currentInd.getMarkerA(i); byte kid2 = currentInd.getMarkerB(i); byte dad1 = dad.getMarkerA(i); byte dad2 = dad.getMarkerB(i); byte mom1 = mom.getMarkerA(i); byte mom2 = mom.getMarkerB(i); byte momT=0, momU=0, dadT=0, dadU=0; if (kid1 == 0 || kid2 == 0 || dad1 == 0 || dad2 == 0 || mom1 == 0 || mom2 == 0) { continue; } else if (kid1 == kid2) { //kid homozygous if (dad1 == kid1) { dadT = dad1; dadU = dad2; } else { dadT = dad2; dadU = dad1; } if (mom1 == kid1) { momT = mom1; momU = mom2; } else { momT = mom2; momU = mom1; } } else { if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadT = dad1; dadU = dad2; if (kid1 == dad1) { momT = kid2; momU = kid1; } else { momT = kid1; momU = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momT = mom1; momU = mom2; if (kid1 == mom1) { dadT = kid2; dadU = kid1; } else { dadT = kid1; dadU = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadT = dad1; dadU = dad1; momT = mom1; momU = mom1; } else { //everybody het dadT = (byte)(4+dad1); dadU = (byte)(4+dad2); momT = (byte)(4+mom1); momU = (byte)(4+mom2); } } if(((Boolean)permuteInd.get(j)).booleanValue()) { tt.tallyTrioInd(dadU, dadT); tt.tallyTrioInd(momU, momT); } else { tt.tallyTrioInd(dadT, dadU); tt.tallyTrioInd(momT, momU); } if(mom.getAffectedStatus() != dad.getAffectedStatus()) { //discordant parental phenotypes if(!(dad1 == mom1 && dad2 == mom2) && !(dad1 == mom2 && dad2 == mom1)) { if(mom.getAffectedStatus() == 2) { tt.tallyDiscordantParents(momT,momU,dadT,dadU); } else if(dad.getAffectedStatus() == 2) { tt.tallyDiscordantParents(dadT,dadU,momT,momU); } discordantTallied++; }else { discordantNotTallied++; } } usedParents.add(mom); usedParents.add(dad); } } int[] g1 = {tt.allele1}; int[] g2 = {tt.allele2}; int[] m = {i}; Haplotype thisSNP1 = new Haplotype(g1, 0, m, null); thisSNP1.setTransCount(tt.counts[0][0]); thisSNP1.setUntransCount(tt.counts[1][0]); thisSNP1.setDiscordantAlleleCounts(tt.discordantAlleleCounts); Haplotype thisSNP2 = new Haplotype(g2, 0, m, null); thisSNP2.setTransCount(tt.counts[0][1]); thisSNP2.setUntransCount(tt.counts[1][1]); thisSNP2.setDiscordantAlleleCounts(tt.getDiscordantCountsAllele2()); Haplotype[] daBlock = {thisSNP1, thisSNP2}; results.add(new MarkerAssociationResult(daBlock, currentMarker.getName(), currentMarker)); } } this.results = results; }
|
private void buildParenTDTTrioSet(PedFile pf, Vector permuteInd, TreeSet snpsToBeTested) throws PedFileException{ Vector results = new Vector(); Vector indList = pf.getAllIndividuals(); if(permuteInd == null || permuteInd.size() != indList.size()) { permuteInd = new Vector(); for (int i = 0; i < indList.size(); i++){ permuteInd.add(new Boolean(false)); } } //todo: need to make sure each set of parents only used once int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ SNP currentMarker = Chromosome.getUnfilteredMarker(i); if (snpsToBeTested.contains(currentMarker)){ int discordantNotTallied=0; int discordantTallied = 0; Individual currentInd; Family currentFam; HashSet usedParents = new HashSet(); AssociationResult.TallyTrio tt = new AssociationResult.TallyTrio(); for (int j = 0; j < indList.size(); j++){ currentInd = (Individual)indList.elementAt(j); currentFam = pf.getFamily(currentInd.getFamilyID()); if (currentFam.containsMember(currentInd.getMomID()) && currentFam.containsMember(currentInd.getDadID()) && currentInd.getAffectedStatus() == 2){ //if he has both parents, and is affected, we can get a transmission Individual mom = currentFam.getMember(currentInd.getMomID()); Individual dad = currentFam.getMember(currentInd.getDadID()); if(usedParents.contains(mom) || usedParents.contains(dad)) { continue; } if(currentInd.getZeroed(i) || dad.getZeroed(i) || mom.getZeroed(i)) { continue; } byte kid1 = currentInd.getMarkerA(i); byte kid2 = currentInd.getMarkerB(i); byte dad1 = dad.getMarkerA(i); byte dad2 = dad.getMarkerB(i); byte mom1 = mom.getMarkerA(i); byte mom2 = mom.getMarkerB(i); byte momT=0, momU=0, dadT=0, dadU=0; if (kid1 == 0 || kid2 == 0 || dad1 == 0 || dad2 == 0 || mom1 == 0 || mom2 == 0) { continue; } else if (kid1 == kid2) { //kid homozygous if (dad1 == kid1) { dadT = dad1; dadU = dad2; } else { dadT = dad2; dadU = dad1; } if (mom1 == kid1) { momT = mom1; momU = mom2; } else { momT = mom2; momU = mom1; } } else { if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadT = dad1; dadU = dad2; if (kid1 == dad1) { momT = kid2; momU = kid1; } else { momT = kid1; momU = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momT = mom1; momU = mom2; if (kid1 == mom1) { dadT = kid2; dadU = kid1; } else { dadT = kid1; dadU = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadT = dad1; dadU = dad1; momT = mom1; momU = mom1; } else { //everybody het dadT = (byte)(4+dad1); dadU = (byte)(4+dad2); momT = (byte)(4+mom1); momU = (byte)(4+mom2); } } if(((Boolean)permuteInd.get(j)).booleanValue()) { tt.tallyTrioInd(dadU, dadT); tt.tallyTrioInd(momU, momT); } else { tt.tallyTrioInd(dadT, dadU); tt.tallyTrioInd(momT, momU); } if(mom.getAffectedStatus() != dad.getAffectedStatus()) { //discordant parental phenotypes if(!(dad1 == mom1 && dad2 == mom2) && !(dad1 == mom2 && dad2 == mom1)) { if(mom.getAffectedStatus() == 2) { tt.tallyDiscordantParents(momT,momU,dadT,dadU); } else if(dad.getAffectedStatus() == 2) { tt.tallyDiscordantParents(dadT,dadU,momT,momU); } discordantTallied++; }else { discordantNotTallied++; } } usedParents.add(mom); usedParents.add(dad); } } int[] g1 = {tt.allele1}; int[] g2 = {tt.allele2}; int[] m = {i}; Haplotype thisSNP1 = new Haplotype(g1, 0, m, null); thisSNP1.setTransCount(tt.counts[0][0]); thisSNP1.setUntransCount(tt.counts[1][0]); thisSNP1.setDiscordantAlleleCounts(tt.discordantAlleleCounts); Haplotype thisSNP2 = new Haplotype(g2, 0, m, null); thisSNP2.setTransCount(tt.counts[0][1]); thisSNP2.setUntransCount(tt.counts[1][1]); thisSNP2.setDiscordantAlleleCounts(tt.getDiscordantCountsAllele2()); Haplotype[] daBlock = {thisSNP1, thisSNP2}; results.add(new MarkerAssociationResult(daBlock, currentMarker.getName(), currentMarker)); } } this.results = results; }
| 1,109,562
|
public String getCountString(int i){ nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); Haplotype h = (Haplotype) alleles.get(i); StringBuffer countSB = new StringBuffer(); if(Options.getAssocTest() == ASSOC_TRIO) { countSB.append(nf.format(h.getTransCount())).append(" : ").append(nf.format(h.getUntransCount())); } else if(Options.getAssocTest() == ASSOC_CC) { double caseSum = 0, controlSum = 0; for (int j = 0; j < alleles.size(); j++){ if (i!=j){ caseSum += ((Haplotype)alleles.get(j)).getCaseCount(); controlSum += ((Haplotype)alleles.get(j)).getControlCount(); } } countSB.append(nf.format(h.getCaseCount())).append(" : ").append(nf.format(caseSum)).append(", "); countSB.append(nf.format(h.getControlCount())).append(" : ").append(nf.format(controlSum)); } return countSB.toString(); }
|
public String getCountString(int i){ nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); Haplotype h = (Haplotype) filteredAlleles.get(i); StringBuffer countSB = new StringBuffer(); if(Options.getAssocTest() == ASSOC_TRIO) { countSB.append(nf.format(h.getTransCount())).append(" : ").append(nf.format(h.getUntransCount())); } else if(Options.getAssocTest() == ASSOC_CC) { double caseSum = 0, controlSum = 0; for (int j = 0; j < alleles.size(); j++){ if (i!=j){ caseSum += ((Haplotype)alleles.get(j)).getCaseCount(); controlSum += ((Haplotype)alleles.get(j)).getControlCount(); } } countSB.append(nf.format(h.getCaseCount())).append(" : ").append(nf.format(caseSum)).append(", "); countSB.append(nf.format(h.getControlCount())).append(" : ").append(nf.format(controlSum)); } return countSB.toString(); }
| 1,109,564
|
public String getPValue(int i) { if (pValues == null || i >= pValues.size()){ return ""; } return Util.formatPValue(((Double)pValues.get(i)).doubleValue()); }
|
public Double getPValue(int i) { if (pValues == null || i >= pValues.size()){ return ""; } return Util.formatPValue(((Double)pValues.get(i)).doubleValue()); }
| 1,109,565
|
public String getPValue(int i) { if (pValues == null || i >= pValues.size()){ return ""; } return Util.formatPValue(((Double)pValues.get(i)).doubleValue()); }
|
public String getPValue(int i) { if (pValues == null || i >= pValues.size()){ return new Double(0); } return Util.formatPValue(((Double)pValues.get(i)).doubleValue()); }
| 1,109,566
|
public String getPValue(int i) { if (pValues == null || i >= pValues.size()){ return ""; } return Util.formatPValue(((Double)pValues.get(i)).doubleValue()); }
|
public String getPValue(int i) { if (pValues == null || i >= pValues.size()){ return ""; } return new Double(Util.formatPValue(((Double)pValues.get(i)).doubleValue())); }
| 1,109,567
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); long mDistance = Long.parseLong(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; long morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Long(new String(tokenizer.nextToken()))).longValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); double mDistance = Double.parseDouble(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; long morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Long(new String(tokenizer.nextToken()))).longValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
| 1,109,568
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); long mDistance = Long.parseLong(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; long morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Long(new String(tokenizer.nextToken()))).longValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); long mDistance = Long.parseLong(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; double morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Long(new String(tokenizer.nextToken()))).longValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
| 1,109,569
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); long mDistance = Long.parseLong(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; long morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Long(new String(tokenizer.nextToken()))).longValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
|
public void parseWGA(String wga, String map, boolean embed) throws PlinkException { markers = new Vector(); results = new Vector(); columns = new Vector(); columns.add("Result"); columns.add("Chrom"); columns.add("Marker"); columns.add("Position"); final File wgaFile = new File(wga); final File mapFile = new File(map); Hashtable markerHash = new Hashtable(1,1); Vector ignoredMarkers = new Vector(); try{ if (wgaFile.length() < 1){ throw new PlinkException("plink file is empty or nonexistent."); } if (!embed){ if (mapFile.length() < 1){ throw new PlinkException("Map file is empty or nonexistent."); } BufferedReader mapReader = new BufferedReader(new FileReader(mapFile)); String mapLine; String unknownChrom = "0"; while((mapLine = mapReader.readLine())!=null) { if (mapLine.length() == 0){ //skip blank lines continue; } StringTokenizer st = new StringTokenizer(mapLine,"\t "); String chrom = st.nextToken(); String chr; if (chrom.equals("0")){ chr = unknownChrom; }else if (chrom.equalsIgnoreCase("x") || chrom.equalsIgnoreCase("xy")){ chr = CHROM_NAMES[22]; } else{ chr = CHROM_NAMES[Integer.parseInt(chrom)-1]; } String marker = new String(st.nextToken()); long mDistance = Long.parseLong(st.nextToken()); long position = Long.parseLong(st.nextToken()); Marker mark = new Marker(chr, marker, mDistance, position); markers.add(mark); markerHash.put(mark.getMarkerID(), mark); } } BufferedReader wgaReader = new BufferedReader(new FileReader(wgaFile)); int numColumns = 0; int markerColumn = -1; int chromColumn = -1; int positionColumn = -1; int morganColumn = -1; String headerLine = wgaReader.readLine(); StringTokenizer headerSt = new StringTokenizer(headerLine); while (headerSt.hasMoreTokens()){ String column = new String(headerSt.nextToken()); if (column.equals("SNP")){ markerColumn = numColumns; numColumns++; }else if (column.equals("CHR")){ chromColumn = numColumns; numColumns++; }else if (column.equals("POS")){ positionColumn = numColumns; numColumns++; }else if (column.equals("MORGAN")){ morganColumn = numColumns; numColumns++; } else{ columns.add(column); numColumns++; } } if (markerColumn == -1){ throw new PlinkException("Results file must contain a SNP column."); } if (embed){ if (chromColumn == -1 || positionColumn == -1 || morganColumn == -1){ throw new PlinkException("Results files with embedded map files must contain CHR, POS, and MORGAN columns."); } } String wgaLine; int lineNumber = 0; while((wgaLine = wgaReader.readLine())!=null){ if (wgaLine.length() == 0){ //skip blank lines continue; } int tokenNumber = 0; //StringTokenizer tokenizer = new StringTokenizer(wgaLine,"\t :"); StringTokenizer tokenizer = new StringTokenizer(wgaLine); String marker = null; String chromosome = null; long position = 0; long morganDistance = 0; Vector values = new Vector(); while(tokenizer.hasMoreTokens()){ if (tokenNumber == markerColumn){ marker = new String(tokenizer.nextToken()); }else if (tokenNumber == chromColumn){ chromosome = new String(tokenizer.nextToken()); if(chromosome.equals("23")){ chromosome = "X"; } }else if (tokenNumber == positionColumn){ position = (new Long(new String(tokenizer.nextToken()))).longValue(); }else if (tokenNumber == morganColumn){ morganDistance = (new Double(new String(tokenizer.nextToken()))).doubleValue(); } else{ values.add(new String(tokenizer.nextToken())); } tokenNumber++; } if (tokenNumber != numColumns){ throw new PlinkException("Inconsistent column number on line " + (lineNumber+1)); } Marker assocMarker; if (!embed){ assocMarker = (Marker)markerHash.get(marker); if (assocMarker == null){ ignoredMarkers.add(marker); lineNumber++; continue; }else if (!(assocMarker.getChromosome().equalsIgnoreCase(chromosome)) && chromosome != null){ throw new PlinkException("Incompatible chromsomes."); } }else{ assocMarker = new Marker(chromosome,marker,morganDistance,position); } AssociationResult result = new AssociationResult(lineNumber,assocMarker,values); results.add(result); lineNumber++; } }catch(IOException ioe){ throw new PlinkException("File error."); }catch(NumberFormatException nfe){ throw new PlinkException("File formatting error."); } if (ignoredMarkers.size() != 0){ IgnoredMarkersDialog imd = new IgnoredMarkersDialog(hv,"Ignored Markers",ignoredMarkers,false); imd.pack(); imd.setVisible(true); } hv.setPlinkData(results,columns); }
| 1,109,570
|
public void actionPerformed(ActionEvent e) { final DDLExportPanel ddlPanel = new DDLExportPanel(architectFrame.getProject()); Action okAction, cancelAction; okAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { try { if (ddlPanel.applyChanges()) { GenericDDLGenerator ddlg = architectFrame.getProject().getDDLGenerator(); ddlg.setTargetSchema(ddlPanel.getSchemaField().getText()); // XXX is it OK that is this generated but never used?? ddlg.generateDDL(architectFrame.getProject().getPlayPen().getDatabase()); List warnings = ddlg.getWarnings(); if (warnings.size() > 0) { TableSorter sorter = new TableSorter(new DDLWarningTableModel(warnings)); JTable warningTable = new JTable(sorter); sorter.setTableHeader(warningTable.getTableHeader()); JOptionPane.showMessageDialog(d, new JScrollPane(warningTable), "Warnings in generated DDL", JOptionPane.WARNING_MESSAGE); } SQLDatabase ppdb = ArchitectFrame.getMainInstance().getProject().getPlayPen().getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(d, "Preview SQL Script", "", false, ddlg, ppdb.getDataSource(), true); MonitorableWorker scriptWorker = ssd.getExecuteTask(); ConflictFinderProcess cfp = new ConflictFinderProcess(ssd, ppdb, ddlg, ddlg.getDdlStatements()); ConflictResolverProcess crp = new ConflictResolverProcess(ssd, cfp); cfp.setNextProcess(crp); crp.setNextProcess(scriptWorker); ssd.setExecuteTask(cfp); ssd.setVisible(true); } } catch (Exception ex) { JOptionPane.showMessageDialog (architectFrame, "Can't export DDL: "+ex.getMessage()); logger.error("Got exception while exporting DDL", ex); } } }; cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { ddlPanel.discardChanges(); d.setVisible(false); } }; d = ArchitectPanelBuilder.createArchitectPanelDialog( ddlPanel, ArchitectFrame.getMainInstance(), "Forward Engineer SQL Script", "OK", okAction, cancelAction); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
|
public void actionPerformed(ActionEvent e) { final DDLExportPanel ddlPanel = new DDLExportPanel(architectFrame.getProject()); Action okAction, cancelAction; okAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { try { if (ddlPanel.applyChanges()) { GenericDDLGenerator ddlg = architectFrame.getProject().getDDLGenerator(); ddlg.setTargetSchema(ddlPanel.getSchemaField().getText()); // XXX is it OK that is this generated but never used?? ddlg.generateDDL(architectFrame.getProject().getPlayPen().getDatabase()); List warnings = ddlg.getWarnings(); if (warnings.size() > 0) { TableSorter sorter = new TableSorter(new DDLWarningTableModel(warnings)); JTable warningTable = new JTable(sorter); sorter.setTableHeader(warningTable.getTableHeader()); JOptionPane.showMessageDialog(d, new JScrollPane(warningTable), "Warnings in generated DDL", JOptionPane.WARNING_MESSAGE); } SQLDatabase ppdb = ArchitectFrame.getMainInstance().getProject().getPlayPen().getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(d, "Preview SQL Script", "", false, ddlg, ppdb.getDataSource(), true); MonitorableWorker scriptWorker = ssd.getExecuteTask(); ConflictFinderProcess cfp = new ConflictFinderProcess(ssd, ppdb, ddlg, ddlg.getDdlStatements()); ConflictResolverProcess crp = new ConflictResolverProcess(ssd, cfp); cfp.setNextProcess(crp); crp.setNextProcess(scriptWorker); ssd.setExecuteTask(cfp); ssd.setVisible(true); } } catch (Exception ex) { JOptionPane.showMessageDialog (architectFrame, "Can't export DDL: "+ex.getMessage()); logger.error("Got exception while exporting DDL", ex); } } }; cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { ddlPanel.discardChanges(); d.setVisible(false); } }; d = ArchitectPanelBuilder.createArchitectPanelDialog( ddlPanel, ArchitectFrame.getMainInstance(), "Forward Engineer SQL Script", "OK", okAction, cancelAction); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
| 1,109,571
|
public void actionPerformed(ActionEvent evt) { try { if (ddlPanel.applyChanges()) { GenericDDLGenerator ddlg = architectFrame.getProject().getDDLGenerator(); ddlg.setTargetSchema(ddlPanel.getSchemaField().getText()); // XXX is it OK that is this generated but never used?? ddlg.generateDDL(architectFrame.getProject().getPlayPen().getDatabase()); List warnings = ddlg.getWarnings(); if (warnings.size() > 0) { TableSorter sorter = new TableSorter(new DDLWarningTableModel(warnings)); JTable warningTable = new JTable(sorter); sorter.setTableHeader(warningTable.getTableHeader()); JOptionPane.showMessageDialog(d, new JScrollPane(warningTable), "Warnings in generated DDL", JOptionPane.WARNING_MESSAGE); } SQLDatabase ppdb = ArchitectFrame.getMainInstance().getProject().getPlayPen().getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(d, "Preview SQL Script", "", false, ddlg, ppdb.getDataSource(), true); MonitorableWorker scriptWorker = ssd.getExecuteTask(); ConflictFinderProcess cfp = new ConflictFinderProcess(ssd, ppdb, ddlg, ddlg.getDdlStatements()); ConflictResolverProcess crp = new ConflictResolverProcess(ssd, cfp); cfp.setNextProcess(crp); crp.setNextProcess(scriptWorker); ssd.setExecuteTask(cfp); ssd.setVisible(true); } } catch (Exception ex) { JOptionPane.showMessageDialog (architectFrame, "Can't export DDL: "+ex.getMessage()); logger.error("Got exception while exporting DDL", ex); } }
|
public void actionPerformed(ActionEvent evt) { try { if (ddlPanel.applyChanges()) { GenericDDLGenerator ddlg = architectFrame.getProject().getDDLGenerator(); ddlg.setTargetSchema(ddlPanel.getSchemaField().getText()); // XXX is it OK that is this generated but never used?? ddlg.generateDDL(architectFrame.getProject().getPlayPen().getDatabase()); List warnings = ddlg.getWarnings(); if (warnings.size() > 0) { TableSorter sorter = new TableSorter(new DDLWarningTableModel(warnings)); JTable warningTable = new JTable(sorter); sorter.setTableHeader(warningTable.getTableHeader()); JOptionPane.showMessageDialog(d, new JScrollPane(warningTable), "Warnings in generated DDL", JOptionPane.WARNING_MESSAGE); } SQLDatabase ppdb = ArchitectFrame.getMainInstance().getProject().getPlayPen().getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(d, "Preview SQL Script", "", false, ddlg, ppdb.getDataSource(), true); MonitorableWorker scriptWorker = ssd.getExecuteTask(); ConflictFinderProcess cfp = new ConflictFinderProcess(ssd, ppdb, ddlg, ddlg.getDdlStatements()); ConflictResolverProcess crp = new ConflictResolverProcess(ssd, cfp); cfp.setNextProcess(crp); crp.setNextProcess(scriptWorker); ssd.setExecuteTask(cfp); ssd.setVisible(true); } } catch (Exception ex) { JOptionPane.showMessageDialog (architectFrame, "Can't export DDL: "+ex.getMessage()); logger.error("Got exception while exporting DDL", ex); } }
| 1,109,572
|
public void dbObjectChanged(SQLObjectEvent e) { if (e.getPropertyName() != null && e.getPropertyName().equals("name")) { setToolTipText(model.getName()); } }
|
public void dbObjectChanged(SQLObjectEvent e) { if (e.getPropertyName() != null && e.getPropertyName().equals("name")) { setToolTipText(model.getName()); } }
| 1,109,574
|
protected Project getProject() { ProjectTag tag = (ProjectTag) findAncestorWithClass(ProjectTag.class); if ( tag != null) { return tag.getProject(); } return (Project) context.findVariable( "org.apache.commons.jelly.werkz.Project" ); }
|
protected Project getProject() { ProjectTag tag = (ProjectTag) findAncestorWithClass(ProjectTag.class); if ( tag != null) { answer = tag.getProject(); } return (Project) context.findVariable( "org.apache.commons.jelly.werkz.Project" ); }
| 1,109,575
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.