bugged
stringlengths
6
599k
fixed
stringlengths
10
599k
__index_level_0__
int64
0
1.13M
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); }else if (command == "Continue"){ //TODO: change it so writing to a file is a checkbox on the CheckDataPanel theData = new HaploData(); JTable table = checkPanel.getTable(); checkWindow.dispose(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } /* try{ new TextMethods().linkageToHaps(markerResultArray,checkPanel.getPedFile(),filenames[0]+".haps"); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } */ theData.linkageToChrom(markerResultArray,checkPanel.getPedFile()); processData(); //processInput(new File(hapInputFileName+".haps")); } else if (command == READ_MARKERS){ fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == "Clear All Blocks"){ //theBlocks.clearBlocks(); }else if (command == DEFINE_BLOCKS){ defineBlocks(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); }else if (command == "Continue"){ //TODO: change it so writing to a file is a checkbox on the CheckDataPanel theData = new HaploData(); JTable table = checkPanel.getTable(); checkWindow.dispose(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } /* try{ new TextMethods().linkageToHaps(markerResultArray,checkPanel.getPedFile(),inputOptions[0]+".haps"); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } */ theData.linkageToChrom(markerResultArray,checkPanel.getPedFile()); processData(); //processInput(new File(hapInputFileName+".haps")); } else if (command == READ_MARKERS){ fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == "Clear All Blocks"){ //theBlocks.clearBlocks(); }else if (command == DEFINE_BLOCKS){ defineBlocks(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
1,110,434
void drawPicture(HaploData theData){ Container contents = getContentPane(); contents.removeAll(); //remember which tab we're in if they've already been set up int currentTabIndex = 0; if (!(tabs == null)){ currentTabIndex = tabs.getSelectedIndex(); } tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData.dPrimeTable, infoKnown); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[0], panel); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); hapDisplay = new HaplotypeDisplay(theData); HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); JScrollPane hapScroller = new JScrollPane(hapDisplay); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[1], panel); tabs.setSelectedIndex(currentTabIndex); contents.add(tabs); //next add a little spacer //ontents.add(Box.createRigidArea(new Dimension(0,5))); //and then add the block display //theBlocks = new BlockDisplay(theData.markerInfo, theData.blocks, dPrimeDisplay, infoKnown); //contents.setBackground(Color.black); //put the block display in a scroll pane in case the data set is very large. //JScrollPane blockScroller = new JScrollPane(theBlocks, // JScrollPane.VERTICAL_SCROLLBAR_NEVER, // JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); //blockScroller.getHorizontalScrollBar().setUnitIncrement(60); //blockScroller.setMinimumSize(new Dimension(800, 100)); //contents.add(blockScroller); repaint(); setVisible(true); }
void drawPicture(HaploData theData){ Container contents = getContentPane(); contents.removeAll(); //remember which tab we're in if they've already been set up int currentTabIndex = 0; if (!(tabs == null)){ currentTabIndex = tabs.getSelectedIndex(); } tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData.dPrimeTable, infoKnown); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[0], panel); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); hapDisplay = new HaplotypeDisplay(theData); HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[1], panel); tabs.setSelectedIndex(currentTabIndex); contents.add(tabs); //next add a little spacer //ontents.add(Box.createRigidArea(new Dimension(0,5))); //and then add the block display //theBlocks = new BlockDisplay(theData.markerInfo, theData.blocks, dPrimeDisplay, infoKnown); //contents.setBackground(Color.black); //put the block display in a scroll pane in case the data set is very large. //JScrollPane blockScroller = new JScrollPane(theBlocks, // JScrollPane.VERTICAL_SCROLLBAR_NEVER, // JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); //blockScroller.getHorizontalScrollBar().setUnitIncrement(60); //blockScroller.setMinimumSize(new Dimension(800, 100)); //contents.add(blockScroller); repaint(); setVisible(true); }
1,110,436
void processData(){ maxCompDist = Long.parseLong(filenames[2])*1000; try{ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); defineBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
void processData(){ maxCompDist = Long.parseLong(inputOptions[2])*1000; try{ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); defineBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
1,110,437
void processData(){ maxCompDist = Long.parseLong(filenames[2])*1000; try{ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); defineBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
void processData(){ maxCompDist = Long.parseLong(filenames[2])*1000; try{ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); defineBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }
1,110,438
public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; }
public Object construct(){ dPrimeDisplay=null; infoKnown = false; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; }
1,110,439
void readPedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = ped file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) filenames = f; File pedFile = new File(filenames[0]); //pop open checkdata window checkWindow = new JFrame(); checkPanel = new CheckDataPanel(pedFile); checkWindow.setTitle("Checking markers..." + pedFile.getName()); JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JButton checkContinueButton = new JButton("Continue"); checkContinueButton.addActionListener(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(checkPanel); checkContinueButton.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(checkContinueButton); JLabel infoLabel = new JLabel("(this will create a haplotype file named " + pedFile.getName() + ".haps)"); infoLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(infoLabel); checkWindow.setContentPane(metaCheckPanel); checkWindow.pack(); checkWindow.setVisible(true); }
void readPedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = ped file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) filenames = f; File pedFile = new File(filenames[0]); //pop open checkdata window checkWindow = new JFrame(); checkPanel = new CheckDataPanel(pedFile); checkWindow.setTitle("Checking markers..." + pedFile.getName()); JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JButton checkContinueButton = new JButton("Continue"); checkContinueButton.addActionListener(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(checkPanel); checkContinueButton.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(checkContinueButton); JLabel infoLabel = new JLabel("(this will create a haplotype file named " + pedFile.getName() + ".haps)"); infoLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(infoLabel); checkWindow.setContentPane(metaCheckPanel); checkWindow.pack(); checkWindow.setVisible(true); }
1,110,440
void readPhasedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = haps file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) filenames = f; theData = new HaploData(); try{ theData.prepareHapsInput(new File(filenames[0])); processData(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void readPhasedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = haps file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) inputOptions = f; theData = new HaploData(); try{ theData.prepareHapsInput(new File(filenames[0])); processData(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,110,441
void readPhasedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = haps file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) filenames = f; theData = new HaploData(); try{ theData.prepareHapsInput(new File(filenames[0])); processData(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void readPhasedGenotypes(String[] f){ //input is a 3 element array with //filenames[0] = haps file //filenames[1] = info file (null if none) //filenames[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) filenames = f; theData = new HaploData(); try{ theData.prepareHapsInput(new File(inputOptions[0])); processData(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,110,442
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns if (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
1,110,443
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
1,110,444
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns if (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
1,110,445
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateColumnDiffs( SQLTable sourceTable, SQLTable targetTable) throws ArchitectException { TreeSet<SQLColumn> sourceColumnList; TreeSet<SQLColumn> targetColumnList; Iterator<SQLColumn> sourceColIter; Iterator<SQLColumn> targetColIter; SQLColumn sourceColumn; SQLColumn targetColumn; boolean sourceColContinue; boolean targetColContinue; sourceColumnList = new TreeSet<SQLColumn>(comparator); targetColumnList = new TreeSet<SQLColumn>(comparator); sourceColContinue = false; targetColContinue = false; sourceColIter = null; targetColIter = null; sourceColumn = null; targetColumn = null; // We store the diffs in here, then return this listS List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); if (sourceTable != null) { sourceColumnList.addAll(sourceTable.getColumns()); } if (targetTable != null) { targetColumnList.addAll(targetTable.getColumns()); } if (sourceColumnList.size() == 0) { sourceColumnList = null; sourceColContinue = false; } else { sourceColIter = sourceColumnList.iterator(); sourceColumn = sourceColIter.next(); sourceColContinue = true; } if (targetColumnList.size() == 0) { targetColumnList = null; targetColContinue = false; } else { targetColIter = targetColumnList.iterator(); targetColumn = targetColIter.next(); targetColContinue = true; } while (sourceColContinue && targetColContinue) { // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; break; } } // Comparing Columns while (comparator.compare(sourceColumn, targetColumn) > 0) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; break; } } // Comparing Columns if (comparator.compare(sourceColumn, targetColumn) == 0) { if (targetColumn.getType() != sourceColumn.getType() || (targetColumn.getPrecision() != sourceColumn.getPrecision()) || (targetColumn.getScale() != sourceColumn.getScale()) || (targetColumn.getNullable() != sourceColumn.getNullable()) ) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.MODIFIED)); } else { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.SAME)); } if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } if (!sourceColContinue || !targetColContinue) { break; } } } while (sourceColContinue) { diffs.add(new DiffChunk<SQLObject>(sourceColumn, DiffType.LEFTONLY)); if (sourceColIter.hasNext()) { sourceColumn = sourceColIter.next(); } else { sourceColContinue = false; } } while (targetColContinue) { diffs.add(new DiffChunk<SQLObject>(targetColumn, DiffType.RIGHTONLY)); if (targetColIter.hasNext()) { targetColumn = targetColIter.next(); } else { targetColContinue = false; } } return diffs; }
1,110,446
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,448
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,449
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,450
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,451
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,452
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source if (relComparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,453
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,454
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } if (relComparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,455
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,456
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,457
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,458
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
private List<DiffChunk<SQLObject>> generateRelationshipDiffs( Collection<SQLTable> sourceTables, Collection<SQLTable> targetTables) throws ArchitectException { //XXX: This is using an incorrect comparator, it only checks by relationship //names and does not get into a deeper level of checking the mappings. Set<SQLRelationship> sourceRels = new TreeSet<SQLRelationship>(comparator); Set<SQLRelationship> targetRels = new TreeSet<SQLRelationship>(comparator); for (SQLTable t : sourceTables) { sourceRels.addAll(t.getImportedKeys()); } for (SQLTable t : targetTables) { targetRels.addAll(t.getImportedKeys()); } logger.debug("Source relationships: "+sourceRels); logger.debug("Target relationships: "+targetRels); List<DiffChunk<SQLObject>> diffs = new ArrayList<DiffChunk<SQLObject>>(); Iterator<SQLRelationship> sourceIter = sourceRels.iterator(); Iterator<SQLRelationship> targetIter = targetRels.iterator(); SQLRelationship targetRel; SQLRelationship sourceRel; boolean sourceContinue; boolean targetContinue; //Checks if both lists of tables contain any tables at all, if they do //the iterator is initialized for the list if (sourceIter.hasNext()) { sourceContinue = true; sourceRel = sourceIter.next(); } else { sourceContinue = false; sourceRel = null; } if (targetIter.hasNext()) { targetContinue = true; targetRel = targetIter.next(); } else { targetContinue = false; targetRel = null; } // Will loop until one or both of the lists reaches its last table while (sourceContinue && targetContinue) { // bring the source table up to the same level as the target while (comparator.compare(sourceRel, targetRel) < 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } // bring the target table up to the same level as the source while (comparator.compare(sourceRel, targetRel) > 0) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); // now do the mappings diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } } while (comparator.compare(sourceRel, targetRel) == 0) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.SAME)); // now do the columns diffs.addAll(generateMappingDiffs(sourceRel, targetRel)); if (!targetIter.hasNext() && !sourceIter.hasNext()) { targetContinue = false; sourceContinue = false; break; } if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; break; } if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; break; } } } // If any tables in the sourceList still exist, the changes are added while (sourceContinue) { diffs.add(new DiffChunk<SQLObject>(sourceRel, DiffType.LEFTONLY)); diffs.addAll(generateMappingDiffs(sourceRel, null)); if (sourceIter.hasNext()) { sourceRel = sourceIter.next(); } else { sourceContinue = false; } } //If any remaining tables in the targetList still exist, they are now being added while (targetContinue) { diffs.add(new DiffChunk<SQLObject>(targetRel, DiffType.RIGHTONLY)); diffs.addAll(generateMappingDiffs(null, targetRel)); if (targetIter.hasNext()) { targetRel = targetIter.next(); } else { targetContinue = false; } } return diffs; }
1,110,459
public String draw();
public String draw(String applicationName);
1,110,460
private void applicationChanged(ApplicationConfig appConfig) { removeApplication(appConfig); addApplication(appConfig); }
private void applicationChanged(ApplicationConfig appConfig) { removeApplication(appConfig); addApplication(appConfig); }
1,110,461
public void start() { for (ApplicationConfig appConfig : ApplicationConfigManager .getAllApplications()) { // only add non-cluster applications if(!appConfig.isCluster()) addApplication(appConfig); } // TODO: perfect dependency to be injected via Spring framework --rk EventSystem eventSystem = EventSystem.getInstance(); /* Add the recorder to record the downtimes to the DB */ eventSystem.addListener(recorder, ApplicationEvent.class); /* application event listener to add */ eventSystem.addListener(new EventListener(){ public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } } }, ApplicationEvent.class); logger.info("ApplicationDowntimeService started."); }
public void start() { for (ApplicationConfig appConfig : ApplicationConfigManager .getAllApplications()) { // only add non-cluster applications if(!appConfig.isCluster()) addApplication(appConfig); } // TODO: perfect dependency to be injected via Spring framework --rk EventSystem eventSystem = EventSystem.getInstance(); /* Add the recorder to record the downtimes to the DB */ eventSystem.addListener(recorder, ApplicationEvent.class); /* application event listener to add */ eventSystem.addListener(new EventListener(){ public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication((NewApplicationEvent)event); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } } }, ApplicationEvent.class); logger.info("ApplicationDowntimeService started."); }
1,110,462
public void start() { for (ApplicationConfig appConfig : ApplicationConfigManager .getAllApplications()) { // only add non-cluster applications if(!appConfig.isCluster()) addApplication(appConfig); } // TODO: perfect dependency to be injected via Spring framework --rk EventSystem eventSystem = EventSystem.getInstance(); /* Add the recorder to record the downtimes to the DB */ eventSystem.addListener(recorder, ApplicationEvent.class); /* application event listener to add */ eventSystem.addListener(new EventListener(){ public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } } }, ApplicationEvent.class); logger.info("ApplicationDowntimeService started."); }
public void start() { for (ApplicationConfig appConfig : ApplicationConfigManager .getAllApplications()) { // only add non-cluster applications if(!appConfig.isCluster()) addApplication(appConfig); } // TODO: perfect dependency to be injected via Spring framework --rk EventSystem eventSystem = EventSystem.getInstance(); /* Add the recorder to record the downtimes to the DB */ eventSystem.addListener(recorder, ApplicationEvent.class); /* application event listener to add */ eventSystem.addListener(new EventListener(){ public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged((ApplicationChangedEvent)event); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } } }, ApplicationEvent.class); logger.info("ApplicationDowntimeService started."); }
1,110,463
public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } }
public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication((NewApplicationEvent)event); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } }
1,110,464
public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged(((ApplicationChangedEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } }
public void handleEvent(EventObject event) { if(!(event instanceof ApplicationEvent)){ throw new IllegalArgumentException("event must be of type ApplicationEvent"); } if(event instanceof NewApplicationEvent){ addApplication(((NewApplicationEvent)event).getApplicationConfig()); }else if(event instanceof ApplicationChangedEvent){ applicationChanged((ApplicationChangedEvent)event); }else if(event instanceof ApplicationRemovedEvent){ removeApplication(((ApplicationRemovedEvent)event).getApplicationConfig()); } }
1,110,465
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { makeResponseNotCacheable(response); final ObjectName objectName = context.getObjectName(); final ApplicationConfig config = context.getApplicationConfig(); final MBeanConfig configuredMBean = config.findMBeanByObjectName(objectName.getCanonicalName()); AccessController.checkAccess(context.getServiceContext(), ACL_VIEW_APPLICATIONS); if(configuredMBean != null) AccessController.checkAccess(context.getServiceContext(), ACL_VIEW_MBEANS); List applications = null; if(config.isCluster()){ applications = config.getApplications(); }else{ applications = new ArrayList(1); applications.add(config); } /* the ObjectInfo for the mbean that is being viewed */ ObjectInfo objInfo = null; /* array that will be initialized with all attribute names for this mbean */ String[] attributeNames = null; /* a Map which constains list of attribute values for each application in the cluster. ApplicationConfig is the key and attribute List is the value*/ final Map appConfigToAttrListMap = new HashMap(applications.size()); for(Iterator it=applications.iterator(); it.hasNext(); ){ ApplicationConfig childAppConfig = (ApplicationConfig)it.next(); try { ServerConnection serverConnection = ServerConnector.getServerConnection(childAppConfig); /* assuming that all servers in this cluster have exact same object info, we will get the ObjectInfo from the first server in the list */ if(objInfo == null){ objInfo = serverConnection.getObjectInfo(objectName); assert objInfo != null; ObjectAttributeInfo[] attributes = objInfo.getAttributes(); attributeNames = new String[attributes.length]; for (int i = 0; i < attributes.length; i++) { // TODO: we should only add the readable attributes here attributeNames[i] = attributes[i].getName(); } } /* add attribute values of this application to the map*/ appConfigToAttrListMap.put(childAppConfig, serverConnection.getAttributes(objectName, attributeNames)); } catch (Exception e) { logger.log(Level.FINE, "Error retrieving attributes for:" + childAppConfig.getName(), e); /* add null, indicating that the server is down */ appConfigToAttrListMap.put(childAppConfig, null); } } request.setAttribute("objInfo", objInfo); request.setAttribute("appConfigToAttrListMap", appConfigToAttrListMap); /* setup the form to be used in the html form */ MBeanConfigForm mbeanConfigForm = (MBeanConfigForm)actionForm; mbeanConfigForm.setObjectName(objectName.getCanonicalName()); ApplicationConfig appConfig = context.getApplicationConfig(); MBeanConfig mbeanConfig = appConfig.findMBeanByObjectName(objectName.getCanonicalName()); if(mbeanConfig != null){ if(appConfig.isCluster()){ request.setAttribute("mbeanIncludedIn", "cluster"); }else{ request.setAttribute("mbeanIncludedIn", "application"); } request.setAttribute("mbeanConfig", mbeanConfig); }else{ ApplicationConfig clusterConfig = appConfig.getClusterConfig(); if(clusterConfig != null){ mbeanConfig = clusterConfig.findMBeanByObjectName(objectName.getCanonicalName()); } if(mbeanConfig != null){ request.setAttribute("mbeanIncludedIn", "cluster"); request.setAttribute("mbeanConfig", mbeanConfig); } } return mapping.findForward(Forwards.SUCCESS); }
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { makeResponseNotCacheable(response); final ObjectName objectName = context.getObjectName(); final ApplicationConfig config = context.getApplicationConfig(); final MBeanConfig configuredMBean = config.findMBeanByObjectName(objectName.getCanonicalName()); AccessController.checkAccess(context.getServiceContext(), ACL_VIEW_APPLICATIONS); if(configuredMBean != null) AccessController.checkAccess(context.getServiceContext(), ACL_VIEW_MBEANS); List applications = null; if(config.isCluster()){ applications = config.getApplications(); }else{ applications = new ArrayList(1); applications.add(config); } /* the ObjectInfo for the mbean that is being viewed */ ObjectInfo objInfo = null; /* array that will be initialized with all attribute names for this mbean */ String[] attributeNames = null; /* a Map which constains list of attribute values for each application in the cluster. ApplicationConfig is the key and attribute List is the value*/ final Map appConfigToAttrListMap = new HashMap(applications.size()); for(Iterator it=applications.iterator(); it.hasNext(); ){ ApplicationConfig childAppConfig = (ApplicationConfig)it.next(); try { ServerConnection serverConnection = ServerConnector.getServerConnection(childAppConfig); /* assuming that all servers in this cluster have exact same object info, we will get the ObjectInfo from the first server in the list */ if(objInfo == null){ objInfo = serverConnection.getObjectInfo(objectName); assert objInfo != null; ObjectAttributeInfo[] attributes = objInfo.getAttributes(); attributeNames = new String[attributes.length]; for (int i = 0; i < attributes.length; i++) { // TODO: we should only add the readable attributes here attributeNames[i] = attributes[i].getName(); } } /* add attribute values of this application to the map*/ appConfigToAttrListMap.put(childAppConfig, serverConnection.getAttributes(objectName, attributeNames)); } catch (ConnectionFailedException e){ logger.log(Level.FINE, "Error retrieving attributes for:" + childAppConfig.getName(), e); /* add null, indicating that the server is down */ appConfigToAttrListMap.put(childAppConfig, null); } } request.setAttribute("objInfo", objInfo); request.setAttribute("appConfigToAttrListMap", appConfigToAttrListMap); /* setup the form to be used in the html form */ MBeanConfigForm mbeanConfigForm = (MBeanConfigForm)actionForm; mbeanConfigForm.setObjectName(objectName.getCanonicalName()); ApplicationConfig appConfig = context.getApplicationConfig(); MBeanConfig mbeanConfig = appConfig.findMBeanByObjectName(objectName.getCanonicalName()); if(mbeanConfig != null){ if(appConfig.isCluster()){ request.setAttribute("mbeanIncludedIn", "cluster"); }else{ request.setAttribute("mbeanIncludedIn", "application"); } request.setAttribute("mbeanConfig", mbeanConfig); }else{ ApplicationConfig clusterConfig = appConfig.getClusterConfig(); if(clusterConfig != null){ mbeanConfig = clusterConfig.findMBeanByObjectName(objectName.getCanonicalName()); } if(mbeanConfig != null){ request.setAttribute("mbeanIncludedIn", "cluster"); request.setAttribute("mbeanConfig", mbeanConfig); } } return mapping.findForward(Forwards.SUCCESS); }
1,110,466
public EditTableAction() { super("Edit Table Properties...", ASUtils.createIcon("TableProperties", "Edit Table Properties", ArchitectFrame.getMainInstance().sprefs.getInt(SwingUserSettings.ICON_SIZE, 24))); }
public EditTableAction() { super("Table Properties...", ASUtils.createIcon("TableProperties", "Edit Table Properties", ArchitectFrame.getMainInstance().sprefs.getInt(SwingUserSettings.ICON_SIZE, 24))); }
1,110,467
public EditTableAction() { super("Edit Table Properties...", ASUtils.createIcon("TableProperties", "Edit Table Properties", ArchitectFrame.getMainInstance().sprefs.getInt(SwingUserSettings.ICON_SIZE, 24))); }
public EditTableAction() { super("Edit Table Properties...", ASUtils.createIcon("TableProperties", "Table Properties", ArchitectFrame.getMainInstance().sprefs.getInt(SwingUserSettings.ICON_SIZE, 24))); }
1,110,468
protected SAXReader createSAXReader() throws Exception { // installs the NekoHTML parser SAXParser parser = new SAXParser(); parser.setProperty( "http://cyberneko.org/html/properties/names/elems", "match" ); parser.setProperty( "http://cyberneko.org/html/properties/names/attrs", "match" ); return new SAXReader( parser ); }
protected SAXReader createSAXReader() throws Exception { // installs the NekoHTML parser SAXParser parser = new SAXParser(); parser.setProperty( "http://cyberneko.org/html/properties/names/elems", element ); parser.setProperty( "http://cyberneko.org/html/properties/names/attrs", element ); return new SAXReader( parser ); }
1,110,470
protected SAXReader createSAXReader() throws Exception { // installs the NekoHTML parser SAXParser parser = new SAXParser(); parser.setProperty( "http://cyberneko.org/html/properties/names/elems", "match" ); parser.setProperty( "http://cyberneko.org/html/properties/names/attrs", "match" ); return new SAXReader( parser ); }
protected SAXReader createSAXReader() throws Exception { // installs the NekoHTML parser SAXParser parser = new SAXParser(); parser.setProperty( "http://cyberneko.org/html/properties/names/elems", attribute ); parser.setProperty( "http://cyberneko.org/html/properties/names/attrs", attribute ); return new SAXReader( parser ); }
1,110,471
public void doTag(XMLOutput output) throws Exception { if ( uri == null ) { throw new JellyException( "<j:include> must have a 'uri' attribute defined" ); } // we need to create a new JellyContext of the URI // take off the script name from the URL context.runScript( uri, output ); }
public void doTag(XMLOutput output) throws Exception { if ( uri == null ) { throw new JellyException( "<j:include> must have a 'uri' attribute defined" ); } // we need to create a new JellyContext of the URI // take off the script name from the URL context.runScript( uri, output ); }
1,110,474
public Object getSource() { return source; }
public String getSource() { return source; }
1,110,475
public void setSource(Object source) { this.source = source; }
public void setSource(String source) { this.source = source; }
1,110,476
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+newDS.getName()+"\" already exists"); newConnectionDialog.setVisible(true); } } } }
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+newDS.getName()+"\" already exists"); newConnectionDialog.setVisible(true); } } } }
1,110,478
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+newDS.getName()+"\" already exists"); newConnectionDialog.setVisible(true); } } } }
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } }
1,110,479
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+newDS.getName()+"\" already exists"); newConnectionDialog.setVisible(true); } } } }
public void actionPerformed(ActionEvent e) { logger.debug(getValue(SHORT_DESCRIPTION) + " started"); dbcsPanel.applyChanges(); ArchitectDataSource newDS = dbcsPanel.getDbcs(); if (isNew) { if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+newDS.getName()+"\" already exists"); newConnectionDialog.setVisible(true); } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } }
1,110,480
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { log.debug( "paintThumbnail entry " + photo.getUid() ); long startTime = System.currentTimeMillis(); long thumbReadyTime = 0; long thumbDrawnTime = 0; long endTime = 0; // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; // Create a transaction which will be used for persisten object operations // during painting (to avoid creating several short-livin transactions) ODMGXAWrapper txw = new ODMGXAWrapper(); if ( photo != null ) { Thumbnail thumbnail = null; log.debug( "finding thumb" ); boolean hasThumbnail = photo.hasThumbnail(); log.debug( "asked if has thumb" ); if ( hasThumbnail ) { log.debug( "Photo " + photo.getUid() + " has thumbnail" ); thumbnail = photo.getThumbnail(); log.debug( "got thumbnail" ); } else { thumbnail = Thumbnail.getDefaultThumbnail(); if ( !thumbCreatorThread.isBusy() ) { log.debug( "Create thumbnail for " + photo.getUid() ); thumbCreatorThread.createThumbnail( photo ); log.debug( "Thumbnail request submitted" ); } } thumbReadyTime = System.currentTimeMillis(); log.debug( "starting to draw" ); // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; log.debug( "drawing thumbnail" ); g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); log.debug( "Drawn, drawing decorations" ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } thumbDrawnTime = System.currentTimeMillis(); // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { FuzzyDate fd = new FuzzyDate( photo.getShootTime(), photo.getTimeAccuracy() ); String dateStr = fd.format(); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } txw.commit(); endTime = System.currentTimeMillis(); log.debug( "paintThumbnail: exit " + photo.getUid() ); log.debug( "Thumb fetch " + (thumbReadyTime - startTime ) + " ms" ); log.debug( "Thumb draw " + ( thumbDrawnTime - thumbReadyTime ) + " ms" ); log.debug( "Deacoration draw " + (endTime - thumbDrawnTime ) + " ms" ); log.debug( "Total " + (endTime - startTime ) + " ms" ); }
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { log.debug( "paintThumbnail entry " + photo.getUid() ); long startTime = System.currentTimeMillis(); long thumbReadyTime = 0; long thumbDrawnTime = 0; long endTime = 0; // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; // Create a transaction which will be used for persisten object operations // during painting (to avoid creating several short-livin transactions) ODMGXAWrapper txw = new ODMGXAWrapper(); if ( photo != null ) { Thumbnail thumbnail = null; log.debug( "finding thumb" ); boolean hasThumbnail = photo.hasThumbnail(); log.debug( "asked if has thumb" ); if ( hasThumbnail ) { log.debug( "Photo " + photo.getUid() + " has thumbnail" ); thumbnail = photo.getThumbnail(); log.debug( "got thumbnail" ); } else { g2.setBackground( this.getBackground() ); } else { thumbnail = Thumbnail.getDefaultThumbnail(); if ( !thumbCreatorThread.isBusy() ) { log.debug( "Create thumbnail for " + photo.getUid() ); thumbCreatorThread.createThumbnail( photo ); log.debug( "Thumbnail request submitted" ); } else { g2.setBackground( this.getBackground() ); } } else { g2.setBackground( this.getBackground() ); } thumbReadyTime = System.currentTimeMillis(); log.debug( "starting to draw" ); // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; log.debug( "drawing thumbnail" ); g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); log.debug( "Drawn, drawing decorations" ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } else { g2.setBackground( this.getBackground() ); } thumbDrawnTime = System.currentTimeMillis(); // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } else { g2.setBackground( this.getBackground() ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { FuzzyDate fd = new FuzzyDate( photo.getShootTime(), photo.getTimeAccuracy() ); String dateStr = fd.format(); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } else { g2.setBackground( this.getBackground() ); } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } else { g2.setBackground( this.getBackground() ); } g2.setBackground( prevBkg ); } else { g2.setBackground( this.getBackground() ); } txw.commit(); endTime = System.currentTimeMillis(); log.debug( "paintThumbnail: exit " + photo.getUid() ); log.debug( "Thumb fetch " + (thumbReadyTime - startTime ) + " ms" ); log.debug( "Thumb draw " + ( thumbDrawnTime - thumbReadyTime ) + " ms" ); log.debug( "Deacoration draw " + (endTime - thumbDrawnTime ) + " ms" ); log.debug( "Total " + (endTime - startTime ) + " ms" ); } else { g2.setBackground( this.getBackground() ); }
1,110,481
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } formatStr =accuracyFormatStrings[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } formatStr =accuracyFormatStrings[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
1,110,483
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } formatStr =accuracyFormatStrings[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } parser = fdParsers[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
1,110,484
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } formatStr =accuracyFormatStrings[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
public String format() { long lAccuracy = (long) (accuracy * 24 * 3600 * 1000); String dateStr = ""; if ( date == null ) { return ""; } if ( accuracy > 0 ) { // Find the correct format to use String formatStr =accuracyFormatStrings[0]; for ( int i = 1; i < accuracyFormatLimits.length; i++ ) { if ( accuracy < accuracyFormatLimits[i] ) { break; } formatStr =accuracyFormatStrings[i]; } // Show the limits of the accuracy range DateFormat df = new SimpleDateFormat( formatStr ); Date lower = new Date( date.getTime() - lAccuracy ); Date upper = new Date( date.getTime() + lAccuracy ); String lowerStr = df.format( lower ); String upperStr = df.format( upper ); dateStr = lowerStr; if ( !lowerStr.equals( upperStr ) ) { dateStr += " - " + upperStr; } } else { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); dateStr = df.format( date ); } return dateStr; }
1,110,485
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,488
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,489
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,490
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,491
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; public void setVars(Map vars) { if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,492
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
public Object evaluate(Context context) { try { JexlContext jexlContext = new JexlContext() { Map ctx; public void setVars(Map vars) { ctx = vars; } public Map getVars() { return ctx; } }; jexlContext.setVars(context.getVariables()); if ( log.isDebugEnabled() ) { log.debug( "Evaluating EL: " + expression ); } return expression.evaluate( jexlContext ); } catch (Exception e) { log.warn( "Caught exception evaluating: " + expression + ". Reason: " + e, e ); return null; } }
1,110,493
public Vector check() throws PedFileException{ //before we perform the check we want to prune out individuals with too much missing data //or trios which contain individuals with too much missing data Iterator fitr = families.values().iterator(); Vector useable = new Vector(); while (fitr.hasNext()){ Family curFam = (Family) fitr.next(); Enumeration indIDEnum = curFam.getMemberList(); Vector victor = new Vector(); while (indIDEnum.hasMoreElements()){ victor.add(curFam.getMember((String) indIDEnum.nextElement())); } PedParser pp = new PedParser(); try { SimpleGraph sg = pp.buildGraph(victor, Options.getMissingThreshold()); Vector indStrings = pp.parsePed(sg); if (indStrings != null){ Iterator sitr = indStrings.iterator(); while (sitr.hasNext()){ useable.add(curFam.getMember((String)sitr.next())); } } }catch (PedigreeException pe){ String pem = pe.getMessage(); if (pem.indexOf("one parent") != -1){ indIDEnum = curFam.getMemberList(); while (indIDEnum.hasMoreElements()){ curFam.getMember((String) indIDEnum.nextElement()).setReasonImAxed(pem); } }else{ throw new PedFileException(pem + "\nin family " + curFam.getFamilyName()); } } } unrelatedIndividuals = new Vector(); Vector indList = (Vector)allIndividuals.clone(); Individual currentInd; Family currentFamily; //deal with individuals who are missing too much data for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); if (currentInd.getGenoPC() < 1 - Options.getMissingThreshold()){ allIndividuals.removeElement(currentInd); axedPeople.add(currentInd); currentInd.setReasonImAxed("% Genotypes: " + new Double(currentInd.getGenoPC()*100).intValue()); currentFamily.removeMember(currentInd.getIndividualID()); if (currentFamily.getNumMembers() == 0){ //if everyone in a family is gone, we remove it from the list families.remove(currentInd.getFamilyID()); } }else if (!useable.contains(currentInd)){ axedPeople.add(currentInd); if (currentInd.getReasonImAxed() == null){ currentInd.setReasonImAxed("Not a member of maximum unrelated subset."); } } } if (useable.size() == 0){ //todo: this should be more specific about the problems. throw new PedFileException("File contains zero valid individuals."); } CheckData cd = new CheckData(this); Vector results = cd.check(); this.results = results; return results; }
public Vector check() throws PedFileException{ //before we perform the check we want to prune out individuals with too much missing data //or trios which contain individuals with too much missing data Iterator fitr = families.values().iterator(); Vector useable = new Vector(); while (fitr.hasNext()){ Family curFam = (Family) fitr.next(); Enumeration indIDEnum = curFam.getMemberList(); Vector victor = new Vector(); while (indIDEnum.hasMoreElements()){ victor.add(curFam.getMember((String) indIDEnum.nextElement())); } PedParser pp = new PedParser(); try { SimpleGraph sg = pp.buildGraph(victor, Options.getMissingThreshold()); Vector indStrings = pp.parsePed(sg); if (indStrings != null){ Iterator sitr = indStrings.iterator(); while (sitr.hasNext()){ useable.add(curFam.getMember((String)sitr.next())); } } }catch (PedigreeException pe){ String pem = pe.getMessage(); if (pem.indexOf("one parent") != -1){ indIDEnum = curFam.getMemberList(); while (indIDEnum.hasMoreElements()){ curFam.getMember((String) indIDEnum.nextElement()).setReasonImAxed(pem); } }else{ throw new PedFileException(pem + "\nin family " + curFam.getFamilyName()); } } } unrelatedIndividuals = useable; Vector indList = (Vector)allIndividuals.clone(); Individual currentInd; Family currentFamily; //deal with individuals who are missing too much data for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); if (currentInd.getGenoPC() < 1 - Options.getMissingThreshold()){ allIndividuals.removeElement(currentInd); axedPeople.add(currentInd); currentInd.setReasonImAxed("% Genotypes: " + new Double(currentInd.getGenoPC()*100).intValue()); currentFamily.removeMember(currentInd.getIndividualID()); if (currentFamily.getNumMembers() == 0){ //if everyone in a family is gone, we remove it from the list families.remove(currentInd.getFamilyID()); } }else if (!useable.contains(currentInd)){ axedPeople.add(currentInd); if (currentInd.getReasonImAxed() == null){ currentInd.setReasonImAxed("Not a member of maximum unrelated subset."); } } } if (useable.size() == 0){ //todo: this should be more specific about the problems. throw new PedFileException("File contains zero valid individuals."); } CheckData cd = new CheckData(this); Vector results = cd.check(); this.results = results; return results; }
1,110,494
public void populate() throws ArchitectException { // SQLColumn doesn't have children, so populate does nothing! return; }
public void populate() throws ArchitectException { // SQLColumn doesn't have children, so populate does nothing! logger.debug("SQLColumn: populate is a no-op"); }
1,110,495
public static void main(String[] args) { JTextField[] fields = { new JTextField(), new JTextField(), new JTextField(), new JTextField() }; String[] labels = { "First Name", "Middle Initial", "Last Name", "Age" }; char[] mnemonics = { 'F', 'M', 'L', 'A' }; int[] widths = { 15, 1, 15, 3 }; String[] descs = { "First Name", "Middle Initial", "Last Name", "Age" }; final TextPanel form = new TextPanel(fields, labels, mnemonics, widths, descs); JButton submit = new JButton("Submit Form"); submit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { System.out.println(form.getField(0) + " " + form.getField(1) + ". " + form.getField(2) + ", age " + form.getField(3)); } }); JFrame f = new JFrame("Text Form Example"); f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); f.getContentPane().add(form, BorderLayout.NORTH); JPanel p = new JPanel(); p.add(submit); f.getContentPane().add(p, BorderLayout.SOUTH); f.pack(); f.setVisible(true); }
public static void main(String[] args) { JTextField[] fields = { new JTextField(), new JTextField(), new JTextField(), new JTextField() }; String[] labels = { "First Name", "Middle Initial", "Last Name", "Age" }; char[] mnemonics = { 'F', 'M', 'L', 'A' }; int[] widths = { 15, 1, 15, 3 }; String[] descs = { "First Name", "Middle Initial", "Last Name", "Age" }; final TextPanel form = new TextPanel(fields, labels, mnemonics, widths, descs); JButton submit = new JButton("Submit Form"); submit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { logger.info(form.getField(0) + " " + form.getField(1) + ". " + form.getField(2) + ", age " + form.getField(3)); } }); JFrame f = new JFrame("Text Form Example"); f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); f.getContentPane().add(form, BorderLayout.NORTH); JPanel p = new JPanel(); p.add(submit); f.getContentPane().add(p, BorderLayout.SOUTH); f.pack(); f.setVisible(true); }
1,110,496
public void actionPerformed(ActionEvent e) { System.out.println(form.getField(0) + " " + form.getField(1) + ". " + form.getField(2) + ", age " + form.getField(3)); }
public void actionPerformed(ActionEvent e) { logger.info(form.getField(0) + " " + form.getField(1) + ". " + form.getField(2) + ", age " + form.getField(3)); }
1,110,497
public Dimension getPreferredSize(){ return new DrawingMethods().dPrimeGetPreferredSize(table.length); }
public Dimension getPreferredSize(){ return new DrawingMethods().dPrimeGetPreferredSize(table.length,info); }
1,110,500
public void paintComponent(Graphics g){ super.paintComponent(g); new DrawingMethods().dPrimeDraw(table, g); }
public void paintComponent(Graphics g){ super.paintComponent(g); new DrawingMethods().dPrimeDraw(table, info, vec, g); }
1,110,501
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
1,110,502
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
1,110,503
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
1,110,504
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
1,110,505
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = tableModel.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
1,110,506
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(tableModel.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
1,110,507
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < tableModel.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
1,110,508
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(tableModel.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
1,110,509
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)tableModel.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
1,110,510
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getColumnsFolder().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
1,110,511
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
1,110,512
public static PlayPenComponentUI createUI(JComponent c) { logger.debug("Creating new IERelationshipUI for "+c); return new IERelationshipUI(); }
public static PlayPenComponentUI createUI(PlayPenComponent c) { logger.debug("Creating new IERelationshipUI for "+c); return new IERelationshipUI(); }
1,110,513
public void tallyCCInd(byte a1, byte a2, int cc){ //case = 0, control = 1 for int cc if (a1 >= 5 && a2 >= 5){ counts[cc][0]++; counts[cc][1]++; if (allele1 == 0){ allele1 = (byte)(a1 - 4); allele2 = (byte)(a2 - 4); } }else{ //seed the alleles as soon as they're found if (allele1 == 0){ allele1 = a1; if (a1 != a2){ allele2 = a2; } }else if (allele2 == 0){ if (a1 != allele1){ allele2 = a1; }else if (a2 != allele1){ allele2 = a2; } } if (a1 != 0){ if (a1 == allele1){ counts[cc][0] ++; }else{ counts[cc][1] ++; } } if (a2 != 0){ if (a2 == allele1){ counts[cc][0]++; }else{ counts[cc][1]++; } } } }
public void tallyCCInd(byte[] a, int cc){ if (cc == 2) cc = 0; byte a1 = a[0]; byte a2 = a[1]; //case = 0, control = 1 for int cc if (a1 >= 5 && a2 >= 5){ counts[cc][0]++; counts[cc][1]++; if (allele1 == 0){ allele1 = (byte)(a1 - 4); allele2 = (byte)(a2 - 4); } }else{ //seed the alleles as soon as they're found if (allele1 == 0){ allele1 = a1; if (a1 != a2){ allele2 = a2; } }else if (allele2 == 0){ if (a1 != allele1){ allele2 = a1; }else if (a2 != allele1){ allele2 = a2; } } if (a1 != 0){ if (a1 == allele1){ counts[cc][0] ++; }else{ counts[cc][1] ++; } } if (a2 != 0){ if (a2 == allele1){ counts[cc][0]++; }else{ counts[cc][1]++; } } } }
1,110,516
protected static void addTablesToDatabase(SQLDatabase addTo) throws SQLException, ArchitectException { HashMap catalogs = new HashMap(); HashMap schemas = new HashMap(); synchronized (addTo) { Connection con = addTo.getConnection(); DatabaseMetaData dbmd = con.getMetaData(); ResultSet mdTables = null; try { mdTables = dbmd.getTables(null, null, "%", new String[] {"SYSTEM TABLE", "TABLE", "VIEW"}); while (mdTables.next()) { SQLObject tableParent = addTo; String catName = mdTables.getString(1); SQLCatalog cat = null; if (catName != null) { cat = (SQLCatalog) catalogs.get(catName); if (cat == null) { cat = new SQLCatalog(addTo, catName); addTo.children.add(cat); catalogs.put(catName, cat); } tableParent = cat; } String schName = mdTables.getString(2); SQLSchema schema = null; if (schName != null) { schema = (SQLSchema) schemas.get(catName+"."+schName); if (schema == null) { if (cat == null) { schema = new SQLSchema(addTo, schName); addTo.children.add(schema); } else { schema = new SQLSchema(cat, schName); cat.children.add(schema); } schemas.put(catName+"."+schName, schema); } tableParent = schema; } tableParent.children.add(new SQLTable(addTo, tableParent, cat, schema, mdTables.getString(3), mdTables.getString(5), mdTables.getString(4) )); } } finally { if (mdTables != null) mdTables.close(); } } }
protected static void addTablesToDatabase(SQLDatabase addTo) throws SQLException, ArchitectException { HashMap catalogs = new HashMap(); HashMap schemas = new HashMap(); synchronized (addTo) { Connection con = addTo.getConnection(); DatabaseMetaData dbmd = con.getMetaData(); ResultSet mdTables = null; try { mdTables = dbmd.getTables(null, null, "%", new String[] {"TABLE", "VIEW"}); while (mdTables.next()) { SQLObject tableParent = addTo; String catName = mdTables.getString(1); SQLCatalog cat = null; if (catName != null) { cat = (SQLCatalog) catalogs.get(catName); if (cat == null) { cat = new SQLCatalog(addTo, catName); addTo.children.add(cat); catalogs.put(catName, cat); } tableParent = cat; } String schName = mdTables.getString(2); SQLSchema schema = null; if (schName != null) { schema = (SQLSchema) schemas.get(catName+"."+schName); if (schema == null) { if (cat == null) { schema = new SQLSchema(addTo, schName); addTo.children.add(schema); } else { schema = new SQLSchema(cat, schName); cat.children.add(schema); } schemas.put(catName+"."+schName, schema); } tableParent = schema; } tableParent.children.add(new SQLTable(addTo, tableParent, cat, schema, mdTables.getString(3), mdTables.getString(5), mdTables.getString(4) )); } } finally { if (mdTables != null) mdTables.close(); } } }
1,110,518
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
1,110,519
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
1,110,520
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
1,110,521
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } if (bar != null) { bar.setVisible(false); } if (pm != null) { logger.debug("pm done, max was: " + pm.getMaximum()); } logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
1,110,522
public Tagger(Vector s, Vector include, Vector exclude, AlleleCorrelator ac){ this(s,include,exclude,ac,DEFAULT_RSQ_CUTOFF,AGGRESSIVE_TRIPLE, DEFAULT_MAXDIST, DEFAULT_MAXNUMTAGS); }
public Tagger(Vector s, Vector include, Vector exclude, AlleleCorrelator ac){ this(s,include,exclude,ac,DEFAULT_RSQ_CUTOFF,AGGRESSIVE_TRIPLE, DEFAULT_MAXDIST, DEFAULT_MAXNUMTAGS,true); }
1,110,524
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); pt.removeTagged(theTag.sequence); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) { toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) { toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
1,110,525
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); pt.removeTagged(theTag.sequence); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) { toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); pt.removeTagged(theTag.sequence); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0){ toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
1,110,526
public Date getCreateTime() { return createTime; }
public Date getCreateTime() { return createTime != null ? (Date) createTime.clone() : null; }
1,110,527
public void run(Context context, XMLOutput output) throws Exception { for ( int i = 0, size = scripts.length; i < size; i++ ) { Script script = scripts[i]; script.run( context, output ); } }
public void run(JellyContext context, XMLOutput output) throws Exception { for ( int i = 0, size = scripts.length; i < size; i++ ) { Script script = scripts[i]; script.run( context, output ); } }
1,110,528
public void doTag(final XMLOutput output) throws JellyTagException { if (file == null && uri == null) { throw new JellyTagException("This tag must define a 'file' or 'uri' attribute"); } InputStream is = null; if (file != null) { File f = new File(file); if (!f.exists()) { throw new JellyTagException("file: " + file + " does not exist!"); } try { is = new FileInputStream(f); } catch (FileNotFoundException e) { throw new JellyTagException(e); } } else { is = context.getResourceAsStream(uri); if (is == null) { throw new JellyTagException( "Could not find: " + uri ); } } Properties props = new Properties(); try { props.load(is); } catch (IOException e) { throw new JellyTagException("properties tag could not load from file",e); } if (var != null) { context.setVariable(var, props); } else { Enumeration enum = props.propertyNames(); while (enum.hasMoreElements()) { String key = (String) enum.nextElement(); String value = props.getProperty(key); // @todo we should parse the value in case its an Expression context.setVariable(key, value); } }
public void doTag(final XMLOutput output) throws JellyTagException { if (file == null && uri == null) { throw new JellyTagException("This tag must define a 'file' or 'uri' attribute"); } InputStream is = null; if (file != null) { File f = new File(file); if (!f.exists()) { throw new JellyTagException("file: " + file + " does not exist!"); } try { is = new FileInputStream(f); } catch (FileNotFoundException e) { throw new JellyTagException(e); } } else { is = context.getResourceAsStream(uri); if (is == null) { throw new JellyTagException( "Could not find: " + uri ); } } Properties props = new Properties(); try { props.load(is); } catch (IOException e) { throw new JellyTagException("properties tag could not load from file",e); } if (var != null) { context.setVariable(var, props); } else { Enumeration enum = props.propertyNames(); while (enum.hasMoreElements()) { String key = (String) enum.nextElement(); String value = props.getProperty(key); // @todo we should parse the value in case its an Expression context.setVariable(key, value); } }
1,110,529
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
1,110,531
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
1,110,532
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
1,110,533
public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } }
public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } }
1,110,534
public ObjectInfo getMBeanInfo(ServiceContext context) throws ServiceException { canAccessThisMBean(context); ServerConnection serverConnection = context.getServerConnection(); ObjectInfo objectInfo = serverConnection.getObjectInfo(context.getObjectName()); return objectInfo; }
public ObjectInfo getMBeanInfo(ServiceContext context) throws ServiceException { canAccessThisMBean(context); ServerConnection serverConnection = ServiceUtils.getServerConnectionEvenIfCluster( context.getApplicationConfig()); ObjectInfo objectInfo = serverConnection.getObjectInfo(context.getObjectName()); return objectInfo; }
1,110,536
public void addExportedKey(SQLRelationship r) { exportedKeys.add(r); }
public void addExportedKey(SQLRelationship r) { exportedKeysFolder.addChild(r); }
1,110,537
public void addImportedKey(SQLRelationship r) { importedKeys.add(r); }
public void addImportedKey(SQLRelationship r) { importedKeysFolder.addChild(r); }
1,110,538
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
1,110,541
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
1,110,542
public boolean saveOrSaveAs(boolean showChooser, boolean separateThread) { if (project.getFile() == null || showChooser) { JFileChooser chooser = new JFileChooser(project.getFile()); chooser.addChoosableFileFilter(ASUtils.ARCHITECT_FILE_FILTER); int response = chooser.showSaveDialog(ArchitectFrame.this); if (response != JFileChooser.APPROVE_OPTION) { return false; } else { File file = chooser.getSelectedFile(); if (!file.getPath().endsWith(".architect")) { file = new File(file.getPath()+".architect"); } if (file.exists()) { response = JOptionPane.showConfirmDialog( ArchitectFrame.this, "The file\n\n"+file.getPath()+"\n\nalready exists. Do you want to overwrite it?", "File Exists", JOptionPane.YES_NO_OPTION); if (response == JOptionPane.NO_OPTION) { return saveOrSaveAs(true, separateThread); } } project.setFile(file); String projName = file.getName().substring(0, file.getName().length()-".architect".length()); project.setName(projName); setTitle(projName); } } final boolean finalSeparateThread = separateThread; final ProgressMonitor pm = new ProgressMonitor (ArchitectFrame.this, "Saving Project", "", 0, 100); Runnable saveTask = new Runnable() { public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } } }; if (separateThread) { new Thread(saveTask).start(); return true; // this is an optimistic lie } else { saveTask.run(); return lastSaveOpSuccessful; } }
public boolean saveOrSaveAs(boolean showChooser, boolean separateThread) { if (project.getFile() == null || showChooser) { JFileChooser chooser = new JFileChooser(project.getFile()); chooser.addChoosableFileFilter(ASUtils.ARCHITECT_FILE_FILTER); int response = chooser.showSaveDialog(ArchitectFrame.this); if (response != JFileChooser.APPROVE_OPTION) { return false; } else { File file = chooser.getSelectedFile(); if (!file.getPath().endsWith(".architect")) { file = new File(file.getPath()+".architect"); } if (file.exists()) { response = JOptionPane.showConfirmDialog( ArchitectFrame.this, "The file\n\n"+file.getPath()+"\n\nalready exists. Do you want to overwrite it?", "File Exists", JOptionPane.YES_NO_OPTION); if (response == JOptionPane.NO_OPTION) { return saveOrSaveAs(true, separateThread); } } project.setFile(file); String projName = file.getName().substring(0, file.getName().length()-".architect".length()); project.setName(projName); setTitle(projName); } } final boolean finalSeparateThread = separateThread; final ProgressMonitor pm = new ProgressMonitor (ArchitectFrame.this, "Saving Project", "", 0, 100); Runnable saveTask = new Runnable() { public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; JOptionPane.showMessageDialog(ArchitectFrame.this, "Save successful"); } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } } }; if (separateThread) { new Thread(saveTask).start(); return true; // this is an optimistic lie } else { saveTask.run(); return lastSaveOpSuccessful; } }
1,110,543
public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } }
public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; JOptionPane.showMessageDialog(ArchitectFrame.this, "Save successful"); } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } }
1,110,544
public void run(Context context, XMLOutput output) throws Exception { // initialize all the properties of the tag before its used // if there is a problem abort this tag for ( int i = 0, size = expressions.length; i < size; i++ ) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if ( type.isAssignableFrom( Expression.class ) ) { value = expression; } else { value = expression.evaluate( context ); } // convert value to correct type if ( value != null ) { value = convertType( value, type ); } Object[] arguments = { value }; method.invoke( tag, arguments ); } tag.run( context, output ); }
public void run(Context context, XMLOutput output) throws Exception { // initialize all the properties of the tag before its used // if there is a problem abort this tag for ( int i = 0, size = expressions.length; i < size; i++ ) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if ( type.isAssignableFrom( Expression.class ) ) { value = expression; } else { value = expression.evaluate( context ); } // convert value to correct type if ( value != null ) { value = convertType( value, type ); } Object[] arguments = { value }; method.invoke( tag, arguments ); } tag.run( context, output ); }
1,110,545
public void dbStructureChanged(SQLObjectEvent e) { firePropertyChange("model.children", null, null); revalidate(); }
public void dbStructureChanged(SQLObjectEvent e) { firePropertyChange("model.children", null, null); revalidate(); }
1,110,548
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
1,110,549
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
1,110,550
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
1,110,551
public void startElement( String namespaceURI, String localName, String qName, Attributes list) throws SAXException { try { // if this is a tag then create a script to run it // otherwise pass the text to the current body tagScript = createTag(namespaceURI, localName, list); if (tagScript == null) { tagScript = createStaticTag(namespaceURI, localName, qName, list); } tagScriptStack.add(tagScript); if (tagScript != null) { // set parent relationship... Tag tag = tagScript.getTag(); tag.setParent(parentTag); // pop another tag onto the stack if ( parentTag != null ) { tagStack.add( parentTag ); } parentTag = tag; if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script.addScript(tagScript); // start a new body scriptStack.push(script); script = new ScriptBlock(); tag.setBody(script); } else { // XXXX: might wanna handle empty elements later... textBuffer.append("<"); textBuffer.append(qName); int size = list.getLength(); for (int i = 0; i < size; i++) { textBuffer.append(" "); textBuffer.append(list.getQName(i)); textBuffer.append("="); textBuffer.append("\""); textBuffer.append(list.getValue(i)); textBuffer.append("\""); } textBuffer.append(">"); } } catch (SAXException e) { throw e; } catch (Exception e) { log.error( "Caught exception: " + e, e ); throw new SAXException( "Runtime Exception: " + e, e ); } }
public void startElement( String namespaceURI, String localName, String qName, Attributes list) throws SAXException { try { // if this is a tag then create a script to run it // otherwise pass the text to the current body tagScript = createTag(namespaceURI, localName, list); if (tagScript == null) { tagScript = createStaticTag(namespaceURI, localName, qName, list); } tagScriptStack.add(tagScript); if (tagScript != null) { // set parent relationship... Tag tag = tagScript.getTag(); tag.setParent(parentTag); // pop another tag onto the stack if ( parentTag != null ) { tagStack.add( parentTag ); } parentTag = tag; if (textBuffer.length() > 0) { addTextScript(textBuffer.toString()); textBuffer.setLength(0); } script.addScript(tagScript); // start a new body scriptStack.push(script); script = new ScriptBlock(); tag.setBody(script); } else { // XXXX: might wanna handle empty elements later... textBuffer.append("<"); textBuffer.append(qName); int size = list.getLength(); for (int i = 0; i < size; i++) { textBuffer.append(" "); textBuffer.append(list.getQName(i)); textBuffer.append("="); textBuffer.append("\""); textBuffer.append(list.getValue(i)); textBuffer.append("\""); } textBuffer.append(">"); } } catch (SAXException e) { throw e; } catch (Exception e) { log.error( "Caught exception: " + e, e ); throw new SAXException( "Runtime Exception: " + e, e ); } }
1,110,552
public Object get(Object key) { return context.getVariable( (String) key ); }
public Object get(Object key) { return context.findVariable( (String) key ); }
1,110,554
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { String formattedValue = new String(); if ( column < 5) { if (value == null) { formattedValue = "null"; } else { formattedValue = ((SQLObject)value).getName(); } }else if (column == 5) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); formattedValue =df.format(new Date((Long) value)); }else if (column == 9 || column == 11) { if (value == null) { formattedValue = "N/A"; } else { formattedValue = value.toString(); } } else { if (value == null) { formattedValue = "null"; } else { formattedValue = value.toString(); } } return super.getTableCellRendererComponent(table,formattedValue,isSelected,hasFocus,row,column); }
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { String formattedValue = new String(); if ( column < 5) { if (value == null) { formattedValue = "null"; } else { formattedValue = ((SQLObject)value).getName(); } }else if (column == 5) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); formattedValue =df.format(new Date((Long) value)); }else if (column == 9 || column == 11) { if (value == null) { formattedValue = "N/A"; } else { formattedValue = pctFormat.format(value); } } else if (column == 14 ) { if (value == null) { formattedValue = "null"; } else { formattedValue = aldf.format(value); } } else { if (value == null) { formattedValue = "null"; } else { formattedValue = pctFormat.format(value); } } else if (column == 14 ) { if (value == null) { formattedValue = "null"; } else { formattedValue = aldf.format(value); } } return super.getTableCellRendererComponent(table,formattedValue,isSelected,hasFocus,row,column); }
1,110,556